Merge remote-tracking branch 'origin/master' into pullfail

This commit is contained in:
Jake Moshenko 2014-09-12 10:36:38 -04:00
commit 5388633f9a
100 changed files with 2125 additions and 1008 deletions

View file

@ -1,11 +1,11 @@
conf/stack conf/stack
screenshots screenshots
tools
test/data/registry test/data/registry
venv venv
.git .git
.gitignore .gitignore
Bobfile Bobfile
README.md README.md
license.py
requirements-nover.txt requirements-nover.txt
run-local.sh run-local.sh

View file

@ -1,13 +1,13 @@
FROM phusion/baseimage:0.9.11 FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
# Install the dependencies. # Install the dependencies.
RUN apt-get update # 06AUG2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev
# Build the python dependencies # Build the python dependencies
ADD requirements.txt requirements.txt ADD requirements.txt requirements.txt

View file

@ -1,13 +1,13 @@
FROM phusion/baseimage:0.9.11 FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
# Install the dependencies. # Install the dependencies.
RUN apt-get update # 06AUG2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev
# Build the python dependencies # Build the python dependencies
ADD requirements.txt requirements.txt ADD requirements.txt requirements.txt
@ -30,6 +30,7 @@ RUN cd grunt && npm install
RUN cd grunt && grunt RUN cd grunt && grunt
ADD conf/init/svlogd_config /svlogd_config ADD conf/init/svlogd_config /svlogd_config
ADD conf/init/doupdatelimits.sh /etc/my_init.d/
ADD conf/init/preplogsdir.sh /etc/my_init.d/ ADD conf/init/preplogsdir.sh /etc/my_init.d/
ADD conf/init/runmigration.sh /etc/my_init.d/ ADD conf/init/runmigration.sh /etc/my_init.d/
@ -38,9 +39,6 @@ ADD conf/init/nginx /etc/service/nginx
ADD conf/init/diffsworker /etc/service/diffsworker ADD conf/init/diffsworker /etc/service/diffsworker
ADD conf/init/notificationworker /etc/service/notificationworker ADD conf/init/notificationworker /etc/service/notificationworker
# TODO: Remove this after the prod CL push
ADD conf/init/webhookworker /etc/service/webhookworker
# Download any external libs. # Download any external libs.
RUN mkdir static/fonts static/ldn RUN mkdir static/fonts static/ldn
RUN venv/bin/python -m external_libraries RUN venv/bin/python -m external_libraries

54
app.py
View file

@ -1,8 +1,9 @@
import logging import logging
import os import os
import json import json
import yaml
from flask import Flask from flask import Flask as BaseFlask, Config as BaseConfig
from flask.ext.principal import Principal from flask.ext.principal import Principal
from flask.ext.login import LoginManager from flask.ext.login import LoginManager
from flask.ext.mail import Mail from flask.ext.mail import Mail
@ -21,11 +22,37 @@ from data.billing import Billing
from data.buildlogs import BuildLogs from data.buildlogs import BuildLogs
from data.queue import WorkQueue from data.queue import WorkQueue
from data.userevent import UserEventsBuilderModule from data.userevent import UserEventsBuilderModule
from license import load_license
from datetime import datetime from datetime import datetime
OVERRIDE_CONFIG_FILENAME = 'conf/stack/config.py' class Config(BaseConfig):
""" Flask config enhanced with a `from_yamlfile` method """
def from_yamlfile(self, config_file):
with open(config_file) as f:
c = yaml.load(f)
if not c:
logger.debug('Empty YAML config file')
return
if isinstance(c, str):
raise Exception('Invalid YAML config file: ' + str(c))
for key in c.iterkeys():
if key.isupper():
self[key] = c[key]
class Flask(BaseFlask):
""" Extends the Flask class to implement our custom Config class. """
def make_config(self, instance_relative=False):
root_path = self.instance_path if instance_relative else self.root_path
return Config(root_path, self.default_config)
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG' OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
LICENSE_FILENAME = 'conf/stack/license.enc' LICENSE_FILENAME = 'conf/stack/license.enc'
@ -43,22 +70,17 @@ else:
logger.debug('Loading default config.') logger.debug('Loading default config.')
app.config.from_object(DefaultConfig()) app.config.from_object(DefaultConfig())
if os.path.exists(OVERRIDE_CONFIG_FILENAME): if os.path.exists(OVERRIDE_CONFIG_PY_FILENAME):
logger.debug('Applying config file: %s', OVERRIDE_CONFIG_FILENAME) logger.debug('Applying config file: %s', OVERRIDE_CONFIG_PY_FILENAME)
app.config.from_pyfile(OVERRIDE_CONFIG_FILENAME) app.config.from_pyfile(OVERRIDE_CONFIG_PY_FILENAME)
if os.path.exists(OVERRIDE_CONFIG_YAML_FILENAME):
logger.debug('Applying config file: %s', OVERRIDE_CONFIG_YAML_FILENAME)
app.config.from_yamlfile(OVERRIDE_CONFIG_YAML_FILENAME)
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}')) environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
app.config.update(environ_config) app.config.update(environ_config)
logger.debug('Applying license config from: %s', LICENSE_FILENAME)
try:
app.config.update(load_license(LICENSE_FILENAME))
except IOError:
raise RuntimeError('License file %s not found; please check your configuration' % LICENSE_FILENAME)
if app.config.get('LICENSE_EXPIRATION', datetime.min) < datetime.utcnow():
raise RuntimeError('License has expired, please contact support@quay.io')
features.import_features(app.config) features.import_features(app.config)
Principal(app, use_sessions=False) Principal(app, use_sessions=False)
@ -66,7 +88,7 @@ Principal(app, use_sessions=False)
login_manager = LoginManager(app) login_manager = LoginManager(app)
mail = Mail(app) mail = Mail(app)
storage = Storage(app) storage = Storage(app)
userfiles = Userfiles(app) userfiles = Userfiles(app, storage)
analytics = Analytics(app) analytics = Analytics(app)
billing = Billing(app) billing = Billing(app)
sentry = Sentry(app) sentry = Sentry(app)

View file

@ -1,5 +1,5 @@
bind = 'unix:/tmp/gunicorn.sock' bind = 'unix:/tmp/gunicorn.sock'
workers = 8 workers = 16
worker_class = 'gevent' worker_class = 'gevent'
timeout = 2000 timeout = 2000
logconfig = 'conf/logging.conf' logconfig = 'conf/logging.conf'

5
conf/init/doupdatelimits.sh Executable file
View file

@ -0,0 +1,5 @@
#! /bin/bash
set -e
# Update the connection limit
sysctl -w net.core.somaxconn=1024

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec svlogd -t /var/log/webhookworker/

View file

@ -1,8 +0,0 @@
#! /bin/bash
echo 'Starting webhook worker'
cd /
venv/bin/python -m workers.webhookworker
echo 'Webhook worker exited'

View file

@ -1,4 +1,4 @@
client_max_body_size 8G; client_max_body_size 20G;
client_body_temp_path /var/log/nginx/client_body 1 2; client_body_temp_path /var/log/nginx/client_body 1 2;
server_name _; server_name _;

View file

@ -19,7 +19,7 @@ def build_requests_session():
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID', CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID',
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY', 'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY',
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE', 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE',
'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT'] 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID']
def getFrontendVisibleConfig(config_dict): def getFrontendVisibleConfig(config_dict):
@ -89,10 +89,6 @@ class DefaultConfig(object):
# Stripe config # Stripe config
BILLING_TYPE = 'FakeStripe' BILLING_TYPE = 'FakeStripe'
# Userfiles
USERFILES_TYPE = 'LocalUserfiles'
USERFILES_PATH = 'test/data/registry/userfiles'
# Analytics # Analytics
ANALYTICS_TYPE = 'FakeAnalytics' ANALYTICS_TYPE = 'FakeAnalytics'
@ -115,6 +111,13 @@ class DefaultConfig(object):
GITHUB_LOGIN_CLIENT_ID = '' GITHUB_LOGIN_CLIENT_ID = ''
GITHUB_LOGIN_CLIENT_SECRET = '' GITHUB_LOGIN_CLIENT_SECRET = ''
# Google Config.
GOOGLE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
GOOGLE_USER_URL = 'https://www.googleapis.com/oauth2/v1/userinfo'
GOOGLE_LOGIN_CLIENT_ID = ''
GOOGLE_LOGIN_CLIENT_SECRET = ''
# Requests based HTTP client with a large request pool # Requests based HTTP client with a large request pool
HTTPCLIENT = build_requests_session() HTTPCLIENT = build_requests_session()
@ -144,6 +147,9 @@ class DefaultConfig(object):
# Feature Flag: Whether GitHub login is supported. # Feature Flag: Whether GitHub login is supported.
FEATURE_GITHUB_LOGIN = False FEATURE_GITHUB_LOGIN = False
# Feature Flag: Whether Google login is supported.
FEATURE_GOOGLE_LOGIN = False
# Feature flag, whether to enable olark chat # Feature flag, whether to enable olark chat
FEATURE_OLARK_CHAT = False FEATURE_OLARK_CHAT = False
@ -153,9 +159,16 @@ class DefaultConfig(object):
# Feature Flag: Whether to support GitHub build triggers. # Feature Flag: Whether to support GitHub build triggers.
FEATURE_GITHUB_BUILD = False FEATURE_GITHUB_BUILD = False
# Feature Flag: Dockerfile build support.
FEATURE_BUILD_SUPPORT = True
DISTRIBUTED_STORAGE_CONFIG = { DISTRIBUTED_STORAGE_CONFIG = {
'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}], 'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}],
'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}], 'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}],
} }
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
# Userfiles
USERFILES_LOCATION = 'local_us'
USERFILES_PATH = 'userfiles/'

View file

@ -3,6 +3,8 @@ import stripe
from datetime import datetime, timedelta from datetime import datetime, timedelta
from calendar import timegm from calendar import timegm
from util.collections import AttrDict
PLANS = [ PLANS = [
# Deprecated Plans # Deprecated Plans
{ {
@ -118,20 +120,6 @@ def get_plan(plan_id):
return None return None
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
@classmethod
def deep_copy(cls, attr_dict):
copy = AttrDict(attr_dict)
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy
class FakeStripe(object): class FakeStripe(object):
class Customer(AttrDict): class Customer(AttrDict):
FAKE_PLAN = AttrDict({ FAKE_PLAN = AttrDict({

View file

@ -17,6 +17,8 @@ SCHEME_DRIVERS = {
'mysql': MySQLDatabase, 'mysql': MySQLDatabase,
'mysql+pymysql': MySQLDatabase, 'mysql+pymysql': MySQLDatabase,
'sqlite': SqliteDatabase, 'sqlite': SqliteDatabase,
'postgresql': PostgresqlDatabase,
'postgresql+psycopg2': PostgresqlDatabase,
} }
db = Proxy() db = Proxy()
@ -32,7 +34,7 @@ def _db_from_url(url, db_kwargs):
if parsed_url.username: if parsed_url.username:
db_kwargs['user'] = parsed_url.username db_kwargs['user'] = parsed_url.username
if parsed_url.password: if parsed_url.password:
db_kwargs['passwd'] = parsed_url.password db_kwargs['password'] = parsed_url.password
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs) return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
@ -74,6 +76,8 @@ class User(BaseModel):
organization = BooleanField(default=False, index=True) organization = BooleanField(default=False, index=True)
robot = BooleanField(default=False, index=True) robot = BooleanField(default=False, index=True)
invoice_email = BooleanField(default=False) invoice_email = BooleanField(default=False)
invalid_login_attempts = IntegerField(default=0)
last_invalid_login = DateTimeField(default=datetime.utcnow)
class TeamRole(BaseModel): class TeamRole(BaseModel):
@ -116,6 +120,7 @@ class FederatedLogin(BaseModel):
user = ForeignKeyField(User, index=True) user = ForeignKeyField(User, index=True)
service = ForeignKeyField(LoginService, index=True) service = ForeignKeyField(LoginService, index=True)
service_ident = CharField() service_ident = CharField()
metadata_json = TextField(default='{}')
class Meta: class Meta:
database = db database = db

View file

@ -8,6 +8,7 @@ from peewee import SqliteDatabase
from data.database import all_models, db from data.database import all_models, db
from app import app from app import app
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from util.collections import AttrDict
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
# access to the values within the .ini file in use. # access to the values within the .ini file in use.
@ -23,6 +24,7 @@ fileConfig(config.config_file_name)
# from myapp import mymodel # from myapp import mymodel
# target_metadata = mymodel.Base.metadata # target_metadata = mymodel.Base.metadata
target_metadata = gen_sqlalchemy_metadata(all_models) target_metadata = gen_sqlalchemy_metadata(all_models)
tables = AttrDict(target_metadata.tables)
# other values from the config, defined by the needs of env.py, # other values from the config, defined by the needs of env.py,
# can be acquired: # can be acquired:
@ -45,7 +47,7 @@ def run_migrations_offline():
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True) context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
with context.begin_transaction(): with context.begin_transaction():
context.run_migrations() context.run_migrations(tables=tables)
def run_migrations_online(): def run_migrations_online():
"""Run migrations in 'online' mode. """Run migrations in 'online' mode.
@ -72,7 +74,7 @@ def run_migrations_online():
try: try:
with context.begin_transaction(): with context.begin_transaction():
context.run_migrations() context.run_migrations(tables=tables)
finally: finally:
connection.close() connection.close()

View file

@ -14,9 +14,9 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
${imports if imports else ""} ${imports if imports else ""}
def upgrade(): def upgrade(tables):
${upgrades if upgrades else "pass"} ${upgrades if upgrades else "pass"}
def downgrade(): def downgrade(tables):
${downgrades if downgrades else "pass"} ${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,35 @@
"""add metadata field to external logins
Revision ID: 1594a74a74ca
Revises: f42b0ea7a4d
Create Date: 2014-09-04 18:17:35.205698
"""
# revision identifiers, used by Alembic.
revision = '1594a74a74ca'
down_revision = 'f42b0ea7a4d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False))
### end Alembic commands ###
op.bulk_insert(tables.loginservice,
[
{'id':4, 'name':'google'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('federatedlogin', 'metadata_json')
### end Alembic commands ###
op.execute(
(tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('google')))
)

View file

@ -14,7 +14,7 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects import mysql from sqlalchemy.dialects import mysql
def upgrade(): def upgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True) op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True)
@ -34,7 +34,7 @@ def upgrade():
### end Alembic commands ### ### end Alembic commands ###
def downgrade(): def downgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.drop_index('visibility_name', table_name='visibility') op.drop_index('visibility_name', table_name='visibility')
op.create_index('visibility_name', 'visibility', ['name'], unique=False) op.create_index('visibility_name', 'visibility', ['name'], unique=False)

View file

@ -13,12 +13,8 @@ down_revision = '4b7ef0c7bdb2'
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects import mysql from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.create_table('externalnotificationmethod', op.create_table('externalnotificationmethod',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
@ -26,7 +22,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True) op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationmethod'], op.bulk_insert(tables.externalnotificationmethod,
[ [
{'id':1, 'name':'quay_notification'}, {'id':1, 'name':'quay_notification'},
{'id':2, 'name':'email'}, {'id':2, 'name':'email'},
@ -38,7 +34,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True) op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationevent'], op.bulk_insert(tables.externalnotificationevent,
[ [
{'id':1, 'name':'repo_push'}, {'id':1, 'name':'repo_push'},
{'id':2, 'name':'build_queued'}, {'id':2, 'name':'build_queued'},
@ -77,7 +73,7 @@ def upgrade():
op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False)) op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False))
# Manually add the new notificationkind types # Manually add the new notificationkind types
op.bulk_insert(schema.tables['notificationkind'], op.bulk_insert(tables.notificationkind,
[ [
{'id':5, 'name':'repo_push'}, {'id':5, 'name':'repo_push'},
{'id':6, 'name':'build_queued'}, {'id':6, 'name':'build_queued'},
@ -87,7 +83,7 @@ def upgrade():
]) ])
# Manually add the new logentrykind types # Manually add the new logentrykind types
op.bulk_insert(schema.tables['logentrykind'], op.bulk_insert(tables.logentrykind,
[ [
{'id':39, 'name':'add_repo_notification'}, {'id':39, 'name':'add_repo_notification'},
{'id':40, 'name':'delete_repo_notification'}, {'id':40, 'name':'delete_repo_notification'},
@ -97,61 +93,49 @@ def upgrade():
### end Alembic commands ### ### end Alembic commands ###
def downgrade(): def downgrade(tables):
schema = gen_sqlalchemy_metadata(all_models)
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'notification', 'dismissed') op.drop_column(u'notification', 'dismissed')
op.drop_index('repositorynotification_uuid', table_name='repositorynotification')
op.drop_index('repositorynotification_repository_id', table_name='repositorynotification')
op.drop_index('repositorynotification_method_id', table_name='repositorynotification')
op.drop_index('repositorynotification_event_id', table_name='repositorynotification')
op.drop_table('repositorynotification') op.drop_table('repositorynotification')
op.drop_index('repositoryauthorizedemail_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_email_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_code', table_name='repositoryauthorizedemail')
op.drop_table('repositoryauthorizedemail') op.drop_table('repositoryauthorizedemail')
op.drop_index('externalnotificationevent_name', table_name='externalnotificationevent')
op.drop_table('externalnotificationevent') op.drop_table('externalnotificationevent')
op.drop_index('externalnotificationmethod_name', table_name='externalnotificationmethod')
op.drop_table('externalnotificationmethod') op.drop_table('externalnotificationmethod')
# Manually remove the notificationkind and logentrykind types # Manually remove the notificationkind and logentrykind types
notificationkind = schema.tables['notificationkind']
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('repo_push'))) .where(tables.notificationkind.c.name == op.inline_literal('repo_push')))
) )
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_queued'))) .where(tables.notificationkind.c.name == op.inline_literal('build_queued')))
) )
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_start'))) .where(tables.notificationkind.c.name == op.inline_literal('build_start')))
) )
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_success'))) .where(tables.notificationkind.c.name == op.inline_literal('build_success')))
) )
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_failure'))) .where(tables.notificationkind.c.name == op.inline_literal('build_failure')))
) )
op.execute( op.execute(
(logentrykind.delete() (tables.logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('add_repo_notification'))) .where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification')))
) )
op.execute( op.execute(
(logentrykind.delete() (tables.logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('delete_repo_notification'))) .where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification')))
) )
### end Alembic commands ### ### end Alembic commands ###

View file

@ -0,0 +1,29 @@
"""add log kind for regenerating robot tokens
Revision ID: 43e943c0639f
Revises: 82297d834ad
Create Date: 2014-08-25 17:14:42.784518
"""
# revision identifiers, used by Alembic.
revision = '43e943c0639f'
down_revision = '82297d834ad'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'id': 41, 'name':'regenerate_robot_token'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
)

View file

@ -18,14 +18,14 @@ def get_id(query):
conn = op.get_bind() conn = op.get_bind()
return list(conn.execute(query, ()).fetchall())[0][0] return list(conn.execute(query, ()).fetchall())[0][0]
def upgrade(): def upgrade(tables):
conn = op.get_bind() conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1') event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1') method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id)) conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id))
def downgrade(): def downgrade(tables):
conn = op.get_bind() conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1') event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1') method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id)) conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id))

View file

@ -0,0 +1,39 @@
"""add new notification kinds
Revision ID: 4a0c94399f38
Revises: 1594a74a74ca
Create Date: 2014-08-28 16:17:01.898269
"""
# revision identifiers, used by Alembic.
revision = '4a0c94399f38'
down_revision = '1594a74a74ca'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.bulk_insert(tables.externalnotificationmethod,
[
{'id':4, 'name':'flowdock'},
{'id':5, 'name':'hipchat'},
{'id':6, 'name':'slack'},
])
def downgrade(tables):
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('slack')))
)

View file

@ -11,23 +11,18 @@ revision = '4b7ef0c7bdb2'
down_revision = 'bcdde200a1b' down_revision = 'bcdde200a1b'
from alembic import op from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(tables):
def upgrade(): op.bulk_insert(tables.notificationkind,
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['notificationkind'],
[ [
{'id':4, 'name':'maintenance'}, {'id':4, 'name':'maintenance'},
]) ])
def downgrade(): def downgrade(tables):
notificationkind = schema.tables['notificationkind']
op.execute( op.execute(
(notificationkind.delete() (tables.notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('maintenance'))) .where(tables.notificationkind.c.name == op.inline_literal('maintenance')))
) )

View file

@ -0,0 +1,28 @@
"""Add brute force prevention metadata to the user table.
Revision ID: 4fdb65816b8d
Revises: 43e943c0639f
Create Date: 2014-09-03 12:35:33.722435
"""
# revision identifiers, used by Alembic.
revision = '4fdb65816b8d'
down_revision = '43e943c0639f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_invalid_login')
op.drop_column('user', 'invalid_login_attempts')
### end Alembic commands ###

View file

@ -11,14 +11,9 @@ revision = '5a07499ce53f'
down_revision = None down_revision = None
from alembic import op from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(tables):
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.create_table('loginservice', op.create_table('loginservice',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +22,7 @@ def upgrade():
) )
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True) op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
op.bulk_insert(schema.tables['loginservice'], op.bulk_insert(tables.loginservice,
[ [
{'id':1, 'name':'github'}, {'id':1, 'name':'github'},
{'id':2, 'name':'quayrobot'}, {'id':2, 'name':'quayrobot'},
@ -66,7 +61,7 @@ def upgrade():
) )
op.create_index('role_name', 'role', ['name'], unique=False) op.create_index('role_name', 'role', ['name'], unique=False)
op.bulk_insert(schema.tables['role'], op.bulk_insert(tables.role,
[ [
{'id':1, 'name':'admin'}, {'id':1, 'name':'admin'},
{'id':2, 'name':'write'}, {'id':2, 'name':'write'},
@ -80,7 +75,7 @@ def upgrade():
) )
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.bulk_insert(schema.tables['logentrykind'], op.bulk_insert(tables.logentrykind,
[ [
{'id':1, 'name':'account_change_plan'}, {'id':1, 'name':'account_change_plan'},
{'id':2, 'name':'account_change_cc'}, {'id':2, 'name':'account_change_cc'},
@ -136,7 +131,7 @@ def upgrade():
) )
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.bulk_insert(schema.tables['notificationkind'], op.bulk_insert(tables.notificationkind,
[ [
{'id':1, 'name':'password_required'}, {'id':1, 'name':'password_required'},
{'id':2, 'name':'over_private_usage'}, {'id':2, 'name':'over_private_usage'},
@ -150,7 +145,7 @@ def upgrade():
) )
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False) op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
op.bulk_insert(schema.tables['teamrole'], op.bulk_insert(tables.teamrole,
[ [
{'id':1, 'name':'admin'}, {'id':1, 'name':'admin'},
{'id':2, 'name':'creator'}, {'id':2, 'name':'creator'},
@ -164,7 +159,7 @@ def upgrade():
) )
op.create_index('visibility_name', 'visibility', ['name'], unique=False) op.create_index('visibility_name', 'visibility', ['name'], unique=False)
op.bulk_insert(schema.tables['visibility'], op.bulk_insert(tables.visibility,
[ [
{'id':1, 'name':'public'}, {'id':1, 'name':'public'},
{'id':2, 'name':'private'}, {'id':2, 'name':'private'},
@ -194,7 +189,7 @@ def upgrade():
) )
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
op.bulk_insert(schema.tables['buildtriggerservice'], op.bulk_insert(tables.buildtriggerservice,
[ [
{'id':1, 'name':'github'}, {'id':1, 'name':'github'},
]) ])
@ -203,7 +198,7 @@ def upgrade():
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False), sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False), sa.Column('service_ident', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ), sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
@ -375,7 +370,7 @@ def upgrade():
sa.Column('command', sa.Text(), nullable=True), sa.Column('command', sa.Text(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=False), sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('image_size', sa.BigInteger(), nullable=True), sa.Column('image_size', sa.BigInteger(), nullable=True),
sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True), sa.Column('ancestors', sa.String(length=60535), nullable=True),
sa.Column('storage_id', sa.Integer(), nullable=True), sa.Column('storage_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ), sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
@ -490,119 +485,34 @@ def upgrade():
### end Alembic commands ### ### end Alembic commands ###
def downgrade(): def downgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.drop_index('repositorybuild_uuid', table_name='repositorybuild')
op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild')
op.drop_index('repositorybuild_resource_key', table_name='repositorybuild')
op.drop_index('repositorybuild_repository_id', table_name='repositorybuild')
op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild')
op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild')
op.drop_table('repositorybuild') op.drop_table('repositorybuild')
op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger')
op.drop_table('repositorybuildtrigger') op.drop_table('repositorybuildtrigger')
op.drop_index('logentry_repository_id', table_name='logentry')
op.drop_index('logentry_performer_id', table_name='logentry')
op.drop_index('logentry_kind_id', table_name='logentry')
op.drop_index('logentry_datetime', table_name='logentry')
op.drop_index('logentry_account_id', table_name='logentry')
op.drop_index('logentry_access_token_id', table_name='logentry')
op.drop_table('logentry') op.drop_table('logentry')
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
op.drop_index('repositorytag_repository_id', table_name='repositorytag')
op.drop_index('repositorytag_image_id', table_name='repositorytag')
op.drop_table('repositorytag') op.drop_table('repositorytag')
op.drop_index('permissionprototype_role_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype')
op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype')
op.drop_table('permissionprototype') op.drop_table('permissionprototype')
op.drop_index('image_storage_id', table_name='image')
op.drop_index('image_repository_id_docker_image_id', table_name='image')
op.drop_index('image_repository_id', table_name='image')
op.drop_index('image_ancestors', table_name='image')
op.drop_table('image') op.drop_table('image')
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode')
op.drop_table('oauthauthorizationcode') op.drop_table('oauthauthorizationcode')
op.drop_index('webhook_repository_id', table_name='webhook')
op.drop_index('webhook_public_id', table_name='webhook')
op.drop_table('webhook') op.drop_table('webhook')
op.drop_index('teammember_user_id_team_id', table_name='teammember')
op.drop_index('teammember_user_id', table_name='teammember')
op.drop_index('teammember_team_id', table_name='teammember')
op.drop_table('teammember') op.drop_table('teammember')
op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken')
op.drop_table('oauthaccesstoken') op.drop_table('oauthaccesstoken')
op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_user_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id', table_name='repositorypermission')
op.drop_index('repositorypermission_role_id', table_name='repositorypermission')
op.drop_index('repositorypermission_repository_id', table_name='repositorypermission')
op.drop_table('repositorypermission') op.drop_table('repositorypermission')
op.drop_index('accesstoken_role_id', table_name='accesstoken')
op.drop_index('accesstoken_repository_id', table_name='accesstoken')
op.drop_index('accesstoken_code', table_name='accesstoken')
op.drop_table('accesstoken') op.drop_table('accesstoken')
op.drop_index('repository_visibility_id', table_name='repository')
op.drop_index('repository_namespace_name', table_name='repository')
op.drop_table('repository') op.drop_table('repository')
op.drop_index('team_role_id', table_name='team')
op.drop_index('team_organization_id', table_name='team')
op.drop_index('team_name_organization_id', table_name='team')
op.drop_index('team_name', table_name='team')
op.drop_table('team') op.drop_table('team')
op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation')
op.drop_index('emailconfirmation_code', table_name='emailconfirmation')
op.drop_table('emailconfirmation') op.drop_table('emailconfirmation')
op.drop_index('notification_uuid', table_name='notification')
op.drop_index('notification_target_id', table_name='notification')
op.drop_index('notification_kind_id', table_name='notification')
op.drop_index('notification_created', table_name='notification')
op.drop_table('notification') op.drop_table('notification')
op.drop_index('oauthapplication_organization_id', table_name='oauthapplication')
op.drop_index('oauthapplication_client_id', table_name='oauthapplication')
op.drop_table('oauthapplication') op.drop_table('oauthapplication')
op.drop_index('federatedlogin_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id', table_name='federatedlogin')
op.drop_table('federatedlogin') op.drop_table('federatedlogin')
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.drop_table('buildtriggerservice') op.drop_table('buildtriggerservice')
op.drop_index('user_username', table_name='user')
op.drop_index('user_stripe_id', table_name='user')
op.drop_index('user_robot', table_name='user')
op.drop_index('user_organization', table_name='user')
op.drop_index('user_email', table_name='user')
op.drop_table('user') op.drop_table('user')
op.drop_index('visibility_name', table_name='visibility')
op.drop_table('visibility') op.drop_table('visibility')
op.drop_index('teamrole_name', table_name='teamrole')
op.drop_table('teamrole') op.drop_table('teamrole')
op.drop_index('notificationkind_name', table_name='notificationkind')
op.drop_table('notificationkind') op.drop_table('notificationkind')
op.drop_index('logentrykind_name', table_name='logentrykind')
op.drop_table('logentrykind') op.drop_table('logentrykind')
op.drop_index('role_name', table_name='role')
op.drop_table('role') op.drop_table('role')
op.drop_index('queueitem_queue_name', table_name='queueitem')
op.drop_index('queueitem_processing_expires', table_name='queueitem')
op.drop_index('queueitem_available_after', table_name='queueitem')
op.drop_index('queueitem_available', table_name='queueitem')
op.drop_table('queueitem') op.drop_table('queueitem')
op.drop_table('imagestorage') op.drop_table('imagestorage')
op.drop_index('loginservice_name', table_name='loginservice')
op.drop_table('loginservice') op.drop_table('loginservice')
### end Alembic commands ### ### end Alembic commands ###

View file

@ -13,24 +13,17 @@ down_revision = '47670cbeced'
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects import mysql from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade(tables):
def upgrade(): op.bulk_insert(tables.imagestoragelocation,
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['imagestoragelocation'],
[ [
{'id':8, 'name':'s3_us_west_1'}, {'id':8, 'name':'s3_us_west_1'},
]) ])
def downgrade(): def downgrade(tables):
schema = gen_sqlalchemy_metadata(all_models)
op.execute( op.execute(
(imagestoragelocation.delete() (tables.imagestoragelocation.delete()
.where(imagestoragelocation.c.name == op.inline_literal('s3_us_west_1'))) .where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
) )

View file

@ -11,14 +11,10 @@ revision = 'bcdde200a1b'
down_revision = '201d55b38649' down_revision = '201d55b38649'
from alembic import op from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(): def upgrade(tables):
schema = gen_sqlalchemy_metadata(all_models)
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragelocation', op.create_table('imagestoragelocation',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +23,7 @@ def upgrade():
) )
op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True) op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True)
op.bulk_insert(schema.tables['imagestoragelocation'], op.bulk_insert(tables.imagestoragelocation,
[ [
{'id':1, 'name':'s3_us_east_1'}, {'id':1, 'name':'s3_us_east_1'},
{'id':2, 'name':'s3_eu_west_1'}, {'id':2, 'name':'s3_eu_west_1'},
@ -52,12 +48,8 @@ def upgrade():
### end Alembic commands ### ### end Alembic commands ###
def downgrade(): def downgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.drop_index('imagestorageplacement_storage_id_location_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_storage_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_location_id', table_name='imagestorageplacement')
op.drop_table('imagestorageplacement') op.drop_table('imagestorageplacement')
op.drop_index('imagestoragelocation_name', table_name='imagestoragelocation')
op.drop_table('imagestoragelocation') op.drop_table('imagestoragelocation')
### end Alembic commands ### ### end Alembic commands ###

View file

@ -0,0 +1,35 @@
"""Remove the old webhooks table.
Revision ID: f42b0ea7a4d
Revises: 4fdb65816b8d
Create Date: 2014-09-03 13:43:23.391464
"""
# revision identifiers, used by Alembic.
revision = 'f42b0ea7a4d'
down_revision = '4fdb65816b8d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('webhook')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###

View file

@ -1,12 +1,17 @@
import bcrypt import bcrypt
import logging import logging
import datetime
import dateutil.parser import dateutil.parser
import json import json
from datetime import datetime, timedelta
from data.database import * from data.database import *
from util.validation import * from util.validation import *
from util.names import format_robot_username from util.names import format_robot_username
from util.backoff import exponential_backoff
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -68,10 +73,15 @@ class TooManyUsersException(DataModelException):
pass pass
def is_create_user_allowed(): class TooManyLoginAttemptsException(Exception):
return get_active_user_count() < config.app_config['LICENSE_USER_LIMIT'] def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
self.retry_after = retry_after
def is_create_user_allowed():
return True
def create_user(username, password, email): def create_user(username, password, email):
""" Creates a regular user, if allowed. """ """ Creates a regular user, if allowed. """
if not validate_password(password): if not validate_password(password):
@ -181,6 +191,19 @@ def create_robot(robot_shortname, parent):
except Exception as ex: except Exception as ex:
raise DataModelException(ex.message) raise DataModelException(ex.message)
def get_robot(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
if not robot:
msg = ('Could not find robot with username: %s' %
robot_username)
raise InvalidRobotException(msg)
service = LoginService.get(name='quayrobot')
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
return robot, login.service_ident
def lookup_robot(robot_username): def lookup_robot(robot_username):
joined = User.select().join(FederatedLogin).join(LoginService) joined = User.select().join(FederatedLogin).join(LoginService)
@ -191,7 +214,6 @@ def lookup_robot(robot_username):
return found[0] return found[0]
def verify_robot(robot_username, password): def verify_robot(robot_username, password):
joined = User.select().join(FederatedLogin).join(LoginService) joined = User.select().join(FederatedLogin).join(LoginService)
found = list(joined.where(FederatedLogin.service_ident == password, found = list(joined.where(FederatedLogin.service_ident == password,
@ -204,6 +226,25 @@ def verify_robot(robot_username, password):
return found[0] return found[0]
def regenerate_robot_token(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
if not robot:
raise InvalidRobotException('Could not find robot with username: %s' %
robot_username)
password = random_string_generator(length=64)()
robot.email = password
service = LoginService.get(name='quayrobot')
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
login.service_ident = password
login.save()
robot.save()
return robot, password
def delete_robot(robot_username): def delete_robot(robot_username):
try: try:
@ -346,7 +387,8 @@ def set_team_org_permission(team, team_role_name, set_by_username):
return team return team
def create_federated_user(username, email, service_name, service_id, set_password_notification): def create_federated_user(username, email, service_name, service_id,
set_password_notification, metadata={}):
if not is_create_user_allowed(): if not is_create_user_allowed():
raise TooManyUsersException() raise TooManyUsersException()
@ -356,7 +398,8 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
service = LoginService.get(LoginService.name == service_name) service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=new_user, service=service, FederatedLogin.create(user=new_user, service=service,
service_ident=service_id) service_ident=service_id,
metadata_json=json.dumps(metadata))
if set_password_notification: if set_password_notification:
create_notification('password_required', new_user) create_notification('password_required', new_user)
@ -364,9 +407,10 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
return new_user return new_user
def attach_federated_login(user, service_name, service_id): def attach_federated_login(user, service_name, service_id, metadata={}):
service = LoginService.get(LoginService.name == service_name) service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=user, service=service, service_ident=service_id) FederatedLogin.create(user=user, service=service, service_ident=service_id,
metadata_json=json.dumps(metadata))
return user return user
@ -385,7 +429,7 @@ def verify_federated_login(service_name, service_id):
def list_federated_logins(user): def list_federated_logins(user):
selected = FederatedLogin.select(FederatedLogin.service_ident, selected = FederatedLogin.select(FederatedLogin.service_ident,
LoginService.name) LoginService.name, FederatedLogin.metadata_json)
joined = selected.join(LoginService) joined = selected.join(LoginService)
return joined.where(LoginService.name != 'quayrobot', return joined.where(LoginService.name != 'quayrobot',
FederatedLogin.user == user) FederatedLogin.user == user)
@ -521,11 +565,30 @@ def verify_user(username_or_email, password):
except User.DoesNotExist: except User.DoesNotExist:
return None return None
now = datetime.utcnow()
if fetched.invalid_login_attempts > 0:
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
fetched.last_invalid_login)
if can_retry_at > now:
retry_after = can_retry_at - now
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
if (fetched.password_hash and if (fetched.password_hash and
bcrypt.hashpw(password, fetched.password_hash) == bcrypt.hashpw(password, fetched.password_hash) ==
fetched.password_hash): fetched.password_hash):
if fetched.invalid_login_attempts > 0:
fetched.invalid_login_attempts = 0
fetched.save()
return fetched return fetched
fetched.invalid_login_attempts += 1
fetched.last_invalid_login = now
fetched.save()
# We weren't able to authorize the user # We weren't able to authorize the user
return None return None
@ -1007,7 +1070,8 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
.join(Repository) .join(Repository)
.join(Visibility) .join(Visibility)
.switch(Repository) .switch(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER)) .join(RepositoryPermission, JOIN_LEFT_OUTER)
.where(ImageStorage.uploading == False))
query = (_filter_to_repos_for_user(query, username) query = (_filter_to_repos_for_user(query, username)
.where(Image.docker_image_id == docker_image_id)) .where(Image.docker_image_id == docker_image_id))
@ -1687,19 +1751,20 @@ def create_notification(kind_name, target, metadata={}):
def create_unique_notification(kind_name, target, metadata={}): def create_unique_notification(kind_name, target, metadata={}):
with config.app_config['DB_TRANSACTION_FACTORY'](db): with config.app_config['DB_TRANSACTION_FACTORY'](db):
if list_notifications(target, kind_name).count() == 0: if list_notifications(target, kind_name, limit=1).count() == 0:
create_notification(kind_name, target, metadata) create_notification(kind_name, target, metadata)
def lookup_notification(user, uuid): def lookup_notification(user, uuid):
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True)) results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
if not results: if not results:
return None return None
return results[0] return results[0]
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False): def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
page=None, limit=None):
Org = User.alias() Org = User.alias()
AdminTeam = Team.alias() AdminTeam = Team.alias()
AdminTeamMember = TeamMember.alias() AdminTeamMember = TeamMember.alias()
@ -1737,6 +1802,11 @@ def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=F
.switch(Notification) .switch(Notification)
.where(Notification.uuid == id_filter)) .where(Notification.uuid == id_filter))
if page:
query = query.paginate(page, limit)
elif limit:
query = query.limit(limit)
return query return query

View file

@ -1,110 +1,35 @@
import boto
import os import os
import logging import logging
import hashlib
import magic import magic
from boto.s3.key import Key
from uuid import uuid4 from uuid import uuid4
from flask import url_for, request, send_file, make_response, abort from flask import url_for, request, send_file, make_response, abort
from flask.views import View from flask.views import View
from _pyio import BufferedReader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = path
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id)
def store_file(self, file_like_obj, content_type):
self._initialize_s3()
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % content_type)
k.set_metadata('Content-Type', content_type)
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
rewind=True)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300, mime_type=None):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
headers = None
if mime_type:
headers={'Content-Type': mime_type}
return k.generate_url(expires_in, headers=headers)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
class UserfilesHandlers(View): class UserfilesHandlers(View):
methods = ['GET', 'PUT'] methods = ['GET', 'PUT']
def __init__(self, local_userfiles): def __init__(self, distributed_storage, location, files):
self._userfiles = local_userfiles self._storage = distributed_storage
self._files = files
self._locations = {location}
self._magic = magic.Magic(mime=True) self._magic = magic.Magic(mime=True)
def get(self, file_id): def get(self, file_id):
path = self._userfiles.file_path(file_id) path = self._files.get_file_id_path(file_id)
if not os.path.exists(path): try:
file_stream = self._storage.stream_read_file(self._locations, path)
buffered = BufferedReader(file_stream)
file_header_bytes = buffered.peek(1024)
return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
except IOError:
abort(404) abort(404)
logger.debug('Sending path: %s' % path)
return send_file(path, mimetype=self._magic.from_file(path))
def put(self, file_id): def put(self, file_id):
input_stream = request.stream input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked': if request.headers.get('transfer-encoding') == 'chunked':
@ -112,7 +37,10 @@ class UserfilesHandlers(View):
# encoding (Gunicorn) # encoding (Gunicorn)
input_stream = request.environ['wsgi.input'] input_stream = request.environ['wsgi.input']
self._userfiles.store_stream(input_stream, file_id) c_type = request.headers.get('Content-Type', None)
path = self._files.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, input_stream, c_type)
return make_response('Okay') return make_response('Okay')
@ -123,99 +51,79 @@ class UserfilesHandlers(View):
return self.put(file_id) return self.put(file_id)
class LocalUserfiles(object): class DelegateUserfiles(object):
def __init__(self, app, path): def __init__(self, app, distributed_storage, location, path, handler_name):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
self._app = app self._app = app
self._storage = distributed_storage
self._locations = {location}
self._prefix = path
self._handler_name = handler_name
def _build_url_adapter(self): def _build_url_adapter(self):
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'], return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
script_name=self._app.config['APPLICATION_ROOT'] or '/', script_name=self._app.config['APPLICATION_ROOT'] or '/',
url_scheme=self._app.config['PREFERRED_URL_SCHEME']) url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
def prepare_for_drop(self, mime_type): def get_file_id_path(self, file_id):
return os.path.join(self._prefix, file_id)
def prepare_for_drop(self, mime_type, requires_cors=True):
""" Returns a signed URL to upload a file to our bucket. """
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4()) file_id = str(uuid4())
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors)
if url is None:
with self._app.app_context() as ctx: with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter() ctx.url_adapter = self._build_url_adapter()
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id) return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
def file_path(self, file_id): return (url, file_id)
if '..' in file_id or file_id.startswith('/'):
raise RuntimeError('Invalid Filename')
return os.path.join(self._root_path, file_id)
def store_stream(self, stream, file_id):
path = self.file_path(file_id)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(path, 'w') as to_write:
while True:
try:
buf = stream.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
def store_file(self, file_like_obj, content_type): def store_file(self, file_like_obj, content_type):
file_id = str(uuid4()) file_id = str(uuid4())
path = self.get_file_id_path(file_id)
# Rewind the file to match what s3 does self._storage.stream_write(self._locations, path, file_like_obj, content_type)
file_like_obj.seek(0, os.SEEK_SET)
self.store_stream(file_like_obj, file_id)
return file_id return file_id
def get_file_url(self, file_id, expires_in=300): def get_file_url(self, file_id, expires_in=300, requires_cors=False):
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors)
if url is None:
with self._app.app_context() as ctx: with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter() ctx.url_adapter = self._build_url_adapter()
return url_for('userfiles_handlers', file_id=file_id, _external=True) return url_for(self._handler_name, file_id=file_id, _external=True)
return url
def get_file_checksum(self, file_id): def get_file_checksum(self, file_id):
path = self.file_path(file_id) path = self.get_file_id_path(file_id)
sha_hash = hashlib.sha256() return self._storage.get_checksum(self._locations, path)
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
class Userfiles(object): class Userfiles(object):
def __init__(self, app=None): def __init__(self, app=None, distributed_storage=None):
self.app = app self.app = app
if app is not None: if app is not None:
self.state = self.init_app(app) self.state = self.init_app(app, distributed_storage)
else: else:
self.state = None self.state = None
def init_app(self, app): def init_app(self, app, distributed_storage):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles') location = app.config.get('USERFILES_LOCATION')
path = app.config.get('USERFILES_PATH', '') path = app.config.get('USERFILES_PATH', None)
handler_name = 'userfiles_handlers'
userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
if storage_type == 'LocalUserfiles':
userfiles = LocalUserfiles(app, path)
app.add_url_rule('/userfiles/<file_id>', app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view('userfiles_handlers', view_func=UserfilesHandlers.as_view(handler_name,
local_userfiles=userfiles)) distributed_storage=distributed_storage,
location=location,
elif storage_type == 'S3Userfiles': files=userfiles))
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
elif storage_type == 'FakeUserfiles':
userfiles = FakeUserfiles()
else:
raise RuntimeError('Unknown userfiles type: %s' % storage_type)
# register extension with app # register extension with app
app.extensions = getattr(app, 'extensions', {}) app.extensions = getattr(app, 'extensions', {})

View file

@ -1,7 +1,8 @@
import logging import logging
import json import json
import datetime
from flask import Blueprint, request, make_response, jsonify from flask import Blueprint, request, make_response, jsonify, session
from flask.ext.restful import Resource, abort, Api, reqparse from flask.ext.restful import Resource, abort, Api, reqparse
from flask.ext.restful.utils.cors import crossdomain from flask.ext.restful.utils.cors import crossdomain
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
@ -66,6 +67,11 @@ class Unauthorized(ApiException):
ApiException.__init__(self, 'insufficient_scope', 403, 'Unauthorized', payload) ApiException.__init__(self, 'insufficient_scope', 403, 'Unauthorized', payload)
class FreshLoginRequired(ApiException):
def __init__(self, payload=None):
ApiException.__init__(self, 'fresh_login_required', 401, "Requires fresh login", payload)
class ExceedsLicenseException(ApiException): class ExceedsLicenseException(ApiException):
def __init__(self, payload=None): def __init__(self, payload=None):
ApiException.__init__(self, None, 402, 'Payment Required', payload) ApiException.__init__(self, None, 402, 'Payment Required', payload)
@ -87,6 +93,14 @@ def handle_api_error(error):
return response return response
@api_bp.app_errorhandler(model.TooManyLoginAttemptsException)
@crossdomain(origin='*', headers=['Authorization', 'Content-Type'])
def handle_too_many_login_attempts(error):
response = make_response('Too many login attempts', 429)
response.headers['Retry-After'] = int(error.retry_after)
return response
def resource(*urls, **kwargs): def resource(*urls, **kwargs):
def wrapper(api_resource): def wrapper(api_resource):
if not api_resource: if not api_resource:
@ -256,6 +270,26 @@ def require_user_permission(permission_class, scope=None):
require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER) require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER)
require_user_admin = require_user_permission(UserAdminPermission, None) require_user_admin = require_user_permission(UserAdminPermission, None)
require_fresh_user_admin = require_user_permission(UserAdminPermission, None)
def require_fresh_login(func):
@add_method_metadata('requires_fresh_login', True)
@wraps(func)
def wrapped(*args, **kwargs):
user = get_authenticated_user()
if not user:
raise Unauthorized()
logger.debug('Checking fresh login for user %s', user.username)
last_login = session.get('login_time', datetime.datetime.min)
valid_span = datetime.datetime.now() - datetime.timedelta(minutes=10)
if not user.password_hash or last_login >= valid_span:
return func(*args, **kwargs)
raise FreshLoginRequired()
return wrapped
def require_scope(scope_object): def require_scope(scope_object):

View file

@ -4,7 +4,7 @@ from flask import request
from app import billing from app import billing
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, log_action, from endpoints.api import (resource, nickname, ApiResource, validate_json_request, log_action,
related_user_resource, internal_only, Unauthorized, NotFound, related_user_resource, internal_only, Unauthorized, NotFound,
require_user_admin, show_if, hide_if) require_user_admin, show_if, hide_if, abort)
from endpoints.api.subscribe import subscribe, subscription_view from endpoints.api.subscribe import subscribe, subscription_view
from auth.permissions import AdministerOrganizationPermission from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user from auth.auth_context import get_authenticated_user
@ -23,7 +23,11 @@ def get_card(user):
} }
if user.stripe_id: if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id) cus = billing.Customer.retrieve(user.stripe_id)
except stripe.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus and cus.default_card: if cus and cus.default_card:
# Find the default card. # Find the default card.
default_card = None default_card = None
@ -46,7 +50,11 @@ def get_card(user):
def set_card(user, token): def set_card(user, token):
if user.stripe_id: if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id) cus = billing.Customer.retrieve(user.stripe_id)
except stripe.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus: if cus:
try: try:
cus.card = token cus.card = token
@ -55,6 +63,8 @@ def set_card(user, token):
return carderror_response(exc) return carderror_response(exc)
except stripe.InvalidRequestError as exc: except stripe.InvalidRequestError as exc:
return carderror_response(exc) return carderror_response(exc)
except stripe.APIConnectionError as e:
return carderror_response(e)
return get_card(user) return get_card(user)
@ -75,7 +85,11 @@ def get_invoices(customer_id):
'plan': i.lines.data[0].plan.id if i.lines.data[0].plan else None 'plan': i.lines.data[0].plan.id if i.lines.data[0].plan else None
} }
try:
invoices = billing.Invoice.all(customer=customer_id, count=12) invoices = billing.Invoice.all(customer=customer_id, count=12)
except stripe.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
return { return {
'invoices': [invoice_view(i) for i in invoices.data] 'invoices': [invoice_view(i) for i in invoices.data]
} }
@ -228,7 +242,10 @@ class UserPlan(ApiResource):
private_repos = model.get_private_repo_count(user.username) private_repos = model.get_private_repo_count(user.username)
if user.stripe_id: if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id) cus = billing.Customer.retrieve(user.stripe_id)
except stripe.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus.subscription: if cus.subscription:
return subscription_view(cus.subscription, private_repos) return subscription_view(cus.subscription, private_repos)
@ -291,7 +308,10 @@ class OrganizationPlan(ApiResource):
private_repos = model.get_private_repo_count(orgname) private_repos = model.get_private_repo_count(orgname)
organization = model.get_organization(orgname) organization = model.get_organization(orgname)
if organization.stripe_id: if organization.stripe_id:
try:
cus = billing.Customer.retrieve(organization.stripe_id) cus = billing.Customer.retrieve(organization.stripe_id)
except stripe.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus.subscription: if cus.subscription:
return subscription_view(cus.subscription, private_repos) return subscription_view(cus.subscription, private_repos)

View file

@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False):
} }
if can_write: if can_write:
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key) resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
return resp return resp
@ -257,7 +257,7 @@ class FileDropResource(ApiResource):
def post(self): def post(self):
""" Request a URL to which a file may be uploaded. """ """ Request a URL to which a file may be uploaded. """
mime_type = request.get_json()['mimeType'] mime_type = request.get_json()['mimeType']
(url, file_id) = user_files.prepare_for_drop(mime_type) (url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
return { return {
'url': url, 'url': url,
'file_id': str(file_id), 'file_id': str(file_id),

View file

@ -119,6 +119,11 @@ def swagger_route_data(include_internal=False, compact=False):
if internal is not None: if internal is not None:
new_operation['internal'] = True new_operation['internal'] = True
if include_internal:
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
if requires_fresh_login is not None:
new_operation['requires_fresh_login'] = True
if not internal or (internal and include_internal): if not internal or (internal and include_internal):
operations.append(new_operation) operations.append(new_operation)

View file

@ -35,6 +35,14 @@ class UserRobotList(ApiResource):
@internal_only @internal_only
class UserRobot(ApiResource): class UserRobot(ApiResource):
""" Resource for managing a user's robots. """ """ Resource for managing a user's robots. """
@require_user_admin
@nickname('getUserRobot')
def get(self, robot_shortname):
""" Returns the user's robot with the specified name. """
parent = get_authenticated_user()
robot, password = model.get_robot(robot_shortname, parent)
return robot_view(robot.username, password)
@require_user_admin @require_user_admin
@nickname('createUserRobot') @nickname('createUserRobot')
def put(self, robot_shortname): def put(self, robot_shortname):
@ -79,6 +87,18 @@ class OrgRobotList(ApiResource):
@related_user_resource(UserRobot) @related_user_resource(UserRobot)
class OrgRobot(ApiResource): class OrgRobot(ApiResource):
""" Resource for managing an organization's robots. """ """ Resource for managing an organization's robots. """
@require_scope(scopes.ORG_ADMIN)
@nickname('getOrgRobot')
def get(self, orgname, robot_shortname):
""" Returns the organization's robot with the specified name. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.get_robot(robot_shortname, parent)
return robot_view(robot.username, password)
raise Unauthorized()
@require_scope(scopes.ORG_ADMIN) @require_scope(scopes.ORG_ADMIN)
@nickname('createOrgRobot') @nickname('createOrgRobot')
def put(self, orgname, robot_shortname): def put(self, orgname, robot_shortname):
@ -103,3 +123,38 @@ class OrgRobot(ApiResource):
return 'Deleted', 204 return 'Deleted', 204
raise Unauthorized() raise Unauthorized()
@resource('/v1/user/robots/<robot_shortname>/regenerate')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@internal_only
class RegenerateUserRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
@require_user_admin
@nickname('regenerateUserRobotToken')
def post(self, robot_shortname):
""" Regenerates the token for a user's robot. """
parent = get_authenticated_user()
robot, password = model.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
return robot_view(robot.username, password)
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/regenerate')
@path_param('orgname', 'The name of the organization')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@related_user_resource(RegenerateUserRobot)
class RegenerateOrgRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
@require_scope(scopes.ORG_ADMIN)
@nickname('regenerateOrgRobotToken')
def post(self, orgname, robot_shortname):
""" Regenerates the token for an organization robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
return robot_view(robot.username, password)
raise Unauthorized()

View file

@ -15,6 +15,9 @@ logger = logging.getLogger(__name__)
def carderror_response(exc): def carderror_response(exc):
return {'carderror': exc.message}, 402 return {'carderror': exc.message}, 402
def connection_response(exc):
return {'message': 'Could not contact Stripe. Please try again.'}, 503
def subscription_view(stripe_subscription, used_repos): def subscription_view(stripe_subscription, used_repos):
view = { view = {
@ -74,19 +77,29 @@ def subscribe(user, plan, token, require_business_plan):
log_action('account_change_plan', user.username, {'plan': plan}) log_action('account_change_plan', user.username, {'plan': plan})
except stripe.CardError as e: except stripe.CardError as e:
return carderror_response(e) return carderror_response(e)
except stripe.APIConnectionError as e:
return connection_response(e)
response_json = subscription_view(cus.subscription, private_repos) response_json = subscription_view(cus.subscription, private_repos)
status_code = 201 status_code = 201
else: else:
# Change the plan # Change the plan
try:
cus = billing.Customer.retrieve(user.stripe_id) cus = billing.Customer.retrieve(user.stripe_id)
except stripe.APIConnectionError as e:
return connection_response(e)
if plan_found['price'] == 0: if plan_found['price'] == 0:
if cus.subscription is not None: if cus.subscription is not None:
# We only have to cancel the subscription if they actually have one # We only have to cancel the subscription if they actually have one
try:
cus.cancel_subscription() cus.cancel_subscription()
cus.save() cus.save()
except stripe.APIConnectionError as e:
return connection_response(e)
check_repository_usage(user, plan_found) check_repository_usage(user, plan_found)
log_action('account_change_plan', user.username, {'plan': plan}) log_action('account_change_plan', user.username, {'plan': plan})
@ -101,6 +114,8 @@ def subscribe(user, plan, token, require_business_plan):
cus.save() cus.save()
except stripe.CardError as e: except stripe.CardError as e:
return carderror_response(e) return carderror_response(e)
except stripe.APIConnectionError as e:
return connection_response(e)
response_json = subscription_view(cus.subscription, private_repos) response_json = subscription_view(cus.subscription, private_repos)
check_repository_usage(user, plan_found) check_repository_usage(user, plan_found)

View file

@ -42,24 +42,6 @@ class SuperUserLogs(ApiResource):
abort(403) abort(403)
@resource('/v1/superuser/seats')
@internal_only
@show_if(features.SUPER_USERS)
@hide_if(features.BILLING)
class SeatUsage(ApiResource):
""" Resource for managing the seats granted in the license for the system. """
@nickname('getSeatCount')
def get(self):
""" Returns the current number of seats being used in the system. """
if SuperUserPermission().can():
return {
'count': model.get_active_user_count(),
'allowed': app.config.get('LICENSE_USER_LIMIT', 0)
}
abort(403)
def user_view(user): def user_view(user):
return { return {
'username': user.username, 'username': user.username,

View file

@ -7,8 +7,9 @@ from flask.ext.principal import identity_changed, AnonymousIdentity
from app import app, billing as stripe, authentication from app import app, billing as stripe, authentication
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error, from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, NotFound, require_user_admin, log_action, internal_only, NotFound, require_user_admin, parse_args,
InvalidToken, require_scope, format_date, hide_if, show_if, license_error) query_param, InvalidToken, require_scope, format_date, hide_if, show_if,
license_error, require_fresh_login)
from endpoints.api.subscribe import subscribe from endpoints.api.subscribe import subscribe
from endpoints.common import common_login from endpoints.common import common_login
from data import model from data import model
@ -39,9 +40,15 @@ def user_view(user):
organizations = model.get_user_organizations(user.username) organizations = model.get_user_organizations(user.username)
def login_view(login): def login_view(login):
try:
metadata = json.loads(login.metadata_json)
except:
metadata = {}
return { return {
'service': login.service.name, 'service': login.service.name,
'service_identifier': login.service_ident, 'service_identifier': login.service_ident,
'metadata': metadata
} }
logins = model.list_federated_logins(user) logins = model.list_federated_logins(user)
@ -88,6 +95,7 @@ class User(ApiResource):
""" Operations related to users. """ """ Operations related to users. """
schemas = { schemas = {
'NewUser': { 'NewUser': {
'id': 'NewUser', 'id': 'NewUser',
'type': 'object', 'type': 'object',
'description': 'Fields which must be specified for a new user.', 'description': 'Fields which must be specified for a new user.',
@ -143,6 +151,7 @@ class User(ApiResource):
return user_view(user) return user_view(user)
@require_user_admin @require_user_admin
@require_fresh_login
@nickname('changeUserDetails') @nickname('changeUserDetails')
@internal_only @internal_only
@validate_json_request('UpdateUser') @validate_json_request('UpdateUser')
@ -356,6 +365,37 @@ class Signin(ApiResource):
return conduct_signin(username, password) return conduct_signin(username, password)
@resource('/v1/signin/verify')
@internal_only
class VerifyUser(ApiResource):
""" Operations for verifying the existing user. """
schemas = {
'VerifyUser': {
'id': 'VerifyUser',
'type': 'object',
'description': 'Information required to verify the signed in user.',
'required': [
'password',
],
'properties': {
'password': {
'type': 'string',
'description': 'The user\'s password',
},
},
},
}
@require_user_admin
@nickname('verifyUser')
@validate_json_request('VerifyUser')
def post(self):
""" Verifies the signed in the user with the specified credentials. """
signin_data = request.get_json()
password = signin_data['password']
return conduct_signin(get_authenticated_user().username, password)
@resource('/v1/signout') @resource('/v1/signout')
@internal_only @internal_only
class Signout(ApiResource): class Signout(ApiResource):
@ -403,11 +443,24 @@ class Recovery(ApiResource):
@internal_only @internal_only
class UserNotificationList(ApiResource): class UserNotificationList(ApiResource):
@require_user_admin @require_user_admin
@parse_args
@query_param('page', 'Offset page number. (int)', type=int, default=0)
@query_param('limit', 'Limit on the number of results (int)', type=int, default=5)
@nickname('listUserNotifications') @nickname('listUserNotifications')
def get(self): def get(self, args):
notifications = model.list_notifications(get_authenticated_user()) page = args['page']
limit = args['limit']
notifications = list(model.list_notifications(get_authenticated_user(), page=page, limit=limit + 1))
has_more = False
if len(notifications) > limit:
has_more = True
notifications = notifications[0:limit]
return { return {
'notifications': [notification_view(notification) for notification in notifications] 'notifications': [notification_view(notification) for notification in notifications],
'additional': has_more
} }

View file

@ -4,12 +4,14 @@ from flask import request, redirect, url_for, Blueprint
from flask.ext.login import current_user from flask.ext.login import current_user
from endpoints.common import render_page_template, common_login, route_show_if from endpoints.common import render_page_template, common_login, route_show_if
from app import app, analytics from app import app, analytics, get_app_url
from data import model from data import model
from util.names import parse_repository_name from util.names import parse_repository_name
from util.validation import generate_valid_usernames
from util.http import abort from util.http import abort
from auth.permissions import AdministerRepositoryPermission from auth.permissions import AdministerRepositoryPermission
from auth.auth import require_session_login from auth.auth import require_session_login
from peewee import IntegrityError
import features import features
@ -20,19 +22,38 @@ client = app.config['HTTPCLIENT']
callback = Blueprint('callback', __name__) callback = Blueprint('callback', __name__)
def render_ologin_error(service_name,
error_message='Could not load user data. The token may have expired.'):
return render_page_template('ologinerror.html', service_name=service_name,
error_message=error_message,
service_url=get_app_url())
def exchange_github_code_for_token(code, for_login=True): def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
redirect_suffix=''):
code = request.args.get('code') code = request.args.get('code')
id_config = service_name + '_LOGIN_CLIENT_ID' if for_login else service_name + '_CLIENT_ID'
secret_config = service_name + '_LOGIN_CLIENT_SECRET' if for_login else service_name + '_CLIENT_SECRET'
payload = { payload = {
'client_id': app.config['GITHUB_LOGIN_CLIENT_ID' if for_login else 'GITHUB_CLIENT_ID'], 'client_id': app.config[id_config],
'client_secret': app.config['GITHUB_LOGIN_CLIENT_SECRET' if for_login else 'GITHUB_CLIENT_SECRET'], 'client_secret': app.config[secret_config],
'code': code, 'code': code,
'grant_type': 'authorization_code',
'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'],
app.config['SERVER_HOSTNAME'],
service_name.lower(),
redirect_suffix)
} }
headers = { headers = {
'Accept': 'application/json' 'Accept': 'application/json'
} }
get_access_token = client.post(app.config['GITHUB_TOKEN_URL'], if form_encode:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
data=payload, headers=headers)
else:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
params=payload, headers=headers) params=payload, headers=headers)
json_data = get_access_token.json() json_data = get_access_token.json()
@ -52,17 +73,82 @@ def get_github_user(token):
return get_user.json() return get_user.json()
def get_google_user(token):
token_param = {
'access_token': token,
'alt': 'json',
}
get_user = client.get(app.config['GOOGLE_USER_URL'], params=token_param)
return get_user.json()
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
to_login = model.verify_federated_login(service_name.lower(), user_id)
if not to_login:
# try to create the user
try:
valid = next(generate_valid_usernames(username))
to_login = model.create_federated_user(valid, email, service_name.lower(),
user_id, set_password_notification=True,
metadata=metadata)
# Success, tell analytics
analytics.track(to_login.username, 'register', {'service': service_name.lower()})
state = request.args.get('state', None)
if state:
logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state)
except model.DataModelException, ex:
return render_ologin_error(service_name, ex.message)
if common_login(to_login):
return redirect(url_for('web.index'))
return render_ologin_error(service_name)
def get_google_username(user_data):
username = user_data['email']
at = username.find('@')
if at > 0:
username = username[0:at]
return username
@callback.route('/google/callback', methods=['GET'])
@route_show_if(features.GOOGLE_LOGIN)
def google_oauth_callback():
error = request.args.get('error', None)
if error:
return render_ologin_error('Google', error)
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', form_encode=True)
user_data = get_google_user(token)
if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
return render_ologin_error('Google')
username = get_google_username(user_data)
metadata = {
'service_username': user_data['email']
}
return conduct_oauth_login('Google', user_data['id'], username, user_data['email'],
metadata=metadata)
@callback.route('/github/callback', methods=['GET']) @callback.route('/github/callback', methods=['GET'])
@route_show_if(features.GITHUB_LOGIN) @route_show_if(features.GITHUB_LOGIN)
def github_oauth_callback(): def github_oauth_callback():
error = request.args.get('error', None) error = request.args.get('error', None)
if error: if error:
return render_page_template('githuberror.html', error_message=error) return render_ologin_error('GitHub', error)
token = exchange_github_code_for_token(request.args.get('code')) token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token) user_data = get_github_user(token)
if not user_data: if not user_data:
return render_page_template('githuberror.html', error_message='Could not load user data') return render_ologin_error('GitHub')
username = user_data['login'] username = user_data['login']
github_id = user_data['id'] github_id = user_data['id']
@ -84,42 +170,67 @@ def github_oauth_callback():
if user_email['primary']: if user_email['primary']:
break break
to_login = model.verify_federated_login('github', github_id) metadata = {
if not to_login: 'service_username': username
# try to create the user }
return conduct_oauth_login('github', github_id, username, found_email, metadata=metadata)
@callback.route('/google/callback/attach', methods=['GET'])
@route_show_if(features.GOOGLE_LOGIN)
@require_session_login
def google_oauth_attach():
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE',
redirect_suffix='/attach', form_encode=True)
user_data = get_google_user(token)
if not user_data or not user_data.get('id', None):
return render_ologin_error('Google')
google_id = user_data['id']
user_obj = current_user.db_user()
username = get_google_username(user_data)
metadata = {
'service_username': user_data['email']
}
try: try:
to_login = model.create_federated_user(username, found_email, 'github', model.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
github_id, set_password_notification=True) except IntegrityError:
err = 'Google account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])
return render_ologin_error('Google', err)
# Success, tell analytics return redirect(url_for('web.user'))
analytics.track(to_login.username, 'register', {'service': 'github'})
state = request.args.get('state', None)
if state:
logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state)
except model.DataModelException, ex:
return render_page_template('githuberror.html', error_message=ex.message)
if common_login(to_login):
return redirect(url_for('web.index'))
return render_page_template('githuberror.html')
@callback.route('/github/callback/attach', methods=['GET']) @callback.route('/github/callback/attach', methods=['GET'])
@route_show_if(features.GITHUB_LOGIN) @route_show_if(features.GITHUB_LOGIN)
@require_session_login @require_session_login
def github_oauth_attach(): def github_oauth_attach():
token = exchange_github_code_for_token(request.args.get('code')) token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token) user_data = get_github_user(token)
if not user_data: if not user_data:
return render_page_template('githuberror.html', error_message='Could not load user data') return render_ologin_error('GitHub')
github_id = user_data['id'] github_id = user_data['id']
user_obj = current_user.db_user() user_obj = current_user.db_user()
model.attach_federated_login(user_obj, 'github', github_id)
username = user_data['login']
metadata = {
'service_username': username
}
try:
model.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
except IntegrityError:
err = 'Github account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])
return render_ologin_error('GitHub', err)
return redirect(url_for('web.user')) return redirect(url_for('web.user'))
@ -130,7 +241,8 @@ def github_oauth_attach():
def attach_github_build_trigger(namespace, repository): def attach_github_build_trigger(namespace, repository):
permission = AdministerRepositoryPermission(namespace, repository) permission = AdministerRepositoryPermission(namespace, repository)
if permission.can(): if permission.can():
token = exchange_github_code_for_token(request.args.get('code'), for_login=False) token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB',
for_login=False)
repo = model.get_repository(namespace, repository) repo = model.get_repository(namespace, repository)
if not repo: if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository) msg = 'Invalid repository: %s/%s' % (namespace, repository)

View file

@ -2,8 +2,9 @@ import logging
import urlparse import urlparse
import json import json
import string import string
import datetime
from flask import make_response, render_template, request, abort from flask import make_response, render_template, request, abort, session
from flask.ext.login import login_user, UserMixin from flask.ext.login import login_user, UserMixin
from flask.ext.principal import identity_changed from flask.ext.principal import identity_changed
from random import SystemRandom from random import SystemRandom
@ -112,6 +113,7 @@ def common_login(db_user):
logger.debug('Successfully signed in as: %s' % db_user.username) logger.debug('Successfully signed in as: %s' % db_user.username)
new_identity = QuayDeferredPermissionUser(db_user.username, 'username', {scopes.DIRECT_LOGIN}) new_identity = QuayDeferredPermissionUser(db_user.username, 'username', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=new_identity) identity_changed.send(app, identity=new_identity)
session['login_time'] = datetime.datetime.now()
return True return True
else: else:
logger.debug('User could not be logged in, inactive?.') logger.debug('User could not be logged in, inactive?.')

View file

@ -413,8 +413,39 @@ def put_repository_auth(namespace, repository):
@index.route('/search', methods=['GET']) @index.route('/search', methods=['GET'])
@process_auth
def get_search(): def get_search():
abort(501, 'Not Implemented', issue='not-implemented') def result_view(repo):
return {
"name": repo.namespace + '/' + repo.name,
"description": repo.description
}
query = request.args.get('q')
username = None
user = get_authenticated_user()
if user is not None:
username = user.username
if query:
matching = model.get_matching_repositories(query, username)
else:
matching = []
results = [result_view(repo) for repo in matching
if (repo.visibility.name == 'public' or
ReadRepositoryPermission(repo.namespace, repo.name).can())]
data = {
"query": query,
"num_results": len(results),
"results" : results
}
resp = make_response(json.dumps(data), 200)
resp.mimetype = 'application/json'
return resp
@index.route('/_ping') @index.route('/_ping')

View file

@ -15,6 +15,13 @@ class NotificationEvent(object):
def __init__(self): def __init__(self):
pass pass
def get_level(self, event_data, notification_data):
"""
Returns a 'level' representing the severity of the event.
Valid values are: 'info', 'warning', 'error', 'primary'
"""
raise NotImplementedError
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
""" """
Returns a human readable one-line summary for the given notification data. Returns a human readable one-line summary for the given notification data.
@ -55,6 +62,9 @@ class RepoPushEvent(NotificationEvent):
def event_name(cls): def event_name(cls):
return 'repo_push' return 'repo_push'
def get_level(self, event_data, notification_data):
return 'info'
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Repository %s updated' % (event_data['repository']) return 'Repository %s updated' % (event_data['repository'])
@ -88,6 +98,9 @@ class BuildQueueEvent(NotificationEvent):
def event_name(cls): def event_name(cls):
return 'build_queued' return 'build_queued'
def get_level(self, event_data, notification_data):
return 'info'
def get_sample_data(self, repository): def get_sample_data(self, repository):
build_uuid = 'fake-build-id' build_uuid = 'fake-build-id'
@ -127,6 +140,9 @@ class BuildStartEvent(NotificationEvent):
def event_name(cls): def event_name(cls):
return 'build_start' return 'build_start'
def get_level(self, event_data, notification_data):
return 'info'
def get_sample_data(self, repository): def get_sample_data(self, repository):
build_uuid = 'fake-build-id' build_uuid = 'fake-build-id'
@ -155,6 +171,9 @@ class BuildSuccessEvent(NotificationEvent):
def event_name(cls): def event_name(cls):
return 'build_success' return 'build_success'
def get_level(self, event_data, notification_data):
return 'primary'
def get_sample_data(self, repository): def get_sample_data(self, repository):
build_uuid = 'fake-build-id' build_uuid = 'fake-build-id'
@ -183,7 +202,12 @@ class BuildFailureEvent(NotificationEvent):
def event_name(cls): def event_name(cls):
return 'build_failure' return 'build_failure'
def get_level(self, event_data, notification_data):
return 'error'
def get_sample_data(self, repository): def get_sample_data(self, repository):
build_uuid = 'fake-build-id'
return build_event_data(repository, { return build_event_data(repository, {
'build_id': build_uuid, 'build_id': build_uuid,
'build_name': 'some-fake-build', 'build_name': 'some-fake-build',

View file

@ -4,9 +4,11 @@ import os.path
import tarfile import tarfile
import base64 import base64
import json import json
import requests
import re
from flask.ext.mail import Message from flask.ext.mail import Message
from app import mail, app from app import mail, app, get_app_url
from data import model from data import model
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -187,3 +189,194 @@ class WebhookMethod(NotificationMethod):
return False return False
return True return True
class FlowdockMethod(NotificationMethod):
""" Method for sending notifications to Flowdock via the Team Inbox API:
https://www.flowdock.com/api/team-inbox
"""
@classmethod
def method_name(cls):
return 'flowdock'
def validate(self, repository, config_data):
token = config_data.get('flow_api_token', '')
if not token:
raise CannotValidateNotificationMethodException('Missing Flowdock API Token')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('flow_api_token', '')
if not token:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://api.flowdock.com/v1/messages/team_inbox/%s' % token
headers = {'Content-type': 'application/json'}
payload = {
'source': 'Quay',
'from_address': 'support@quay.io',
'subject': event_handler.get_summary(notification_data['event_data'], notification_data),
'content': event_handler.get_message(notification_data['event_data'], notification_data),
'from_name': owner.username,
'project': notification.repository.namespace + ' ' + notification.repository.name,
'tags': ['#' + event_handler.event_name()],
'link': notification_data['event_data']['homepage']
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for flowdock to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Flowdock method was unable to be sent: %s' % ex.message)
return False
return True
class HipchatMethod(NotificationMethod):
""" Method for sending notifications to Hipchat via the API:
https://www.hipchat.com/docs/apiv2/method/send_room_notification
"""
@classmethod
def method_name(cls):
return 'hipchat'
def validate(self, repository, config_data):
if not config_data.get('notification_token', ''):
raise CannotValidateNotificationMethodException('Missing Hipchat Room Notification Token')
if not config_data.get('room_id', ''):
raise CannotValidateNotificationMethodException('Missing Hipchat Room ID')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('notification_token', '')
room_id = config_data.get('room_id', '')
if not token or not room_id:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://api.hipchat.com/v2/room/%s/notification?auth_token=%s' % (room_id, token)
level = event_handler.get_level(notification_data['event_data'], notification_data)
color = {
'info': 'gray',
'warning': 'yellow',
'error': 'red',
'primary': 'purple'
}.get(level, 'gray')
headers = {'Content-type': 'application/json'}
payload = {
'color': color,
'message': event_handler.get_message(notification_data['event_data'], notification_data),
'notify': level == 'error',
'message_format': 'html',
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for hipchat to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Hipchat method was unable to be sent: %s' % ex.message)
return False
return True
class SlackMethod(NotificationMethod):
""" Method for sending notifications to Slack via the API:
https://api.slack.com/docs/attachments
"""
@classmethod
def method_name(cls):
return 'slack'
def validate(self, repository, config_data):
if not config_data.get('token', ''):
raise CannotValidateNotificationMethodException('Missing Slack Token')
if not config_data.get('subdomain', '').isalnum():
raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name')
def formatForSlack(self, message):
message = message.replace('\n', '')
message = re.sub(r'\s+', ' ', message)
message = message.replace('<br>', '\n')
message = re.sub(r'<a href="(.+)">(.+)</a>', '<\\1|\\2>', message)
return message
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('token', '')
subdomain = config_data.get('subdomain', '')
if not token or not subdomain:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://%s.slack.com/services/hooks/incoming-webhook?token=%s' % (subdomain, token)
level = event_handler.get_level(notification_data['event_data'], notification_data)
color = {
'info': '#ffffff',
'warning': 'warning',
'error': 'danger',
'primary': 'good'
}.get(level, '#ffffff')
summary = event_handler.get_summary(notification_data['event_data'], notification_data)
message = event_handler.get_message(notification_data['event_data'], notification_data)
headers = {'Content-type': 'application/json'}
payload = {
'text': summary,
'username': 'quayiobot',
'attachments': [
{
'fallback': summary,
'text': self.formatForSlack(message),
'color': color
}
]
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for Slack to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Slack method was unable to be sent: %s' % ex.message)
return False
return True

View file

@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers):
extra_headers = {} extra_headers = {}
# Add the Accept-Ranges header if the storage engine supports resumeable # Add the Accept-Ranges header if the storage engine supports resumable
# downloads. # downloads.
if store.get_supports_resumeable_downloads(repo_image.storage.locations): if store.get_supports_resumable_downloads(repo_image.storage.locations):
profile.debug('Storage supports resumeable downloads') profile.debug('Storage supports resumable downloads')
extra_headers['Accept-Ranges'] = 'bytes' extra_headers['Accept-Ranges'] = 'bytes'
resp = make_response('') resp = make_response('')

View file

@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive: with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0] tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy
tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
logger.debug('Successfully prepared job') logger.debug('Successfully prepared job')

View file

@ -179,6 +179,8 @@ def initialize_database():
TeamRole.create(name='member') TeamRole.create(name='member')
Visibility.create(name='public') Visibility.create(name='public')
Visibility.create(name='private') Visibility.create(name='private')
LoginService.create(name='google')
LoginService.create(name='github') LoginService.create(name='github')
LoginService.create(name='quayrobot') LoginService.create(name='quayrobot')
LoginService.create(name='ldap') LoginService.create(name='ldap')
@ -229,13 +231,15 @@ def initialize_database():
LogEntryKind.create(name='delete_application') LogEntryKind.create(name='delete_application')
LogEntryKind.create(name='reset_application_client_secret') LogEntryKind.create(name='reset_application_client_secret')
# Note: These are deprecated. # Note: These next two are deprecated.
LogEntryKind.create(name='add_repo_webhook') LogEntryKind.create(name='add_repo_webhook')
LogEntryKind.create(name='delete_repo_webhook') LogEntryKind.create(name='delete_repo_webhook')
LogEntryKind.create(name='add_repo_notification') LogEntryKind.create(name='add_repo_notification')
LogEntryKind.create(name='delete_repo_notification') LogEntryKind.create(name='delete_repo_notification')
LogEntryKind.create(name='regenerate_robot_token')
ImageStorageLocation.create(name='local_eu') ImageStorageLocation.create(name='local_eu')
ImageStorageLocation.create(name='local_us') ImageStorageLocation.create(name='local_us')
@ -251,6 +255,10 @@ def initialize_database():
ExternalNotificationMethod.create(name='email') ExternalNotificationMethod.create(name='email')
ExternalNotificationMethod.create(name='webhook') ExternalNotificationMethod.create(name='webhook')
ExternalNotificationMethod.create(name='flowdock')
ExternalNotificationMethod.create(name='hipchat')
ExternalNotificationMethod.create(name='slack')
NotificationKind.create(name='repo_push') NotificationKind.create(name='repo_push')
NotificationKind.create(name='build_queued') NotificationKind.create(name='build_queued')
NotificationKind.create(name='build_start') NotificationKind.create(name='build_start')

View file

@ -1,13 +0,0 @@
import pickle
from Crypto.PublicKey import RSA
n = 24311791124264168943780535074639421876317270880681911499019414944027362498498429776192966738844514582251884695124256895677070273097239290537016363098432785034818859765271229653729724078304186025013011992335454557504431888746007324285000011384941749613875855493086506022340155196030616409545906383713728780211095701026770053812741971198465120292345817928060114890913931047021503727972067476586739126160044293621653486418983183727572502888923949587290840425930251185737996066354726953382305020440374552871209809125535533731995494145421279907938079885061852265339259634996180877443852561265066616143910755505151318370667L
e = 65537L
def load_license(license_path):
decryptor = RSA.construct((n, e))
with open(license_path, 'rb') as encrypted_license:
decrypted_data = decryptor.encrypt(encrypted_license.read(), 0)
return pickle.loads(decrypted_data[0])

Binary file not shown.

View file

@ -32,5 +32,7 @@ raven
python-ldap python-ldap
pycrypto pycrypto
logentries logentries
psycopg2
pyyaml
git+https://github.com/DevTable/aniso8601-fake.git git+https://github.com/DevTable/aniso8601-fake.git
git+https://github.com/DevTable/anunidecode.git git+https://github.com/DevTable/anunidecode.git

View file

@ -12,6 +12,7 @@ Pillow==2.5.1
PyGithub==1.25.0 PyGithub==1.25.0
PyMySQL==0.6.2 PyMySQL==0.6.2
PyPDF2==1.22 PyPDF2==1.22
PyYAML==3.11
SQLAlchemy==0.9.7 SQLAlchemy==0.9.7
Werkzeug==0.9.6 Werkzeug==0.9.6
alembic==0.6.5 alembic==0.6.5
@ -44,6 +45,7 @@ python-dateutil==2.2
python-ldap==2.4.15 python-ldap==2.4.15
python-magic==0.4.6 python-magic==0.4.6
pytz==2014.4 pytz==2014.4
psycopg2==2.5.3
raven==5.0.0 raven==5.0.0
redis==2.10.1 redis==2.10.1
reportlab==2.7 reportlab==2.7

View file

@ -21,8 +21,7 @@
#quay-logo { #quay-logo {
width: 80px; width: 100px;
margin-right: 30px;
} }
#padding-container { #padding-container {
@ -464,6 +463,22 @@ i.toggle-icon:hover {
.docker-auth-dialog .token-dialog-body .well { .docker-auth-dialog .token-dialog-body .well {
margin-bottom: 0px; margin-bottom: 0px;
position: relative;
padding-right: 24px;
}
.docker-auth-dialog .token-dialog-body .well i.fa-refresh {
position: absolute;
top: 9px;
right: 9px;
font-size: 20px;
color: gray;
transition: all 0.5s ease-in-out;
cursor: pointer;
}
.docker-auth-dialog .token-dialog-body .well i.fa-refresh:hover {
color: black;
} }
.docker-auth-dialog .token-view { .docker-auth-dialog .token-view {
@ -729,7 +744,7 @@ i.toggle-icon:hover {
} }
.user-notification.notification-animated { .user-notification.notification-animated {
width: 21px; min-width: 21px;
transform: scale(0); transform: scale(0);
-moz-transform: scale(0); -moz-transform: scale(0);
@ -2257,6 +2272,14 @@ p.editable:hover i {
position: relative; position: relative;
} }
.copy-box-element.disabled .input-group-addon {
display: none;
}
.copy-box-element.disabled input {
border-radius: 4px !important;
}
.global-zeroclipboard-container embed { .global-zeroclipboard-container embed {
cursor: pointer; cursor: pointer;
} }
@ -4559,6 +4582,27 @@ i.quay-icon {
height: 16px; height: 16px;
} }
i.flowdock-icon {
background-image: url(/static/img/flowdock.ico);
background-size: 16px;
width: 16px;
height: 16px;
}
i.hipchat-icon {
background-image: url(/static/img/hipchat.png);
background-size: 16px;
width: 16px;
height: 16px;
}
i.slack-icon {
background-image: url(/static/img/slack.ico);
background-size: 16px;
width: 16px;
height: 16px;
}
.external-notification-view-element { .external-notification-view-element {
margin: 10px; margin: 10px;
padding: 6px; padding: 6px;

View file

@ -1,4 +1,4 @@
<div class="copy-box-element"> <div class="copy-box-element" ng-class="disabled ? 'disabled' : ''">
<div class="id-container"> <div class="id-container">
<div class="input-group"> <div class="input-group">
<input type="text" class="form-control" value="{{ value }}" readonly> <input type="text" class="form-control" value="{{ value }}" readonly>

View file

@ -73,7 +73,7 @@
<tr ng-if="currentMethod.fields.length"><td colspan="2"><hr></td></tr> <tr ng-if="currentMethod.fields.length"><td colspan="2"><hr></td></tr>
<tr ng-repeat="field in currentMethod.fields"> <tr ng-repeat="field in currentMethod.fields">
<td>{{ field.title }}:</td> <td valign="top">{{ field.title }}:</td>
<td> <td>
<div ng-switch on="field.type"> <div ng-switch on="field.type">
<span ng-switch-when="email"> <span ng-switch-when="email">
@ -86,7 +86,11 @@
current-entity="currentConfig[field.name]" current-entity="currentConfig[field.name]"
ng-model="currentConfig[field.name]" ng-model="currentConfig[field.name]"
allowed-entities="['user', 'team', 'org']" allowed-entities="['user', 'team', 'org']"
ng-switch-when="entity"> ng-switch-when="entity"></div>
<div ng-if="getHelpUrl(field, currentConfig)" style="margin-top: 10px">
See: <a href="{{ getHelpUrl(field, currentConfig) }}" target="_blank">{{ getHelpUrl(field, currentConfig) }}</a>
</div>
</div> </div>
</td> </td>
</tr> </tr>

View file

@ -10,19 +10,33 @@
</div> </div>
<div class="modal-body token-dialog-body"> <div class="modal-body token-dialog-body">
<div class="alert alert-info">The docker <u>username</u> is <b>{{ username }}</b> and the <u>password</u> is the token below. You may use any value for email.</div> <div class="alert alert-info">The docker <u>username</u> is <b>{{ username }}</b> and the <u>password</u> is the token below. You may use any value for email.</div>
<div class="well well-sm">
<div class="well well-sm" ng-show="regenerating">
Regenerating Token...
<i class="fa fa-refresh fa-spin"></i>
</div>
<div class="well well-sm" ng-show="!regenerating">
<input id="token-view" class="token-view" type="text" value="{{ token }}" onClick="this.select();" readonly> <input id="token-view" class="token-view" type="text" value="{{ token }}" onClick="this.select();" readonly>
<i class="fa fa-refresh" ng-show="supportsRegenerate" ng-click="askRegenerate()"
data-title="Regenerate Token"
data-placement="left"
bs-tooltip></i>
</div> </div>
</div> </div>
<div class="modal-footer"> <div class="modal-footer" ng-show="regenerating">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
<div class="modal-footer" ng-show="!regenerating">
<span class="download-cfg" ng-show="isDownloadSupported()"> <span class="download-cfg" ng-show="isDownloadSupported()">
<i class="fa fa-download"></i> <i class="fa fa-download"></i>
<a href="javascript:void(0)" ng-click="downloadCfg(shownRobot)">Download .dockercfg file</a> <a href="javascript:void(0)" ng-click="downloadCfg(shownRobot)">Download .dockercfg file</a>
</span> </span>
<div id="clipboardCopied" style="display: none"> <div class="clipboard-copied-message" style="display: none">
Copied to clipboard Copied
</div> </div>
<button id="copyClipboard" type="button" class="btn btn-primary" data-clipboard-target="token-view">Copy to clipboard</button> <input type="hidden" name="command-data" id="command-data" value="{{ command }}">
<button id="copyClipboard" type="button" class="btn btn-primary" data-clipboard-target="command-data">Copy Login Command</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div> </div>
</div><!-- /.modal-content --> </div><!-- /.modal-content -->

View file

@ -2,7 +2,7 @@
<div class="current-item"> <div class="current-item">
<div class="dropdown-select-icon-transclude"></div> <div class="dropdown-select-icon-transclude"></div>
<input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}" <input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}"
ng-readonly="!lookaheadItems || !lookaheadItems.length"></input> ng-readonly="!allowCustomInput"></input>
</div> </div>
<div class="dropdown"> <div class="dropdown">
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown"> <button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">

View file

@ -0,0 +1,17 @@
<span class="external-login-button-element">
<span ng-if="provider == 'github'">
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px">
<i class="fa fa-github fa-lg"></i>
<span ng-if="action != 'attach'">Sign In with GitHub</span>
<span ng-if="action == 'attach'">Attach to GitHub Account</span>
</a>
</span>
<span ng-if="provider == 'google'">
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GOOGLE_LOGIN']" ng-click="startSignin('google')">
<i class="fa fa-google fa-lg"></i>
<span ng-if="action != 'attach'">Sign In with Google</span>
<span ng-if="action == 'attach'">Attach to Google Account</span>
</a>
</span>
</span>

View file

@ -4,7 +4,7 @@
&equiv; &equiv;
</button> </button>
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}"> <a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
<img id="quay-logo" src="/static/img/black-horizontal.svg"> <img id="quay-logo" src="/static/img/quay-logo.png">
</a> </a>
</div> </div>
@ -37,15 +37,7 @@
<a href="javascript:void(0)" class="dropdown-toggle user-dropdown user-view" data-toggle="dropdown"> <a href="javascript:void(0)" class="dropdown-toggle user-dropdown user-view" data-toggle="dropdown">
<img src="//www.gravatar.com/avatar/{{ user.gravatar }}?s=32&d=identicon" /> <img src="//www.gravatar.com/avatar/{{ user.gravatar }}?s=32&d=identicon" />
{{ user.username }} {{ user.username }}
<span class="badge user-notification notification-animated" <span class="notifications-bubble"></span>
ng-show="notificationService.notifications.length"
ng-class="notificationService.notificationClasses"
bs-tooltip=""
data-title="User Notifications"
data-placement="left"
data-container="body">
{{ notificationService.notifications.length }}
</span>
<b class="caret"></b> <b class="caret"></b>
</a> </a>
<ul class="dropdown-menu"> <ul class="dropdown-menu">
@ -58,11 +50,7 @@
<a href="javascript:void(0)" data-template="/static/directives/notification-bar.html" <a href="javascript:void(0)" data-template="/static/directives/notification-bar.html"
data-animation="am-slide-right" bs-aside="aside" data-container="body"> data-animation="am-slide-right" bs-aside="aside" data-container="body">
Notifications Notifications
<span class="badge user-notification" <span class="notifications-bubble"></span>
ng-class="notificationService.notificationClasses"
ng-show="notificationService.notifications.length">
{{ notificationService.notifications.length }}
</span>
</a> </a>
</li> </li>
<li><a ng-href="/organizations/" target="{{ appLinkTarget() }}">Organizations</a></li> <li><a ng-href="/organizations/" target="{{ appLinkTarget() }}">Organizations</a></li>

View file

@ -3,7 +3,10 @@
<div class="aside-content"> <div class="aside-content">
<div class="aside-header"> <div class="aside-header">
<button type="button" class="close" ng-click="$hide()">&times;</button> <button type="button" class="close" ng-click="$hide()">&times;</button>
<h4 class="aside-title">Notifications</h4> <h4 class="aside-title">
Notifications
<span class="notifications-bubble"></span>
</h4>
</div> </div>
<div class="aside-body"> <div class="aside-body">
<div ng-repeat="notification in notificationService.notifications"> <div ng-repeat="notification in notificationService.notifications">

View file

@ -0,0 +1,7 @@
<span class="notifications-bubble-element">
<span class="badge user-notification notification-animated"
ng-show="notificationService.notifications.length"
ng-class="notificationService.notificationClasses">
{{ notificationService.notifications.length }}<span ng-if="notificationService.additionalNotifications">+</span>
</span>
</span>

View file

@ -31,7 +31,7 @@
</div> </div>
<div class="docker-auth-dialog" username="shownRobot.name" token="shownRobot.token" <div class="docker-auth-dialog" username="shownRobot.name" token="shownRobot.token"
shown="!!shownRobot" counter="showRobotCounter"> shown="!!shownRobot" counter="showRobotCounter" supports-regenerate="true" regenerate="regenerateToken(username)">
<i class="fa fa-wrench"></i> {{ shownRobot.name }} <i class="fa fa-wrench"></i> {{ shownRobot.name }}
</div> </div>
</div> </div>

View file

@ -4,17 +4,22 @@
placeholder="Username or E-mail Address" ng-model="user.username" autofocus> placeholder="Username or E-mail Address" ng-model="user.username" autofocus>
<input type="password" class="form-control input-lg" name="password" <input type="password" class="form-control input-lg" name="password"
placeholder="Password" ng-model="user.password"> placeholder="Password" ng-model="user.password">
<div class="alert alert-warning" ng-show="tryAgainSoon > 0">
Too many attempts have been made to login. Please try again in {{ tryAgainSoon }} second<span ng-if="tryAgainSoon != 1">s</span>.
</div>
<span ng-show="tryAgainSoon == 0">
<button class="btn btn-lg btn-primary btn-block" type="submit">Sign In</button> <button class="btn btn-lg btn-primary btn-block" type="submit">Sign In</button>
<span class="social-alternate" quay-require="['GITHUB_LOGIN']"> <span class="social-alternate" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">
<i class="fa fa-circle"></i> <i class="fa fa-circle"></i>
<span class="inner-text">OR</span> <span class="inner-text">OR</span>
</span> </span>
<a id="github-signin-link" class="btn btn-primary btn-lg btn-block" href="javascript:void(0)" ng-click="showGithub()" <div class="external-login-button" provider="github" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
quay-require="['GITHUB_LOGIN']"> <div class="external-login-button" provider="google" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
<i class="fa fa-github fa-lg"></i> Sign In with GitHub </span>
</a>
</form> </form>
<div class="alert alert-danger" ng-show="invalidCredentials">Invalid username or password.</div> <div class="alert alert-danger" ng-show="invalidCredentials">Invalid username or password.</div>

View file

@ -18,10 +18,8 @@
<i class="fa fa-circle"></i> <i class="fa fa-circle"></i>
<span class="inner-text">OR</span> <span class="inner-text">OR</span>
</span> </span>
<a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=user:email{{ github_state_clause }}" <div class="external-login-button" provider="github"></div>
class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']"> <div class="external-login-button" provider="google"></div>
<i class="fa fa-github fa-lg"></i> Sign In with GitHub
</a>
</div> </div>
</form> </form>
<div ng-show="registering" style="text-align: center"> <div ng-show="registering" style="text-align: center">

View file

@ -29,7 +29,8 @@
<div class="slideinout" ng-show="currentRepo"> <div class="slideinout" ng-show="currentRepo">
<div style="margin-top: 10px">Dockerfile Location:</div> <div style="margin-top: 10px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation" <div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"> lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"
allow-custom-input="true">
<!-- Icons --> <!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i>

BIN
static/img/flowdock.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

BIN
static/img/hipchat.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

BIN
static/img/slack.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View file

@ -1,6 +1,46 @@
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$'; var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$'; var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
$.fn.clipboardCopy = function() {
if (zeroClipboardSupported) {
(new ZeroClipboard($(this)));
return true;
}
this.hide();
return false;
};
var zeroClipboardSupported = true;
ZeroClipboard.config({
'swfPath': 'static/lib/ZeroClipboard.swf'
});
ZeroClipboard.on("error", function(e) {
zeroClipboardSupported = false;
});
ZeroClipboard.on('aftercopy', function(e) {
var container = e.target.parentNode.parentNode.parentNode;
var message = $(container).find('.clipboard-copied-message')[0];
// Resets the animation.
var elem = message;
elem.style.display = 'none';
elem.classList.remove('animated');
// Show the notification.
setTimeout(function() {
elem.style.display = 'inline-block';
elem.classList.add('animated');
}, 10);
// Reset the notification.
setTimeout(function() {
elem.style.display = 'none';
}, 5000);
});
function getRestUrl(args) { function getRestUrl(args) {
var url = ''; var url = '';
for (var i = 0; i < arguments.length; ++i) { for (var i = 0; i < arguments.length; ++i) {
@ -352,7 +392,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var uiService = {}; var uiService = {};
uiService.hidePopover = function(elem) { uiService.hidePopover = function(elem) {
var popover = $('#signupButton').data('bs.popover'); var popover = $(elem).data('bs.popover');
if (popover) { if (popover) {
popover.hide(); popover.hide();
} }
@ -409,6 +449,29 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var pingService = {}; var pingService = {};
var pingCache = {}; var pingCache = {};
var invokeCallback = function($scope, pings, callback) {
if (pings[0] == -1) {
setTimeout(function() {
$scope.$apply(function() {
callback(-1, false, -1);
});
}, 0);
return;
}
var sum = 0;
for (var i = 0; i < pings.length; ++i) {
sum += pings[i];
}
// Report the average ping.
setTimeout(function() {
$scope.$apply(function() {
callback(Math.floor(sum / pings.length), true, pings.length);
});
}, 0);
};
var reportPingResult = function($scope, url, ping, callback) { var reportPingResult = function($scope, url, ping, callback) {
// Lookup the cached ping data, if any. // Lookup the cached ping data, if any.
var cached = pingCache[url]; var cached = pingCache[url];
@ -421,28 +484,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
// If an error occurred, report it and done. // If an error occurred, report it and done.
if (ping < 0) { if (ping < 0) {
cached['pings'] = [-1]; cached['pings'] = [-1];
setTimeout(function() { invokeCallback($scope, pings, callback);
$scope.$apply(function() {
callback(-1, false, -1);
});
}, 0);
return; return;
} }
// Otherwise, add the current ping and determine the average. // Otherwise, add the current ping and determine the average.
cached['pings'].push(ping); cached['pings'].push(ping);
var sum = 0; // Invoke the callback.
for (var i = 0; i < cached['pings'].length; ++i) { invokeCallback($scope, cached['pings'], callback);
sum += cached['pings'][i];
}
// Report the average ping.
setTimeout(function() {
$scope.$apply(function() {
callback(Math.floor(sum / cached['pings'].length), true, cached['pings'].length);
});
}, 0);
// Schedule another check if we've done less than three. // Schedule another check if we've done less than three.
if (cached['pings'].length < 3) { if (cached['pings'].length < 3) {
@ -478,12 +528,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
pingService.pingUrl = function($scope, url, callback) { pingService.pingUrl = function($scope, url, callback) {
if (pingCache[url]) { if (pingCache[url]) {
cached = pingCache[url]; invokeCallback($scope, pingCache[url]['pings'], callback);
setTimeout(function() {
$scope.$apply(function() {
callback(cached.result, cached.success);
});
}, 0);
return; return;
} }
@ -521,6 +566,41 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
$provide.factory('StringBuilderService', ['$sce', 'UtilService', function($sce, UtilService) { $provide.factory('StringBuilderService', ['$sce', 'UtilService', function($sce, UtilService) {
var stringBuilderService = {}; var stringBuilderService = {};
stringBuilderService.buildUrl = function(value_or_func, metadata) {
var url = value_or_func;
if (typeof url != 'string') {
url = url(metadata);
}
// Find the variables to be replaced.
var varNames = [];
for (var i = 0; i < url.length; ++i) {
var c = url[i];
if (c == '{') {
for (var j = i + 1; j < url.length; ++j) {
var d = url[j];
if (d == '}') {
varNames.push(url.substring(i + 1, j));
i = j;
break;
}
}
}
}
// Replace all variables found.
for (var i = 0; i < varNames.length; ++i) {
var varName = varNames[i];
if (!metadata[varName]) {
return null;
}
url = url.replace('{' + varName + '}', metadata[varName]);
}
return url;
};
stringBuilderService.buildString = function(value_or_func, metadata) { stringBuilderService.buildString = function(value_or_func, metadata) {
var fieldIcons = { var fieldIcons = {
'username': 'user', 'username': 'user',
@ -676,7 +756,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
return config; return config;
}]); }]);
$provide.factory('ApiService', ['Restangular', function(Restangular) { $provide.factory('ApiService', ['Restangular', '$q', function(Restangular, $q) {
var apiService = {}; var apiService = {};
var getResource = function(path, opt_background) { var getResource = function(path, opt_background) {
@ -773,6 +853,77 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
} }
}; };
var freshLoginFailCheck = function(opName, opArgs) {
return function(resp) {
var deferred = $q.defer();
// If the error is a fresh login required, show the dialog.
if (resp.status == 401 && resp.data['error_type'] == 'fresh_login_required') {
var verifyNow = function() {
var info = {
'password': $('#freshPassword').val()
};
$('#freshPassword').val('');
// Conduct the sign in of the user.
apiService.verifyUser(info).then(function() {
// On success, retry the operation. if it succeeds, then resolve the
// deferred promise with the result. Otherwise, reject the same.
apiService[opName].apply(apiService, opArgs).then(function(resp) {
deferred.resolve(resp);
}, function(resp) {
deferred.reject(resp);
});
}, function(resp) {
// Reject with the sign in error.
deferred.reject({'data': {'message': 'Invalid verification credentials'}});
});
};
var box = bootbox.dialog({
"message": 'It has been more than a few minutes since you last logged in, ' +
'so please verify your password to perform this sensitive operation:' +
'<form style="margin-top: 10px" action="javascript:void(0)">' +
'<input id="freshPassword" class="form-control" type="password" placeholder="Current Password">' +
'</form>',
"title": 'Please Verify',
"buttons": {
"verify": {
"label": "Verify",
"className": "btn-success",
"callback": verifyNow
},
"close": {
"label": "Cancel",
"className": "btn-default",
"callback": function() {
deferred.reject({'data': {'message': 'Verification canceled'}});
}
}
}
});
box.bind('shown.bs.modal', function(){
box.find("input").focus();
box.find("form").submit(function() {
if (!$('#freshPassword').val()) { return; }
box.modal('hide');
verifyNow();
});
});
// Return a new promise. We'll accept or reject it based on the result
// of the login.
return deferred.promise;
}
// Otherwise, we just 'raise' the error via the reject method on the promise.
return $q.reject(resp);
};
};
var buildMethodsForOperation = function(operation, resource, resourceMap) { var buildMethodsForOperation = function(operation, resource, resourceMap) {
var method = operation['method'].toLowerCase(); var method = operation['method'].toLowerCase();
var operationName = operation['nickname']; var operationName = operation['nickname'];
@ -786,7 +937,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'ignoreLoadingBar': true 'ignoreLoadingBar': true
}); });
} }
return one['custom' + method.toUpperCase()](opt_options);
var opObj = one['custom' + method.toUpperCase()](opt_options);
// If the operation requires_fresh_login, then add a specialized error handler that
// will defer the operation's result if sudo is requested.
if (operation['requires_fresh_login']) {
opObj = opObj.catch(freshLoginFailCheck(operationName, arguments));
}
return opObj;
}; };
// If the method for the operation is a GET, add an operationAsResource method. // If the method for the operation is a GET, add an operationAsResource method.
@ -1084,6 +1243,54 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'title': 'Webhook URL' 'title': 'Webhook URL'
} }
] ]
},
{
'id': 'flowdock',
'title': 'Flowdock Team Notification',
'icon': 'flowdock-icon',
'fields': [
{
'name': 'flow_api_token',
'type': 'string',
'title': 'Flow API Token',
'help_url': 'https://www.flowdock.com/account/tokens'
}
]
},
{
'id': 'hipchat',
'title': 'HipChat Room Notification',
'icon': 'hipchat-icon',
'fields': [
{
'name': 'room_id',
'type': 'string',
'title': 'Room ID #'
},
{
'name': 'notification_token',
'type': 'string',
'title': 'Notification Token'
}
]
},
{
'id': 'slack',
'title': 'Slack Room Notification',
'icon': 'slack-icon',
'fields': [
{
'name': 'subdomain',
'type': 'string',
'title': 'Slack Subdomain'
},
{
'name': 'token',
'type': 'string',
'title': 'Token',
'help_url': 'https://{subdomain}.slack.com/services/new/incoming-webhook'
}
]
} }
]; ];
@ -1123,7 +1330,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'user': null, 'user': null,
'notifications': [], 'notifications': [],
'notificationClasses': [], 'notificationClasses': [],
'notificationSummaries': [] 'notificationSummaries': [],
'additionalNotifications': false
}; };
var pollTimerHandle = null; var pollTimerHandle = null;
@ -1219,7 +1427,9 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'uuid': notification.id 'uuid': notification.id
}; };
ApiService.updateUserNotification(notification, params); ApiService.updateUserNotification(notification, params, function() {
notificationService.update();
}, ApiService.errorDisplay('Could not update notification'));
var index = $.inArray(notification, notificationService.notifications); var index = $.inArray(notification, notificationService.notifications);
if (index >= 0) { if (index >= 0) {
@ -1276,6 +1486,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
ApiService.listUserNotifications().then(function(resp) { ApiService.listUserNotifications().then(function(resp) {
notificationService.notifications = resp['notifications']; notificationService.notifications = resp['notifications'];
notificationService.additionalNotifications = resp['additional'];
notificationService.notificationClasses = notificationService.getClasses(notificationService.notifications); notificationService.notificationClasses = notificationService.getClasses(notificationService.notifications);
}); });
}; };
@ -1304,10 +1515,41 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var keyService = {} var keyService = {}
keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY']; keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY'];
keyService['githubClientId'] = Config['GITHUB_CLIENT_ID']; keyService['githubClientId'] = Config['GITHUB_CLIENT_ID'];
keyService['githubLoginClientId'] = Config['GITHUB_LOGIN_CLIENT_ID']; keyService['githubLoginClientId'] = Config['GITHUB_LOGIN_CLIENT_ID'];
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback'); keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
keyService['googleLoginClientId'] = Config['GOOGLE_LOGIN_CLIENT_ID'];
keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback');
keyService['googleLoginUrl'] = 'https://accounts.google.com/o/oauth2/auth?response_type=code&';
keyService['githubLoginUrl'] = 'https://github.com/login/oauth/authorize?';
keyService['googleLoginScope'] = 'openid email';
keyService['githubLoginScope'] = 'user:email';
keyService.getExternalLoginUrl = function(service, action) {
var state_clause = '';
if (Config.MIXPANEL_KEY && window.mixpanel) {
if (mixpanel.get_distinct_id !== undefined) {
state_clause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
}
}
var client_id = keyService[service + 'LoginClientId'];
var scope = keyService[service + 'LoginScope'];
var redirect_uri = keyService[service + 'RedirectUri'];
if (action == 'attach') {
redirect_uri += '/attach';
}
var url = keyService[service + 'LoginUrl'] + 'client_id=' + client_id + '&scope=' + scope +
'&redirect_uri=' + redirect_uri + state_clause;
return url;
};
return keyService; return keyService;
}]); }]);
@ -1507,7 +1749,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
}); });
}; };
planService.changePlan = function($scope, orgname, planId, callbacks) { planService.changePlan = function($scope, orgname, planId, callbacks, opt_async) {
if (!Features.BILLING) { return; } if (!Features.BILLING) { return; }
if (callbacks['started']) { if (callbacks['started']) {
@ -1520,7 +1762,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
planService.getCardInfo(orgname, function(cardInfo) { planService.getCardInfo(orgname, function(cardInfo) {
if (plan.price > 0 && (previousSubscribeFailure || !cardInfo.last4)) { if (plan.price > 0 && (previousSubscribeFailure || !cardInfo.last4)) {
var title = cardInfo.last4 ? 'Subscribe' : 'Start Trial ({{amount}} plan)'; var title = cardInfo.last4 ? 'Subscribe' : 'Start Trial ({{amount}} plan)';
planService.showSubscribeDialog($scope, orgname, planId, callbacks, title); planService.showSubscribeDialog($scope, orgname, planId, callbacks, title, /* async */true);
return; return;
} }
@ -1593,9 +1835,34 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
return email; return email;
}; };
planService.showSubscribeDialog = function($scope, orgname, planId, callbacks, opt_title) { planService.showSubscribeDialog = function($scope, orgname, planId, callbacks, opt_title, opt_async) {
if (!Features.BILLING) { return; } if (!Features.BILLING) { return; }
// If the async parameter is true and this is a browser that does not allow async popup of the
// Stripe dialog (such as Mobile Safari or IE), show a bootbox to show the dialog instead.
var isIE = navigator.appName.indexOf("Internet Explorer") != -1;
var isMobileSafari = navigator.userAgent.match(/(iPod|iPhone|iPad)/) && navigator.userAgent.match(/AppleWebKit/);
if (opt_async && (isIE || isMobileSafari)) {
bootbox.dialog({
"message": "Please click 'Subscribe' to continue",
"buttons": {
"subscribe": {
"label": "Subscribe",
"className": "btn-primary",
"callback": function() {
planService.showSubscribeDialog($scope, orgname, planId, callbacks, opt_title, false);
}
},
"close": {
"label": "Cancel",
"className": "btn-default"
}
}
});
return;
}
if (callbacks['opening']) { if (callbacks['opening']) {
callbacks['opening'](); callbacks['opening']();
} }
@ -1688,7 +1955,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
when('/repository/:namespace/:name/build', {templateUrl: '/static/partials/repo-build.html', controller:RepoBuildCtrl, reloadOnSearch: false}). when('/repository/:namespace/:name/build', {templateUrl: '/static/partials/repo-build.html', controller:RepoBuildCtrl, reloadOnSearch: false}).
when('/repository/:namespace/:name/build/:buildid/buildpack', {templateUrl: '/static/partials/build-package.html', controller:BuildPackageCtrl, reloadOnSearch: false}). when('/repository/:namespace/:name/build/:buildid/buildpack', {templateUrl: '/static/partials/build-package.html', controller:BuildPackageCtrl, reloadOnSearch: false}).
when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list', when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list',
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl}). templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl, reloadOnSearch: false}).
when('/user/', {title: 'Account Settings', description:'Account settings for ' + title, templateUrl: '/static/partials/user-admin.html', when('/user/', {title: 'Account Settings', description:'Account settings for ' + title, templateUrl: '/static/partials/user-admin.html',
reloadOnSearch: false, controller: UserAdminCtrl}). reloadOnSearch: false, controller: UserAdminCtrl}).
when('/superuser/', {title: 'Superuser Admin Panel', description:'Admin panel for ' + title, templateUrl: '/static/partials/super-user.html', when('/superuser/', {title: 'Superuser Admin Panel', description:'Admin panel for ' + title, templateUrl: '/static/partials/super-user.html',
@ -2114,6 +2381,8 @@ quayApp.directive('copyBox', function () {
'hoveringMessage': '=hoveringMessage' 'hoveringMessage': '=hoveringMessage'
}, },
controller: function($scope, $element, $rootScope) { controller: function($scope, $element, $rootScope) {
$scope.disabled = false;
var number = $rootScope.__copyBoxIdCounter || 0; var number = $rootScope.__copyBoxIdCounter || 0;
$rootScope.__copyBoxIdCounter = number + 1; $rootScope.__copyBoxIdCounter = number + 1;
$scope.inputId = "copy-box-input-" + number; $scope.inputId = "copy-box-input-" + number;
@ -2123,27 +2392,7 @@ quayApp.directive('copyBox', function () {
input.attr('id', $scope.inputId); input.attr('id', $scope.inputId);
button.attr('data-clipboard-target', $scope.inputId); button.attr('data-clipboard-target', $scope.inputId);
$scope.disabled = !button.clipboardCopy();
var clip = new ZeroClipboard($(button), { 'moviePath': 'static/lib/ZeroClipboard.swf' });
clip.on('complete', function(e) {
var message = $(this.parentNode.parentNode.parentNode).find('.clipboard-copied-message')[0];
// Resets the animation.
var elem = message;
elem.style.display = 'none';
elem.classList.remove('animated');
// Show the notification.
setTimeout(function() {
elem.style.display = 'inline-block';
elem.classList.add('animated');
}, 10);
// Reset the notification.
setTimeout(function() {
elem.style.display = 'none';
}, 5000);
});
} }
}; };
return directiveDefinitionObject; return directiveDefinitionObject;
@ -2185,6 +2434,41 @@ quayApp.directive('userSetup', function () {
}); });
quayApp.directive('externalLoginButton', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/external-login-button.html',
replace: false,
transclude: true,
restrict: 'C',
scope: {
'signInStarted': '&signInStarted',
'redirectUrl': '=redirectUrl',
'provider': '@provider',
'action': '@action'
},
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
$scope.startSignin = function(service) {
$scope.signInStarted({'service': service});
var url = KeyService.getExternalLoginUrl(service, $scope.action || 'login');
// Save the redirect URL in a cookie so that we can redirect back after the service returns to us.
var redirectURL = $scope.redirectUrl || window.location.toString();
CookieService.putPermanent('quay.redirectAfterLoad', redirectURL);
// Needed to ensure that UI work done by the started callback is finished before the location
// changes.
$timeout(function() {
document.location = url;
}, 250);
};
}
};
return directiveDefinitionObject;
});
quayApp.directive('signinForm', function () { quayApp.directive('signinForm', function () {
var directiveDefinitionObject = { var directiveDefinitionObject = {
priority: 0, priority: 0,
@ -2197,29 +2481,9 @@ quayApp.directive('signinForm', function () {
'signInStarted': '&signInStarted', 'signInStarted': '&signInStarted',
'signedIn': '&signedIn' 'signedIn': '&signedIn'
}, },
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, CookieService, Features, Config) { controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config) {
$scope.showGithub = function() { $scope.tryAgainSoon = 0;
if (!Features.GITHUB_LOGIN) { return; } $scope.tryAgainInterval = null;
$scope.markStarted();
var mixpanelDistinctIdClause = '';
if (Config.MIXPANEL_KEY && mixpanel.get_distinct_id !== undefined) {
$scope.mixpanelDistinctIdClause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
}
// Save the redirect URL in a cookie so that we can redirect back after GitHub returns to us.
var redirectURL = $scope.redirectUrl || window.location.toString();
CookieService.putPermanent('quay.redirectAfterLoad', redirectURL);
// Needed to ensure that UI work done by the started callback is finished before the location
// changes.
$timeout(function() {
var url = 'https://github.com/login/oauth/authorize?client_id=' + encodeURIComponent(KeyService.githubLoginClientId) +
'&scope=user:email' + mixpanelDistinctIdClause;
document.location = url;
}, 250);
};
$scope.markStarted = function() { $scope.markStarted = function() {
if ($scope.signInStarted != null) { if ($scope.signInStarted != null) {
@ -2227,8 +2491,29 @@ quayApp.directive('signinForm', function () {
} }
}; };
$scope.cancelInterval = function() {
$scope.tryAgainSoon = 0;
if ($scope.tryAgainInterval) {
$interval.cancel($scope.tryAgainInterval);
}
$scope.tryAgainInterval = null;
};
$scope.$watch('user.username', function() {
$scope.cancelInterval();
});
$scope.$on('$destroy', function() {
$scope.cancelInterval();
});
$scope.signin = function() { $scope.signin = function() {
if ($scope.tryAgainSoon > 0) { return; }
$scope.markStarted(); $scope.markStarted();
$scope.cancelInterval();
ApiService.signinUser($scope.user).then(function() { ApiService.signinUser($scope.user).then(function() {
$scope.needsEmailVerification = false; $scope.needsEmailVerification = false;
@ -2250,8 +2535,23 @@ quayApp.directive('signinForm', function () {
$location.path($scope.redirectUrl ? $scope.redirectUrl : '/'); $location.path($scope.redirectUrl ? $scope.redirectUrl : '/');
}, 500); }, 500);
}, function(result) { }, function(result) {
if (result.status == 429 /* try again later */) {
$scope.needsEmailVerification = false;
$scope.invalidCredentials = false;
$scope.cancelInterval();
$scope.tryAgainSoon = result.headers('Retry-After');
$scope.tryAgainInterval = $interval(function() {
$scope.tryAgainSoon--;
if ($scope.tryAgainSoon <= 0) {
$scope.cancelInterval();
}
}, 1000, $scope.tryAgainSoon);
} else {
$scope.needsEmailVerification = result.data.needsEmailVerification; $scope.needsEmailVerification = result.data.needsEmailVerification;
$scope.invalidCredentials = result.data.invalidCredentials; $scope.invalidCredentials = result.data.invalidCredentials;
}
}); });
}; };
} }
@ -2273,15 +2573,6 @@ quayApp.directive('signupForm', function () {
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) { controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) {
$('.form-signup').popover(); $('.form-signup').popover();
if (Config.MIXPANEL_KEY) {
angulartics.waitForVendorApi(mixpanel, 500, function(loadedMixpanel) {
var mixpanelId = loadedMixpanel.get_distinct_id();
$scope.github_state_clause = '&state=' + mixpanelId;
});
}
$scope.githubClientId = KeyService.githubLoginClientId;
$scope.awaitingConfirmation = false; $scope.awaitingConfirmation = false;
$scope.registering = false; $scope.registering = false;
@ -2371,11 +2662,42 @@ quayApp.directive('dockerAuthDialog', function (Config) {
'username': '=username', 'username': '=username',
'token': '=token', 'token': '=token',
'shown': '=shown', 'shown': '=shown',
'counter': '=counter' 'counter': '=counter',
'supportsRegenerate': '@supportsRegenerate',
'regenerate': '&regenerate'
}, },
controller: function($scope, $element) { controller: function($scope, $element) {
var updateCommand = function() {
var escape = function(v) {
if (!v) { return v; }
return v.replace('$', '\\$');
};
$scope.command = 'docker login -e="." -u="' + escape($scope.username) +
'" -p="' + $scope.token + '" ' + Config['SERVER_HOSTNAME'];
};
$scope.$watch('username', updateCommand);
$scope.$watch('token', updateCommand);
$scope.regenerating = true;
$scope.askRegenerate = function() {
bootbox.confirm('Are you sure you want to regenerate the token? All existing login credentials will become invalid', function(resp) {
if (resp) {
$scope.regenerating = true;
$scope.regenerate({'username': $scope.username, 'token': $scope.token});
}
});
};
$scope.isDownloadSupported = function() { $scope.isDownloadSupported = function() {
try { return !!new Blob(); } catch(e){} var isSafari = /^((?!chrome).)*safari/i.test(navigator.userAgent);
if (isSafari) {
// Doesn't work properly in Safari, sadly.
return false;
}
try { return !!new Blob(); } catch(e) {}
return false; return false;
}; };
@ -2393,6 +2715,8 @@ quayApp.directive('dockerAuthDialog', function (Config) {
}; };
var show = function(r) { var show = function(r) {
$scope.regenerating = false;
if (!$scope.shown || !$scope.username || !$scope.token) { if (!$scope.shown || !$scope.username || !$scope.token) {
$('#dockerauthmodal').modal('hide'); $('#dockerauthmodal').modal('hide');
return; return;
@ -2633,6 +2957,8 @@ quayApp.directive('logsView', function () {
return 'Delete notification of event "' + eventData['title'] + '" for repository {repo}'; return 'Delete notification of event "' + eventData['title'] + '" for repository {repo}';
}, },
'regenerate_robot_token': 'Regenerated token for robot {robot}',
// Note: These are deprecated. // Note: These are deprecated.
'add_repo_webhook': 'Add webhook in repository {repo}', 'add_repo_webhook': 'Add webhook in repository {repo}',
'delete_repo_webhook': 'Delete webhook in repository {repo}' 'delete_repo_webhook': 'Delete webhook in repository {repo}'
@ -2676,6 +3002,7 @@ quayApp.directive('logsView', function () {
'reset_application_client_secret': 'Reset Client Secret', 'reset_application_client_secret': 'Reset Client Secret',
'add_repo_notification': 'Add repository notification', 'add_repo_notification': 'Add repository notification',
'delete_repo_notification': 'Delete repository notification', 'delete_repo_notification': 'Delete repository notification',
'regenerate_robot_token': 'Regenerate Robot Token',
// Note: these are deprecated. // Note: these are deprecated.
'add_repo_webhook': 'Add webhook', 'add_repo_webhook': 'Add webhook',
@ -2847,6 +3174,20 @@ quayApp.directive('robotsManager', function () {
$scope.shownRobot = null; $scope.shownRobot = null;
$scope.showRobotCounter = 0; $scope.showRobotCounter = 0;
$scope.regenerateToken = function(username) {
if (!username) { return; }
var shortName = $scope.getShortenedName(username);
ApiService.regenerateRobotToken($scope.organization, null, {'robot_shortname': shortName}).then(function(updated) {
var index = $scope.findRobotIndexByName(username);
if (index >= 0) {
$scope.robots.splice(index, 1);
$scope.robots.push(updated);
}
$scope.shownRobot = updated;
}, ApiService.errorDisplay('Cannot regenerate robot account token'));
};
$scope.showRobot = function(info) { $scope.showRobot = function(info) {
$scope.shownRobot = info; $scope.shownRobot = info;
$scope.showRobotCounter++; $scope.showRobotCounter++;
@ -3786,9 +4127,11 @@ quayApp.directive('billingOptions', function () {
var save = function() { var save = function() {
$scope.working = true; $scope.working = true;
var errorHandler = ApiService.errorDisplay('Could not change user details');
ApiService.changeDetails($scope.organization, $scope.obj).then(function(resp) { ApiService.changeDetails($scope.organization, $scope.obj).then(function(resp) {
$scope.working = false; $scope.working = false;
}); }, errorHandler);
}; };
var checkSave = function() { var checkSave = function() {
@ -3840,7 +4183,7 @@ quayApp.directive('planManager', function () {
return true; return true;
}; };
$scope.changeSubscription = function(planId) { $scope.changeSubscription = function(planId, opt_async) {
if ($scope.planChanging) { return; } if ($scope.planChanging) { return; }
var callbacks = { var callbacks = {
@ -3854,7 +4197,7 @@ quayApp.directive('planManager', function () {
} }
}; };
PlanService.changePlan($scope, $scope.organization, planId, callbacks); PlanService.changePlan($scope, $scope.organization, planId, callbacks, opt_async);
}; };
$scope.cancelSubscription = function() { $scope.cancelSubscription = function() {
@ -3917,7 +4260,7 @@ quayApp.directive('planManager', function () {
if ($scope.readyForPlan) { if ($scope.readyForPlan) {
var planRequested = $scope.readyForPlan(); var planRequested = $scope.readyForPlan();
if (planRequested && planRequested != PlanService.getFreePlan()) { if (planRequested && planRequested != PlanService.getFreePlan()) {
$scope.changeSubscription(planRequested); $scope.changeSubscription(planRequested, /* async */true);
} }
} }
}); });
@ -3948,7 +4291,7 @@ quayApp.directive('namespaceSelector', function () {
'namespace': '=namespace', 'namespace': '=namespace',
'requireCreate': '=requireCreate' 'requireCreate': '=requireCreate'
}, },
controller: function($scope, $element, $routeParams, CookieService) { controller: function($scope, $element, $routeParams, $location, CookieService) {
$scope.namespaces = {}; $scope.namespaces = {};
$scope.initialize = function(user) { $scope.initialize = function(user) {
@ -3985,6 +4328,10 @@ quayApp.directive('namespaceSelector', function () {
if (newNamespace) { if (newNamespace) {
CookieService.putPermanent('quay.namespace', newNamespace); CookieService.putPermanent('quay.namespace', newNamespace);
if ($routeParams['namespace'] && $routeParams['namespace'] != newNamespace) {
$location.search({'namespace': newNamespace});
}
} }
}; };
@ -4106,6 +4453,9 @@ quayApp.directive('dropdownSelect', function ($compile) {
'selectedItem': '=selectedItem', 'selectedItem': '=selectedItem',
'placeholder': '=placeholder', 'placeholder': '=placeholder',
'lookaheadItems': '=lookaheadItems', 'lookaheadItems': '=lookaheadItems',
'allowCustomInput': '@allowCustomInput',
'handleItemSelected': '&handleItemSelected', 'handleItemSelected': '&handleItemSelected',
'handleInput': '&handleInput', 'handleInput': '&handleInput',
@ -4864,7 +5214,7 @@ quayApp.directive('createExternalNotificationDialog', function () {
'counter': '=counter', 'counter': '=counter',
'notificationCreated': '&notificationCreated' 'notificationCreated': '&notificationCreated'
}, },
controller: function($scope, $element, ExternalNotificationData, ApiService, $timeout) { controller: function($scope, $element, ExternalNotificationData, ApiService, $timeout, StringBuilderService) {
$scope.currentEvent = null; $scope.currentEvent = null;
$scope.currentMethod = null; $scope.currentMethod = null;
$scope.status = ''; $scope.status = '';
@ -4964,6 +5314,15 @@ quayApp.directive('createExternalNotificationDialog', function () {
}, 1000); }, 1000);
}; };
$scope.getHelpUrl = function(field, config) {
var helpUrl = field['help_url'];
if (!helpUrl) {
return null;
}
return StringBuilderService.buildUrl(helpUrl, config);
};
$scope.$watch('counter', function(counter) { $scope.$watch('counter', function(counter) {
if (counter) { if (counter) {
$scope.clearCounter++; $scope.clearCounter++;
@ -5002,6 +5361,23 @@ quayApp.directive('twitterView', function () {
}); });
quayApp.directive('notificationsBubble', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/notifications-bubble.html',
replace: false,
transclude: false,
restrict: 'C',
scope: {
},
controller: function($scope, UserService, NotificationService) {
$scope.notificationService = NotificationService;
}
};
return directiveDefinitionObject;
});
quayApp.directive('notificationView', function () { quayApp.directive('notificationView', function () {
var directiveDefinitionObject = { var directiveDefinitionObject = {
priority: 0, priority: 0,
@ -5330,7 +5706,9 @@ quayApp.directive('locationView', function () {
$scope.getLocationTooltip = function(location, ping) { $scope.getLocationTooltip = function(location, ping) {
var tip = $scope.getLocationTitle(location) + '<br>'; var tip = $scope.getLocationTitle(location) + '<br>';
if (ping < 0) { if (ping == null) {
tip += '(Loading)';
} else if (ping < 0) {
tip += '<br><b>Note: Could not contact server</b>'; tip += '<br><b>Note: Could not contact server</b>';
} else { } else {
tip += 'Estimated Ping: ' + (ping ? ping + 'ms' : '(Loading)'); tip += 'Estimated Ping: ' + (ping ? ping + 'ms' : '(Loading)');
@ -5578,15 +5956,14 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
// Handle session expiration. // Handle session expiration.
Restangular.setErrorInterceptor(function(response) { Restangular.setErrorInterceptor(function(response) {
if (response.status == 401) { if (response.status == 401 && response.data['error_type'] == 'invalid_token' &&
if (response.data['session_required'] == null || response.data['session_required'] === true) { response.data['session_required'] !== false) {
$('#sessionexpiredModal').modal({}); $('#sessionexpiredModal').modal({});
return false; return false;
} }
}
if (!Features.BILLING && response.status == 402) { if (response.status == 503) {
$('#overlicenseModal').modal({}); $('#cannotContactService').modal({});
return false; return false;
} }

View file

@ -1,25 +1,3 @@
$.fn.clipboardCopy = function() {
var clip = new ZeroClipboard($(this), { 'moviePath': 'static/lib/ZeroClipboard.swf' });
clip.on('complete', function() {
// Resets the animation.
var elem = $('#clipboardCopied')[0];
if (!elem) {
return;
}
elem.style.display = 'none';
elem.classList.remove('animated');
// Show the notification.
setTimeout(function() {
if (!elem) { return; }
elem.style.display = 'inline-block';
elem.classList.add('animated');
}, 10);
});
};
function GuideCtrl() { function GuideCtrl() {
} }
@ -545,16 +523,24 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$scope.deleteTag = function(tagName) { $scope.deleteTag = function(tagName) {
if (!$scope.repo.can_admin) { return; } if (!$scope.repo.can_admin) { return; }
$('#confirmdeleteTagModal').modal('hide');
var params = { var params = {
'repository': namespace + '/' + name, 'repository': namespace + '/' + name,
'tag': tagName 'tag': tagName
}; };
var errorHandler = ApiService.errorDisplay('Cannot delete tag', function() {
$('#confirmdeleteTagModal').modal('hide');
$scope.deletingTag = false;
});
$scope.deletingTag = true;
ApiService.deleteFullTag(null, params).then(function() { ApiService.deleteFullTag(null, params).then(function() {
loadViewInfo(); loadViewInfo();
}, ApiService.errorDisplay('Cannot delete tag')); $('#confirmdeleteTagModal').modal('hide');
$scope.deletingTag = false;
}, errorHandler);
}; };
$scope.getImagesForTagBySize = function(tag) { $scope.getImagesForTagBySize = function(tag) {
@ -733,8 +719,6 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
// Load the builds for this repository. If none are active it will cancel the poll. // Load the builds for this repository. If none are active it will cancel the poll.
startBuildInfoTimer(repo); startBuildInfoTimer(repo);
$('#copyClipboard').clipboardCopy();
}); });
}; };
@ -1661,13 +1645,19 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
UserService.updateUserIn($scope, function(user) { UserService.updateUserIn($scope, function(user) {
$scope.cuser = jQuery.extend({}, user); $scope.cuser = jQuery.extend({}, user);
if (Features.GITHUB_LOGIN && $scope.cuser.logins) { if ($scope.cuser.logins) {
for (var i = 0; i < $scope.cuser.logins.length; i++) { for (var i = 0; i < $scope.cuser.logins.length; i++) {
if ($scope.cuser.logins[i].service == 'github') { var login = $scope.cuser.logins[i];
var githubId = $scope.cuser.logins[i].service_identifier; login.metadata = login.metadata || {};
$http.get('https://api.github.com/user/' + githubId).success(function(resp) {
$scope.githubLogin = resp.login; if (login.service == 'github') {
}); $scope.hasGithubLogin = true;
$scope.githubLogin = login.metadata['service_username'];
}
if (login.service == 'google') {
$scope.hasGoogleLogin = true;
$scope.googleLogin = login.metadata['service_username'];
} }
} }
} }
@ -1685,7 +1675,6 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
$scope.convertStep = 0; $scope.convertStep = 0;
$scope.org = {}; $scope.org = {};
$scope.githubRedirectUri = KeyService.githubRedirectUri; $scope.githubRedirectUri = KeyService.githubRedirectUri;
$scope.githubClientId = KeyService.githubLoginClientId;
$scope.authorizedApps = null; $scope.authorizedApps = null;
$scope.logsShown = 0; $scope.logsShown = 0;
@ -1715,7 +1704,6 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
}; };
$scope.loadInvoices = function() { $scope.loadInvoices = function() {
if (!$scope.hasPaidBusinessPlan) { return; }
$scope.invoicesShown++; $scope.invoicesShown++;
}; };
@ -1794,7 +1782,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
$scope.updatingUser = true; $scope.updatingUser = true;
$scope.changePasswordSuccess = false; $scope.changePasswordSuccess = false;
ApiService.changeUserDetails($scope.cuser).then(function() { ApiService.changeUserDetails($scope.cuser).then(function(resp) {
$scope.updatingUser = false; $scope.updatingUser = false;
$scope.changePasswordSuccess = true; $scope.changePasswordSuccess = true;
@ -1901,9 +1890,6 @@ function ImageViewCtrl($scope, $routeParams, $rootScope, $timeout, ApiService, I
// Fetch the image's changes. // Fetch the image's changes.
fetchChanges(); fetchChanges();
$('#copyClipboard').clipboardCopy();
return image; return image;
}); });
}; };
@ -2699,35 +2685,7 @@ function SuperUserAdminCtrl($scope, ApiService, Features, UserService) {
}, ApiService.errorDisplay('Cannot delete user')); }, ApiService.errorDisplay('Cannot delete user'));
}; };
var seatUsageLoaded = function(usage) { $scope.loadUsers();
$scope.usageLoading = false;
if (usage.count > usage.allowed) {
$scope.limit = 'over';
} else if (usage.count == usage.allowed) {
$scope.limit = 'at';
} else if (usage.count >= usage.allowed * 0.7) {
$scope.limit = 'near';
} else {
$scope.limit = 'none';
}
if (!$scope.chart) {
$scope.chart = new UsageChart();
$scope.chart.draw('seat-usage-chart');
}
$scope.chart.update(usage.count, usage.allowed);
};
var loadSeatUsage = function() {
$scope.usageLoading = true;
ApiService.getSeatCount().then(function(resp) {
seatUsageLoaded(resp);
});
};
loadSeatUsage();
} }
function TourCtrl($scope, $location) { function TourCtrl($scope, $location) {

17
static/lib/ZeroClipboard.min.js vendored Executable file → Normal file

File diff suppressed because one or more lines are too long

BIN
static/lib/ZeroClipboard.swf Executable file → Normal file

Binary file not shown.

View file

@ -24,7 +24,7 @@
<a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a> <a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a>
<div class="markdown-view description" content="repository.description" first-line-only="true"></div> <div class="markdown-view description" content="repository.description" first-line-only="true"></div>
</div> </div>
<a href="/repository/?namespace={{ user.username }}">See All Repositories</a> <a href="/repository/?namespace={{ namespace }}">See All Repositories</a>
</div> </div>
<!-- No Repos --> <!-- No Repos -->

View file

@ -34,7 +34,7 @@
<a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a> <a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a>
<div class="markdown-view description" content="repository.description" first-line-only="true"></div> <div class="markdown-view description" content="repository.description" first-line-only="true"></div>
</div> </div>
<a href="/repository/?namespace={{ user.username }}">See All Repositories</a> <a href="/repository/?namespace={{ namespace }}">See All Repositories</a>
</div> </div>
<!-- No Repos --> <!-- No Repos -->

View file

@ -34,6 +34,13 @@
</span> </span>
<i class="fa fa-upload visible-lg"></i> <i class="fa fa-upload visible-lg"></i>
</div> </div>
<div class="feature">
<span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right"
data-title="Administrators can view and download the full invoice history for their organization">
Invoice History
</span>
<i class="fa fa-calendar visible-lg"></i>
</div>
<div class="feature"> <div class="feature">
<span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right" <span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right"
data-title="Grant subsets of users in an organization their own permissions, either on a global basis or a per-repository basis"> data-title="Grant subsets of users in an organization their own permissions, either on a global basis or a per-repository basis">
@ -48,13 +55,6 @@
</span> </span>
<i class="fa fa-bar-chart-o visible-lg"></i> <i class="fa fa-bar-chart-o visible-lg"></i>
</div> </div>
<div class="feature">
<span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right"
data-title="Administrators can view and download the full invoice history for their organization">
Invoice History
</span>
<i class="fa fa-calendar visible-lg"></i>
</div>
<div class="feature"> <div class="feature">
<span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right" <span class="context-tooltip" bs-tooltip="tooltip.title" data-container="body" data-placement="right"
data-title="All plans have a free trial"> data-title="All plans have a free trial">
@ -81,7 +81,7 @@
<div class="feature present"></div> <div class="feature present"></div>
<div class="feature present"></div> <div class="feature present"></div>
<div class="feature present"></div> <div class="feature present"></div>
<div class="feature" ng-class="plan.bus_features ? 'present' : ''"></div> <div class="feature present"></div>
<div class="feature" ng-class="plan.bus_features ? 'present' : ''"></div> <div class="feature" ng-class="plan.bus_features ? 'present' : ''"></div>
<div class="feature" ng-class="plan.bus_features ? 'present' : ''"></div> <div class="feature" ng-class="plan.bus_features ? 'present' : ''"></div>
<div class="feature present"></div> <div class="feature present"></div>
@ -93,9 +93,9 @@
<div class="feature present">SSL Encryption</div> <div class="feature present">SSL Encryption</div>
<div class="feature present">Robot accounts</div> <div class="feature present">Robot accounts</div>
<div class="feature present">Dockerfile Build</div> <div class="feature present">Dockerfile Build</div>
<div class="feature present">Invoice History</div>
<div class="feature" ng-class="plan.bus_features ? 'present' : 'notpresent'">Teams</div> <div class="feature" ng-class="plan.bus_features ? 'present' : 'notpresent'">Teams</div>
<div class="feature" ng-class="plan.bus_features ? 'present' : 'notpresent'">Logging</div> <div class="feature" ng-class="plan.bus_features ? 'present' : 'notpresent'">Logging</div>
<div class="feature" ng-class="plan.bus_features ? 'present' : 'notpresent'">Invoice History</div>
<div class="feature present">Free Trial</div> <div class="feature present">Free Trial</div>
</div> </div>

View file

@ -18,7 +18,8 @@
<div class="col-md-2"> <div class="col-md-2">
<ul class="nav nav-pills nav-stacked"> <ul class="nav nav-pills nav-stacked">
<li class="active"><a href="javascript:void(0)" data-toggle="tab" data-target="#permissions">Permissions</a></li> <li class="active"><a href="javascript:void(0)" data-toggle="tab" data-target="#permissions">Permissions</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#trigger" ng-click="loadTriggers()">Build Triggers</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#trigger" ng-click="loadTriggers()"
quay-require="['BUILD_SUPPORT']">Build Triggers</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#badge">Status Badge</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#badge">Status Badge</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#notification" ng-click="loadNotifications()">Notifications</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#notification" ng-click="loadNotifications()">Notifications</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#publicprivate">Public/Private</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#publicprivate">Public/Private</a></li>
@ -225,7 +226,7 @@
</div> </div>
<!-- Triggers tab --> <!-- Triggers tab -->
<div id="trigger" class="tab-pane"> <div id="trigger" class="tab-pane" quay-require="['BUILD_SUPPORT']">
<div class="panel panel-default"> <div class="panel panel-default">
<div class="panel-heading">Build Triggers <div class="panel-heading">Build Triggers
<i class="info-icon fa fa-info-circle" data-placement="left" data-content="Triggers from various services (such as GitHub) which tell the repository to be built and updated."></i> <i class="info-icon fa fa-info-circle" data-placement="left" data-content="Triggers from various services (such as GitHub) which tell the repository to be built and updated."></i>

View file

@ -8,9 +8,6 @@
<div class="col-md-2"> <div class="col-md-2">
<ul class="nav nav-pills nav-stacked"> <ul class="nav nav-pills nav-stacked">
<li class="active"> <li class="active">
<a href="javascript:void(0)" data-toggle="tab" data-target="#license">License and Usage</a>
</li>
<li>
<a href="javascript:void(0)" data-toggle="tab" data-target="#users" ng-click="loadUsers()">Users</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#users" ng-click="loadUsers()">Users</a>
</li> </li>
</ul> </ul>
@ -19,19 +16,8 @@
<!-- Content --> <!-- Content -->
<div class="col-md-10"> <div class="col-md-10">
<div class="tab-content"> <div class="tab-content">
<!-- License tab -->
<div id="license" class="tab-pane active">
<div class="quay-spinner 3x" ng-show="usageLoading"></div>
<!-- Chart -->
<div>
<div id="seat-usage-chart" class="usage-chart limit-{{limit}}"></div>
<span class="usage-caption" ng-show="chart">Seat Usage</span>
</div>
</div>
<!-- Users tab --> <!-- Users tab -->
<div id="users" class="tab-pane"> <div id="users" class="tab-pane active">
<div class="quay-spinner" ng-show="!users"></div> <div class="quay-spinner" ng-show="!users"></div>
<div class="alert alert-error" ng-show="usersError"> <div class="alert alert-error" ng-show="usersError">
{{ usersError }} {{ usersError }}

View file

@ -25,7 +25,7 @@
<li ng-show="hasPaidPlan" quay-require="['BILLING']"> <li ng-show="hasPaidPlan" quay-require="['BILLING']">
<a href="javascript:void(0)" data-toggle="tab" data-target="#billingoptions">Billing Options</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#billingoptions">Billing Options</a>
</li> </li>
<li ng-show="hasPaidBusinessPlan" quay-require="['BILLING']"> <li ng-show="hasPaidPlan" quay-require="['BILLING']">
<a href="javascript:void(0)" data-toggle="tab" data-target="#billing" ng-click="loadInvoices()">Billing History</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#billing" ng-click="loadInvoices()">Billing History</a>
</li> </li>
@ -33,7 +33,7 @@
<li quay-classes="{'!Features.BILLING': 'active'}"><a href="javascript:void(0)" data-toggle="tab" data-target="#email">Account E-mail</a></li> <li quay-classes="{'!Features.BILLING': 'active'}"><a href="javascript:void(0)" data-toggle="tab" data-target="#email">Account E-mail</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#robots">Robot Accounts</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#robots">Robot Accounts</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#password">Change Password</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#password">Change Password</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#github" quay-require="['GITHUB_LOGIN']">GitHub Login</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#external" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">External Logins</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#authorized" ng-click="loadAuthedApps()">Authorized Applications</a></li> <li><a href="javascript:void(0)" data-toggle="tab" data-target="#authorized" ng-click="loadAuthedApps()">Authorized Applications</a></li>
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan"> <li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
@ -138,13 +138,14 @@
<!-- Change password tab --> <!-- Change password tab -->
<div id="password" class="tab-pane"> <div id="password" class="tab-pane">
<div class="loading" ng-show="updatingUser">
<div class="quay-spinner 3x"></div>
</div>
<div class="row"> <div class="row">
<div class="panel"> <div class="panel">
<div class="panel-title">Change Password</div> <div class="panel-title">Change Password</div>
<div class="loading" ng-show="updatingUser">
<div class="quay-spinner 3x"></div>
</div>
<span class="help-block" ng-show="changePasswordSuccess">Password changed successfully</span> <span class="help-block" ng-show="changePasswordSuccess">Password changed successfully</span>
<div ng-show="!updatingUser" class="panel-body"> <div ng-show="!updatingUser" class="panel-body">
@ -162,25 +163,52 @@
</div> </div>
</div> </div>
<!-- Github tab --> <!-- External Login tab -->
<div id="github" class="tab-pane" quay-require="['GITHUB_LOGIN']"> <div id="external" class="tab-pane" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">
<div class="loading" ng-show="!cuser"> <div class="loading" ng-show="!cuser">
<div class="quay-spinner 3x"></div> <div class="quay-spinner 3x"></div>
</div> </div>
<div class="row" ng-show="cuser">
<!-- Github -->
<div class="row" quay-show="cuser && Features.GITHUB_LOGIN">
<div class="panel"> <div class="panel">
<div class="panel-title">GitHub Login:</div> <div class="panel-title">GitHub Login:</div>
<div class="panel-body"> <div class="panel-body">
<div ng-show="githubLogin" class="lead col-md-8"> <div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i> <i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b> <b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
</div> </div>
<div ng-show="!githubLogin" class="col-md-8"> <div ng-show="hasGithubLogin && !githubLogin" class="lead col-md-8">
<a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=user:email{{ github_state_clause }}&redirect_uri={{ githubRedirectUri }}/attach" class="btn btn-primary"><i class="fa fa-github fa-lg"></i> Connect with GitHub</a> <i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
Account attached to Github Account
</div>
<div ng-show="!hasGithubLogin" class="col-md-4">
<span class="external-login-button" provider="github" action="attach"></span>
</div> </div>
</div> </div>
</div> </div>
</div> </div>
<!-- Google -->
<div class="row" quay-show="cuser && Features.GOOGLE_LOGIN">
<div class="panel">
<div class="panel-title">Google Login:</div>
<div class="panel-body">
<div ng-show="hasGoogleLogin && googleLogin" class="lead col-md-8">
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
<b>{{ googleLogin }}</b>
</div>
<div ng-show="hasGoogleLogin && !googleLogin" class="lead col-md-8">
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
Account attached to Google Account
</div>
<div ng-show="!hasGoogleLogin" class="col-md-4">
<span class="external-login-button" provider="google" action="attach"></span>
</div>
</div>
</div>
</div>
</div> </div>
<!-- Robot accounts tab --> <!-- Robot accounts tab -->

View file

@ -18,7 +18,7 @@
<div class="dropdown" data-placement="top" style="display: inline-block" <div class="dropdown" data-placement="top" style="display: inline-block"
bs-tooltip="" bs-tooltip=""
data-title="{{ runningBuilds.length ? 'Dockerfile Builds Running: ' + (runningBuilds.length) : 'Dockerfile Build' }}" data-title="{{ runningBuilds.length ? 'Dockerfile Builds Running: ' + (runningBuilds.length) : 'Dockerfile Build' }}"
ng-show="repo.can_write || buildHistory.length"> quay-show="Features.BUILD_SUPPORT && (repo.can_write || buildHistory.length)">
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown"> <button class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-tasks fa-lg"></i> <i class="fa fa-tasks fa-lg"></i>
<span class="count" ng-class="runningBuilds.length ? 'visible' : ''"><span>{{ runningBuilds.length ? runningBuilds.length : '' }}</span></span> <span class="count" ng-class="runningBuilds.length ? 'visible' : ''"><span>{{ runningBuilds.length ? runningBuilds.length : '' }}</span></span>
@ -58,16 +58,9 @@
<span class="pull-command visible-md-inline"> <span class="pull-command visible-md-inline">
<div class="pull-container" data-title="Pull repository" bs-tooltip="tooltip.title"> <div class="pull-container" data-title="Pull repository" bs-tooltip="tooltip.title">
<div class="input-group"> <div class="input-group">
<input id="pull-text" type="text" class="form-control" value="{{ 'docker pull ' + Config.getDomain() + '/' + repo.namespace + '/' + repo.name }}" readonly> <div class="copy-box" hovering-message="true" value="'docker pull ' + Config.getDomain() + '/' + repo.namespace + '/' + repo.name"></div>
<span id="copyClipboard" class="input-group-addon" data-title="Copy to Clipboard" data-clipboard-target="pull-text">
<i class="fa fa-copy"></i>
</span>
</div> </div>
</div> </div>
<div id="clipboardCopied" class="hovering" style="display: none">
Copied to clipboard
</div>
</span> </span>
</div> </div>
</div> </div>
@ -398,7 +391,10 @@
</span>? </span>?
</h4> </h4>
</div> </div>
<div class="modal-body"> <div class="modal-body" ng-show="deletingTag">
<div class="quay-spinner"></div>
</div>
<div class="modal-body" ng-show="!deletingTag">
Are you sure you want to delete tag Are you sure you want to delete tag
<span class="label tag" ng-class="tagToDelete == currentTag.name ? 'label-success' : 'label-default'"> <span class="label tag" ng-class="tagToDelete == currentTag.name ? 'label-success' : 'label-default'">
{{ tagToDelete }} {{ tagToDelete }}
@ -408,7 +404,7 @@
The following images and any other images not referenced by a tag will be deleted: The following images and any other images not referenced by a tag will be deleted:
</div> </div>
</div> </div>
<div class="modal-footer"> <div class="modal-footer" ng-show="!deletingTag">
<button type="button" class="btn btn-primary" ng-click="deleteTag(tagToDelete)">Delete Tag</button> <button type="button" class="btn btn-primary" ng-click="deleteTag(tagToDelete)">Delete Tag</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button> <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
</div> </div>

View file

@ -1,5 +1,5 @@
from storage.local import LocalStorage from storage.local import LocalStorage
from storage.cloud import S3Storage, GoogleCloudStorage from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage
from storage.fakestorage import FakeStorage from storage.fakestorage import FakeStorage
from storage.distributedstorage import DistributedStorage from storage.distributedstorage import DistributedStorage
@ -8,6 +8,7 @@ STORAGE_DRIVER_CLASSES = {
'LocalStorage': LocalStorage, 'LocalStorage': LocalStorage,
'S3Storage': S3Storage, 'S3Storage': S3Storage,
'GoogleCloudStorage': GoogleCloudStorage, 'GoogleCloudStorage': GoogleCloudStorage,
'RadosGWStorage': RadosGWStorage,
} }

View file

@ -54,10 +54,13 @@ class BaseStorage(StoragePaths):
# Set the IO buffer to 64kB # Set the IO buffer to 64kB
buffer_size = 64 * 1024 buffer_size = 64 * 1024
def get_direct_download_url(self, path, expires_in=60): def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
return None return None
def get_supports_resumeable_downloads(self): def get_direct_upload_url(self, path, mime_type, requires_cors=True):
return None
def get_supports_resumable_downloads(self):
return False return False
def get_content(self, path): def get_content(self, path):
@ -72,7 +75,7 @@ class BaseStorage(StoragePaths):
def stream_read_file(self, path): def stream_read_file(self, path):
raise NotImplementedError raise NotImplementedError
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
raise NotImplementedError raise NotImplementedError
def list_directory(self, path=None): def list_directory(self, path=None):
@ -83,3 +86,6 @@ class BaseStorage(StoragePaths):
def remove(self, path): def remove(self, path):
raise NotImplementedError raise NotImplementedError
def get_checksum(self, path):
raise NotImplementedError

View file

@ -7,36 +7,39 @@ import boto.gs.connection
import boto.s3.key import boto.s3.key
import boto.gs.key import boto.gs.key
from io import BufferedIOBase
from storage.basestorage import BaseStorage from storage.basestorage import BaseStorage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class StreamReadKeyAsFile(object): class StreamReadKeyAsFile(BufferedIOBase):
def __init__(self, key): def __init__(self, key):
self._key = key self._key = key
self._finished = False
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self._key.close(fast=True)
def read(self, amt=None): def read(self, amt=None):
if self._finished: if self.closed:
return None return None
resp = self._key.read(amt) resp = self._key.read(amt)
if not resp:
self._finished = True
return resp return resp
def readable(self):
return True
@property
def closed(self):
return self._key.closed
def close(self):
self._key.close(fast=True)
class _CloudStorage(BaseStorage): class _CloudStorage(BaseStorage):
def __init__(self, connection_class, key_class, upload_params, storage_path, access_key, def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
secret_key, bucket_name): access_key, secret_key, bucket_name):
self._initialized = False self._initialized = False
self._bucket_name = bucket_name self._bucket_name = bucket_name
self._access_key = access_key self._access_key = access_key
@ -45,12 +48,14 @@ class _CloudStorage(BaseStorage):
self._connection_class = connection_class self._connection_class = connection_class
self._key_class = key_class self._key_class = key_class
self._upload_params = upload_params self._upload_params = upload_params
self._connect_kwargs = connect_kwargs
self._cloud_conn = None self._cloud_conn = None
self._cloud_bucket = None self._cloud_bucket = None
def _initialize_cloud_conn(self): def _initialize_cloud_conn(self):
if not self._initialized: if not self._initialized:
self._cloud_conn = self._connection_class(self._access_key, self._secret_key) self._cloud_conn = self._connection_class(self._access_key, self._secret_key,
**self._connect_kwargs)
self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name) self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name)
self._initialized = True self._initialized = True
@ -87,15 +92,22 @@ class _CloudStorage(BaseStorage):
key.set_contents_from_string(content, **self._upload_params) key.set_contents_from_string(content, **self._upload_params)
return path return path
def get_supports_resumeable_downloads(self): def get_supports_resumable_downloads(self):
return True return True
def get_direct_download_url(self, path, expires_in=60): def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
k = self._key_class(self._cloud_bucket, path) k = self._key_class(self._cloud_bucket, path)
return k.generate_url(expires_in) return k.generate_url(expires_in)
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
url = key.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, encrypt_key=True)
return url
def stream_read(self, path): def stream_read(self, path):
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
@ -116,14 +128,20 @@ class _CloudStorage(BaseStorage):
raise IOError('No such key: \'{0}\''.format(path)) raise IOError('No such key: \'{0}\''.format(path))
return StreamReadKeyAsFile(key) return StreamReadKeyAsFile(key)
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn() self._initialize_cloud_conn()
buffer_size = 5 * 1024 * 1024 buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size: if self.buffer_size > buffer_size:
buffer_size = self.buffer_size buffer_size = self.buffer_size
path = self._init_path(path) path = self._init_path(path)
mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params)
metadata = {}
if content_type is not None:
metadata['Content-Type'] = content_type
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
**self._upload_params)
num_part = 1 num_part = 1
while True: while True:
try: try:
@ -179,25 +197,67 @@ class _CloudStorage(BaseStorage):
for key in self._cloud_bucket.list(prefix=path): for key in self._cloud_bucket.list(prefix=path):
key.delete() key.delete()
def get_checksum(self, path):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
k = self._cloud_bucket.lookup(key)
return k.etag[1:-1][:7]
class S3Storage(_CloudStorage): class S3Storage(_CloudStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
upload_params = { upload_params = {
'encrypt_key': True, 'encrypt_key': True,
} }
connect_kwargs = {}
super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key, super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
upload_params, storage_path, s3_access_key, s3_secret_key, connect_kwargs, upload_params, storage_path, s3_access_key,
s3_bucket) s3_secret_key, s3_bucket)
class GoogleCloudStorage(_CloudStorage): class GoogleCloudStorage(_CloudStorage):
def __init__(self, storage_path, access_key, secret_key, bucket_name): def __init__(self, storage_path, access_key, secret_key, bucket_name):
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, {}, upload_params = {}
storage_path, access_key, secret_key, bucket_name) connect_kwargs = {}
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key,
connect_kwargs, upload_params, storage_path,
access_key, secret_key, bucket_name)
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path) key = self._key_class(self._cloud_bucket, path)
if content_type is not None:
key.set_metadata('Content-Type', content_type)
key.set_contents_from_stream(fp) key.set_contents_from_stream(fp)
class RadosGWStorage(_CloudStorage):
def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name):
upload_params = {}
connect_kwargs = {
'host': hostname,
'is_secure': is_secure,
'calling_format': boto.s3.connection.OrdinaryCallingFormat(),
}
super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
connect_kwargs, upload_params, storage_path, access_key,
secret_key, bucket_name)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_download_url(path, expires_in, requires_cors)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_upload_url(path, mime_type, requires_cors)

View file

@ -31,6 +31,7 @@ class DistributedStorage(StoragePaths):
self.preferred_locations = list(preferred_locations) self.preferred_locations = list(preferred_locations)
get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url) get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url)
get_direct_upload_url = _location_aware(BaseStorage.get_direct_upload_url)
get_content = _location_aware(BaseStorage.get_content) get_content = _location_aware(BaseStorage.get_content)
put_content = _location_aware(BaseStorage.put_content) put_content = _location_aware(BaseStorage.put_content)
stream_read = _location_aware(BaseStorage.stream_read) stream_read = _location_aware(BaseStorage.stream_read)
@ -39,4 +40,5 @@ class DistributedStorage(StoragePaths):
list_directory = _location_aware(BaseStorage.list_directory) list_directory = _location_aware(BaseStorage.list_directory)
exists = _location_aware(BaseStorage.exists) exists = _location_aware(BaseStorage.exists)
remove = _location_aware(BaseStorage.remove) remove = _location_aware(BaseStorage.remove)
get_supports_resumeable_downloads = _location_aware(BaseStorage.get_supports_resumeable_downloads) get_checksum = _location_aware(BaseStorage.get_checksum)
get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads)

View file

@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
def stream_read(self, path): def stream_read(self, path):
yield '' yield ''
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
pass pass
def remove(self, path): def remove(self, path):
@ -22,3 +22,6 @@ class FakeStorage(BaseStorage):
def exists(self, path): def exists(self, path):
return False return False
def get_checksum(self, path):
return 'abcdefg'

View file

@ -1,6 +1,7 @@
import os import os
import shutil import shutil
import hashlib
import io
from storage.basestorage import BaseStorage from storage.basestorage import BaseStorage
@ -40,9 +41,9 @@ class LocalStorage(BaseStorage):
def stream_read_file(self, path): def stream_read_file(self, path):
path = self._init_path(path) path = self._init_path(path)
return open(path, mode='rb') return io.open(path, mode='rb')
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Size is mandatory # Size is mandatory
path = self._init_path(path, create=True) path = self._init_path(path, create=True)
with open(path, mode='wb') as f: with open(path, mode='wb') as f:
@ -80,3 +81,14 @@ class LocalStorage(BaseStorage):
os.remove(path) os.remove(path)
except OSError: except OSError:
pass pass
def get_checksum(self, path):
path = self._init_path(path)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self.buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]

View file

@ -35,23 +35,18 @@
</div><!-- /.modal-dialog --> </div><!-- /.modal-dialog -->
</div><!-- /.modal --> </div><!-- /.modal -->
{% if not has_billing %}
<!-- Modal message dialog --> <!-- Modal message dialog -->
<div class="modal fade" id="overlicenseModal" data-backdrop="static"> <div class="modal fade" id="cannotContactService" data-backdrop="static">
<div class="modal-dialog"> <div class="modal-dialog">
<div class="modal-content"> <div class="modal-content">
<div class="modal-header"> <div class="modal-header">
<h4 class="modal-title">Cannot create user</h4> <h4 class="modal-title">Cannot Contact External Service</h4>
</div> </div>
<div class="modal-body"> <div class="modal-body">
A new user cannot be created as this organization has reached its licensed seat count. Please contact your administrator. A connection to an external service has failed. Please reload the page to try again.
</div>
<div class="modal-footer">
<a href="javascript:void(0)" class="btn btn-primary" data-dismiss="modal" onclick="location = '/signin'">Sign In</a>
</div> </div>
</div><!-- /.modal-content --> </div><!-- /.modal-content -->
</div><!-- /.modal-dialog --> </div><!-- /.modal-dialog -->
</div><!-- /.modal --> </div><!-- /.modal -->
{% endif %}
{% endblock %} {% endblock %}

View file

@ -1,22 +1,22 @@
{% extends "base.html" %} {% extends "base.html" %}
{% block title %} {% block title %}
<title>Error Logging in with GitHub · Quay.io</title> <title>Error Logging in with {{ service_name }} · Quay.io</title>
{% endblock %} {% endblock %}
{% block body_content %} {% block body_content %}
<div class="container"> <div class="container">
<div class="row"> <div class="row">
<div class="col-md-12"> <div class="col-md-12">
<h2>There was an error logging in with GitHub.</h2> <h2>There was an error logging in with {{ service_name }}.</h2>
{% if error_message %} {% if error_message %}
<div class="alert alert-danger">{{ error_message }}</div> <div class="alert alert-danger">{{ error_message }}</div>
{% endif %} {% endif %}
<div> <div>
Please register using the <a href="/">registration form</a> to continue. Please register using the <a ng-href="{{ service_url }}/signin" target="_self">registration form</a> to continue.
You will be able to connect your github account to your Quay.io account You will be able to connect your account to your Quay.io account
in the user settings. in the user settings.
</div> </div>
</div> </div>

Binary file not shown.

View file

@ -196,7 +196,7 @@ def build_index_specs():
IndexTestSpec(url_for('index.put_repository_auth', repository=PUBLIC_REPO), IndexTestSpec(url_for('index.put_repository_auth', repository=PUBLIC_REPO),
NO_REPO, 501, 501, 501, 501).set_method('PUT'), NO_REPO, 501, 501, 501, 501).set_method('PUT'),
IndexTestSpec(url_for('index.get_search'), NO_REPO, 501, 501, 501, 501), IndexTestSpec(url_for('index.get_search'), NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('index.ping'), NO_REPO, 200, 200, 200, 200), IndexTestSpec(url_for('index.ping'), NO_REPO, 200, 200, 200, 200),

View file

@ -14,14 +14,17 @@ from endpoints.api.search import FindRepositories, EntitySearch
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs, from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs,
RepositoryBuildList) RepositoryBuildList)
from endpoints.api.robot import UserRobotList, OrgRobot, OrgRobotList, UserRobot from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
RegenerateOrgRobot, RegenerateUserRobot)
from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs, from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs,
TriggerBuildList, ActivateBuildTrigger, BuildTrigger, TriggerBuildList, ActivateBuildTrigger, BuildTrigger,
BuildTriggerList, BuildTriggerAnalyze) BuildTriggerList, BuildTriggerAnalyze)
from endpoints.api.repoemail import RepositoryAuthorizedEmail from endpoints.api.repoemail import RepositoryAuthorizedEmail
from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList
from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout, from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout,
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification) Signin, User, UserAuthorizationList, UserAuthorization, UserNotification,
VerifyUser)
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs
@ -37,7 +40,7 @@ from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repos
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission, from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
RepositoryTeamPermissionList, RepositoryUserPermissionList) RepositoryTeamPermissionList, RepositoryUserPermissionList)
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement from endpoints.api.superuser import SuperUserLogs, SuperUserList, SuperUserManagement
try: try:
@ -432,6 +435,24 @@ class TestSignin(ApiTestCase):
self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'}) self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'})
class TestVerifyUser(ApiTestCase):
def setUp(self):
ApiTestCase.setUp(self)
self._set_url(VerifyUser)
def test_post_anonymous(self):
self._run_test('POST', 401, None, {u'password': 'LQ0N'})
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', {u'password': 'LQ0N'})
def test_post_reader(self):
self._run_test('POST', 403, 'reader', {u'password': 'LQ0N'})
def test_post_devtable(self):
self._run_test('POST', 200, 'devtable', {u'password': 'password'})
class TestListPlans(ApiTestCase): class TestListPlans(ApiTestCase):
def setUp(self): def setUp(self):
ApiTestCase.setUp(self) ApiTestCase.setUp(self)
@ -471,13 +492,13 @@ class TestUser(ApiTestCase):
self._run_test('PUT', 401, None, {}) self._run_test('PUT', 401, None, {})
def test_put_freshuser(self): def test_put_freshuser(self):
self._run_test('PUT', 200, 'freshuser', {}) self._run_test('PUT', 401, 'freshuser', {})
def test_put_reader(self): def test_put_reader(self):
self._run_test('PUT', 200, 'reader', {}) self._run_test('PUT', 401, 'reader', {})
def test_put_devtable(self): def test_put_devtable(self):
self._run_test('PUT', 200, 'devtable', {}) self._run_test('PUT', 401, 'devtable', {})
def test_post_anonymous(self): def test_post_anonymous(self):
self._run_test('POST', 400, None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'}) self._run_test('POST', 400, None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'})
@ -1632,6 +1653,19 @@ class TestOrgRobotBuynlargeZ7pd(ApiTestCase):
ApiTestCase.setUp(self) ApiTestCase.setUp(self)
self._set_url(OrgRobot, orgname="buynlarge", robot_shortname="Z7PD") self._set_url(OrgRobot, orgname="buynlarge", robot_shortname="Z7PD")
def test_get_anonymous(self):
self._run_test('GET', 401, None, None)
def test_get_freshuser(self):
self._run_test('GET', 403, 'freshuser', None)
def test_get_reader(self):
self._run_test('GET', 403, 'reader', None)
def test_get_devtable(self):
self._run_test('GET', 400, 'devtable', None)
def test_put_anonymous(self): def test_put_anonymous(self):
self._run_test('PUT', 401, None, None) self._run_test('PUT', 401, None, None)
@ -1644,6 +1678,7 @@ class TestOrgRobotBuynlargeZ7pd(ApiTestCase):
def test_put_devtable(self): def test_put_devtable(self):
self._run_test('PUT', 400, 'devtable', None) self._run_test('PUT', 400, 'devtable', None)
def test_delete_anonymous(self): def test_delete_anonymous(self):
self._run_test('DELETE', 401, None, None) self._run_test('DELETE', 401, None, None)
@ -3040,6 +3075,19 @@ class TestUserRobot5vdy(ApiTestCase):
ApiTestCase.setUp(self) ApiTestCase.setUp(self)
self._set_url(UserRobot, robot_shortname="robotname") self._set_url(UserRobot, robot_shortname="robotname")
def test_get_anonymous(self):
self._run_test('GET', 401, None, None)
def test_get_freshuser(self):
self._run_test('GET', 400, 'freshuser', None)
def test_get_reader(self):
self._run_test('GET', 400, 'reader', None)
def test_get_devtable(self):
self._run_test('GET', 400, 'devtable', None)
def test_put_anonymous(self): def test_put_anonymous(self):
self._run_test('PUT', 401, None, None) self._run_test('PUT', 401, None, None)
@ -3052,6 +3100,7 @@ class TestUserRobot5vdy(ApiTestCase):
def test_put_devtable(self): def test_put_devtable(self):
self._run_test('PUT', 201, 'devtable', None) self._run_test('PUT', 201, 'devtable', None)
def test_delete_anonymous(self): def test_delete_anonymous(self):
self._run_test('DELETE', 401, None, None) self._run_test('DELETE', 401, None, None)
@ -3065,6 +3114,42 @@ class TestUserRobot5vdy(ApiTestCase):
self._run_test('DELETE', 400, 'devtable', None) self._run_test('DELETE', 400, 'devtable', None)
class TestRegenerateUserRobot(ApiTestCase):
def setUp(self):
ApiTestCase.setUp(self)
self._set_url(RegenerateUserRobot, robot_shortname="robotname")
def test_post_anonymous(self):
self._run_test('POST', 401, None, None)
def test_post_freshuser(self):
self._run_test('POST', 400, 'freshuser', None)
def test_post_reader(self):
self._run_test('POST', 400, 'reader', None)
def test_post_devtable(self):
self._run_test('POST', 400, 'devtable', None)
class TestRegenerateOrgRobot(ApiTestCase):
def setUp(self):
ApiTestCase.setUp(self)
self._set_url(RegenerateOrgRobot, orgname="buynlarge", robot_shortname="robotname")
def test_post_anonymous(self):
self._run_test('POST', 401, None, None)
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', None)
def test_post_reader(self):
self._run_test('POST', 403, 'reader', None)
def test_post_devtable(self):
self._run_test('POST', 400, 'devtable', None)
class TestOrganizationBuynlarge(ApiTestCase): class TestOrganizationBuynlarge(ApiTestCase):
def setUp(self): def setUp(self):
ApiTestCase.setUp(self) ApiTestCase.setUp(self)

View file

@ -16,7 +16,8 @@ from endpoints.api.tag import RepositoryTagImages, RepositoryTag
from endpoints.api.search import FindRepositories, EntitySearch from endpoints.api.search import FindRepositories, EntitySearch
from endpoints.api.image import RepositoryImage, RepositoryImageList from endpoints.api.image import RepositoryImage, RepositoryImageList
from endpoints.api.build import RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList from endpoints.api.build import RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList
from endpoints.api.robot import UserRobotList, OrgRobot, OrgRobotList, UserRobot from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
RegenerateUserRobot, RegenerateOrgRobot)
from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs, from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs,
TriggerBuildList, ActivateBuildTrigger, BuildTrigger, TriggerBuildList, ActivateBuildTrigger, BuildTrigger,
BuildTriggerList, BuildTriggerAnalyze) BuildTriggerList, BuildTriggerAnalyze)
@ -40,7 +41,7 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember,
from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission, from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
RepositoryTeamPermissionList, RepositoryUserPermissionList) RepositoryTeamPermissionList, RepositoryUserPermissionList)
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement from endpoints.api.superuser import SuperUserLogs, SuperUserList, SuperUserManagement
try: try:
app.register_blueprint(api_bp, url_prefix='/api') app.register_blueprint(api_bp, url_prefix='/api')
@ -327,6 +328,12 @@ class TestChangeUserDetails(ApiTestCase):
data=dict(password='newpasswordiscool')) data=dict(password='newpasswordiscool'))
self.login(READ_ACCESS_USER, password='newpasswordiscool') self.login(READ_ACCESS_USER, password='newpasswordiscool')
def test_changeeemail(self):
self.login(READ_ACCESS_USER)
self.putJsonResponse(User,
data=dict(email='test+foo@devtable.com'))
def test_changeinvoiceemail(self): def test_changeinvoiceemail(self):
self.login(READ_ACCESS_USER) self.login(READ_ACCESS_USER)
@ -1572,6 +1579,30 @@ class TestUserRobots(ApiTestCase):
robots = self.getRobotNames() robots = self.getRobotNames()
assert not NO_ACCESS_USER + '+bender' in robots assert not NO_ACCESS_USER + '+bender' in robots
def test_regenerate(self):
self.login(NO_ACCESS_USER)
# Create a robot.
json = self.putJsonResponse(UserRobot,
params=dict(robot_shortname='bender'),
expected_code=201)
token = json['token']
# Regenerate the robot.
json = self.postJsonResponse(RegenerateUserRobot,
params=dict(robot_shortname='bender'),
expected_code=200)
# Verify the token changed.
self.assertNotEquals(token, json['token'])
json2 = self.getJsonResponse(UserRobot,
params=dict(robot_shortname='bender'),
expected_code=200)
self.assertEquals(json['token'], json2['token'])
class TestOrgRobots(ApiTestCase): class TestOrgRobots(ApiTestCase):
def getRobotNames(self): def getRobotNames(self):
@ -1601,6 +1632,31 @@ class TestOrgRobots(ApiTestCase):
assert not ORGANIZATION + '+bender' in robots assert not ORGANIZATION + '+bender' in robots
def test_regenerate(self):
self.login(ADMIN_ACCESS_USER)
# Create a robot.
json = self.putJsonResponse(OrgRobot,
params=dict(orgname=ORGANIZATION, robot_shortname='bender'),
expected_code=201)
token = json['token']
# Regenerate the robot.
json = self.postJsonResponse(RegenerateOrgRobot,
params=dict(orgname=ORGANIZATION, robot_shortname='bender'),
expected_code=200)
# Verify the token changed.
self.assertNotEquals(token, json['token'])
json2 = self.getJsonResponse(OrgRobot,
params=dict(orgname=ORGANIZATION, robot_shortname='bender'),
expected_code=200)
self.assertEquals(json['token'], json2['token'])
class TestLogs(ApiTestCase): class TestLogs(ApiTestCase):
def test_user_logs(self): def test_user_logs(self):
self.login(ADMIN_ACCESS_USER) self.login(ADMIN_ACCESS_USER)

View file

@ -46,25 +46,30 @@ class TestImageSharing(unittest.TestCase):
preferred = storage.preferred_locations[0] preferred = storage.preferred_locations[0]
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {}, image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred) preferred)
return image.storage.id image.storage.uploading = False
image.storage.save()
return image.storage
def assertSameStorage(self, docker_image_id, storage_id, repository=REPO, username=ADMIN_ACCESS_USER): def assertSameStorage(self, docker_image_id, existing_storage, repository=REPO,
new_storage_id = self.createStorage(docker_image_id, repository, username) username=ADMIN_ACCESS_USER):
self.assertEquals(storage_id, new_storage_id) new_storage = self.createStorage(docker_image_id, repository, username)
self.assertEquals(existing_storage.id, new_storage.id)
def assertDifferentStorage(self, docker_image_id, storage_id, repository=REPO, username=ADMIN_ACCESS_USER): def assertDifferentStorage(self, docker_image_id, existing_storage, repository=REPO,
new_storage_id = self.createStorage(docker_image_id, repository, username) username=ADMIN_ACCESS_USER):
self.assertNotEquals(storage_id, new_storage_id) new_storage = self.createStorage(docker_image_id, repository, username)
self.assertNotEquals(existing_storage.id, new_storage.id)
def test_same_user(self): def test_same_user(self):
""" The same user creates two images, each which should be shared in the same repo. This is a sanity check. """ """ The same user creates two images, each which should be shared in the same repo. This is a
sanity check. """
# Create a reference to a new docker ID => new image. # Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('first-image') first_storage = self.createStorage('first-image')
# Create a reference to the same docker ID => same image. # Create a reference to the same docker ID => same image.
self.assertSameStorage('first-image', first_storage_id) self.assertSameStorage('first-image', first_storage)
# Create a reference to another new docker ID => new image. # Create a reference to another new docker ID => new image.
second_storage_id = self.createStorage('second-image') second_storage_id = self.createStorage('second-image')
@ -73,68 +78,68 @@ class TestImageSharing(unittest.TestCase):
self.assertSameStorage('second-image', second_storage_id) self.assertSameStorage('second-image', second_storage_id)
# Make sure the images are different. # Make sure the images are different.
self.assertNotEquals(first_storage_id, second_storage_id) self.assertNotEquals(first_storage, second_storage_id)
def test_no_user_private_repo(self): def test_no_user_private_repo(self):
""" If no user is specified (token case usually), then no sharing can occur on a private repo. """ """ If no user is specified (token case usually), then no sharing can occur on a private repo. """
# Create a reference to a new docker ID => new image. # Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('the-image', username=None, repository=SHARED_REPO) first_storage = self.createStorage('the-image', username=None, repository=SHARED_REPO)
# Create a areference to the same docker ID, but since no username => new image. # Create a areference to the same docker ID, but since no username => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=None, repository=RANDOM_REPO) self.assertDifferentStorage('the-image', first_storage, username=None, repository=RANDOM_REPO)
def test_no_user_public_repo(self): def test_no_user_public_repo(self):
""" If no user is specified (token case usually), then no sharing can occur on a private repo except when the image is first public. """ """ If no user is specified (token case usually), then no sharing can occur on a private repo except when the image is first public. """
# Create a reference to a new docker ID => new image. # Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('the-image', username=None, repository=PUBLIC_REPO) first_storage = self.createStorage('the-image', username=None, repository=PUBLIC_REPO)
# Create a areference to the same docker ID. Since no username, we'd expect different but the first image is public so => shaed image. # Create a areference to the same docker ID. Since no username, we'd expect different but the first image is public so => shaed image.
self.assertSameStorage('the-image', first_storage_id, username=None, repository=RANDOM_REPO) self.assertSameStorage('the-image', first_storage, username=None, repository=RANDOM_REPO)
def test_different_user_same_repo(self): def test_different_user_same_repo(self):
""" Two different users create the same image in the same repo. """ """ Two different users create the same image in the same repo. """
# Create a reference to a new docker ID under the first user => new image. # Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=PUBLIC_USER, repository=SHARED_REPO) first_storage = self.createStorage('the-image', username=PUBLIC_USER, repository=SHARED_REPO)
# Create a reference to the *same* docker ID under the second user => same image. # Create a reference to the *same* docker ID under the second user => same image.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=SHARED_REPO) self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
def test_different_repo_no_shared_access(self): def test_different_repo_no_shared_access(self):
""" Neither user has access to the other user's repository. """ """ Neither user has access to the other user's repository. """
# Create a reference to a new docker ID under the first user => new image. # Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=RANDOM_USER, repository=RANDOM_REPO) first_storage = self.createStorage('the-image', username=RANDOM_USER, repository=RANDOM_REPO)
# Create a reference to the *same* docker ID under the second user => new image. # Create a reference to the *same* docker ID under the second user => new image.
second_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO) second_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO)
# Verify that the users do not share storage. # Verify that the users do not share storage.
self.assertNotEquals(first_storage_id, second_storage_id) self.assertNotEquals(first_storage, second_storage_id)
def test_public_than_private(self): def test_public_than_private(self):
""" An image is created publicly then used privately, so it should be shared. """ """ An image is created publicly then used privately, so it should be shared. """
# Create a reference to a new docker ID under the first user => new image. # Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=PUBLIC_USER, repository=PUBLIC_REPO) first_storage = self.createStorage('the-image', username=PUBLIC_USER, repository=PUBLIC_REPO)
# Create a reference to the *same* docker ID under the second user => same image, since the first was public. # Create a reference to the *same* docker ID under the second user => same image, since the first was public.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=REPO) self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=REPO)
def test_private_than_public(self): def test_private_than_public(self):
""" An image is created privately then used publicly, so it should *not* be shared. """ """ An image is created privately then used publicly, so it should *not* be shared. """
# Create a reference to a new docker ID under the first user => new image. # Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO)
# Create a reference to the *same* docker ID under the second user => new image, since the first was private. # Create a reference to the *same* docker ID under the second user => new image, since the first was private.
self.assertDifferentStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO) self.assertDifferentStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
def test_different_repo_with_access(self): def test_different_repo_with_access(self):
@ -143,64 +148,71 @@ class TestImageSharing(unittest.TestCase):
be shared since the user has access. be shared since the user has access.
""" """
# Create the image in the shared repo => new image. # Create the image in the shared repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=SHARED_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
# Create the image in the other user's repo, but since the user (PUBLIC) still has access to the shared # Create the image in the other user's repo, but since the user (PUBLIC) still has access to the shared
# repository, they should reuse the storage. # repository, they should reuse the storage.
self.assertSameStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO) self.assertSameStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
def test_org_access(self): def test_org_access(self):
""" An image is accessible by being a member of the organization. """ """ An image is accessible by being a member of the organization. """
# Create the new image under the org's repo => new image. # Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under the user's repo, but since the user has access to the organization => shared image. # Create an image under the user's repo, but since the user has access to the organization => shared image.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=REPO) self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=REPO)
# Ensure that the user's robot does not have access, since it is not on the permissions list for the repo. # Ensure that the user's robot does not have access, since it is not on the permissions list for the repo.
self.assertDifferentStorage('the-image', first_storage_id, username=ADMIN_ROBOT_USER, repository=SHARED_REPO) self.assertDifferentStorage('the-image', first_storage, username=ADMIN_ROBOT_USER, repository=SHARED_REPO)
def test_org_access_different_user(self): def test_org_access_different_user(self):
""" An image is accessible by being a member of the organization. """ """ An image is accessible by being a member of the organization. """
# Create the new image under the org's repo => new image. # Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo, but since the user has access to the organization => shared image. # Create an image under a user's repo, but since the user has access to the organization => shared image.
self.assertSameStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO) self.assertSameStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
# Also verify for reader. # Also verify for reader.
self.assertSameStorage('the-image', first_storage_id, username=READ_ACCESS_USER, repository=PUBLIC_REPO) self.assertSameStorage('the-image', first_storage, username=READ_ACCESS_USER, repository=PUBLIC_REPO)
def test_org_no_access(self): def test_org_no_access(self):
""" An image is not accessible if not a member of the organization. """ """ An image is not accessible if not a member of the organization. """
# Create the new image under the org's repo => new image. # Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo. Since the user is not a member of the organization => new image. # Create an image under a user's repo. Since the user is not a member of the organization => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=RANDOM_USER, repository=RANDOM_REPO) self.assertDifferentStorage('the-image', first_storage, username=RANDOM_USER, repository=RANDOM_REPO)
def test_org_not_team_member_with_access(self): def test_org_not_team_member_with_access(self):
""" An image is accessible to a user specifically listed as having permission on the org repo. """ """ An image is accessible to a user specifically listed as having permission on the org repo. """
# Create the new image under the org's repo => new image. # Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo. Since the user has read access on that repo, they can see the image => shared image. # Create an image under a user's repo. Since the user has read access on that repo, they can see the image => shared image.
self.assertSameStorage('the-image', first_storage_id, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO) self.assertSameStorage('the-image', first_storage, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
def test_org_not_team_member_with_no_access(self): def test_org_not_team_member_with_no_access(self):
""" A user that has access to one org repo but not another and is not a team member. """ """ A user that has access to one org repo but not another and is not a team member. """
# Create the new image under the org's repo => new image. # Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ANOTHER_ORG_REPO) first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ANOTHER_ORG_REPO)
# Create an image under a user's repo. The user doesn't have access to the repo (ANOTHER_ORG_REPO) so => new image. # Create an image under a user's repo. The user doesn't have access to the repo (ANOTHER_ORG_REPO) so => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO) self.assertDifferentStorage('the-image', first_storage, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
def test_no_link_to_uploading(self):
still_uploading = self.createStorage('an-image', repository=PUBLIC_REPO)
still_uploading.uploading = True
still_uploading.save()
self.assertDifferentStorage('an-image', still_uploading)

View file

@ -30,7 +30,7 @@ class TestConfig(DefaultConfig):
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs') BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False] BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False]
USERFILES_TYPE = 'FakeUserfiles' USERFILES_LOCATION = 'local_us'
FEATURE_SUPER_USERS = True FEATURE_SUPER_USERS = True
FEATURE_BILLING = True FEATURE_BILLING = True

View file

@ -20,7 +20,7 @@ query = (Image
.join(ImageStorage) .join(ImageStorage)
.switch(Image) .switch(Image)
.join(Repository) .join(Repository)
.where(Repository.name == 'userportal', Repository.namespace == 'crsinc')) .where(ImageStorage.uploading == False))
bad_count = 0 bad_count = 0
good_count = 0 good_count = 0

View file

@ -1,38 +0,0 @@
import argparse
import pickle
from Crypto.PublicKey import RSA
from datetime import datetime, timedelta
def encrypt(message, output_filename):
private_key_file = 'conf/stack/license_key'
with open(private_key_file, 'r') as private_key:
encryptor = RSA.importKey(private_key)
encrypted_data = encryptor.decrypt(message)
with open(output_filename, 'wb') as encrypted_file:
encrypted_file.write(encrypted_data)
parser = argparse.ArgumentParser(description='Create a license file.')
parser.add_argument('--users', type=int, default=20,
help='Number of users allowed by the license')
parser.add_argument('--days', type=int, default=30,
help='Number of days for which the license is valid')
parser.add_argument('--warn', type=int, default=7,
help='Number of days prior to expiration to warn users')
parser.add_argument('--output', type=str, required=True,
help='File in which to store the license')
if __name__ == "__main__":
args = parser.parse_args()
print ('Creating license for %s users for %s days in file: %s' %
(args.users, args.days, args.output))
license_data = {
'LICENSE_EXPIRATION': datetime.utcnow() + timedelta(days=args.days),
'LICENSE_USER_LIMIT': args.users,
'LICENSE_EXPIRATION_WARNING': datetime.utcnow() + timedelta(days=(args.days - args.warn)),
}
encrypt(pickle.dumps(license_data, 2), args.output)

View file

@ -1,4 +1,4 @@
from app import stripe import stripe
from app import app from app import app
from util.invoice import renderInvoiceToHtml from util.invoice import renderInvoiceToHtml

View file

@ -1,4 +1,3 @@
from app import stripe
from app import app from app import app
from util.useremails import send_confirmation_email from util.useremails import send_confirmation_email

View file

@ -30,7 +30,11 @@ class SendToMixpanel(Process):
while True: while True:
mp_request = self._mp_queue.get() mp_request = self._mp_queue.get()
logger.debug('Got queued mixpanel reqeust.') logger.debug('Got queued mixpanel reqeust.')
try:
self._consumer.send(*json.loads(mp_request)) self._consumer.send(*json.loads(mp_request))
except:
# Make sure we don't crash if Mixpanel request fails.
pass
class FakeMixpanel(object): class FakeMixpanel(object):

5
util/backoff.py Normal file
View file

@ -0,0 +1,5 @@
def exponential_backoff(attempts, scaling_factor, base):
backoff = 5 * (pow(2, attempts) - 1)
backoff_time = backoff * scaling_factor
retry_at = backoff_time/10 + base
return retry_at

12
util/collections.py Normal file
View file

@ -0,0 +1,12 @@
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
@classmethod
def deep_copy(cls, attr_dict):
copy = AttrDict(attr_dict)
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy

View file

@ -1,55 +0,0 @@
import calendar
import sys
from email.utils import formatdate
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime, timedelta
from data import model
class ExpirationScheduler(object):
def __init__(self, utc_create_notifications_date, utc_terminate_processes_date):
self._scheduler = BackgroundScheduler()
self._termination_date = utc_terminate_processes_date
soon = datetime.now() + timedelta(seconds=1)
if utc_create_notifications_date > datetime.utcnow():
self._scheduler.add_job(model.delete_all_notifications_by_kind, 'date', run_date=soon,
args=['expiring_license'])
local_notifications_date = self._utc_to_local(utc_create_notifications_date)
self._scheduler.add_job(self._generate_notifications, 'date',
run_date=local_notifications_date)
else:
self._scheduler.add_job(self._generate_notifications, 'date', run_date=soon)
local_termination_date = self._utc_to_local(utc_terminate_processes_date)
self._scheduler.add_job(self._terminate, 'date', run_date=local_termination_date)
@staticmethod
def _format_date(date):
""" Output an RFC822 date format. """
if date is None:
return None
return formatdate(calendar.timegm(date.utctimetuple()))
@staticmethod
def _utc_to_local(utc_dt):
# get integer timestamp to avoid precision lost
timestamp = calendar.timegm(utc_dt.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
return local_dt.replace(microsecond=utc_dt.microsecond)
def _generate_notifications(self):
for user in model.get_active_users():
model.create_unique_notification('expiring_license', user,
{'expires_at': self._format_date(self._termination_date)})
@staticmethod
def _terminate():
sys.exit(1)
def start(self):
self._scheduler.start()

View file

@ -44,9 +44,9 @@ def matches_system_error(status_str):
KNOWN_MATCHES = ['lxc-start: invalid', 'lxc-start: failed to', 'lxc-start: Permission denied'] KNOWN_MATCHES = ['lxc-start: invalid', 'lxc-start: failed to', 'lxc-start: Permission denied']
for match in KNOWN_MATCHES: for match in KNOWN_MATCHES:
# 4 because we might have a Unix control code at the start. # 10 because we might have a Unix control code at the start.
found = status_str.find(match[0:len(match) + 4]) found = status_str.find(match[0:len(match) + 10])
if found >= 0 and found <= 4: if found >= 0 and found <= 10:
return True return True
return False return False
@ -489,6 +489,7 @@ class DockerfileBuildWorker(Worker):
container['Id'], container['Command']) container['Id'], container['Command'])
docker_cl.kill(container['Id']) docker_cl.kill(container['Id'])
self._timeout.set() self._timeout.set()
except ConnectionError as exc: except ConnectionError as exc:
raise WorkerUnhealthyException(exc.message) raise WorkerUnhealthyException(exc.message)
@ -506,7 +507,7 @@ class DockerfileBuildWorker(Worker):
job_config = json.loads(repository_build.job_config) job_config = json.loads(repository_build.job_config)
resource_url = user_files.get_file_url(repository_build.resource_key) resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
tag_names = job_config['docker_tags'] tag_names = job_config['docker_tags']
build_subdir = job_config['build_subdir'] build_subdir = job_config['build_subdir']
repo = job_config['repository'] repo = job_config['repository']
@ -625,6 +626,7 @@ class DockerfileBuildWorker(Worker):
except WorkerUnhealthyException as exc: except WorkerUnhealthyException as exc:
# Spawn a notification that the build has failed. # Spawn a notification that the build has failed.
log_appender('Worker has become unhealthy. Will retry shortly.', build_logs.ERROR)
spawn_failure(exc.message, event_data) spawn_failure(exc.message, event_data)
# Raise the exception to the queue. # Raise the exception to the queue.

View file

@ -1,41 +0,0 @@
import logging
import argparse
import requests
import json
from app import webhook_queue
from workers.worker import Worker
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
class WebhookWorker(Worker):
def process_queue_item(self, job_details):
url = job_details['url']
payload = job_details['payload']
headers = {'Content-type': 'application/json'}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for webhook to url: %s' % (resp.status_code,
url))
return False
except requests.exceptions.RequestException as ex:
logger.exception('Webhook was unable to be sent: %s' % ex.message)
return False
return True
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
worker = WebhookWorker(webhook_queue, poll_period_seconds=15,
reservation_seconds=3600)
worker.start()

View file

@ -102,8 +102,8 @@ class Worker(object):
logger.debug('Running watchdog.') logger.debug('Running watchdog.')
try: try:
self.watchdog() self.watchdog()
except WorkerUnhealthyException: except WorkerUnhealthyException as exc:
logger.error('The worker has encountered an error and will not take new jobs.') logger.error('The worker has encountered an error via watchdog and will not take new jobs: %s' % exc.message)
self.mark_current_incomplete(restore_retry=True) self.mark_current_incomplete(restore_retry=True)
self._stop.set() self._stop.set()
@ -133,10 +133,10 @@ class Worker(object):
logger.warning('An error occurred processing request: %s', current_queue_item.body) logger.warning('An error occurred processing request: %s', current_queue_item.body)
self.mark_current_incomplete(restore_retry=False) self.mark_current_incomplete(restore_retry=False)
except WorkerUnhealthyException: except WorkerUnhealthyException as exc:
logger.error('The worker has encountered an error and will not take new jobs. Job is being requeued.') logger.error('The worker has encountered an error via the job and will not take new jobs: %s' % exc.message)
self._stop.set()
self.mark_current_incomplete(restore_retry=True) self.mark_current_incomplete(restore_retry=True)
self._stop.set()
finally: finally:
# Close the db handle periodically # Close the db handle periodically