Merge remote-tracking branch 'origin/master' into comewithmeifyouwanttowork

Conflicts:
	data/model/legacy.py
	static/js/app.js
This commit is contained in:
Jake Moshenko 2014-09-12 11:03:30 -04:00
commit c5ca46a14b
70 changed files with 1566 additions and 630 deletions

View file

@ -1,10 +1,10 @@
FROM phusion/baseimage:0.9.11
FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root
# Install the dependencies.
RUN apt-get update # 21AUG2014
RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev

View file

@ -1,10 +1,10 @@
FROM phusion/baseimage:0.9.11
FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root
# Install the dependencies.
RUN apt-get update # 21AUG2014
RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev

2
app.py
View file

@ -88,7 +88,7 @@ Principal(app, use_sessions=False)
login_manager = LoginManager(app)
mail = Mail(app)
storage = Storage(app)
userfiles = Userfiles(app)
userfiles = Userfiles(app, storage)
analytics = Analytics(app)
billing = Billing(app)
sentry = Sentry(app)

View file

@ -1,5 +1,5 @@
bind = 'unix:/tmp/gunicorn.sock'
workers = 8
workers = 16
worker_class = 'gevent'
timeout = 2000
logconfig = 'conf/logging.conf'

View file

@ -19,7 +19,7 @@ def build_requests_session():
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID',
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY',
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE',
'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT']
'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID']
def getFrontendVisibleConfig(config_dict):
@ -89,10 +89,6 @@ class DefaultConfig(object):
# Stripe config
BILLING_TYPE = 'FakeStripe'
# Userfiles
USERFILES_TYPE = 'LocalUserfiles'
USERFILES_PATH = 'test/data/registry/userfiles'
# Analytics
ANALYTICS_TYPE = 'FakeAnalytics'
@ -115,6 +111,13 @@ class DefaultConfig(object):
GITHUB_LOGIN_CLIENT_ID = ''
GITHUB_LOGIN_CLIENT_SECRET = ''
# Google Config.
GOOGLE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
GOOGLE_USER_URL = 'https://www.googleapis.com/oauth2/v1/userinfo'
GOOGLE_LOGIN_CLIENT_ID = ''
GOOGLE_LOGIN_CLIENT_SECRET = ''
# Requests based HTTP client with a large request pool
HTTPCLIENT = build_requests_session()
@ -144,6 +147,9 @@ class DefaultConfig(object):
# Feature Flag: Whether GitHub login is supported.
FEATURE_GITHUB_LOGIN = False
# Feature Flag: Whether Google login is supported.
FEATURE_GOOGLE_LOGIN = False
# Feature flag, whether to enable olark chat
FEATURE_OLARK_CHAT = False
@ -162,3 +168,7 @@ class DefaultConfig(object):
}
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
# Userfiles
USERFILES_LOCATION = 'local_us'
USERFILES_PATH = 'userfiles/'

View file

@ -3,6 +3,8 @@ import stripe
from datetime import datetime, timedelta
from calendar import timegm
from util.collections import AttrDict
PLANS = [
# Deprecated Plans
{
@ -118,20 +120,6 @@ def get_plan(plan_id):
return None
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
@classmethod
def deep_copy(cls, attr_dict):
copy = AttrDict(attr_dict)
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy
class FakeStripe(object):
class Customer(AttrDict):
FAKE_PLAN = AttrDict({

View file

@ -25,7 +25,7 @@ class RedisBuildLogs(object):
"""
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def append_log_message(self, build_id, log_message, log_type=None):
def append_log_message(self, build_id, log_message, log_type=None, log_data=None):
"""
Wraps the message in an envelope and push it to the end of the log entry
list and returns the index at which it was inserted.
@ -37,6 +37,9 @@ class RedisBuildLogs(object):
if log_type:
log_obj['type'] = log_type
if log_data:
log_obj['data'] = log_data
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1
def get_log_entries(self, build_id, start_index):

View file

@ -76,6 +76,8 @@ class User(BaseModel):
organization = BooleanField(default=False, index=True)
robot = BooleanField(default=False, index=True)
invoice_email = BooleanField(default=False)
invalid_login_attempts = IntegerField(default=0)
last_invalid_login = DateTimeField(default=datetime.utcnow)
class TeamRole(BaseModel):
@ -127,6 +129,7 @@ class FederatedLogin(BaseModel):
user = ForeignKeyField(User, index=True)
service = ForeignKeyField(LoginService, index=True)
service_ident = CharField()
metadata_json = TextField(default='{}')
class Meta:
database = db

View file

@ -8,6 +8,7 @@ from peewee import SqliteDatabase
from data.database import all_models, db
from app import app
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from util.collections import AttrDict
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@ -23,6 +24,7 @@ fileConfig(config.config_file_name)
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = gen_sqlalchemy_metadata(all_models)
tables = AttrDict(target_metadata.tables)
# other values from the config, defined by the needs of env.py,
# can be acquired:
@ -45,7 +47,7 @@ def run_migrations_offline():
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
with context.begin_transaction():
context.run_migrations()
context.run_migrations(tables=tables)
def run_migrations_online():
"""Run migrations in 'online' mode.
@ -72,7 +74,7 @@ def run_migrations_online():
try:
with context.begin_transaction():
context.run_migrations()
context.run_migrations(tables=tables)
finally:
connection.close()

View file

@ -14,9 +14,9 @@ from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
def upgrade(tables):
${upgrades if upgrades else "pass"}
def downgrade():
def downgrade(tables):
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,35 @@
"""add metadata field to external logins
Revision ID: 1594a74a74ca
Revises: f42b0ea7a4d
Create Date: 2014-09-04 18:17:35.205698
"""
# revision identifiers, used by Alembic.
revision = '1594a74a74ca'
down_revision = 'f42b0ea7a4d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False))
### end Alembic commands ###
op.bulk_insert(tables.loginservice,
[
{'id':4, 'name':'google'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('federatedlogin', 'metadata_json')
### end Alembic commands ###
op.execute(
(tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('google')))
)

View file

@ -14,7 +14,7 @@ from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True)
@ -34,7 +34,7 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('visibility_name', table_name='visibility')
op.create_index('visibility_name', 'visibility', ['name'], unique=False)

View file

@ -13,12 +13,8 @@ down_revision = '4b7ef0c7bdb2'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('externalnotificationmethod',
sa.Column('id', sa.Integer(), nullable=False),
@ -26,7 +22,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationmethod'],
op.bulk_insert(tables.externalnotificationmethod,
[
{'id':1, 'name':'quay_notification'},
{'id':2, 'name':'email'},
@ -38,7 +34,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationevent'],
op.bulk_insert(tables.externalnotificationevent,
[
{'id':1, 'name':'repo_push'},
{'id':2, 'name':'build_queued'},
@ -77,7 +73,7 @@ def upgrade():
op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False))
# Manually add the new notificationkind types
op.bulk_insert(schema.tables['notificationkind'],
op.bulk_insert(tables.notificationkind,
[
{'id':5, 'name':'repo_push'},
{'id':6, 'name':'build_queued'},
@ -87,7 +83,7 @@ def upgrade():
])
# Manually add the new logentrykind types
op.bulk_insert(schema.tables['logentrykind'],
op.bulk_insert(tables.logentrykind,
[
{'id':39, 'name':'add_repo_notification'},
{'id':40, 'name':'delete_repo_notification'},
@ -97,61 +93,49 @@ def upgrade():
### end Alembic commands ###
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'notification', 'dismissed')
op.drop_index('repositorynotification_uuid', table_name='repositorynotification')
op.drop_index('repositorynotification_repository_id', table_name='repositorynotification')
op.drop_index('repositorynotification_method_id', table_name='repositorynotification')
op.drop_index('repositorynotification_event_id', table_name='repositorynotification')
op.drop_table('repositorynotification')
op.drop_index('repositoryauthorizedemail_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_email_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_code', table_name='repositoryauthorizedemail')
op.drop_table('repositoryauthorizedemail')
op.drop_index('externalnotificationevent_name', table_name='externalnotificationevent')
op.drop_table('externalnotificationevent')
op.drop_index('externalnotificationmethod_name', table_name='externalnotificationmethod')
op.drop_table('externalnotificationmethod')
# Manually remove the notificationkind and logentrykind types
notificationkind = schema.tables['notificationkind']
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('repo_push')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('repo_push')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_queued')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_queued')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_start')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_start')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_success')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_success')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_failure')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_failure')))
)
op.execute(
(logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('add_repo_notification')))
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification')))
)
op.execute(
(logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('delete_repo_notification')))
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification')))
)
### end Alembic commands ###

View file

@ -13,25 +13,17 @@ down_revision = '82297d834ad'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['logentrykind'],
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'id': 41, 'name':'regenerate_robot_token'},
])
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
logentrykind = schema.tables['logentrykind']
def downgrade(tables):
op.execute(
(logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
)

View file

@ -18,13 +18,13 @@ def get_id(query):
conn = op.get_bind()
return list(conn.execute(query, ()).fetchall())[0][0]
def upgrade():
def upgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id))
def downgrade():
def downgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')

View file

@ -0,0 +1,39 @@
"""add new notification kinds
Revision ID: 4a0c94399f38
Revises: 1594a74a74ca
Create Date: 2014-08-28 16:17:01.898269
"""
# revision identifiers, used by Alembic.
revision = '4a0c94399f38'
down_revision = '1594a74a74ca'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.bulk_insert(tables.externalnotificationmethod,
[
{'id':4, 'name':'flowdock'},
{'id':5, 'name':'hipchat'},
{'id':6, 'name':'slack'},
])
def downgrade(tables):
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('slack')))
)

View file

@ -11,23 +11,18 @@ revision = '4b7ef0c7bdb2'
down_revision = 'bcdde200a1b'
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['notificationkind'],
def upgrade(tables):
op.bulk_insert(tables.notificationkind,
[
{'id':4, 'name':'maintenance'},
])
def downgrade():
notificationkind = schema.tables['notificationkind']
def downgrade(tables):
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('maintenance')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('maintenance')))
)

View file

@ -0,0 +1,28 @@
"""Add brute force prevention metadata to the user table.
Revision ID: 4fdb65816b8d
Revises: 43e943c0639f
Create Date: 2014-09-03 12:35:33.722435
"""
# revision identifiers, used by Alembic.
revision = '4fdb65816b8d'
down_revision = '43e943c0639f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_invalid_login')
op.drop_column('user', 'invalid_login_attempts')
### end Alembic commands ###

View file

@ -11,14 +11,9 @@ revision = '5a07499ce53f'
down_revision = None
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('loginservice',
sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +22,7 @@ def upgrade():
)
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
op.bulk_insert(schema.tables['loginservice'],
op.bulk_insert(tables.loginservice,
[
{'id':1, 'name':'github'},
{'id':2, 'name':'quayrobot'},
@ -66,7 +61,7 @@ def upgrade():
)
op.create_index('role_name', 'role', ['name'], unique=False)
op.bulk_insert(schema.tables['role'],
op.bulk_insert(tables.role,
[
{'id':1, 'name':'admin'},
{'id':2, 'name':'write'},
@ -80,7 +75,7 @@ def upgrade():
)
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.bulk_insert(schema.tables['logentrykind'],
op.bulk_insert(tables.logentrykind,
[
{'id':1, 'name':'account_change_plan'},
{'id':2, 'name':'account_change_cc'},
@ -136,7 +131,7 @@ def upgrade():
)
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.bulk_insert(schema.tables['notificationkind'],
op.bulk_insert(tables.notificationkind,
[
{'id':1, 'name':'password_required'},
{'id':2, 'name':'over_private_usage'},
@ -150,7 +145,7 @@ def upgrade():
)
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
op.bulk_insert(schema.tables['teamrole'],
op.bulk_insert(tables.teamrole,
[
{'id':1, 'name':'admin'},
{'id':2, 'name':'creator'},
@ -164,7 +159,7 @@ def upgrade():
)
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
op.bulk_insert(schema.tables['visibility'],
op.bulk_insert(tables.visibility,
[
{'id':1, 'name':'public'},
{'id':2, 'name':'private'},
@ -194,7 +189,7 @@ def upgrade():
)
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
op.bulk_insert(schema.tables['buildtriggerservice'],
op.bulk_insert(tables.buildtriggerservice,
[
{'id':1, 'name':'github'},
])
@ -490,119 +485,34 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('repositorybuild_uuid', table_name='repositorybuild')
op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild')
op.drop_index('repositorybuild_resource_key', table_name='repositorybuild')
op.drop_index('repositorybuild_repository_id', table_name='repositorybuild')
op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild')
op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild')
op.drop_table('repositorybuild')
op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger')
op.drop_table('repositorybuildtrigger')
op.drop_index('logentry_repository_id', table_name='logentry')
op.drop_index('logentry_performer_id', table_name='logentry')
op.drop_index('logentry_kind_id', table_name='logentry')
op.drop_index('logentry_datetime', table_name='logentry')
op.drop_index('logentry_account_id', table_name='logentry')
op.drop_index('logentry_access_token_id', table_name='logentry')
op.drop_table('logentry')
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
op.drop_index('repositorytag_repository_id', table_name='repositorytag')
op.drop_index('repositorytag_image_id', table_name='repositorytag')
op.drop_table('repositorytag')
op.drop_index('permissionprototype_role_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype')
op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype')
op.drop_table('permissionprototype')
op.drop_index('image_storage_id', table_name='image')
op.drop_index('image_repository_id_docker_image_id', table_name='image')
op.drop_index('image_repository_id', table_name='image')
op.drop_index('image_ancestors', table_name='image')
op.drop_table('image')
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode')
op.drop_table('oauthauthorizationcode')
op.drop_index('webhook_repository_id', table_name='webhook')
op.drop_index('webhook_public_id', table_name='webhook')
op.drop_table('webhook')
op.drop_index('teammember_user_id_team_id', table_name='teammember')
op.drop_index('teammember_user_id', table_name='teammember')
op.drop_index('teammember_team_id', table_name='teammember')
op.drop_table('teammember')
op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken')
op.drop_table('oauthaccesstoken')
op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_user_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id', table_name='repositorypermission')
op.drop_index('repositorypermission_role_id', table_name='repositorypermission')
op.drop_index('repositorypermission_repository_id', table_name='repositorypermission')
op.drop_table('repositorypermission')
op.drop_index('accesstoken_role_id', table_name='accesstoken')
op.drop_index('accesstoken_repository_id', table_name='accesstoken')
op.drop_index('accesstoken_code', table_name='accesstoken')
op.drop_table('accesstoken')
op.drop_index('repository_visibility_id', table_name='repository')
op.drop_index('repository_namespace_name', table_name='repository')
op.drop_table('repository')
op.drop_index('team_role_id', table_name='team')
op.drop_index('team_organization_id', table_name='team')
op.drop_index('team_name_organization_id', table_name='team')
op.drop_index('team_name', table_name='team')
op.drop_table('team')
op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation')
op.drop_index('emailconfirmation_code', table_name='emailconfirmation')
op.drop_table('emailconfirmation')
op.drop_index('notification_uuid', table_name='notification')
op.drop_index('notification_target_id', table_name='notification')
op.drop_index('notification_kind_id', table_name='notification')
op.drop_index('notification_created', table_name='notification')
op.drop_table('notification')
op.drop_index('oauthapplication_organization_id', table_name='oauthapplication')
op.drop_index('oauthapplication_client_id', table_name='oauthapplication')
op.drop_table('oauthapplication')
op.drop_index('federatedlogin_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id', table_name='federatedlogin')
op.drop_table('federatedlogin')
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.drop_table('buildtriggerservice')
op.drop_index('user_username', table_name='user')
op.drop_index('user_stripe_id', table_name='user')
op.drop_index('user_robot', table_name='user')
op.drop_index('user_organization', table_name='user')
op.drop_index('user_email', table_name='user')
op.drop_table('user')
op.drop_index('visibility_name', table_name='visibility')
op.drop_table('visibility')
op.drop_index('teamrole_name', table_name='teamrole')
op.drop_table('teamrole')
op.drop_index('notificationkind_name', table_name='notificationkind')
op.drop_table('notificationkind')
op.drop_index('logentrykind_name', table_name='logentrykind')
op.drop_table('logentrykind')
op.drop_index('role_name', table_name='role')
op.drop_table('role')
op.drop_index('queueitem_queue_name', table_name='queueitem')
op.drop_index('queueitem_processing_expires', table_name='queueitem')
op.drop_index('queueitem_available_after', table_name='queueitem')
op.drop_index('queueitem_available', table_name='queueitem')
op.drop_table('queueitem')
op.drop_table('imagestorage')
op.drop_index('loginservice_name', table_name='loginservice')
op.drop_table('loginservice')
### end Alembic commands ###

View file

@ -13,24 +13,17 @@ down_revision = '47670cbeced'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['imagestoragelocation'],
def upgrade(tables):
op.bulk_insert(tables.imagestoragelocation,
[
{'id':8, 'name':'s3_us_west_1'},
])
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
def downgrade(tables):
op.execute(
(imagestoragelocation.delete()
.where(imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
(tables.imagestoragelocation.delete()
.where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
)

View file

@ -11,14 +11,10 @@ revision = 'bcdde200a1b'
down_revision = '201d55b38649'
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragelocation',
sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +23,7 @@ def upgrade():
)
op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True)
op.bulk_insert(schema.tables['imagestoragelocation'],
op.bulk_insert(tables.imagestoragelocation,
[
{'id':1, 'name':'s3_us_east_1'},
{'id':2, 'name':'s3_eu_west_1'},
@ -52,12 +48,8 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('imagestorageplacement_storage_id_location_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_storage_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_location_id', table_name='imagestorageplacement')
op.drop_table('imagestorageplacement')
op.drop_index('imagestoragelocation_name', table_name='imagestoragelocation')
op.drop_table('imagestoragelocation')
### end Alembic commands ###

View file

@ -0,0 +1,35 @@
"""Remove the old webhooks table.
Revision ID: f42b0ea7a4d
Revises: 4fdb65816b8d
Create Date: 2014-09-03 13:43:23.391464
"""
# revision identifiers, used by Alembic.
revision = 'f42b0ea7a4d'
down_revision = '4fdb65816b8d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('webhook')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###

View file

@ -1,12 +1,17 @@
import bcrypt
import logging
import datetime
import dateutil.parser
import json
from datetime import datetime, timedelta
from data.database import *
from util.validation import *
from util.names import format_robot_username
from util.backoff import exponential_backoff
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
logger = logging.getLogger(__name__)
@ -75,6 +80,12 @@ class UserAlreadyInTeam(DataModelException):
pass
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
self.retry_after = retry_after
def is_create_user_allowed():
return True
@ -413,7 +424,8 @@ def set_team_org_permission(team, team_role_name, set_by_username):
return team
def create_federated_user(username, email, service_name, service_id, set_password_notification):
def create_federated_user(username, email, service_name, service_id,
set_password_notification, metadata={}):
if not is_create_user_allowed():
raise TooManyUsersException()
@ -423,7 +435,8 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=new_user, service=service,
service_ident=service_id)
service_ident=service_id,
metadata_json=json.dumps(metadata))
if set_password_notification:
create_notification('password_required', new_user)
@ -431,9 +444,10 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
return new_user
def attach_federated_login(user, service_name, service_id):
def attach_federated_login(user, service_name, service_id, metadata={}):
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=user, service=service, service_ident=service_id)
FederatedLogin.create(user=user, service=service, service_ident=service_id,
metadata_json=json.dumps(metadata))
return user
@ -452,7 +466,7 @@ def verify_federated_login(service_name, service_id):
def list_federated_logins(user):
selected = FederatedLogin.select(FederatedLogin.service_ident,
LoginService.name)
LoginService.name, FederatedLogin.metadata_json)
joined = selected.join(LoginService)
return joined.where(LoginService.name != 'quayrobot',
FederatedLogin.user == user)
@ -588,11 +602,30 @@ def verify_user(username_or_email, password):
except User.DoesNotExist:
return None
now = datetime.utcnow()
if fetched.invalid_login_attempts > 0:
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
fetched.last_invalid_login)
if can_retry_at > now:
retry_after = can_retry_at - now
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
if (fetched.password_hash and
bcrypt.hashpw(password, fetched.password_hash) ==
fetched.password_hash):
if fetched.invalid_login_attempts > 0:
fetched.invalid_login_attempts = 0
fetched.save()
return fetched
fetched.invalid_login_attempts += 1
fetched.last_invalid_login = now
fetched.save()
# We weren't able to authorize the user
return None
@ -1078,7 +1111,8 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
.join(Repository)
.join(Visibility)
.switch(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER))
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.where(ImageStorage.uploading == False))
query = (_filter_to_repos_for_user(query, username)
.where(Image.docker_image_id == docker_image_id))

View file

@ -1,110 +1,35 @@
import boto
import os
import logging
import hashlib
import magic
from boto.s3.key import Key
from uuid import uuid4
from flask import url_for, request, send_file, make_response, abort
from flask.views import View
from _pyio import BufferedReader
logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = path
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id)
def store_file(self, file_like_obj, content_type):
self._initialize_s3()
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % content_type)
k.set_metadata('Content-Type', content_type)
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
rewind=True)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300, mime_type=None):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
headers = None
if mime_type:
headers={'Content-Type': mime_type}
return k.generate_url(expires_in, headers=headers)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
class UserfilesHandlers(View):
methods = ['GET', 'PUT']
def __init__(self, local_userfiles):
self._userfiles = local_userfiles
def __init__(self, distributed_storage, location, files):
self._storage = distributed_storage
self._files = files
self._locations = {location}
self._magic = magic.Magic(mime=True)
def get(self, file_id):
path = self._userfiles.file_path(file_id)
if not os.path.exists(path):
path = self._files.get_file_id_path(file_id)
try:
file_stream = self._storage.stream_read_file(self._locations, path)
buffered = BufferedReader(file_stream)
file_header_bytes = buffered.peek(1024)
return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
except IOError:
abort(404)
logger.debug('Sending path: %s' % path)
return send_file(path, mimetype=self._magic.from_file(path))
def put(self, file_id):
input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked':
@ -112,7 +37,10 @@ class UserfilesHandlers(View):
# encoding (Gunicorn)
input_stream = request.environ['wsgi.input']
self._userfiles.store_stream(input_stream, file_id)
c_type = request.headers.get('Content-Type', None)
path = self._files.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, input_stream, c_type)
return make_response('Okay')
@ -123,99 +51,79 @@ class UserfilesHandlers(View):
return self.put(file_id)
class LocalUserfiles(object):
def __init__(self, app, path):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
class DelegateUserfiles(object):
def __init__(self, app, distributed_storage, location, path, handler_name):
self._app = app
self._storage = distributed_storage
self._locations = {location}
self._prefix = path
self._handler_name = handler_name
def _build_url_adapter(self):
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
script_name=self._app.config['APPLICATION_ROOT'] or '/',
url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
def prepare_for_drop(self, mime_type):
def get_file_id_path(self, file_id):
return os.path.join(self._prefix, file_id)
def prepare_for_drop(self, mime_type, requires_cors=True):
""" Returns a signed URL to upload a file to our bucket. """
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors)
def file_path(self, file_id):
if '..' in file_id or file_id.startswith('/'):
raise RuntimeError('Invalid Filename')
return os.path.join(self._root_path, file_id)
if url is None:
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
def store_stream(self, stream, file_id):
path = self.file_path(file_id)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(path, 'w') as to_write:
while True:
try:
buf = stream.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
return (url, file_id)
def store_file(self, file_like_obj, content_type):
file_id = str(uuid4())
# Rewind the file to match what s3 does
file_like_obj.seek(0, os.SEEK_SET)
self.store_stream(file_like_obj, file_id)
path = self.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, file_like_obj, content_type)
return file_id
def get_file_url(self, file_id, expires_in=300):
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return url_for('userfiles_handlers', file_id=file_id, _external=True)
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors)
if url is None:
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return url_for(self._handler_name, file_id=file_id, _external=True)
return url
def get_file_checksum(self, file_id):
path = self.file_path(file_id)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
path = self.get_file_id_path(file_id)
return self._storage.get_checksum(self._locations, path)
class Userfiles(object):
def __init__(self, app=None):
def __init__(self, app=None, distributed_storage=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
self.state = self.init_app(app, distributed_storage)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
path = app.config.get('USERFILES_PATH', '')
def init_app(self, app, distributed_storage):
location = app.config.get('USERFILES_LOCATION')
path = app.config.get('USERFILES_PATH', None)
if storage_type == 'LocalUserfiles':
userfiles = LocalUserfiles(app, path)
app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view('userfiles_handlers',
local_userfiles=userfiles))
handler_name = 'userfiles_handlers'
elif storage_type == 'S3Userfiles':
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
elif storage_type == 'FakeUserfiles':
userfiles = FakeUserfiles()
else:
raise RuntimeError('Unknown userfiles type: %s' % storage_type)
app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
location=location,
files=userfiles))
# register extension with app
app.extensions = getattr(app, 'extensions', {})

View file

@ -1,7 +1,8 @@
import logging
import json
import datetime
from flask import Blueprint, request, make_response, jsonify
from flask import Blueprint, request, make_response, jsonify, session
from flask.ext.restful import Resource, abort, Api, reqparse
from flask.ext.restful.utils.cors import crossdomain
from werkzeug.exceptions import HTTPException
@ -66,6 +67,11 @@ class Unauthorized(ApiException):
ApiException.__init__(self, 'insufficient_scope', 403, 'Unauthorized', payload)
class FreshLoginRequired(ApiException):
def __init__(self, payload=None):
ApiException.__init__(self, 'fresh_login_required', 401, "Requires fresh login", payload)
class ExceedsLicenseException(ApiException):
def __init__(self, payload=None):
ApiException.__init__(self, None, 402, 'Payment Required', payload)
@ -87,6 +93,14 @@ def handle_api_error(error):
return response
@api_bp.app_errorhandler(model.TooManyLoginAttemptsException)
@crossdomain(origin='*', headers=['Authorization', 'Content-Type'])
def handle_too_many_login_attempts(error):
response = make_response('Too many login attempts', 429)
response.headers['Retry-After'] = int(error.retry_after)
return response
def resource(*urls, **kwargs):
def wrapper(api_resource):
if not api_resource:
@ -256,6 +270,26 @@ def require_user_permission(permission_class, scope=None):
require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER)
require_user_admin = require_user_permission(UserAdminPermission, None)
require_fresh_user_admin = require_user_permission(UserAdminPermission, None)
def require_fresh_login(func):
@add_method_metadata('requires_fresh_login', True)
@wraps(func)
def wrapped(*args, **kwargs):
user = get_authenticated_user()
if not user:
raise Unauthorized()
logger.debug('Checking fresh login for user %s', user.username)
last_login = session.get('login_time', datetime.datetime.min)
valid_span = datetime.datetime.now() - datetime.timedelta(minutes=10)
if not user.password_hash or last_login >= valid_span:
return func(*args, **kwargs)
raise FreshLoginRequired()
return wrapped
def require_scope(scope_object):

View file

@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False):
}
if can_write:
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key)
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
return resp
@ -257,7 +257,7 @@ class FileDropResource(ApiResource):
def post(self):
""" Request a URL to which a file may be uploaded. """
mime_type = request.get_json()['mimeType']
(url, file_id) = user_files.prepare_for_drop(mime_type)
(url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
return {
'url': url,
'file_id': str(file_id),

View file

@ -119,6 +119,11 @@ def swagger_route_data(include_internal=False, compact=False):
if internal is not None:
new_operation['internal'] = True
if include_internal:
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
if requires_fresh_login is not None:
new_operation['requires_fresh_login'] = True
if not internal or (internal and include_internal):
operations.append(new_operation)

View file

@ -9,7 +9,7 @@ from app import app, billing as stripe, authentication
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, NotFound, require_user_admin, parse_args,
query_param, InvalidToken, require_scope, format_date, hide_if, show_if,
license_error)
license_error, require_fresh_login)
from endpoints.api.subscribe import subscribe
from endpoints.common import common_login
from endpoints.api.team import try_accept_invite
@ -43,9 +43,15 @@ def user_view(user):
organizations = model.get_user_organizations(user.username)
def login_view(login):
try:
metadata = json.loads(login.metadata_json)
except:
metadata = {}
return {
'service': login.service.name,
'service_identifier': login.service_ident,
'metadata': metadata
}
logins = model.list_federated_logins(user)
@ -92,6 +98,7 @@ class User(ApiResource):
""" Operations related to users. """
schemas = {
'NewUser': {
'id': 'NewUser',
'type': 'object',
'description': 'Fields which must be specified for a new user.',
@ -147,6 +154,7 @@ class User(ApiResource):
return user_view(user)
@require_user_admin
@require_fresh_login
@nickname('changeUserDetails')
@internal_only
@validate_json_request('UpdateUser')
@ -372,6 +380,37 @@ class Signin(ApiResource):
return conduct_signin(username, password)
@resource('/v1/signin/verify')
@internal_only
class VerifyUser(ApiResource):
""" Operations for verifying the existing user. """
schemas = {
'VerifyUser': {
'id': 'VerifyUser',
'type': 'object',
'description': 'Information required to verify the signed in user.',
'required': [
'password',
],
'properties': {
'password': {
'type': 'string',
'description': 'The user\'s password',
},
},
},
}
@require_user_admin
@nickname('verifyUser')
@validate_json_request('VerifyUser')
def post(self):
""" Verifies the signed in the user with the specified credentials. """
signin_data = request.get_json()
password = signin_data['password']
return conduct_signin(get_authenticated_user().username, password)
@resource('/v1/signout')
@internal_only
class Signout(ApiResource):

View file

@ -4,12 +4,14 @@ from flask import request, redirect, url_for, Blueprint
from flask.ext.login import current_user
from endpoints.common import render_page_template, common_login, route_show_if
from app import app, analytics
from app import app, analytics, get_app_url
from data import model
from util.names import parse_repository_name
from util.validation import generate_valid_usernames
from util.http import abort
from auth.permissions import AdministerRepositoryPermission
from auth.auth import require_session_login
from peewee import IntegrityError
import features
@ -20,20 +22,39 @@ client = app.config['HTTPCLIENT']
callback = Blueprint('callback', __name__)
def render_ologin_error(service_name,
error_message='Could not load user data. The token may have expired.'):
return render_page_template('ologinerror.html', service_name=service_name,
error_message=error_message,
service_url=get_app_url())
def exchange_github_code_for_token(code, for_login=True):
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
redirect_suffix=''):
code = request.args.get('code')
id_config = service_name + '_LOGIN_CLIENT_ID' if for_login else service_name + '_CLIENT_ID'
secret_config = service_name + '_LOGIN_CLIENT_SECRET' if for_login else service_name + '_CLIENT_SECRET'
payload = {
'client_id': app.config['GITHUB_LOGIN_CLIENT_ID' if for_login else 'GITHUB_CLIENT_ID'],
'client_secret': app.config['GITHUB_LOGIN_CLIENT_SECRET' if for_login else 'GITHUB_CLIENT_SECRET'],
'client_id': app.config[id_config],
'client_secret': app.config[secret_config],
'code': code,
'grant_type': 'authorization_code',
'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'],
app.config['SERVER_HOSTNAME'],
service_name.lower(),
redirect_suffix)
}
headers = {
'Accept': 'application/json'
}
get_access_token = client.post(app.config['GITHUB_TOKEN_URL'],
params=payload, headers=headers)
if form_encode:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
data=payload, headers=headers)
else:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
params=payload, headers=headers)
json_data = get_access_token.json()
if not json_data:
@ -52,17 +73,82 @@ def get_github_user(token):
return get_user.json()
def get_google_user(token):
token_param = {
'access_token': token,
'alt': 'json',
}
get_user = client.get(app.config['GOOGLE_USER_URL'], params=token_param)
return get_user.json()
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
to_login = model.verify_federated_login(service_name.lower(), user_id)
if not to_login:
# try to create the user
try:
valid = next(generate_valid_usernames(username))
to_login = model.create_federated_user(valid, email, service_name.lower(),
user_id, set_password_notification=True,
metadata=metadata)
# Success, tell analytics
analytics.track(to_login.username, 'register', {'service': service_name.lower()})
state = request.args.get('state', None)
if state:
logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state)
except model.DataModelException, ex:
return render_ologin_error(service_name, ex.message)
if common_login(to_login):
return redirect(url_for('web.index'))
return render_ologin_error(service_name)
def get_google_username(user_data):
username = user_data['email']
at = username.find('@')
if at > 0:
username = username[0:at]
return username
@callback.route('/google/callback', methods=['GET'])
@route_show_if(features.GOOGLE_LOGIN)
def google_oauth_callback():
error = request.args.get('error', None)
if error:
return render_ologin_error('Google', error)
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', form_encode=True)
user_data = get_google_user(token)
if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
return render_ologin_error('Google')
username = get_google_username(user_data)
metadata = {
'service_username': user_data['email']
}
return conduct_oauth_login('Google', user_data['id'], username, user_data['email'],
metadata=metadata)
@callback.route('/github/callback', methods=['GET'])
@route_show_if(features.GITHUB_LOGIN)
def github_oauth_callback():
error = request.args.get('error', None)
if error:
return render_page_template('githuberror.html', error_message=error)
return render_ologin_error('GitHub', error)
token = exchange_github_code_for_token(request.args.get('code'))
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token)
if not user_data:
return render_page_template('githuberror.html', error_message='Could not load user data')
return render_ologin_error('GitHub')
username = user_data['login']
github_id = user_data['id']
@ -84,42 +170,67 @@ def github_oauth_callback():
if user_email['primary']:
break
to_login = model.verify_federated_login('github', github_id)
if not to_login:
# try to create the user
try:
to_login = model.create_federated_user(username, found_email, 'github',
github_id, set_password_notification=True)
metadata = {
'service_username': username
}
# Success, tell analytics
analytics.track(to_login.username, 'register', {'service': 'github'})
return conduct_oauth_login('github', github_id, username, found_email, metadata=metadata)
state = request.args.get('state', None)
if state:
logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state)
except model.DataModelException, ex:
return render_page_template('githuberror.html', error_message=ex.message)
@callback.route('/google/callback/attach', methods=['GET'])
@route_show_if(features.GOOGLE_LOGIN)
@require_session_login
def google_oauth_attach():
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE',
redirect_suffix='/attach', form_encode=True)
if common_login(to_login):
return redirect(url_for('web.index'))
user_data = get_google_user(token)
if not user_data or not user_data.get('id', None):
return render_ologin_error('Google')
return render_page_template('githuberror.html')
google_id = user_data['id']
user_obj = current_user.db_user()
username = get_google_username(user_data)
metadata = {
'service_username': user_data['email']
}
try:
model.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
except IntegrityError:
err = 'Google account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])
return render_ologin_error('Google', err)
return redirect(url_for('web.user'))
@callback.route('/github/callback/attach', methods=['GET'])
@route_show_if(features.GITHUB_LOGIN)
@require_session_login
def github_oauth_attach():
token = exchange_github_code_for_token(request.args.get('code'))
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token)
if not user_data:
return render_page_template('githuberror.html', error_message='Could not load user data')
return render_ologin_error('GitHub')
github_id = user_data['id']
user_obj = current_user.db_user()
model.attach_federated_login(user_obj, 'github', github_id)
username = user_data['login']
metadata = {
'service_username': username
}
try:
model.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
except IntegrityError:
err = 'Github account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])
return render_ologin_error('GitHub', err)
return redirect(url_for('web.user'))
@ -130,7 +241,8 @@ def github_oauth_attach():
def attach_github_build_trigger(namespace, repository):
permission = AdministerRepositoryPermission(namespace, repository)
if permission.can():
token = exchange_github_code_for_token(request.args.get('code'), for_login=False)
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB',
for_login=False)
repo = model.get_repository(namespace, repository)
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository)

View file

@ -2,8 +2,9 @@ import logging
import urlparse
import json
import string
import datetime
from flask import make_response, render_template, request, abort
from flask import make_response, render_template, request, abort, session
from flask.ext.login import login_user, UserMixin
from flask.ext.principal import identity_changed
from random import SystemRandom
@ -112,6 +113,7 @@ def common_login(db_user):
logger.debug('Successfully signed in as: %s' % db_user.username)
new_identity = QuayDeferredPermissionUser(db_user.username, 'username', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=new_identity)
session['login_time'] = datetime.datetime.now()
return True
else:
logger.debug('User could not be logged in, inactive?.')

View file

@ -15,6 +15,13 @@ class NotificationEvent(object):
def __init__(self):
pass
def get_level(self, event_data, notification_data):
"""
Returns a 'level' representing the severity of the event.
Valid values are: 'info', 'warning', 'error', 'primary'
"""
raise NotImplementedError
def get_summary(self, event_data, notification_data):
"""
Returns a human readable one-line summary for the given notification data.
@ -55,6 +62,9 @@ class RepoPushEvent(NotificationEvent):
def event_name(cls):
return 'repo_push'
def get_level(self, event_data, notification_data):
return 'info'
def get_summary(self, event_data, notification_data):
return 'Repository %s updated' % (event_data['repository'])
@ -88,6 +98,9 @@ class BuildQueueEvent(NotificationEvent):
def event_name(cls):
return 'build_queued'
def get_level(self, event_data, notification_data):
return 'info'
def get_sample_data(self, repository):
build_uuid = 'fake-build-id'
@ -127,6 +140,9 @@ class BuildStartEvent(NotificationEvent):
def event_name(cls):
return 'build_start'
def get_level(self, event_data, notification_data):
return 'info'
def get_sample_data(self, repository):
build_uuid = 'fake-build-id'
@ -155,6 +171,9 @@ class BuildSuccessEvent(NotificationEvent):
def event_name(cls):
return 'build_success'
def get_level(self, event_data, notification_data):
return 'primary'
def get_sample_data(self, repository):
build_uuid = 'fake-build-id'
@ -183,6 +202,9 @@ class BuildFailureEvent(NotificationEvent):
def event_name(cls):
return 'build_failure'
def get_level(self, event_data, notification_data):
return 'error'
def get_sample_data(self, repository):
build_uuid = 'fake-build-id'

View file

@ -4,9 +4,11 @@ import os.path
import tarfile
import base64
import json
import requests
import re
from flask.ext.mail import Message
from app import mail, app
from app import mail, app, get_app_url
from data import model
logger = logging.getLogger(__name__)
@ -187,3 +189,194 @@ class WebhookMethod(NotificationMethod):
return False
return True
class FlowdockMethod(NotificationMethod):
""" Method for sending notifications to Flowdock via the Team Inbox API:
https://www.flowdock.com/api/team-inbox
"""
@classmethod
def method_name(cls):
return 'flowdock'
def validate(self, repository, config_data):
token = config_data.get('flow_api_token', '')
if not token:
raise CannotValidateNotificationMethodException('Missing Flowdock API Token')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('flow_api_token', '')
if not token:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://api.flowdock.com/v1/messages/team_inbox/%s' % token
headers = {'Content-type': 'application/json'}
payload = {
'source': 'Quay',
'from_address': 'support@quay.io',
'subject': event_handler.get_summary(notification_data['event_data'], notification_data),
'content': event_handler.get_message(notification_data['event_data'], notification_data),
'from_name': owner.username,
'project': notification.repository.namespace + ' ' + notification.repository.name,
'tags': ['#' + event_handler.event_name()],
'link': notification_data['event_data']['homepage']
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for flowdock to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Flowdock method was unable to be sent: %s' % ex.message)
return False
return True
class HipchatMethod(NotificationMethod):
""" Method for sending notifications to Hipchat via the API:
https://www.hipchat.com/docs/apiv2/method/send_room_notification
"""
@classmethod
def method_name(cls):
return 'hipchat'
def validate(self, repository, config_data):
if not config_data.get('notification_token', ''):
raise CannotValidateNotificationMethodException('Missing Hipchat Room Notification Token')
if not config_data.get('room_id', ''):
raise CannotValidateNotificationMethodException('Missing Hipchat Room ID')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('notification_token', '')
room_id = config_data.get('room_id', '')
if not token or not room_id:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://api.hipchat.com/v2/room/%s/notification?auth_token=%s' % (room_id, token)
level = event_handler.get_level(notification_data['event_data'], notification_data)
color = {
'info': 'gray',
'warning': 'yellow',
'error': 'red',
'primary': 'purple'
}.get(level, 'gray')
headers = {'Content-type': 'application/json'}
payload = {
'color': color,
'message': event_handler.get_message(notification_data['event_data'], notification_data),
'notify': level == 'error',
'message_format': 'html',
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for hipchat to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Hipchat method was unable to be sent: %s' % ex.message)
return False
return True
class SlackMethod(NotificationMethod):
""" Method for sending notifications to Slack via the API:
https://api.slack.com/docs/attachments
"""
@classmethod
def method_name(cls):
return 'slack'
def validate(self, repository, config_data):
if not config_data.get('token', ''):
raise CannotValidateNotificationMethodException('Missing Slack Token')
if not config_data.get('subdomain', '').isalnum():
raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name')
def formatForSlack(self, message):
message = message.replace('\n', '')
message = re.sub(r'\s+', ' ', message)
message = message.replace('<br>', '\n')
message = re.sub(r'<a href="(.+)">(.+)</a>', '<\\1|\\2>', message)
return message
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
token = config_data.get('token', '')
subdomain = config_data.get('subdomain', '')
if not token or not subdomain:
return False
owner = model.get_user(notification.repository.namespace)
if not owner:
# Something went wrong.
return False
url = 'https://%s.slack.com/services/hooks/incoming-webhook?token=%s' % (subdomain, token)
level = event_handler.get_level(notification_data['event_data'], notification_data)
color = {
'info': '#ffffff',
'warning': 'warning',
'error': 'danger',
'primary': 'good'
}.get(level, '#ffffff')
summary = event_handler.get_summary(notification_data['event_data'], notification_data)
message = event_handler.get_message(notification_data['event_data'], notification_data)
headers = {'Content-type': 'application/json'}
payload = {
'text': summary,
'username': 'quayiobot',
'attachments': [
{
'fallback': summary,
'text': self.formatForSlack(message),
'color': color
}
]
}
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
logger.error('%s response for Slack to url: %s' % (resp.status_code,
url))
logger.error(resp.content)
return False
except requests.exceptions.RequestException as ex:
logger.exception('Slack method was unable to be sent: %s' % ex.message)
return False
return True

View file

@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers):
extra_headers = {}
# Add the Accept-Ranges header if the storage engine supports resumeable
# Add the Accept-Ranges header if the storage engine supports resumable
# downloads.
if store.get_supports_resumeable_downloads(repo_image.storage.locations):
profile.debug('Storage supports resumeable downloads')
if store.get_supports_resumable_downloads(repo_image.storage.locations):
profile.debug('Storage supports resumable downloads')
extra_headers['Accept-Ranges'] = 'bytes'
resp = make_response('')

View file

@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy
tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
logger.debug('Successfully prepared job')

View file

@ -179,6 +179,8 @@ def initialize_database():
TeamRole.create(name='member')
Visibility.create(name='public')
Visibility.create(name='private')
LoginService.create(name='google')
LoginService.create(name='github')
LoginService.create(name='quayrobot')
LoginService.create(name='ldap')
@ -257,6 +259,10 @@ def initialize_database():
ExternalNotificationMethod.create(name='email')
ExternalNotificationMethod.create(name='webhook')
ExternalNotificationMethod.create(name='flowdock')
ExternalNotificationMethod.create(name='hipchat')
ExternalNotificationMethod.create(name='slack')
NotificationKind.create(name='repo_push')
NotificationKind.create(name='build_queued')
NotificationKind.create(name='build_start')

View file

@ -21,8 +21,7 @@
#quay-logo {
width: 80px;
margin-right: 30px;
width: 100px;
}
#padding-container {
@ -2568,7 +2567,7 @@ p.editable:hover i {
margin-top: 10px;
}
.repo-build .build-log-error-element {
.repo-build .build-log-error-element .error-message-container {
position: relative;
display: inline-block;
margin: 10px;
@ -2578,7 +2577,7 @@ p.editable:hover i {
margin-left: 22px;
}
.repo-build .build-log-error-element i.fa {
.repo-build .build-log-error-element .error-message-container i.fa {
color: red;
position: absolute;
top: 13px;
@ -4598,6 +4597,27 @@ i.quay-icon {
height: 16px;
}
i.flowdock-icon {
background-image: url(/static/img/flowdock.ico);
background-size: 16px;
width: 16px;
height: 16px;
}
i.hipchat-icon {
background-image: url(/static/img/hipchat.png);
background-size: 16px;
width: 16px;
height: 16px;
}
i.slack-icon {
background-image: url(/static/img/slack.ico);
background-size: 16px;
width: 16px;
height: 16px;
}
.external-notification-view-element {
margin: 10px;
padding: 6px;

View file

@ -1,4 +1,23 @@
<span bindonce class="build-log-error-element">
<i class="fa fa-exclamation-triangle"></i>
<span class="error-message" bo-text="error.message"></span>
</span>
<div bindonce class="build-log-error-element">
<span class="error-message-container">
<i class="fa fa-exclamation-triangle"></i>
<span class="error-message" bo-text="error.message"></span>
<span ng-if="error.message == 'HTTP code: 403' && getLocalPullInfo().isLocal">
caused by attempting to pull private repository <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a>
<span ng-if="getLocalPullInfo().login">with inaccessible crdentials</span>
<span ng-if="!getLocalPullInfo().login">without credentials</span>
</span>
</span>
<div class="alert alert-danger" ng-if="error.message == 'HTTP code: 403' && getLocalPullInfo().isLocal">
<div ng-if="getLocalPullInfo().login">
Note: The credentials <b>{{ getLocalPullInfo().login.username }}</b> for registry <b>{{ getLocalPullInfo().login.registry }}</b> cannot
access repository <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a>.
</div>
<div ng-if="!getLocalPullInfo().login">
Note: No robot account is specified for this build. Without such credentials, this pull will always fail. Please setup a new
build trigger with a robot account that has access to <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a> or make that repository public.
</div>
</div>
</div>

View file

@ -73,7 +73,7 @@
<tr ng-if="currentMethod.fields.length"><td colspan="2"><hr></td></tr>
<tr ng-repeat="field in currentMethod.fields">
<td>{{ field.title }}:</td>
<td valign="top">{{ field.title }}:</td>
<td>
<div ng-switch on="field.type">
<span ng-switch-when="email">
@ -86,7 +86,11 @@
current-entity="currentConfig[field.name]"
ng-model="currentConfig[field.name]"
allowed-entities="['user', 'team', 'org']"
ng-switch-when="entity">
ng-switch-when="entity"></div>
<div ng-if="getHelpUrl(field, currentConfig)" style="margin-top: 10px">
See: <a href="{{ getHelpUrl(field, currentConfig) }}" target="_blank">{{ getHelpUrl(field, currentConfig) }}</a>
</div>
</div>
</td>
</tr>

View file

@ -2,7 +2,7 @@
<div class="current-item">
<div class="dropdown-select-icon-transclude"></div>
<input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}"
ng-readonly="!lookaheadItems || !lookaheadItems.length"></input>
ng-readonly="!allowCustomInput"></input>
</div>
<div class="dropdown">
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">

View file

@ -0,0 +1,17 @@
<span class="external-login-button-element">
<span ng-if="provider == 'github'">
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px">
<i class="fa fa-github fa-lg"></i>
<span ng-if="action != 'attach'">Sign In with GitHub</span>
<span ng-if="action == 'attach'">Attach to GitHub Account</span>
</a>
</span>
<span ng-if="provider == 'google'">
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GOOGLE_LOGIN']" ng-click="startSignin('google')">
<i class="fa fa-google fa-lg"></i>
<span ng-if="action != 'attach'">Sign In with Google</span>
<span ng-if="action == 'attach'">Attach to Google Account</span>
</a>
</span>
</span>

View file

@ -4,7 +4,7 @@
&equiv;
</button>
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
<img id="quay-logo" src="/static/img/black-horizontal.svg">
<img id="quay-logo" src="/static/img/quay-logo.png">
</a>
</div>

View file

@ -4,17 +4,22 @@
placeholder="Username or E-mail Address" ng-model="user.username" autofocus>
<input type="password" class="form-control input-lg" name="password"
placeholder="Password" ng-model="user.password">
<button class="btn btn-lg btn-primary btn-block" type="submit">Sign In</button>
<span class="social-alternate" quay-require="['GITHUB_LOGIN']">
<i class="fa fa-circle"></i>
<span class="inner-text">OR</span>
<div class="alert alert-warning" ng-show="tryAgainSoon > 0">
Too many attempts have been made to login. Please try again in {{ tryAgainSoon }} second<span ng-if="tryAgainSoon != 1">s</span>.
</div>
<span ng-show="tryAgainSoon == 0">
<button class="btn btn-lg btn-primary btn-block" type="submit">Sign In</button>
<span class="social-alternate" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">
<i class="fa fa-circle"></i>
<span class="inner-text">OR</span>
</span>
<div class="external-login-button" provider="github" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
<div class="external-login-button" provider="google" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
</span>
<a id="github-signin-link" class="btn btn-primary btn-lg btn-block" href="javascript:void(0)" ng-click="showGithub()"
quay-require="['GITHUB_LOGIN']">
<i class="fa fa-github fa-lg"></i> Sign In with GitHub
</a>
</form>
<div class="alert alert-danger" ng-show="invalidCredentials">Invalid username or password.</div>

View file

@ -18,10 +18,8 @@
<i class="fa fa-circle"></i>
<span class="inner-text">OR</span>
</span>
<a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=user:email{{ github_state_clause }}"
class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']">
<i class="fa fa-github fa-lg"></i> Sign In with GitHub
</a>
<div class="external-login-button" provider="github"></div>
<div class="external-login-button" provider="google"></div>
</div>
</form>
<div ng-show="registering" style="text-align: center">

View file

@ -29,7 +29,8 @@
<div class="slideinout" ng-show="currentRepo">
<div style="margin-top: 10px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)">
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"
allow-custom-input="true">
<!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i>

BIN
static/img/flowdock.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

BIN
static/img/hipchat.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

BIN
static/img/slack.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View file

@ -153,6 +153,14 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
this.currentIndex_ = 0;
}
_ViewArray.prototype.length = function() {
return this.entries.length;
};
_ViewArray.prototype.get = function(index) {
return this.entries[index];
};
_ViewArray.prototype.push = function(elem) {
this.entries.push(elem);
this.hasEntries = true;
@ -384,7 +392,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var uiService = {};
uiService.hidePopover = function(elem) {
var popover = $('#signupButton').data('bs.popover');
var popover = $(elem).data('bs.popover');
if (popover) {
popover.hide();
}
@ -446,6 +454,29 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var pingService = {};
var pingCache = {};
var invokeCallback = function($scope, pings, callback) {
if (pings[0] == -1) {
setTimeout(function() {
$scope.$apply(function() {
callback(-1, false, -1);
});
}, 0);
return;
}
var sum = 0;
for (var i = 0; i < pings.length; ++i) {
sum += pings[i];
}
// Report the average ping.
setTimeout(function() {
$scope.$apply(function() {
callback(Math.floor(sum / pings.length), true, pings.length);
});
}, 0);
};
var reportPingResult = function($scope, url, ping, callback) {
// Lookup the cached ping data, if any.
var cached = pingCache[url];
@ -458,28 +489,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
// If an error occurred, report it and done.
if (ping < 0) {
cached['pings'] = [-1];
setTimeout(function() {
$scope.$apply(function() {
callback(-1, false, -1);
});
}, 0);
invokeCallback($scope, pings, callback);
return;
}
// Otherwise, add the current ping and determine the average.
cached['pings'].push(ping);
var sum = 0;
for (var i = 0; i < cached['pings'].length; ++i) {
sum += cached['pings'][i];
}
// Report the average ping.
setTimeout(function() {
$scope.$apply(function() {
callback(Math.floor(sum / cached['pings'].length), true, cached['pings'].length);
});
}, 0);
// Invoke the callback.
invokeCallback($scope, cached['pings'], callback);
// Schedule another check if we've done less than three.
if (cached['pings'].length < 3) {
@ -515,12 +533,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
pingService.pingUrl = function($scope, url, callback) {
if (pingCache[url]) {
cached = pingCache[url];
setTimeout(function() {
$scope.$apply(function() {
callback(cached.result, cached.success);
});
}, 0);
invokeCallback($scope, pingCache[url]['pings'], callback);
return;
}
@ -558,6 +571,41 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
$provide.factory('StringBuilderService', ['$sce', 'UtilService', function($sce, UtilService) {
var stringBuilderService = {};
stringBuilderService.buildUrl = function(value_or_func, metadata) {
var url = value_or_func;
if (typeof url != 'string') {
url = url(metadata);
}
// Find the variables to be replaced.
var varNames = [];
for (var i = 0; i < url.length; ++i) {
var c = url[i];
if (c == '{') {
for (var j = i + 1; j < url.length; ++j) {
var d = url[j];
if (d == '}') {
varNames.push(url.substring(i + 1, j));
i = j;
break;
}
}
}
}
// Replace all variables found.
for (var i = 0; i < varNames.length; ++i) {
var varName = varNames[i];
if (!metadata[varName]) {
return null;
}
url = url.replace('{' + varName + '}', metadata[varName]);
}
return url;
};
stringBuilderService.buildString = function(value_or_func, metadata) {
var fieldIcons = {
'inviter': 'user',
@ -716,7 +764,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
return config;
}]);
$provide.factory('ApiService', ['Restangular', function(Restangular) {
$provide.factory('ApiService', ['Restangular', '$q', function(Restangular, $q) {
var apiService = {};
var getResource = function(path, opt_background) {
@ -830,6 +878,77 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
}
};
var freshLoginFailCheck = function(opName, opArgs) {
return function(resp) {
var deferred = $q.defer();
// If the error is a fresh login required, show the dialog.
if (resp.status == 401 && resp.data['error_type'] == 'fresh_login_required') {
var verifyNow = function() {
var info = {
'password': $('#freshPassword').val()
};
$('#freshPassword').val('');
// Conduct the sign in of the user.
apiService.verifyUser(info).then(function() {
// On success, retry the operation. if it succeeds, then resolve the
// deferred promise with the result. Otherwise, reject the same.
apiService[opName].apply(apiService, opArgs).then(function(resp) {
deferred.resolve(resp);
}, function(resp) {
deferred.reject(resp);
});
}, function(resp) {
// Reject with the sign in error.
deferred.reject({'data': {'message': 'Invalid verification credentials'}});
});
};
var box = bootbox.dialog({
"message": 'It has been more than a few minutes since you last logged in, ' +
'so please verify your password to perform this sensitive operation:' +
'<form style="margin-top: 10px" action="javascript:void(0)">' +
'<input id="freshPassword" class="form-control" type="password" placeholder="Current Password">' +
'</form>',
"title": 'Please Verify',
"buttons": {
"verify": {
"label": "Verify",
"className": "btn-success",
"callback": verifyNow
},
"close": {
"label": "Cancel",
"className": "btn-default",
"callback": function() {
deferred.reject({'data': {'message': 'Verification canceled'}});
}
}
}
});
box.bind('shown.bs.modal', function(){
box.find("input").focus();
box.find("form").submit(function() {
if (!$('#freshPassword').val()) { return; }
box.modal('hide');
verifyNow();
});
});
// Return a new promise. We'll accept or reject it based on the result
// of the login.
return deferred.promise;
}
// Otherwise, we just 'raise' the error via the reject method on the promise.
return $q.reject(resp);
};
};
var buildMethodsForOperation = function(operation, resource, resourceMap) {
var method = operation['method'].toLowerCase();
var operationName = operation['nickname'];
@ -843,7 +962,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'ignoreLoadingBar': true
});
}
return one['custom' + method.toUpperCase()](opt_options);
var opObj = one['custom' + method.toUpperCase()](opt_options);
// If the operation requires_fresh_login, then add a specialized error handler that
// will defer the operation's result if sudo is requested.
if (operation['requires_fresh_login']) {
opObj = opObj.catch(freshLoginFailCheck(operationName, arguments));
}
return opObj;
};
// If the method for the operation is a GET, add an operationAsResource method.
@ -1141,6 +1268,54 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'title': 'Webhook URL'
}
]
},
{
'id': 'flowdock',
'title': 'Flowdock Team Notification',
'icon': 'flowdock-icon',
'fields': [
{
'name': 'flow_api_token',
'type': 'string',
'title': 'Flow API Token',
'help_url': 'https://www.flowdock.com/account/tokens'
}
]
},
{
'id': 'hipchat',
'title': 'HipChat Room Notification',
'icon': 'hipchat-icon',
'fields': [
{
'name': 'room_id',
'type': 'string',
'title': 'Room ID #'
},
{
'name': 'notification_token',
'type': 'string',
'title': 'Notification Token'
}
]
},
{
'id': 'slack',
'title': 'Slack Room Notification',
'icon': 'slack-icon',
'fields': [
{
'name': 'subdomain',
'type': 'string',
'title': 'Slack Subdomain'
},
{
'name': 'token',
'type': 'string',
'title': 'Token',
'help_url': 'https://{subdomain}.slack.com/services/new/incoming-webhook'
}
]
}
];
@ -1396,10 +1571,41 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
var keyService = {}
keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY'];
keyService['githubClientId'] = Config['GITHUB_CLIENT_ID'];
keyService['githubLoginClientId'] = Config['GITHUB_LOGIN_CLIENT_ID'];
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
keyService['googleLoginClientId'] = Config['GOOGLE_LOGIN_CLIENT_ID'];
keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback');
keyService['googleLoginUrl'] = 'https://accounts.google.com/o/oauth2/auth?response_type=code&';
keyService['githubLoginUrl'] = 'https://github.com/login/oauth/authorize?';
keyService['googleLoginScope'] = 'openid email';
keyService['githubLoginScope'] = 'user:email';
keyService.getExternalLoginUrl = function(service, action) {
var state_clause = '';
if (Config.MIXPANEL_KEY && window.mixpanel) {
if (mixpanel.get_distinct_id !== undefined) {
state_clause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
}
}
var client_id = keyService[service + 'LoginClientId'];
var scope = keyService[service + 'LoginScope'];
var redirect_uri = keyService[service + 'RedirectUri'];
if (action == 'attach') {
redirect_uri += '/attach';
}
var url = keyService[service + 'LoginUrl'] + 'client_id=' + client_id + '&scope=' + scope +
'&redirect_uri=' + redirect_uri + state_clause;
return url;
};
return keyService;
}]);
@ -1805,7 +2011,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
when('/repository/:namespace/:name/build', {templateUrl: '/static/partials/repo-build.html', controller:RepoBuildCtrl, reloadOnSearch: false}).
when('/repository/:namespace/:name/build/:buildid/buildpack', {templateUrl: '/static/partials/build-package.html', controller:BuildPackageCtrl, reloadOnSearch: false}).
when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list',
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl}).
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl, reloadOnSearch: false}).
when('/user/', {title: 'Account Settings', description:'Account settings for ' + title, templateUrl: '/static/partials/user-admin.html',
reloadOnSearch: false, controller: UserAdminCtrl}).
when('/superuser/', {title: 'Superuser Admin Panel', description:'Admin panel for ' + title, templateUrl: '/static/partials/super-user.html',
@ -2339,6 +2545,45 @@ quayApp.directive('userSetup', function () {
});
quayApp.directive('externalLoginButton', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/external-login-button.html',
replace: false,
transclude: true,
restrict: 'C',
scope: {
'signInStarted': '&signInStarted',
'redirectUrl': '=redirectUrl',
'provider': '@provider',
'action': '@action'
},
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
var getRedirectUrl = function() {
return $scope.redirectUrl;
};
$scope.startSignin = function(service) {
$scope.signInStarted({'service': service});
var url = KeyService.getExternalLoginUrl(service, $scope.action || 'login');
// Save the redirect URL in a cookie so that we can redirect back after the service returns to us.
var redirectURL = getRedirectUrl() || window.location.toString();
CookieService.putPermanent('quay.redirectAfterLoad', redirectURL);
// Needed to ensure that UI work done by the started callback is finished before the location
// changes.
$timeout(function() {
document.location = url;
}, 250);
};
}
};
return directiveDefinitionObject;
});
quayApp.directive('signinForm', function () {
var directiveDefinitionObject = {
priority: 0,
@ -2351,33 +2596,9 @@ quayApp.directive('signinForm', function () {
'signInStarted': '&signInStarted',
'signedIn': '&signedIn'
},
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, CookieService, Features, Config) {
var getRedirectUrl = function() {
return $scope.redirectUrl;
};
$scope.showGithub = function() {
if (!Features.GITHUB_LOGIN) { return; }
$scope.markStarted();
var mixpanelDistinctIdClause = '';
if (Config.MIXPANEL_KEY && mixpanel.get_distinct_id !== undefined) {
$scope.mixpanelDistinctIdClause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
}
// Save the redirect URL in a cookie so that we can redirect back after GitHub returns to us.
var redirectURL = getRedirectUrl() || window.location.toString();
CookieService.putPermanent('quay.redirectAfterLoad', redirectURL);
// Needed to ensure that UI work done by the started callback is finished before the location
// changes.
$timeout(function() {
var url = 'https://github.com/login/oauth/authorize?client_id=' + encodeURIComponent(KeyService.githubLoginClientId) +
'&scope=user:email' + mixpanelDistinctIdClause;
document.location = url;
}, 250);
};
controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config) {
$scope.tryAgainSoon = 0;
$scope.tryAgainInterval = null;
$scope.markStarted = function() {
if ($scope.signInStarted != null) {
@ -2385,8 +2606,29 @@ quayApp.directive('signinForm', function () {
}
};
$scope.cancelInterval = function() {
$scope.tryAgainSoon = 0;
if ($scope.tryAgainInterval) {
$interval.cancel($scope.tryAgainInterval);
}
$scope.tryAgainInterval = null;
};
$scope.$watch('user.username', function() {
$scope.cancelInterval();
});
$scope.$on('$destroy', function() {
$scope.cancelInterval();
});
$scope.signin = function() {
if ($scope.tryAgainSoon > 0) { return; }
$scope.markStarted();
$scope.cancelInterval();
ApiService.signinUser($scope.user).then(function() {
$scope.needsEmailVerification = false;
@ -2410,8 +2652,23 @@ quayApp.directive('signinForm', function () {
window.location = (redirectUrl ? redirectUrl : '/');
}, 500);
}, function(result) {
$scope.needsEmailVerification = result.data.needsEmailVerification;
$scope.invalidCredentials = result.data.invalidCredentials;
if (result.status == 429 /* try again later */) {
$scope.needsEmailVerification = false;
$scope.invalidCredentials = false;
$scope.cancelInterval();
$scope.tryAgainSoon = result.headers('Retry-After');
$scope.tryAgainInterval = $interval(function() {
$scope.tryAgainSoon--;
if ($scope.tryAgainSoon <= 0) {
$scope.cancelInterval();
}
}, 1000, $scope.tryAgainSoon);
} else {
$scope.needsEmailVerification = result.data.needsEmailVerification;
$scope.invalidCredentials = result.data.invalidCredentials;
}
});
};
}
@ -2435,15 +2692,6 @@ quayApp.directive('signupForm', function () {
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) {
$('.form-signup').popover();
if (Config.MIXPANEL_KEY) {
angulartics.waitForVendorApi(mixpanel, 500, function(loadedMixpanel) {
var mixpanelId = loadedMixpanel.get_distinct_id();
$scope.github_state_clause = '&state=' + mixpanelId;
});
}
$scope.githubClientId = KeyService.githubLoginClientId;
$scope.awaitingConfirmation = false;
$scope.registering = false;
@ -2545,7 +2793,11 @@ quayApp.directive('dockerAuthDialog', function (Config) {
},
controller: function($scope, $element) {
var updateCommand = function() {
$scope.command = 'docker login -e="." -u="' + $scope.username +
var escape = function(v) {
if (!v) { return v; }
return v.replace('$', '\\$');
};
$scope.command = 'docker login -e="." -u="' + escape($scope.username) +
'" -p="' + $scope.token + '" ' + Config['SERVER_HOSTNAME'];
};
@ -4045,9 +4297,11 @@ quayApp.directive('billingOptions', function () {
var save = function() {
$scope.working = true;
var errorHandler = ApiService.errorDisplay('Could not change user details');
ApiService.changeDetails($scope.organization, $scope.obj).then(function(resp) {
$scope.working = false;
});
}, errorHandler);
};
var checkSave = function() {
@ -4207,7 +4461,7 @@ quayApp.directive('namespaceSelector', function () {
'namespace': '=namespace',
'requireCreate': '=requireCreate'
},
controller: function($scope, $element, $routeParams, CookieService) {
controller: function($scope, $element, $routeParams, $location, CookieService) {
$scope.namespaces = {};
$scope.initialize = function(user) {
@ -4244,6 +4498,10 @@ quayApp.directive('namespaceSelector', function () {
if (newNamespace) {
CookieService.putPermanent('quay.namespace', newNamespace);
if ($routeParams['namespace'] && $routeParams['namespace'] != newNamespace) {
$location.search({'namespace': newNamespace});
}
}
};
@ -4288,9 +4546,48 @@ quayApp.directive('buildLogError', function () {
transclude: false,
restrict: 'C',
scope: {
'error': '=error'
'error': '=error',
'entries': '=entries'
},
controller: function($scope, $element) {
controller: function($scope, $element, Config) {
$scope.getLocalPullInfo = function() {
if ($scope.entries.__localpull !== undefined) {
return $scope.entries.__localpull;
}
var localInfo = {
'isLocal': false
};
// Find the 'pulling' phase entry, and then extra any metadata found under
// it.
for (var i = 0; i < $scope.entries.length; ++i) {
var entry = $scope.entries[i];
if (entry.type == 'phase' && entry.message == 'pulling') {
for (var j = 0; j < entry.logs.length(); ++j) {
var log = entry.logs.get(j);
if (log.data && log.data.phasestep == 'login') {
localInfo['login'] = log.data;
}
if (log.data && log.data.phasestep == 'pull') {
var repo_url = log.data['repo_url'];
var repo_and_tag = repo_url.substring(Config.SERVER_HOSTNAME.length + 1);
var tagIndex = repo_and_tag.lastIndexOf(':');
var repo = repo_and_tag.substring(0, tagIndex);
localInfo['repo_url'] = repo_url;
localInfo['repo'] = repo;
localInfo['isLocal'] = repo_url.indexOf(Config.SERVER_HOSTNAME + '/') == 0;
}
}
break;
}
}
return $scope.entries.__localpull = localInfo;
};
}
};
return directiveDefinitionObject;
@ -4326,6 +4623,9 @@ quayApp.directive('dropdownSelect', function ($compile) {
'selectedItem': '=selectedItem',
'placeholder': '=placeholder',
'lookaheadItems': '=lookaheadItems',
'allowCustomInput': '@allowCustomInput',
'handleItemSelected': '&handleItemSelected',
'handleInput': '&handleInput',
@ -5084,7 +5384,7 @@ quayApp.directive('createExternalNotificationDialog', function () {
'counter': '=counter',
'notificationCreated': '&notificationCreated'
},
controller: function($scope, $element, ExternalNotificationData, ApiService, $timeout) {
controller: function($scope, $element, ExternalNotificationData, ApiService, $timeout, StringBuilderService) {
$scope.currentEvent = null;
$scope.currentMethod = null;
$scope.status = '';
@ -5184,6 +5484,15 @@ quayApp.directive('createExternalNotificationDialog', function () {
}, 1000);
};
$scope.getHelpUrl = function(field, config) {
var helpUrl = field['help_url'];
if (!helpUrl) {
return null;
}
return StringBuilderService.buildUrl(helpUrl, config);
};
$scope.$watch('counter', function(counter) {
if (counter) {
$scope.clearCounter++;
@ -5571,7 +5880,9 @@ quayApp.directive('locationView', function () {
$scope.getLocationTooltip = function(location, ping) {
var tip = $scope.getLocationTitle(location) + '<br>';
if (ping < 0) {
if (ping == null) {
tip += '(Loading)';
} else if (ping < 0) {
tip += '<br><b>Note: Could not contact server</b>';
} else {
tip += 'Estimated Ping: ' + (ping ? ping + 'ms' : '(Loading)');
@ -5819,11 +6130,10 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
// Handle session expiration.
Restangular.setErrorInterceptor(function(response) {
if (response.status == 401) {
if (response.data['session_required'] == null || response.data['session_required'] === true) {
$('#sessionexpiredModal').modal({});
return false;
}
if (response.status == 401 && response.data['error_type'] == 'invalid_token' &&
response.data['session_required'] !== false) {
$('#sessionexpiredModal').modal({});
return false;
}
if (response.status == 503) {

View file

@ -527,16 +527,24 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$scope.deleteTag = function(tagName) {
if (!$scope.repo.can_admin) { return; }
$('#confirmdeleteTagModal').modal('hide');
var params = {
'repository': namespace + '/' + name,
'tag': tagName
};
var errorHandler = ApiService.errorDisplay('Cannot delete tag', function() {
$('#confirmdeleteTagModal').modal('hide');
$scope.deletingTag = false;
});
$scope.deletingTag = true;
ApiService.deleteFullTag(null, params).then(function() {
loadViewInfo();
}, ApiService.errorDisplay('Cannot delete tag'));
$('#confirmdeleteTagModal').modal('hide');
$scope.deletingTag = false;
}, errorHandler);
};
$scope.getImagesForTagBySize = function(tag) {
@ -1641,13 +1649,19 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
UserService.updateUserIn($scope, function(user) {
$scope.cuser = jQuery.extend({}, user);
if (Features.GITHUB_LOGIN && $scope.cuser.logins) {
if ($scope.cuser.logins) {
for (var i = 0; i < $scope.cuser.logins.length; i++) {
if ($scope.cuser.logins[i].service == 'github') {
var githubId = $scope.cuser.logins[i].service_identifier;
$http.get('https://api.github.com/user/' + githubId).success(function(resp) {
$scope.githubLogin = resp.login;
});
var login = $scope.cuser.logins[i];
login.metadata = login.metadata || {};
if (login.service == 'github') {
$scope.hasGithubLogin = true;
$scope.githubLogin = login.metadata['service_username'];
}
if (login.service == 'google') {
$scope.hasGoogleLogin = true;
$scope.googleLogin = login.metadata['service_username'];
}
}
}
@ -1665,7 +1679,6 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
$scope.convertStep = 0;
$scope.org = {};
$scope.githubRedirectUri = KeyService.githubRedirectUri;
$scope.githubClientId = KeyService.githubLoginClientId;
$scope.authorizedApps = null;
$scope.logsShown = 0;
@ -1773,7 +1786,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
$scope.updatingUser = true;
$scope.changePasswordSuccess = false;
ApiService.changeUserDetails($scope.cuser).then(function() {
ApiService.changeUserDetails($scope.cuser).then(function(resp) {
$scope.updatingUser = false;
$scope.changePasswordSuccess = true;

View file

@ -24,7 +24,7 @@
<a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a>
<div class="markdown-view description" content="repository.description" first-line-only="true"></div>
</div>
<a href="/repository/?namespace={{ user.username }}">See All Repositories</a>
<a href="/repository/?namespace={{ namespace }}">See All Repositories</a>
</div>
<!-- No Repos -->

View file

@ -34,7 +34,7 @@
<a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">{{repository.namespace}}/{{repository.name}}</a>
<div class="markdown-view description" content="repository.description" first-line-only="true"></div>
</div>
<a href="/repository/?namespace={{ user.username }}">See All Repositories</a>
<a href="/repository/?namespace={{ namespace }}">See All Repositories</a>
</div>
<!-- No Repos -->

View file

@ -77,7 +77,7 @@
<span class="container-content build-log-phase" phase="container"></span>
</div>
<div ng-switch-when="error">
<span class="container-content build-log-error" error="container"></span>
<span class="container-content build-log-error" error="container" entries="logEntries"></span>
</div>
<div ng-switch-when="command">
<span class="container-content build-log-command" command="container"></span>

View file

@ -33,7 +33,7 @@
<li quay-classes="{'!Features.BILLING': 'active'}"><a href="javascript:void(0)" data-toggle="tab" data-target="#email">Account E-mail</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#robots">Robot Accounts</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#password">Change Password</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#github" quay-require="['GITHUB_LOGIN']">GitHub Login</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#external" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">External Logins</a></li>
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#authorized" ng-click="loadAuthedApps()">Authorized Applications</a></li>
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
@ -138,13 +138,14 @@
<!-- Change password tab -->
<div id="password" class="tab-pane">
<div class="loading" ng-show="updatingUser">
<div class="quay-spinner 3x"></div>
</div>
<div class="row">
<div class="panel">
<div class="panel-title">Change Password</div>
<div class="loading" ng-show="updatingUser">
<div class="quay-spinner 3x"></div>
</div>
<span class="help-block" ng-show="changePasswordSuccess">Password changed successfully</span>
<div ng-show="!updatingUser" class="panel-body">
@ -162,25 +163,52 @@
</div>
</div>
<!-- Github tab -->
<div id="github" class="tab-pane" quay-require="['GITHUB_LOGIN']">
<!-- External Login tab -->
<div id="external" class="tab-pane" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">
<div class="loading" ng-show="!cuser">
<div class="quay-spinner 3x"></div>
</div>
<div class="row" ng-show="cuser">
<!-- Github -->
<div class="row" quay-show="cuser && Features.GITHUB_LOGIN">
<div class="panel">
<div class="panel-title">GitHub Login:</div>
<div class="panel-body">
<div ng-show="githubLogin" class="lead col-md-8">
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
</div>
<div ng-show="!githubLogin" class="col-md-8">
<a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=user:email{{ github_state_clause }}&redirect_uri={{ githubRedirectUri }}/attach" class="btn btn-primary"><i class="fa fa-github fa-lg"></i> Connect with GitHub</a>
<div ng-show="hasGithubLogin && !githubLogin" class="lead col-md-8">
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
Account attached to Github Account
</div>
<div ng-show="!hasGithubLogin" class="col-md-4">
<span class="external-login-button" provider="github" action="attach"></span>
</div>
</div>
</div>
</div>
<!-- Google -->
<div class="row" quay-show="cuser && Features.GOOGLE_LOGIN">
<div class="panel">
<div class="panel-title">Google Login:</div>
<div class="panel-body">
<div ng-show="hasGoogleLogin && googleLogin" class="lead col-md-8">
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
<b>{{ googleLogin }}</b>
</div>
<div ng-show="hasGoogleLogin && !googleLogin" class="lead col-md-8">
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
Account attached to Google Account
</div>
<div ng-show="!hasGoogleLogin" class="col-md-4">
<span class="external-login-button" provider="google" action="attach"></span>
</div>
</div>
</div>
</div>
</div>
<!-- Robot accounts tab -->

View file

@ -391,7 +391,10 @@
</span>?
</h4>
</div>
<div class="modal-body">
<div class="modal-body" ng-show="deletingTag">
<div class="quay-spinner"></div>
</div>
<div class="modal-body" ng-show="!deletingTag">
Are you sure you want to delete tag
<span class="label tag" ng-class="tagToDelete == currentTag.name ? 'label-success' : 'label-default'">
{{ tagToDelete }}
@ -401,7 +404,7 @@
The following images and any other images not referenced by a tag will be deleted:
</div>
</div>
<div class="modal-footer">
<div class="modal-footer" ng-show="!deletingTag">
<button type="button" class="btn btn-primary" ng-click="deleteTag(tagToDelete)">Delete Tag</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
</div>

View file

@ -1,5 +1,5 @@
from storage.local import LocalStorage
from storage.cloud import S3Storage, GoogleCloudStorage
from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage
from storage.fakestorage import FakeStorage
from storage.distributedstorage import DistributedStorage
@ -8,6 +8,7 @@ STORAGE_DRIVER_CLASSES = {
'LocalStorage': LocalStorage,
'S3Storage': S3Storage,
'GoogleCloudStorage': GoogleCloudStorage,
'RadosGWStorage': RadosGWStorage,
}

View file

@ -54,10 +54,13 @@ class BaseStorage(StoragePaths):
# Set the IO buffer to 64kB
buffer_size = 64 * 1024
def get_direct_download_url(self, path, expires_in=60):
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
return None
def get_supports_resumeable_downloads(self):
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
return None
def get_supports_resumable_downloads(self):
return False
def get_content(self, path):
@ -72,7 +75,7 @@ class BaseStorage(StoragePaths):
def stream_read_file(self, path):
raise NotImplementedError
def stream_write(self, path, fp):
def stream_write(self, path, fp, content_type=None):
raise NotImplementedError
def list_directory(self, path=None):
@ -83,3 +86,6 @@ class BaseStorage(StoragePaths):
def remove(self, path):
raise NotImplementedError
def get_checksum(self, path):
raise NotImplementedError

View file

@ -7,36 +7,39 @@ import boto.gs.connection
import boto.s3.key
import boto.gs.key
from io import BufferedIOBase
from storage.basestorage import BaseStorage
logger = logging.getLogger(__name__)
class StreamReadKeyAsFile(object):
class StreamReadKeyAsFile(BufferedIOBase):
def __init__(self, key):
self._key = key
self._finished = False
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self._key.close(fast=True)
def read(self, amt=None):
if self._finished:
if self.closed:
return None
resp = self._key.read(amt)
if not resp:
self._finished = True
return resp
def readable(self):
return True
@property
def closed(self):
return self._key.closed
def close(self):
self._key.close(fast=True)
class _CloudStorage(BaseStorage):
def __init__(self, connection_class, key_class, upload_params, storage_path, access_key,
secret_key, bucket_name):
def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
access_key, secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = access_key
@ -45,12 +48,14 @@ class _CloudStorage(BaseStorage):
self._connection_class = connection_class
self._key_class = key_class
self._upload_params = upload_params
self._connect_kwargs = connect_kwargs
self._cloud_conn = None
self._cloud_bucket = None
def _initialize_cloud_conn(self):
if not self._initialized:
self._cloud_conn = self._connection_class(self._access_key, self._secret_key)
self._cloud_conn = self._connection_class(self._access_key, self._secret_key,
**self._connect_kwargs)
self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name)
self._initialized = True
@ -87,15 +92,22 @@ class _CloudStorage(BaseStorage):
key.set_contents_from_string(content, **self._upload_params)
return path
def get_supports_resumeable_downloads(self):
def get_supports_resumable_downloads(self):
return True
def get_direct_download_url(self, path, expires_in=60):
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
self._initialize_cloud_conn()
path = self._init_path(path)
k = self._key_class(self._cloud_bucket, path)
return k.generate_url(expires_in)
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
url = key.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, encrypt_key=True)
return url
def stream_read(self, path):
self._initialize_cloud_conn()
path = self._init_path(path)
@ -116,14 +128,20 @@ class _CloudStorage(BaseStorage):
raise IOError('No such key: \'{0}\''.format(path))
return StreamReadKeyAsFile(key)
def stream_write(self, path, fp):
def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn()
buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size:
buffer_size = self.buffer_size
path = self._init_path(path)
mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params)
metadata = {}
if content_type is not None:
metadata['Content-Type'] = content_type
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
**self._upload_params)
num_part = 1
while True:
try:
@ -179,25 +197,67 @@ class _CloudStorage(BaseStorage):
for key in self._cloud_bucket.list(prefix=path):
key.delete()
def get_checksum(self, path):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
k = self._cloud_bucket.lookup(key)
return k.etag[1:-1][:7]
class S3Storage(_CloudStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
upload_params = {
'encrypt_key': True,
}
connect_kwargs = {}
super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
upload_params, storage_path, s3_access_key, s3_secret_key,
s3_bucket)
connect_kwargs, upload_params, storage_path, s3_access_key,
s3_secret_key, s3_bucket)
class GoogleCloudStorage(_CloudStorage):
def __init__(self, storage_path, access_key, secret_key, bucket_name):
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, {},
storage_path, access_key, secret_key, bucket_name)
upload_params = {}
connect_kwargs = {}
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key,
connect_kwargs, upload_params, storage_path,
access_key, secret_key, bucket_name)
def stream_write(self, path, fp):
def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
if content_type is not None:
key.set_metadata('Content-Type', content_type)
key.set_contents_from_stream(fp)
class RadosGWStorage(_CloudStorage):
def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name):
upload_params = {}
connect_kwargs = {
'host': hostname,
'is_secure': is_secure,
'calling_format': boto.s3.connection.OrdinaryCallingFormat(),
}
super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
connect_kwargs, upload_params, storage_path, access_key,
secret_key, bucket_name)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_download_url(path, expires_in, requires_cors)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_upload_url(path, mime_type, requires_cors)

View file

@ -31,6 +31,7 @@ class DistributedStorage(StoragePaths):
self.preferred_locations = list(preferred_locations)
get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url)
get_direct_upload_url = _location_aware(BaseStorage.get_direct_upload_url)
get_content = _location_aware(BaseStorage.get_content)
put_content = _location_aware(BaseStorage.put_content)
stream_read = _location_aware(BaseStorage.stream_read)
@ -39,4 +40,5 @@ class DistributedStorage(StoragePaths):
list_directory = _location_aware(BaseStorage.list_directory)
exists = _location_aware(BaseStorage.exists)
remove = _location_aware(BaseStorage.remove)
get_supports_resumeable_downloads = _location_aware(BaseStorage.get_supports_resumeable_downloads)
get_checksum = _location_aware(BaseStorage.get_checksum)
get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads)

View file

@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
def stream_read(self, path):
yield ''
def stream_write(self, path, fp):
def stream_write(self, path, fp, content_type=None):
pass
def remove(self, path):
@ -22,3 +22,6 @@ class FakeStorage(BaseStorage):
def exists(self, path):
return False
def get_checksum(self, path):
return 'abcdefg'

View file

@ -1,6 +1,7 @@
import os
import shutil
import hashlib
import io
from storage.basestorage import BaseStorage
@ -40,9 +41,9 @@ class LocalStorage(BaseStorage):
def stream_read_file(self, path):
path = self._init_path(path)
return open(path, mode='rb')
return io.open(path, mode='rb')
def stream_write(self, path, fp):
def stream_write(self, path, fp, content_type=None):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
@ -80,3 +81,14 @@ class LocalStorage(BaseStorage):
os.remove(path)
except OSError:
pass
def get_checksum(self, path):
path = self._init_path(path)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self.buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]

View file

@ -1,22 +1,22 @@
{% extends "base.html" %}
{% block title %}
<title>Error Logging in with GitHub · Quay.io</title>
<title>Error Logging in with {{ service_name }} · Quay.io</title>
{% endblock %}
{% block body_content %}
<div class="container">
<div class="row">
<div class="col-md-12">
<h2>There was an error logging in with GitHub.</h2>
<h2>There was an error logging in with {{ service_name }}.</h2>
{% if error_message %}
<div class="alert alert-danger">{{ error_message }}</div>
{% endif %}
<div>
Please register using the <a href="/">registration form</a> to continue.
You will be able to connect your github account to your Quay.io account
Please register using the <a ng-href="{{ service_url }}/signin" target="_self">registration form</a> to continue.
You will be able to connect your account to your Quay.io account
in the user settings.
</div>
</div>

Binary file not shown.

View file

@ -23,7 +23,8 @@ from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, Bu
from endpoints.api.repoemail import RepositoryAuthorizedEmail
from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList
from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout,
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification)
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification,
VerifyUser)
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs
@ -434,6 +435,24 @@ class TestSignin(ApiTestCase):
self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'})
class TestVerifyUser(ApiTestCase):
def setUp(self):
ApiTestCase.setUp(self)
self._set_url(VerifyUser)
def test_post_anonymous(self):
self._run_test('POST', 401, None, {u'password': 'LQ0N'})
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', {u'password': 'LQ0N'})
def test_post_reader(self):
self._run_test('POST', 403, 'reader', {u'password': 'LQ0N'})
def test_post_devtable(self):
self._run_test('POST', 200, 'devtable', {u'password': 'password'})
class TestListPlans(ApiTestCase):
def setUp(self):
ApiTestCase.setUp(self)
@ -473,13 +492,13 @@ class TestUser(ApiTestCase):
self._run_test('PUT', 401, None, {})
def test_put_freshuser(self):
self._run_test('PUT', 200, 'freshuser', {})
self._run_test('PUT', 401, 'freshuser', {})
def test_put_reader(self):
self._run_test('PUT', 200, 'reader', {})
self._run_test('PUT', 401, 'reader', {})
def test_put_devtable(self):
self._run_test('PUT', 200, 'devtable', {})
self._run_test('PUT', 401, 'devtable', {})
def test_post_anonymous(self):
self._run_test('POST', 400, None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'})

View file

@ -339,6 +339,12 @@ class TestChangeUserDetails(ApiTestCase):
data=dict(password='newpasswordiscool'))
self.login(READ_ACCESS_USER, password='newpasswordiscool')
def test_changeeemail(self):
self.login(READ_ACCESS_USER)
self.putJsonResponse(User,
data=dict(email='test+foo@devtable.com'))
def test_changeinvoiceemail(self):
self.login(READ_ACCESS_USER)

View file

@ -46,25 +46,30 @@ class TestImageSharing(unittest.TestCase):
preferred = storage.preferred_locations[0]
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred)
return image.storage.id
image.storage.uploading = False
image.storage.save()
return image.storage
def assertSameStorage(self, docker_image_id, storage_id, repository=REPO, username=ADMIN_ACCESS_USER):
new_storage_id = self.createStorage(docker_image_id, repository, username)
self.assertEquals(storage_id, new_storage_id)
def assertSameStorage(self, docker_image_id, existing_storage, repository=REPO,
username=ADMIN_ACCESS_USER):
new_storage = self.createStorage(docker_image_id, repository, username)
self.assertEquals(existing_storage.id, new_storage.id)
def assertDifferentStorage(self, docker_image_id, storage_id, repository=REPO, username=ADMIN_ACCESS_USER):
new_storage_id = self.createStorage(docker_image_id, repository, username)
self.assertNotEquals(storage_id, new_storage_id)
def assertDifferentStorage(self, docker_image_id, existing_storage, repository=REPO,
username=ADMIN_ACCESS_USER):
new_storage = self.createStorage(docker_image_id, repository, username)
self.assertNotEquals(existing_storage.id, new_storage.id)
def test_same_user(self):
""" The same user creates two images, each which should be shared in the same repo. This is a sanity check. """
""" The same user creates two images, each which should be shared in the same repo. This is a
sanity check. """
# Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('first-image')
first_storage = self.createStorage('first-image')
# Create a reference to the same docker ID => same image.
self.assertSameStorage('first-image', first_storage_id)
self.assertSameStorage('first-image', first_storage)
# Create a reference to another new docker ID => new image.
second_storage_id = self.createStorage('second-image')
@ -73,68 +78,68 @@ class TestImageSharing(unittest.TestCase):
self.assertSameStorage('second-image', second_storage_id)
# Make sure the images are different.
self.assertNotEquals(first_storage_id, second_storage_id)
self.assertNotEquals(first_storage, second_storage_id)
def test_no_user_private_repo(self):
""" If no user is specified (token case usually), then no sharing can occur on a private repo. """
# Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('the-image', username=None, repository=SHARED_REPO)
first_storage = self.createStorage('the-image', username=None, repository=SHARED_REPO)
# Create a areference to the same docker ID, but since no username => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=None, repository=RANDOM_REPO)
self.assertDifferentStorage('the-image', first_storage, username=None, repository=RANDOM_REPO)
def test_no_user_public_repo(self):
""" If no user is specified (token case usually), then no sharing can occur on a private repo except when the image is first public. """
# Create a reference to a new docker ID => new image.
first_storage_id = self.createStorage('the-image', username=None, repository=PUBLIC_REPO)
first_storage = self.createStorage('the-image', username=None, repository=PUBLIC_REPO)
# Create a areference to the same docker ID. Since no username, we'd expect different but the first image is public so => shaed image.
self.assertSameStorage('the-image', first_storage_id, username=None, repository=RANDOM_REPO)
self.assertSameStorage('the-image', first_storage, username=None, repository=RANDOM_REPO)
def test_different_user_same_repo(self):
""" Two different users create the same image in the same repo. """
# Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=PUBLIC_USER, repository=SHARED_REPO)
first_storage = self.createStorage('the-image', username=PUBLIC_USER, repository=SHARED_REPO)
# Create a reference to the *same* docker ID under the second user => same image.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
def test_different_repo_no_shared_access(self):
""" Neither user has access to the other user's repository. """
# Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=RANDOM_USER, repository=RANDOM_REPO)
first_storage = self.createStorage('the-image', username=RANDOM_USER, repository=RANDOM_REPO)
# Create a reference to the *same* docker ID under the second user => new image.
second_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO)
# Verify that the users do not share storage.
self.assertNotEquals(first_storage_id, second_storage_id)
self.assertNotEquals(first_storage, second_storage_id)
def test_public_than_private(self):
""" An image is created publicly then used privately, so it should be shared. """
# Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=PUBLIC_USER, repository=PUBLIC_REPO)
first_storage = self.createStorage('the-image', username=PUBLIC_USER, repository=PUBLIC_REPO)
# Create a reference to the *same* docker ID under the second user => same image, since the first was public.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=REPO)
self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=REPO)
def test_private_than_public(self):
""" An image is created privately then used publicly, so it should *not* be shared. """
# Create a reference to a new docker ID under the first user => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=REPO)
# Create a reference to the *same* docker ID under the second user => new image, since the first was private.
self.assertDifferentStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO)
self.assertDifferentStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
def test_different_repo_with_access(self):
@ -143,64 +148,71 @@ class TestImageSharing(unittest.TestCase):
be shared since the user has access.
"""
# Create the image in the shared repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=SHARED_REPO)
# Create the image in the other user's repo, but since the user (PUBLIC) still has access to the shared
# repository, they should reuse the storage.
self.assertSameStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO)
self.assertSameStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
def test_org_access(self):
""" An image is accessible by being a member of the organization. """
# Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under the user's repo, but since the user has access to the organization => shared image.
self.assertSameStorage('the-image', first_storage_id, username=ADMIN_ACCESS_USER, repository=REPO)
self.assertSameStorage('the-image', first_storage, username=ADMIN_ACCESS_USER, repository=REPO)
# Ensure that the user's robot does not have access, since it is not on the permissions list for the repo.
self.assertDifferentStorage('the-image', first_storage_id, username=ADMIN_ROBOT_USER, repository=SHARED_REPO)
self.assertDifferentStorage('the-image', first_storage, username=ADMIN_ROBOT_USER, repository=SHARED_REPO)
def test_org_access_different_user(self):
""" An image is accessible by being a member of the organization. """
# Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo, but since the user has access to the organization => shared image.
self.assertSameStorage('the-image', first_storage_id, username=PUBLIC_USER, repository=PUBLIC_REPO)
self.assertSameStorage('the-image', first_storage, username=PUBLIC_USER, repository=PUBLIC_REPO)
# Also verify for reader.
self.assertSameStorage('the-image', first_storage_id, username=READ_ACCESS_USER, repository=PUBLIC_REPO)
self.assertSameStorage('the-image', first_storage, username=READ_ACCESS_USER, repository=PUBLIC_REPO)
def test_org_no_access(self):
""" An image is not accessible if not a member of the organization. """
# Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo. Since the user is not a member of the organization => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=RANDOM_USER, repository=RANDOM_REPO)
self.assertDifferentStorage('the-image', first_storage, username=RANDOM_USER, repository=RANDOM_REPO)
def test_org_not_team_member_with_access(self):
""" An image is accessible to a user specifically listed as having permission on the org repo. """
# Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ORG_REPO)
# Create an image under a user's repo. Since the user has read access on that repo, they can see the image => shared image.
self.assertSameStorage('the-image', first_storage_id, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
self.assertSameStorage('the-image', first_storage, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
def test_org_not_team_member_with_no_access(self):
""" A user that has access to one org repo but not another and is not a team member. """
# Create the new image under the org's repo => new image.
first_storage_id = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ANOTHER_ORG_REPO)
first_storage = self.createStorage('the-image', username=ADMIN_ACCESS_USER, repository=ANOTHER_ORG_REPO)
# Create an image under a user's repo. The user doesn't have access to the repo (ANOTHER_ORG_REPO) so => new image.
self.assertDifferentStorage('the-image', first_storage_id, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
self.assertDifferentStorage('the-image', first_storage, username=OUTSIDE_ORG_USER, repository=OUTSIDE_ORG_REPO)
def test_no_link_to_uploading(self):
still_uploading = self.createStorage('an-image', repository=PUBLIC_REPO)
still_uploading.uploading = True
still_uploading.save()
self.assertDifferentStorage('an-image', still_uploading)

View file

@ -30,7 +30,7 @@ class TestConfig(DefaultConfig):
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False]
USERFILES_TYPE = 'FakeUserfiles'
USERFILES_LOCATION = 'local_us'
FEATURE_SUPER_USERS = True
FEATURE_BILLING = True

View file

@ -20,7 +20,7 @@ query = (Image
.join(ImageStorage)
.switch(Image)
.join(Repository)
.where(Repository.name == 'userportal', Repository.namespace == 'crsinc'))
.where(ImageStorage.uploading == False))
bad_count = 0
good_count = 0

5
util/backoff.py Normal file
View file

@ -0,0 +1,5 @@
def exponential_backoff(attempts, scaling_factor, base):
backoff = 5 * (pow(2, attempts) - 1)
backoff_time = backoff * scaling_factor
retry_at = backoff_time/10 + base
return retry_at

12
util/collections.py Normal file
View file

@ -0,0 +1,12 @@
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
@classmethod
def deep_copy(cls, attr_dict):
copy = AttrDict(attr_dict)
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy

View file

@ -223,6 +223,13 @@ class DockerfileBuildContext(object):
if self._pull_credentials:
logger.debug('Logging in with pull credentials: %s@%s',
self._pull_credentials['username'], self._pull_credentials['registry'])
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
'phasestep': 'login',
'username': self._pull_credentials['username'],
'registry': self._pull_credentials['registry']
})
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
registry=self._pull_credentials['registry'], reauth=True)
@ -233,7 +240,12 @@ class DockerfileBuildContext(object):
raise JobException('Missing FROM command in Dockerfile')
image_and_tag = ':'.join(image_and_tag_tuple)
self._build_logger('Pulling base image: %s' % image_and_tag)
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
'phasestep': 'pull',
'repo_url': image_and_tag
})
pull_status = self._build_cl.pull(image_and_tag, stream=True)
self.__monitor_completion(pull_status, 'Downloading', self._status, 'pull_completion')
@ -495,7 +507,7 @@ class DockerfileBuildWorker(Worker):
job_config = json.loads(repository_build.job_config)
resource_url = user_files.get_file_url(repository_build.resource_key)
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
tag_names = job_config['docker_tags']
build_subdir = job_config['build_subdir']
repo = job_config['repository']