diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 04efe38f0..159c7867c 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev diff --git a/Dockerfile.web b/Dockerfile.web index e1d253632..b24694b42 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev diff --git a/app.py b/app.py index 81c59a30c..bcc4e86d7 100644 --- a/app.py +++ b/app.py @@ -88,7 +88,7 @@ Principal(app, use_sessions=False) login_manager = LoginManager(app) mail = Mail(app) storage = Storage(app) -userfiles = Userfiles(app) +userfiles = Userfiles(app, storage) analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) diff --git a/conf/gunicorn_config.py b/conf/gunicorn_config.py index 4d9d50499..ca8ad5363 100644 --- a/conf/gunicorn_config.py +++ b/conf/gunicorn_config.py @@ -1,5 +1,5 @@ bind = 'unix:/tmp/gunicorn.sock' -workers = 8 +workers = 16 worker_class = 'gevent' timeout = 2000 logconfig = 'conf/logging.conf' diff --git a/config.py b/config.py index 3712055d2..ffcf7f79e 100644 --- a/config.py +++ b/config.py @@ -19,7 +19,7 @@ def build_requests_session(): CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID', 'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY', 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE', - 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT'] + 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID'] def getFrontendVisibleConfig(config_dict): @@ -89,10 +89,6 @@ class DefaultConfig(object): # Stripe config BILLING_TYPE = 'FakeStripe' - # Userfiles - USERFILES_TYPE = 'LocalUserfiles' - USERFILES_PATH = 'test/data/registry/userfiles' - # Analytics ANALYTICS_TYPE = 'FakeAnalytics' @@ -115,6 +111,13 @@ class DefaultConfig(object): GITHUB_LOGIN_CLIENT_ID = '' GITHUB_LOGIN_CLIENT_SECRET = '' + # Google Config. + GOOGLE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token' + GOOGLE_USER_URL = 'https://www.googleapis.com/oauth2/v1/userinfo' + + GOOGLE_LOGIN_CLIENT_ID = '' + GOOGLE_LOGIN_CLIENT_SECRET = '' + # Requests based HTTP client with a large request pool HTTPCLIENT = build_requests_session() @@ -144,6 +147,9 @@ class DefaultConfig(object): # Feature Flag: Whether GitHub login is supported. FEATURE_GITHUB_LOGIN = False + # Feature Flag: Whether Google login is supported. + FEATURE_GOOGLE_LOGIN = False + # Feature flag, whether to enable olark chat FEATURE_OLARK_CHAT = False @@ -162,3 +168,7 @@ class DefaultConfig(object): } DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] + + # Userfiles + USERFILES_LOCATION = 'local_us' + USERFILES_PATH = 'userfiles/' diff --git a/data/billing.py b/data/billing.py index 4847dd3f8..8c604aac2 100644 --- a/data/billing.py +++ b/data/billing.py @@ -3,6 +3,8 @@ import stripe from datetime import datetime, timedelta from calendar import timegm +from util.collections import AttrDict + PLANS = [ # Deprecated Plans { @@ -118,20 +120,6 @@ def get_plan(plan_id): return None -class AttrDict(dict): - def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) - self.__dict__ = self - - @classmethod - def deep_copy(cls, attr_dict): - copy = AttrDict(attr_dict) - for key, value in copy.items(): - if isinstance(value, AttrDict): - copy[key] = cls.deep_copy(value) - return copy - - class FakeStripe(object): class Customer(AttrDict): FAKE_PLAN = AttrDict({ diff --git a/data/buildlogs.py b/data/buildlogs.py index 2ccd03899..8f184de27 100644 --- a/data/buildlogs.py +++ b/data/buildlogs.py @@ -25,7 +25,7 @@ class RedisBuildLogs(object): """ return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - def append_log_message(self, build_id, log_message, log_type=None): + def append_log_message(self, build_id, log_message, log_type=None, log_data=None): """ Wraps the message in an envelope and push it to the end of the log entry list and returns the index at which it was inserted. @@ -37,6 +37,9 @@ class RedisBuildLogs(object): if log_type: log_obj['type'] = log_type + if log_data: + log_obj['data'] = log_data + return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1 def get_log_entries(self, build_id, start_index): @@ -106,4 +109,4 @@ class BuildLogs(object): return buildlogs def __getattr__(self, name): - return getattr(self.state, name, None) \ No newline at end of file + return getattr(self.state, name, None) diff --git a/data/database.py b/data/database.py index 72c2c3e30..fa36b2e47 100644 --- a/data/database.py +++ b/data/database.py @@ -76,6 +76,8 @@ class User(BaseModel): organization = BooleanField(default=False, index=True) robot = BooleanField(default=False, index=True) invoice_email = BooleanField(default=False) + invalid_login_attempts = IntegerField(default=0) + last_invalid_login = DateTimeField(default=datetime.utcnow) class TeamRole(BaseModel): @@ -127,6 +129,7 @@ class FederatedLogin(BaseModel): user = ForeignKeyField(User, index=True) service = ForeignKeyField(LoginService, index=True) service_ident = CharField() + metadata_json = TextField(default='{}') class Meta: database = db diff --git a/data/migrations/env.py b/data/migrations/env.py index c267c2f50..863e3d98f 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -8,6 +8,7 @@ from peewee import SqliteDatabase from data.database import all_models, db from app import app from data.model.sqlalchemybridge import gen_sqlalchemy_metadata +from util.collections import AttrDict # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -23,6 +24,7 @@ fileConfig(config.config_file_name) # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = gen_sqlalchemy_metadata(all_models) +tables = AttrDict(target_metadata.tables) # other values from the config, defined by the needs of env.py, # can be acquired: @@ -45,7 +47,7 @@ def run_migrations_offline(): context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True) with context.begin_transaction(): - context.run_migrations() + context.run_migrations(tables=tables) def run_migrations_online(): """Run migrations in 'online' mode. @@ -72,7 +74,7 @@ def run_migrations_online(): try: with context.begin_transaction(): - context.run_migrations() + context.run_migrations(tables=tables) finally: connection.close() diff --git a/data/migrations/script.py.mako b/data/migrations/script.py.mako index 95702017e..1b92f9f48 100644 --- a/data/migrations/script.py.mako +++ b/data/migrations/script.py.mako @@ -14,9 +14,9 @@ from alembic import op import sqlalchemy as sa ${imports if imports else ""} -def upgrade(): +def upgrade(tables): ${upgrades if upgrades else "pass"} -def downgrade(): +def downgrade(tables): ${downgrades if downgrades else "pass"} diff --git a/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py b/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py new file mode 100644 index 000000000..2f6c60706 --- /dev/null +++ b/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py @@ -0,0 +1,35 @@ +"""add metadata field to external logins + +Revision ID: 1594a74a74ca +Revises: f42b0ea7a4d +Create Date: 2014-09-04 18:17:35.205698 + +""" + +# revision identifiers, used by Alembic. +revision = '1594a74a74ca' +down_revision = 'f42b0ea7a4d' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False)) + ### end Alembic commands ### + + op.bulk_insert(tables.loginservice, + [ + {'id':4, 'name':'google'}, + ]) + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('federatedlogin', 'metadata_json') + ### end Alembic commands ### + + op.execute( + (tables.loginservice.delete() + .where(tables.loginservice.c.name == op.inline_literal('google'))) + ) diff --git a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py b/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py index ea36e3f57..d50c3a592 100644 --- a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py +++ b/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py @@ -14,7 +14,7 @@ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql -def upgrade(): +def upgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True) @@ -34,7 +34,7 @@ def upgrade(): ### end Alembic commands ### -def downgrade(): +def downgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.drop_index('visibility_name', table_name='visibility') op.create_index('visibility_name', 'visibility', ['name'], unique=False) diff --git a/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py b/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py index 18c8bf654..e3be811b6 100644 --- a/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py +++ b/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py @@ -13,12 +13,8 @@ down_revision = '4b7ef0c7bdb2' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models - -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) +def upgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.create_table('externalnotificationmethod', sa.Column('id', sa.Integer(), nullable=False), @@ -26,7 +22,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id') ) op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True) - op.bulk_insert(schema.tables['externalnotificationmethod'], + op.bulk_insert(tables.externalnotificationmethod, [ {'id':1, 'name':'quay_notification'}, {'id':2, 'name':'email'}, @@ -38,7 +34,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id') ) op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True) - op.bulk_insert(schema.tables['externalnotificationevent'], + op.bulk_insert(tables.externalnotificationevent, [ {'id':1, 'name':'repo_push'}, {'id':2, 'name':'build_queued'}, @@ -77,7 +73,7 @@ def upgrade(): op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False)) # Manually add the new notificationkind types - op.bulk_insert(schema.tables['notificationkind'], + op.bulk_insert(tables.notificationkind, [ {'id':5, 'name':'repo_push'}, {'id':6, 'name':'build_queued'}, @@ -87,7 +83,7 @@ def upgrade(): ]) # Manually add the new logentrykind types - op.bulk_insert(schema.tables['logentrykind'], + op.bulk_insert(tables.logentrykind, [ {'id':39, 'name':'add_repo_notification'}, {'id':40, 'name':'delete_repo_notification'}, @@ -97,61 +93,49 @@ def upgrade(): ### end Alembic commands ### -def downgrade(): - schema = gen_sqlalchemy_metadata(all_models) - +def downgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'notification', 'dismissed') - op.drop_index('repositorynotification_uuid', table_name='repositorynotification') - op.drop_index('repositorynotification_repository_id', table_name='repositorynotification') - op.drop_index('repositorynotification_method_id', table_name='repositorynotification') - op.drop_index('repositorynotification_event_id', table_name='repositorynotification') op.drop_table('repositorynotification') - op.drop_index('repositoryauthorizedemail_repository_id', table_name='repositoryauthorizedemail') - op.drop_index('repositoryauthorizedemail_email_repository_id', table_name='repositoryauthorizedemail') - op.drop_index('repositoryauthorizedemail_code', table_name='repositoryauthorizedemail') op.drop_table('repositoryauthorizedemail') - op.drop_index('externalnotificationevent_name', table_name='externalnotificationevent') op.drop_table('externalnotificationevent') - op.drop_index('externalnotificationmethod_name', table_name='externalnotificationmethod') op.drop_table('externalnotificationmethod') # Manually remove the notificationkind and logentrykind types - notificationkind = schema.tables['notificationkind'] op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('repo_push'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('repo_push'))) ) op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('build_queued'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('build_queued'))) ) op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('build_start'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('build_start'))) ) op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('build_success'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('build_success'))) ) op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('build_failure'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('build_failure'))) ) op.execute( - (logentrykind.delete() - .where(logentrykind.c.name == op.inline_literal('add_repo_notification'))) + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification'))) ) op.execute( - (logentrykind.delete() - .where(logentrykind.c.name == op.inline_literal('delete_repo_notification'))) + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification'))) ) ### end Alembic commands ### diff --git a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py b/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py index 6ee041e4c..f676bf972 100644 --- a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py +++ b/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py @@ -13,25 +13,17 @@ down_revision = '82297d834ad' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models - -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) - - op.bulk_insert(schema.tables['logentrykind'], +def upgrade(tables): + op.bulk_insert(tables.logentrykind, [ {'id': 41, 'name':'regenerate_robot_token'}, ]) -def downgrade(): - schema = gen_sqlalchemy_metadata(all_models) - - logentrykind = schema.tables['logentrykind'] +def downgrade(tables): op.execute( - (logentrykind.delete() - .where(logentrykind.c.name == op.inline_literal('regenerate_robot_token'))) + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token'))) ) diff --git a/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py b/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py index 726145167..eaa687c73 100644 --- a/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py +++ b/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py @@ -18,13 +18,13 @@ def get_id(query): conn = op.get_bind() return list(conn.execute(query, ()).fetchall())[0][0] -def upgrade(): +def upgrade(tables): conn = op.get_bind() event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1') method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1') conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id)) -def downgrade(): +def downgrade(tables): conn = op.get_bind() event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1') method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1') diff --git a/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py b/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py new file mode 100644 index 000000000..6b4160b19 --- /dev/null +++ b/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py @@ -0,0 +1,39 @@ +"""add new notification kinds + +Revision ID: 4a0c94399f38 +Revises: 1594a74a74ca +Create Date: 2014-08-28 16:17:01.898269 + +""" + +# revision identifiers, used by Alembic. +revision = '4a0c94399f38' +down_revision = '1594a74a74ca' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +def upgrade(tables): + op.bulk_insert(tables.externalnotificationmethod, + [ + {'id':4, 'name':'flowdock'}, + {'id':5, 'name':'hipchat'}, + {'id':6, 'name':'slack'}, + ]) + +def downgrade(tables): + op.execute( + (tables.externalnotificationmethod.delete() + .where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock'))) + ) + + op.execute( + (tables.externalnotificationmethod.delete() + .where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat'))) + ) + + op.execute( + (tables.externalnotificationmethod.delete() + .where(tables.externalnotificationmethod.c.name == op.inline_literal('slack'))) + ) diff --git a/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py b/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py index 9e5fff425..9f48ca6c6 100644 --- a/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py +++ b/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py @@ -11,23 +11,18 @@ revision = '4b7ef0c7bdb2' down_revision = 'bcdde200a1b' from alembic import op -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models import sqlalchemy as sa - -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) - op.bulk_insert(schema.tables['notificationkind'], +def upgrade(tables): + op.bulk_insert(tables.notificationkind, [ {'id':4, 'name':'maintenance'}, ]) -def downgrade(): - notificationkind = schema.tables['notificationkind'] +def downgrade(tables): op.execute( - (notificationkind.delete() - .where(notificationkind.c.name == op.inline_literal('maintenance'))) + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('maintenance'))) ) diff --git a/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py b/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py new file mode 100644 index 000000000..1ce802eca --- /dev/null +++ b/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py @@ -0,0 +1,28 @@ +"""Add brute force prevention metadata to the user table. + +Revision ID: 4fdb65816b8d +Revises: 43e943c0639f +Create Date: 2014-09-03 12:35:33.722435 + +""" + +# revision identifiers, used by Alembic. +revision = '4fdb65816b8d' +down_revision = '43e943c0639f' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0")) + op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now())) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'last_invalid_login') + op.drop_column('user', 'invalid_login_attempts') + ### end Alembic commands ### diff --git a/data/migrations/versions/5a07499ce53f_set_up_initial_database.py b/data/migrations/versions/5a07499ce53f_set_up_initial_database.py index ffc9d28e6..f67224645 100644 --- a/data/migrations/versions/5a07499ce53f_set_up_initial_database.py +++ b/data/migrations/versions/5a07499ce53f_set_up_initial_database.py @@ -11,14 +11,9 @@ revision = '5a07499ce53f' down_revision = None from alembic import op -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models import sqlalchemy as sa - -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) - +def upgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.create_table('loginservice', sa.Column('id', sa.Integer(), nullable=False), @@ -27,7 +22,7 @@ def upgrade(): ) op.create_index('loginservice_name', 'loginservice', ['name'], unique=True) - op.bulk_insert(schema.tables['loginservice'], + op.bulk_insert(tables.loginservice, [ {'id':1, 'name':'github'}, {'id':2, 'name':'quayrobot'}, @@ -66,7 +61,7 @@ def upgrade(): ) op.create_index('role_name', 'role', ['name'], unique=False) - op.bulk_insert(schema.tables['role'], + op.bulk_insert(tables.role, [ {'id':1, 'name':'admin'}, {'id':2, 'name':'write'}, @@ -80,7 +75,7 @@ def upgrade(): ) op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) - op.bulk_insert(schema.tables['logentrykind'], + op.bulk_insert(tables.logentrykind, [ {'id':1, 'name':'account_change_plan'}, {'id':2, 'name':'account_change_cc'}, @@ -136,7 +131,7 @@ def upgrade(): ) op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) - op.bulk_insert(schema.tables['notificationkind'], + op.bulk_insert(tables.notificationkind, [ {'id':1, 'name':'password_required'}, {'id':2, 'name':'over_private_usage'}, @@ -150,7 +145,7 @@ def upgrade(): ) op.create_index('teamrole_name', 'teamrole', ['name'], unique=False) - op.bulk_insert(schema.tables['teamrole'], + op.bulk_insert(tables.teamrole, [ {'id':1, 'name':'admin'}, {'id':2, 'name':'creator'}, @@ -164,7 +159,7 @@ def upgrade(): ) op.create_index('visibility_name', 'visibility', ['name'], unique=False) - op.bulk_insert(schema.tables['visibility'], + op.bulk_insert(tables.visibility, [ {'id':1, 'name':'public'}, {'id':2, 'name':'private'}, @@ -194,7 +189,7 @@ def upgrade(): ) op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) - op.bulk_insert(schema.tables['buildtriggerservice'], + op.bulk_insert(tables.buildtriggerservice, [ {'id':1, 'name':'github'}, ]) @@ -490,119 +485,34 @@ def upgrade(): ### end Alembic commands ### -def downgrade(): +def downgrade(tables): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('repositorybuild_uuid', table_name='repositorybuild') - op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild') - op.drop_index('repositorybuild_resource_key', table_name='repositorybuild') - op.drop_index('repositorybuild_repository_id', table_name='repositorybuild') - op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild') - op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild') op.drop_table('repositorybuild') - op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger') - op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger') - op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger') - op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger') - op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger') op.drop_table('repositorybuildtrigger') - op.drop_index('logentry_repository_id', table_name='logentry') - op.drop_index('logentry_performer_id', table_name='logentry') - op.drop_index('logentry_kind_id', table_name='logentry') - op.drop_index('logentry_datetime', table_name='logentry') - op.drop_index('logentry_account_id', table_name='logentry') - op.drop_index('logentry_access_token_id', table_name='logentry') op.drop_table('logentry') - op.drop_index('repositorytag_repository_id_name', table_name='repositorytag') - op.drop_index('repositorytag_repository_id', table_name='repositorytag') - op.drop_index('repositorytag_image_id', table_name='repositorytag') op.drop_table('repositorytag') - op.drop_index('permissionprototype_role_id', table_name='permissionprototype') - op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype') - op.drop_index('permissionprototype_org_id', table_name='permissionprototype') - op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype') - op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype') - op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype') op.drop_table('permissionprototype') - op.drop_index('image_storage_id', table_name='image') - op.drop_index('image_repository_id_docker_image_id', table_name='image') - op.drop_index('image_repository_id', table_name='image') - op.drop_index('image_ancestors', table_name='image') op.drop_table('image') - op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode') - op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode') op.drop_table('oauthauthorizationcode') - op.drop_index('webhook_repository_id', table_name='webhook') - op.drop_index('webhook_public_id', table_name='webhook') op.drop_table('webhook') - op.drop_index('teammember_user_id_team_id', table_name='teammember') - op.drop_index('teammember_user_id', table_name='teammember') - op.drop_index('teammember_team_id', table_name='teammember') op.drop_table('teammember') - op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken') - op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken') - op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken') - op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken') - op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken') op.drop_table('oauthaccesstoken') - op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission') - op.drop_index('repositorypermission_user_id', table_name='repositorypermission') - op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission') - op.drop_index('repositorypermission_team_id', table_name='repositorypermission') - op.drop_index('repositorypermission_role_id', table_name='repositorypermission') - op.drop_index('repositorypermission_repository_id', table_name='repositorypermission') op.drop_table('repositorypermission') - op.drop_index('accesstoken_role_id', table_name='accesstoken') - op.drop_index('accesstoken_repository_id', table_name='accesstoken') - op.drop_index('accesstoken_code', table_name='accesstoken') op.drop_table('accesstoken') - op.drop_index('repository_visibility_id', table_name='repository') - op.drop_index('repository_namespace_name', table_name='repository') op.drop_table('repository') - op.drop_index('team_role_id', table_name='team') - op.drop_index('team_organization_id', table_name='team') - op.drop_index('team_name_organization_id', table_name='team') - op.drop_index('team_name', table_name='team') op.drop_table('team') - op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation') - op.drop_index('emailconfirmation_code', table_name='emailconfirmation') op.drop_table('emailconfirmation') - op.drop_index('notification_uuid', table_name='notification') - op.drop_index('notification_target_id', table_name='notification') - op.drop_index('notification_kind_id', table_name='notification') - op.drop_index('notification_created', table_name='notification') op.drop_table('notification') - op.drop_index('oauthapplication_organization_id', table_name='oauthapplication') - op.drop_index('oauthapplication_client_id', table_name='oauthapplication') op.drop_table('oauthapplication') - op.drop_index('federatedlogin_user_id', table_name='federatedlogin') - op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin') - op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin') - op.drop_index('federatedlogin_service_id', table_name='federatedlogin') op.drop_table('federatedlogin') - op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') op.drop_table('buildtriggerservice') - op.drop_index('user_username', table_name='user') - op.drop_index('user_stripe_id', table_name='user') - op.drop_index('user_robot', table_name='user') - op.drop_index('user_organization', table_name='user') - op.drop_index('user_email', table_name='user') op.drop_table('user') - op.drop_index('visibility_name', table_name='visibility') op.drop_table('visibility') - op.drop_index('teamrole_name', table_name='teamrole') op.drop_table('teamrole') - op.drop_index('notificationkind_name', table_name='notificationkind') op.drop_table('notificationkind') - op.drop_index('logentrykind_name', table_name='logentrykind') op.drop_table('logentrykind') - op.drop_index('role_name', table_name='role') op.drop_table('role') - op.drop_index('queueitem_queue_name', table_name='queueitem') - op.drop_index('queueitem_processing_expires', table_name='queueitem') - op.drop_index('queueitem_available_after', table_name='queueitem') - op.drop_index('queueitem_available', table_name='queueitem') op.drop_table('queueitem') op.drop_table('imagestorage') - op.drop_index('loginservice_name', table_name='loginservice') op.drop_table('loginservice') ### end Alembic commands ### diff --git a/data/migrations/versions/82297d834ad_add_us_west_location.py b/data/migrations/versions/82297d834ad_add_us_west_location.py index 59eb1f800..b939a939e 100644 --- a/data/migrations/versions/82297d834ad_add_us_west_location.py +++ b/data/migrations/versions/82297d834ad_add_us_west_location.py @@ -13,24 +13,17 @@ down_revision = '47670cbeced' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models - -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) - - op.bulk_insert(schema.tables['imagestoragelocation'], +def upgrade(tables): + op.bulk_insert(tables.imagestoragelocation, [ {'id':8, 'name':'s3_us_west_1'}, ]) -def downgrade(): - schema = gen_sqlalchemy_metadata(all_models) - +def downgrade(tables): op.execute( - (imagestoragelocation.delete() - .where(imagestoragelocation.c.name == op.inline_literal('s3_us_west_1'))) + (tables.imagestoragelocation.delete() + .where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1'))) ) diff --git a/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py b/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py index eda4b2840..9fc433126 100644 --- a/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py +++ b/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py @@ -11,14 +11,10 @@ revision = 'bcdde200a1b' down_revision = '201d55b38649' from alembic import op -from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from data.database import all_models import sqlalchemy as sa -def upgrade(): - schema = gen_sqlalchemy_metadata(all_models) - +def upgrade(tables): ### commands auto generated by Alembic - please adjust! ### op.create_table('imagestoragelocation', sa.Column('id', sa.Integer(), nullable=False), @@ -27,7 +23,7 @@ def upgrade(): ) op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True) - op.bulk_insert(schema.tables['imagestoragelocation'], + op.bulk_insert(tables.imagestoragelocation, [ {'id':1, 'name':'s3_us_east_1'}, {'id':2, 'name':'s3_eu_west_1'}, @@ -52,12 +48,8 @@ def upgrade(): ### end Alembic commands ### -def downgrade(): +def downgrade(tables): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('imagestorageplacement_storage_id_location_id', table_name='imagestorageplacement') - op.drop_index('imagestorageplacement_storage_id', table_name='imagestorageplacement') - op.drop_index('imagestorageplacement_location_id', table_name='imagestorageplacement') op.drop_table('imagestorageplacement') - op.drop_index('imagestoragelocation_name', table_name='imagestoragelocation') op.drop_table('imagestoragelocation') ### end Alembic commands ### diff --git a/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py b/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py new file mode 100644 index 000000000..9ceab4218 --- /dev/null +++ b/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py @@ -0,0 +1,35 @@ +"""Remove the old webhooks table. + +Revision ID: f42b0ea7a4d +Revises: 4fdb65816b8d +Create Date: 2014-09-03 13:43:23.391464 + +""" + +# revision identifiers, used by Alembic. +revision = 'f42b0ea7a4d' +down_revision = '4fdb65816b8d' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('webhook') + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('webhook', + sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), + sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False), + sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), + sa.Column('parameters', mysql.LONGTEXT(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'), + sa.PrimaryKeyConstraint('id'), + mysql_default_charset=u'latin1', + mysql_engine=u'InnoDB' + ) + ### end Alembic commands ### diff --git a/data/model/legacy.py b/data/model/legacy.py index dccff5a61..3df2669aa 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -1,12 +1,17 @@ import bcrypt import logging -import datetime import dateutil.parser import json +from datetime import datetime, timedelta + from data.database import * from util.validation import * from util.names import format_robot_username +from util.backoff import exponential_backoff + + +EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1) logger = logging.getLogger(__name__) @@ -75,6 +80,12 @@ class UserAlreadyInTeam(DataModelException): pass +class TooManyLoginAttemptsException(Exception): + def __init__(self, message, retry_after): + super(TooManyLoginAttemptsException, self).__init__(message) + self.retry_after = retry_after + + def is_create_user_allowed(): return True @@ -413,7 +424,8 @@ def set_team_org_permission(team, team_role_name, set_by_username): return team -def create_federated_user(username, email, service_name, service_id, set_password_notification): +def create_federated_user(username, email, service_name, service_id, + set_password_notification, metadata={}): if not is_create_user_allowed(): raise TooManyUsersException() @@ -423,7 +435,8 @@ def create_federated_user(username, email, service_name, service_id, set_passwor service = LoginService.get(LoginService.name == service_name) FederatedLogin.create(user=new_user, service=service, - service_ident=service_id) + service_ident=service_id, + metadata_json=json.dumps(metadata)) if set_password_notification: create_notification('password_required', new_user) @@ -431,9 +444,10 @@ def create_federated_user(username, email, service_name, service_id, set_passwor return new_user -def attach_federated_login(user, service_name, service_id): +def attach_federated_login(user, service_name, service_id, metadata={}): service = LoginService.get(LoginService.name == service_name) - FederatedLogin.create(user=user, service=service, service_ident=service_id) + FederatedLogin.create(user=user, service=service, service_ident=service_id, + metadata_json=json.dumps(metadata)) return user @@ -452,7 +466,7 @@ def verify_federated_login(service_name, service_id): def list_federated_logins(user): selected = FederatedLogin.select(FederatedLogin.service_ident, - LoginService.name) + LoginService.name, FederatedLogin.metadata_json) joined = selected.join(LoginService) return joined.where(LoginService.name != 'quayrobot', FederatedLogin.user == user) @@ -588,11 +602,30 @@ def verify_user(username_or_email, password): except User.DoesNotExist: return None + now = datetime.utcnow() + + if fetched.invalid_login_attempts > 0: + can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE, + fetched.last_invalid_login) + + if can_retry_at > now: + retry_after = can_retry_at - now + raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds()) + if (fetched.password_hash and bcrypt.hashpw(password, fetched.password_hash) == fetched.password_hash): + + if fetched.invalid_login_attempts > 0: + fetched.invalid_login_attempts = 0 + fetched.save() + return fetched + fetched.invalid_login_attempts += 1 + fetched.last_invalid_login = now + fetched.save() + # We weren't able to authorize the user return None @@ -1078,7 +1111,8 @@ def find_create_or_link_image(docker_image_id, repository, username, translation .join(Repository) .join(Visibility) .switch(Repository) - .join(RepositoryPermission, JOIN_LEFT_OUTER)) + .join(RepositoryPermission, JOIN_LEFT_OUTER) + .where(ImageStorage.uploading == False)) query = (_filter_to_repos_for_user(query, username) .where(Image.docker_image_id == docker_image_id)) diff --git a/data/userfiles.py b/data/userfiles.py index 79fbcb507..950c4dd60 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -1,110 +1,35 @@ -import boto import os import logging -import hashlib import magic -from boto.s3.key import Key from uuid import uuid4 from flask import url_for, request, send_file, make_response, abort from flask.views import View +from _pyio import BufferedReader logger = logging.getLogger(__name__) -class FakeUserfiles(object): - def prepare_for_drop(self, mime_type): - return ('http://fake/url', uuid4()) - - def store_file(self, file_like_obj, content_type): - raise NotImplementedError() - - def get_file_url(self, file_id, expires_in=300): - return ('http://fake/url') - - def get_file_checksum(self, file_id): - return 'abcdefg' - - -class S3FileWriteException(Exception): - pass - - -class S3Userfiles(object): - def __init__(self, path, s3_access_key, s3_secret_key, bucket_name): - self._initialized = False - self._bucket_name = bucket_name - self._access_key = s3_access_key - self._secret_key = s3_secret_key - self._prefix = path - self._s3_conn = None - self._bucket = None - - def _initialize_s3(self): - if not self._initialized: - self._s3_conn = boto.connect_s3(self._access_key, self._secret_key) - self._bucket = self._s3_conn.get_bucket(self._bucket_name) - self._initialized = True - - def prepare_for_drop(self, mime_type): - """ Returns a signed URL to upload a file to our bucket. """ - self._initialize_s3() - logger.debug('Requested upload url with content type: %s' % mime_type) - file_id = str(uuid4()) - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, - encrypt_key=True) - return (url, file_id) - - def store_file(self, file_like_obj, content_type): - self._initialize_s3() - file_id = str(uuid4()) - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - logger.debug('Setting s3 content type to: %s' % content_type) - k.set_metadata('Content-Type', content_type) - bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True, - rewind=True) - - if bytes_written == 0: - raise S3FileWriteException('Unable to write file to S3') - - return file_id - - def get_file_url(self, file_id, expires_in=300, mime_type=None): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - headers = None - if mime_type: - headers={'Content-Type': mime_type} - - return k.generate_url(expires_in, headers=headers) - - def get_file_checksum(self, file_id): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = self._bucket.lookup(full_key) - return k.etag[1:-1][:7] - - class UserfilesHandlers(View): methods = ['GET', 'PUT'] - def __init__(self, local_userfiles): - self._userfiles = local_userfiles + def __init__(self, distributed_storage, location, files): + self._storage = distributed_storage + self._files = files + self._locations = {location} self._magic = magic.Magic(mime=True) def get(self, file_id): - path = self._userfiles.file_path(file_id) - if not os.path.exists(path): + path = self._files.get_file_id_path(file_id) + try: + file_stream = self._storage.stream_read_file(self._locations, path) + buffered = BufferedReader(file_stream) + file_header_bytes = buffered.peek(1024) + return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes)) + except IOError: abort(404) - logger.debug('Sending path: %s' % path) - return send_file(path, mimetype=self._magic.from_file(path)) - def put(self, file_id): input_stream = request.stream if request.headers.get('transfer-encoding') == 'chunked': @@ -112,7 +37,10 @@ class UserfilesHandlers(View): # encoding (Gunicorn) input_stream = request.environ['wsgi.input'] - self._userfiles.store_stream(input_stream, file_id) + c_type = request.headers.get('Content-Type', None) + + path = self._files.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, input_stream, c_type) return make_response('Okay') @@ -123,99 +51,79 @@ class UserfilesHandlers(View): return self.put(file_id) -class LocalUserfiles(object): - def __init__(self, app, path): - self._root_path = path - self._buffer_size = 64 * 1024 # 64 KB +class DelegateUserfiles(object): + def __init__(self, app, distributed_storage, location, path, handler_name): self._app = app + self._storage = distributed_storage + self._locations = {location} + self._prefix = path + self._handler_name = handler_name def _build_url_adapter(self): return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'], script_name=self._app.config['APPLICATION_ROOT'] or '/', url_scheme=self._app.config['PREFERRED_URL_SCHEME']) - def prepare_for_drop(self, mime_type): + def get_file_id_path(self, file_id): + return os.path.join(self._prefix, file_id) + + def prepare_for_drop(self, mime_type, requires_cors=True): + """ Returns a signed URL to upload a file to our bucket. """ + logger.debug('Requested upload url with content type: %s' % mime_type) file_id = str(uuid4()) - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id) + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors) - def file_path(self, file_id): - if '..' in file_id or file_id.startswith('/'): - raise RuntimeError('Invalid Filename') - return os.path.join(self._root_path, file_id) + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return (url_for(self._handler_name, file_id=file_id, _external=True), file_id) - def store_stream(self, stream, file_id): - path = self.file_path(file_id) - dirname = os.path.dirname(path) - if not os.path.exists(dirname): - os.makedirs(dirname) - - with open(path, 'w') as to_write: - while True: - try: - buf = stream.read(self._buffer_size) - if not buf: - break - to_write.write(buf) - except IOError: - break + return (url, file_id) def store_file(self, file_like_obj, content_type): file_id = str(uuid4()) - - # Rewind the file to match what s3 does - file_like_obj.seek(0, os.SEEK_SET) - - self.store_stream(file_like_obj, file_id) + path = self.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, file_like_obj, content_type) return file_id - def get_file_url(self, file_id, expires_in=300): - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return url_for('userfiles_handlers', file_id=file_id, _external=True) + def get_file_url(self, file_id, expires_in=300, requires_cors=False): + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors) + + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return url_for(self._handler_name, file_id=file_id, _external=True) + + return url def get_file_checksum(self, file_id): - path = self.file_path(file_id) - sha_hash = hashlib.sha256() - with open(path, 'r') as to_hash: - while True: - buf = to_hash.read(self._buffer_size) - if not buf: - break - sha_hash.update(buf) - return sha_hash.hexdigest()[:7] + path = self.get_file_id_path(file_id) + return self._storage.get_checksum(self._locations, path) class Userfiles(object): - def __init__(self, app=None): + def __init__(self, app=None, distributed_storage=None): self.app = app if app is not None: - self.state = self.init_app(app) + self.state = self.init_app(app, distributed_storage) else: self.state = None - def init_app(self, app): - storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles') - path = app.config.get('USERFILES_PATH', '') + def init_app(self, app, distributed_storage): + location = app.config.get('USERFILES_LOCATION') + path = app.config.get('USERFILES_PATH', None) - if storage_type == 'LocalUserfiles': - userfiles = LocalUserfiles(app, path) - app.add_url_rule('/userfiles/', - view_func=UserfilesHandlers.as_view('userfiles_handlers', - local_userfiles=userfiles)) + handler_name = 'userfiles_handlers' - elif storage_type == 'S3Userfiles': - access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '') - secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '') - bucket = app.config.get('USERFILES_S3_BUCKET', '') - userfiles = S3Userfiles(path, access_key, secret_key, bucket) + userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name) - elif storage_type == 'FakeUserfiles': - userfiles = FakeUserfiles() - - else: - raise RuntimeError('Unknown userfiles type: %s' % storage_type) + app.add_url_rule('/userfiles/', + view_func=UserfilesHandlers.as_view(handler_name, + distributed_storage=distributed_storage, + location=location, + files=userfiles)) # register extension with app app.extensions = getattr(app, 'extensions', {}) diff --git a/endpoints/api/__init__.py b/endpoints/api/__init__.py index e8dab28dc..2f5e2045e 100644 --- a/endpoints/api/__init__.py +++ b/endpoints/api/__init__.py @@ -1,7 +1,8 @@ import logging import json +import datetime -from flask import Blueprint, request, make_response, jsonify +from flask import Blueprint, request, make_response, jsonify, session from flask.ext.restful import Resource, abort, Api, reqparse from flask.ext.restful.utils.cors import crossdomain from werkzeug.exceptions import HTTPException @@ -66,6 +67,11 @@ class Unauthorized(ApiException): ApiException.__init__(self, 'insufficient_scope', 403, 'Unauthorized', payload) +class FreshLoginRequired(ApiException): + def __init__(self, payload=None): + ApiException.__init__(self, 'fresh_login_required', 401, "Requires fresh login", payload) + + class ExceedsLicenseException(ApiException): def __init__(self, payload=None): ApiException.__init__(self, None, 402, 'Payment Required', payload) @@ -87,6 +93,14 @@ def handle_api_error(error): return response +@api_bp.app_errorhandler(model.TooManyLoginAttemptsException) +@crossdomain(origin='*', headers=['Authorization', 'Content-Type']) +def handle_too_many_login_attempts(error): + response = make_response('Too many login attempts', 429) + response.headers['Retry-After'] = int(error.retry_after) + return response + + def resource(*urls, **kwargs): def wrapper(api_resource): if not api_resource: @@ -256,6 +270,26 @@ def require_user_permission(permission_class, scope=None): require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER) require_user_admin = require_user_permission(UserAdminPermission, None) +require_fresh_user_admin = require_user_permission(UserAdminPermission, None) + +def require_fresh_login(func): + @add_method_metadata('requires_fresh_login', True) + @wraps(func) + def wrapped(*args, **kwargs): + user = get_authenticated_user() + if not user: + raise Unauthorized() + + logger.debug('Checking fresh login for user %s', user.username) + + last_login = session.get('login_time', datetime.datetime.min) + valid_span = datetime.datetime.now() - datetime.timedelta(minutes=10) + + if not user.password_hash or last_login >= valid_span: + return func(*args, **kwargs) + + raise FreshLoginRequired() + return wrapped def require_scope(scope_object): diff --git a/endpoints/api/build.py b/endpoints/api/build.py index 21d554069..74677fadb 100644 --- a/endpoints/api/build.py +++ b/endpoints/api/build.py @@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False): } if can_write: - resp['archive_url'] = user_files.get_file_url(build_obj.resource_key) + resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True) return resp @@ -257,7 +257,7 @@ class FileDropResource(ApiResource): def post(self): """ Request a URL to which a file may be uploaded. """ mime_type = request.get_json()['mimeType'] - (url, file_id) = user_files.prepare_for_drop(mime_type) + (url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True) return { 'url': url, 'file_id': str(file_id), diff --git a/endpoints/api/discovery.py b/endpoints/api/discovery.py index ee8702636..1995c6b42 100644 --- a/endpoints/api/discovery.py +++ b/endpoints/api/discovery.py @@ -119,6 +119,11 @@ def swagger_route_data(include_internal=False, compact=False): if internal is not None: new_operation['internal'] = True + if include_internal: + requires_fresh_login = method_metadata(method, 'requires_fresh_login') + if requires_fresh_login is not None: + new_operation['requires_fresh_login'] = True + if not internal or (internal and include_internal): operations.append(new_operation) diff --git a/endpoints/api/user.py b/endpoints/api/user.py index cf8f05eae..0ba8a8db6 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -9,7 +9,7 @@ from app import app, billing as stripe, authentication from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error, log_action, internal_only, NotFound, require_user_admin, parse_args, query_param, InvalidToken, require_scope, format_date, hide_if, show_if, - license_error) + license_error, require_fresh_login) from endpoints.api.subscribe import subscribe from endpoints.common import common_login from endpoints.api.team import try_accept_invite @@ -43,9 +43,15 @@ def user_view(user): organizations = model.get_user_organizations(user.username) def login_view(login): + try: + metadata = json.loads(login.metadata_json) + except: + metadata = {} + return { 'service': login.service.name, 'service_identifier': login.service_ident, + 'metadata': metadata } logins = model.list_federated_logins(user) @@ -92,6 +98,7 @@ class User(ApiResource): """ Operations related to users. """ schemas = { 'NewUser': { + 'id': 'NewUser', 'type': 'object', 'description': 'Fields which must be specified for a new user.', @@ -147,6 +154,7 @@ class User(ApiResource): return user_view(user) @require_user_admin + @require_fresh_login @nickname('changeUserDetails') @internal_only @validate_json_request('UpdateUser') @@ -155,7 +163,7 @@ class User(ApiResource): user = get_authenticated_user() user_data = request.get_json() - try: + try: if 'password' in user_data: logger.debug('Changing password for user: %s', user.username) log_action('account_change_password', user.username) @@ -372,6 +380,37 @@ class Signin(ApiResource): return conduct_signin(username, password) +@resource('/v1/signin/verify') +@internal_only +class VerifyUser(ApiResource): + """ Operations for verifying the existing user. """ + schemas = { + 'VerifyUser': { + 'id': 'VerifyUser', + 'type': 'object', + 'description': 'Information required to verify the signed in user.', + 'required': [ + 'password', + ], + 'properties': { + 'password': { + 'type': 'string', + 'description': 'The user\'s password', + }, + }, + }, + } + + @require_user_admin + @nickname('verifyUser') + @validate_json_request('VerifyUser') + def post(self): + """ Verifies the signed in the user with the specified credentials. """ + signin_data = request.get_json() + password = signin_data['password'] + return conduct_signin(get_authenticated_user().username, password) + + @resource('/v1/signout') @internal_only class Signout(ApiResource): diff --git a/endpoints/callbacks.py b/endpoints/callbacks.py index 015f3c3a7..1cbd46192 100644 --- a/endpoints/callbacks.py +++ b/endpoints/callbacks.py @@ -4,12 +4,14 @@ from flask import request, redirect, url_for, Blueprint from flask.ext.login import current_user from endpoints.common import render_page_template, common_login, route_show_if -from app import app, analytics +from app import app, analytics, get_app_url from data import model from util.names import parse_repository_name +from util.validation import generate_valid_usernames from util.http import abort from auth.permissions import AdministerRepositoryPermission from auth.auth import require_session_login +from peewee import IntegrityError import features @@ -20,20 +22,39 @@ client = app.config['HTTPCLIENT'] callback = Blueprint('callback', __name__) +def render_ologin_error(service_name, + error_message='Could not load user data. The token may have expired.'): + return render_page_template('ologinerror.html', service_name=service_name, + error_message=error_message, + service_url=get_app_url()) -def exchange_github_code_for_token(code, for_login=True): +def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False, + redirect_suffix=''): code = request.args.get('code') + id_config = service_name + '_LOGIN_CLIENT_ID' if for_login else service_name + '_CLIENT_ID' + secret_config = service_name + '_LOGIN_CLIENT_SECRET' if for_login else service_name + '_CLIENT_SECRET' + payload = { - 'client_id': app.config['GITHUB_LOGIN_CLIENT_ID' if for_login else 'GITHUB_CLIENT_ID'], - 'client_secret': app.config['GITHUB_LOGIN_CLIENT_SECRET' if for_login else 'GITHUB_CLIENT_SECRET'], + 'client_id': app.config[id_config], + 'client_secret': app.config[secret_config], 'code': code, + 'grant_type': 'authorization_code', + 'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'], + app.config['SERVER_HOSTNAME'], + service_name.lower(), + redirect_suffix) } + headers = { 'Accept': 'application/json' } - get_access_token = client.post(app.config['GITHUB_TOKEN_URL'], - params=payload, headers=headers) + if form_encode: + get_access_token = client.post(app.config[service_name + '_TOKEN_URL'], + data=payload, headers=headers) + else: + get_access_token = client.post(app.config[service_name + '_TOKEN_URL'], + params=payload, headers=headers) json_data = get_access_token.json() if not json_data: @@ -52,17 +73,82 @@ def get_github_user(token): return get_user.json() +def get_google_user(token): + token_param = { + 'access_token': token, + 'alt': 'json', + } + + get_user = client.get(app.config['GOOGLE_USER_URL'], params=token_param) + return get_user.json() + +def conduct_oauth_login(service_name, user_id, username, email, metadata={}): + to_login = model.verify_federated_login(service_name.lower(), user_id) + if not to_login: + # try to create the user + try: + valid = next(generate_valid_usernames(username)) + to_login = model.create_federated_user(valid, email, service_name.lower(), + user_id, set_password_notification=True, + metadata=metadata) + + # Success, tell analytics + analytics.track(to_login.username, 'register', {'service': service_name.lower()}) + + state = request.args.get('state', None) + if state: + logger.debug('Aliasing with state: %s' % state) + analytics.alias(to_login.username, state) + + except model.DataModelException, ex: + return render_ologin_error(service_name, ex.message) + + if common_login(to_login): + return redirect(url_for('web.index')) + + return render_ologin_error(service_name) + +def get_google_username(user_data): + username = user_data['email'] + at = username.find('@') + if at > 0: + username = username[0:at] + + return username + + +@callback.route('/google/callback', methods=['GET']) +@route_show_if(features.GOOGLE_LOGIN) +def google_oauth_callback(): + error = request.args.get('error', None) + if error: + return render_ologin_error('Google', error) + + token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', form_encode=True) + user_data = get_google_user(token) + if not user_data or not user_data.get('id', None) or not user_data.get('email', None): + return render_ologin_error('Google') + + username = get_google_username(user_data) + metadata = { + 'service_username': user_data['email'] + } + + return conduct_oauth_login('Google', user_data['id'], username, user_data['email'], + metadata=metadata) + + @callback.route('/github/callback', methods=['GET']) @route_show_if(features.GITHUB_LOGIN) def github_oauth_callback(): error = request.args.get('error', None) if error: - return render_page_template('githuberror.html', error_message=error) + return render_ologin_error('GitHub', error) - token = exchange_github_code_for_token(request.args.get('code')) + token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB') user_data = get_github_user(token) if not user_data: - return render_page_template('githuberror.html', error_message='Could not load user data') + return render_ologin_error('GitHub') username = user_data['login'] github_id = user_data['id'] @@ -84,42 +170,67 @@ def github_oauth_callback(): if user_email['primary']: break - to_login = model.verify_federated_login('github', github_id) - if not to_login: - # try to create the user - try: - to_login = model.create_federated_user(username, found_email, 'github', - github_id, set_password_notification=True) + metadata = { + 'service_username': username + } - # Success, tell analytics - analytics.track(to_login.username, 'register', {'service': 'github'}) + return conduct_oauth_login('github', github_id, username, found_email, metadata=metadata) - state = request.args.get('state', None) - if state: - logger.debug('Aliasing with state: %s' % state) - analytics.alias(to_login.username, state) - except model.DataModelException, ex: - return render_page_template('githuberror.html', error_message=ex.message) +@callback.route('/google/callback/attach', methods=['GET']) +@route_show_if(features.GOOGLE_LOGIN) +@require_session_login +def google_oauth_attach(): + token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', + redirect_suffix='/attach', form_encode=True) - if common_login(to_login): - return redirect(url_for('web.index')) + user_data = get_google_user(token) + if not user_data or not user_data.get('id', None): + return render_ologin_error('Google') - return render_page_template('githuberror.html') + google_id = user_data['id'] + user_obj = current_user.db_user() + + username = get_google_username(user_data) + metadata = { + 'service_username': user_data['email'] + } + + try: + model.attach_federated_login(user_obj, 'google', google_id, metadata=metadata) + except IntegrityError: + err = 'Google account %s is already attached to a %s account' % ( + username, app.config['REGISTRY_TITLE_SHORT']) + return render_ologin_error('Google', err) + + return redirect(url_for('web.user')) @callback.route('/github/callback/attach', methods=['GET']) @route_show_if(features.GITHUB_LOGIN) @require_session_login def github_oauth_attach(): - token = exchange_github_code_for_token(request.args.get('code')) + token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB') user_data = get_github_user(token) if not user_data: - return render_page_template('githuberror.html', error_message='Could not load user data') + return render_ologin_error('GitHub') github_id = user_data['id'] user_obj = current_user.db_user() - model.attach_federated_login(user_obj, 'github', github_id) + + username = user_data['login'] + metadata = { + 'service_username': username + } + + try: + model.attach_federated_login(user_obj, 'github', github_id, metadata=metadata) + except IntegrityError: + err = 'Github account %s is already attached to a %s account' % ( + username, app.config['REGISTRY_TITLE_SHORT']) + + return render_ologin_error('GitHub', err) + return redirect(url_for('web.user')) @@ -130,7 +241,8 @@ def github_oauth_attach(): def attach_github_build_trigger(namespace, repository): permission = AdministerRepositoryPermission(namespace, repository) if permission.can(): - token = exchange_github_code_for_token(request.args.get('code'), for_login=False) + token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB', + for_login=False) repo = model.get_repository(namespace, repository) if not repo: msg = 'Invalid repository: %s/%s' % (namespace, repository) diff --git a/endpoints/common.py b/endpoints/common.py index fe09104ca..52715a1d1 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -2,8 +2,9 @@ import logging import urlparse import json import string +import datetime -from flask import make_response, render_template, request, abort +from flask import make_response, render_template, request, abort, session from flask.ext.login import login_user, UserMixin from flask.ext.principal import identity_changed from random import SystemRandom @@ -112,6 +113,7 @@ def common_login(db_user): logger.debug('Successfully signed in as: %s' % db_user.username) new_identity = QuayDeferredPermissionUser(db_user.username, 'username', {scopes.DIRECT_LOGIN}) identity_changed.send(app, identity=new_identity) + session['login_time'] = datetime.datetime.now() return True else: logger.debug('User could not be logged in, inactive?.') diff --git a/endpoints/notificationevent.py b/endpoints/notificationevent.py index e393dc134..f3f4d6a77 100644 --- a/endpoints/notificationevent.py +++ b/endpoints/notificationevent.py @@ -15,6 +15,13 @@ class NotificationEvent(object): def __init__(self): pass + def get_level(self, event_data, notification_data): + """ + Returns a 'level' representing the severity of the event. + Valid values are: 'info', 'warning', 'error', 'primary' + """ + raise NotImplementedError + def get_summary(self, event_data, notification_data): """ Returns a human readable one-line summary for the given notification data. @@ -55,6 +62,9 @@ class RepoPushEvent(NotificationEvent): def event_name(cls): return 'repo_push' + def get_level(self, event_data, notification_data): + return 'info' + def get_summary(self, event_data, notification_data): return 'Repository %s updated' % (event_data['repository']) @@ -87,6 +97,9 @@ class BuildQueueEvent(NotificationEvent): @classmethod def event_name(cls): return 'build_queued' + + def get_level(self, event_data, notification_data): + return 'info' def get_sample_data(self, repository): build_uuid = 'fake-build-id' @@ -127,6 +140,9 @@ class BuildStartEvent(NotificationEvent): def event_name(cls): return 'build_start' + def get_level(self, event_data, notification_data): + return 'info' + def get_sample_data(self, repository): build_uuid = 'fake-build-id' @@ -155,6 +171,9 @@ class BuildSuccessEvent(NotificationEvent): def event_name(cls): return 'build_success' + def get_level(self, event_data, notification_data): + return 'primary' + def get_sample_data(self, repository): build_uuid = 'fake-build-id' @@ -183,6 +202,9 @@ class BuildFailureEvent(NotificationEvent): def event_name(cls): return 'build_failure' + def get_level(self, event_data, notification_data): + return 'error' + def get_sample_data(self, repository): build_uuid = 'fake-build-id' diff --git a/endpoints/notificationmethod.py b/endpoints/notificationmethod.py index b49055157..9650e79f6 100644 --- a/endpoints/notificationmethod.py +++ b/endpoints/notificationmethod.py @@ -4,9 +4,11 @@ import os.path import tarfile import base64 import json +import requests +import re from flask.ext.mail import Message -from app import mail, app +from app import mail, app, get_app_url from data import model logger = logging.getLogger(__name__) @@ -187,3 +189,194 @@ class WebhookMethod(NotificationMethod): return False return True + + +class FlowdockMethod(NotificationMethod): + """ Method for sending notifications to Flowdock via the Team Inbox API: + https://www.flowdock.com/api/team-inbox + """ + @classmethod + def method_name(cls): + return 'flowdock' + + def validate(self, repository, config_data): + token = config_data.get('flow_api_token', '') + if not token: + raise CannotValidateNotificationMethodException('Missing Flowdock API Token') + + def perform(self, notification, event_handler, notification_data): + config_data = json.loads(notification.config_json) + token = config_data.get('flow_api_token', '') + if not token: + return False + + owner = model.get_user(notification.repository.namespace) + if not owner: + # Something went wrong. + return False + + url = 'https://api.flowdock.com/v1/messages/team_inbox/%s' % token + headers = {'Content-type': 'application/json'} + payload = { + 'source': 'Quay', + 'from_address': 'support@quay.io', + 'subject': event_handler.get_summary(notification_data['event_data'], notification_data), + 'content': event_handler.get_message(notification_data['event_data'], notification_data), + 'from_name': owner.username, + 'project': notification.repository.namespace + ' ' + notification.repository.name, + 'tags': ['#' + event_handler.event_name()], + 'link': notification_data['event_data']['homepage'] + } + + try: + resp = requests.post(url, data=json.dumps(payload), headers=headers) + if resp.status_code/100 != 2: + logger.error('%s response for flowdock to url: %s' % (resp.status_code, + url)) + logger.error(resp.content) + return False + + except requests.exceptions.RequestException as ex: + logger.exception('Flowdock method was unable to be sent: %s' % ex.message) + return False + + return True + + +class HipchatMethod(NotificationMethod): + """ Method for sending notifications to Hipchat via the API: + https://www.hipchat.com/docs/apiv2/method/send_room_notification + """ + @classmethod + def method_name(cls): + return 'hipchat' + + def validate(self, repository, config_data): + if not config_data.get('notification_token', ''): + raise CannotValidateNotificationMethodException('Missing Hipchat Room Notification Token') + + if not config_data.get('room_id', ''): + raise CannotValidateNotificationMethodException('Missing Hipchat Room ID') + + def perform(self, notification, event_handler, notification_data): + config_data = json.loads(notification.config_json) + + token = config_data.get('notification_token', '') + room_id = config_data.get('room_id', '') + + if not token or not room_id: + return False + + owner = model.get_user(notification.repository.namespace) + if not owner: + # Something went wrong. + return False + + url = 'https://api.hipchat.com/v2/room/%s/notification?auth_token=%s' % (room_id, token) + + level = event_handler.get_level(notification_data['event_data'], notification_data) + color = { + 'info': 'gray', + 'warning': 'yellow', + 'error': 'red', + 'primary': 'purple' + }.get(level, 'gray') + + headers = {'Content-type': 'application/json'} + payload = { + 'color': color, + 'message': event_handler.get_message(notification_data['event_data'], notification_data), + 'notify': level == 'error', + 'message_format': 'html', + } + + try: + resp = requests.post(url, data=json.dumps(payload), headers=headers) + if resp.status_code/100 != 2: + logger.error('%s response for hipchat to url: %s' % (resp.status_code, + url)) + logger.error(resp.content) + return False + + except requests.exceptions.RequestException as ex: + logger.exception('Hipchat method was unable to be sent: %s' % ex.message) + return False + + return True + + +class SlackMethod(NotificationMethod): + """ Method for sending notifications to Slack via the API: + https://api.slack.com/docs/attachments + """ + @classmethod + def method_name(cls): + return 'slack' + + def validate(self, repository, config_data): + if not config_data.get('token', ''): + raise CannotValidateNotificationMethodException('Missing Slack Token') + + if not config_data.get('subdomain', '').isalnum(): + raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name') + + def formatForSlack(self, message): + message = message.replace('\n', '') + message = re.sub(r'\s+', ' ', message) + message = message.replace('
', '\n') + message = re.sub(r'(.+)', '<\\1|\\2>', message) + return message + + def perform(self, notification, event_handler, notification_data): + config_data = json.loads(notification.config_json) + + token = config_data.get('token', '') + subdomain = config_data.get('subdomain', '') + + if not token or not subdomain: + return False + + owner = model.get_user(notification.repository.namespace) + if not owner: + # Something went wrong. + return False + + url = 'https://%s.slack.com/services/hooks/incoming-webhook?token=%s' % (subdomain, token) + + level = event_handler.get_level(notification_data['event_data'], notification_data) + color = { + 'info': '#ffffff', + 'warning': 'warning', + 'error': 'danger', + 'primary': 'good' + }.get(level, '#ffffff') + + summary = event_handler.get_summary(notification_data['event_data'], notification_data) + message = event_handler.get_message(notification_data['event_data'], notification_data) + + headers = {'Content-type': 'application/json'} + payload = { + 'text': summary, + 'username': 'quayiobot', + 'attachments': [ + { + 'fallback': summary, + 'text': self.formatForSlack(message), + 'color': color + } + ] + } + + try: + resp = requests.post(url, data=json.dumps(payload), headers=headers) + if resp.status_code/100 != 2: + logger.error('%s response for Slack to url: %s' % (resp.status_code, + url)) + logger.error(resp.content) + return False + + except requests.exceptions.RequestException as ex: + logger.exception('Slack method was unable to be sent: %s' % ex.message) + return False + + return True diff --git a/endpoints/registry.py b/endpoints/registry.py index 72633939e..94719905a 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers): extra_headers = {} - # Add the Accept-Ranges header if the storage engine supports resumeable + # Add the Accept-Ranges header if the storage engine supports resumable # downloads. - if store.get_supports_resumeable_downloads(repo_image.storage.locations): - profile.debug('Storage supports resumeable downloads') + if store.get_supports_resumable_downloads(repo_image.storage.locations): + profile.debug('Storage supports resumable downloads') extra_headers['Accept-Ranges'] = 'bytes' resp = make_response('') diff --git a/endpoints/trigger.py b/endpoints/trigger.py index ab7aa9065..ae0b4b2b7 100644 --- a/endpoints/trigger.py +++ b/endpoints/trigger.py @@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger): with tarfile.open(fileobj=tarball) as archive: tarball_subdir = archive.getnames()[0] + # Seek to position 0 to make boto multipart happy + tarball.seek(0) + dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) logger.debug('Successfully prepared job') diff --git a/initdb.py b/initdb.py index f04de833e..72f529491 100644 --- a/initdb.py +++ b/initdb.py @@ -179,6 +179,8 @@ def initialize_database(): TeamRole.create(name='member') Visibility.create(name='public') Visibility.create(name='private') + + LoginService.create(name='google') LoginService.create(name='github') LoginService.create(name='quayrobot') LoginService.create(name='ldap') @@ -257,6 +259,10 @@ def initialize_database(): ExternalNotificationMethod.create(name='email') ExternalNotificationMethod.create(name='webhook') + ExternalNotificationMethod.create(name='flowdock') + ExternalNotificationMethod.create(name='hipchat') + ExternalNotificationMethod.create(name='slack') + NotificationKind.create(name='repo_push') NotificationKind.create(name='build_queued') NotificationKind.create(name='build_start') diff --git a/static/css/quay.css b/static/css/quay.css index d349d0eb1..78053c1e8 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -21,8 +21,7 @@ #quay-logo { - width: 80px; - margin-right: 30px; + width: 100px; } #padding-container { @@ -2568,7 +2567,7 @@ p.editable:hover i { margin-top: 10px; } -.repo-build .build-log-error-element { +.repo-build .build-log-error-element .error-message-container { position: relative; display: inline-block; margin: 10px; @@ -2578,7 +2577,7 @@ p.editable:hover i { margin-left: 22px; } -.repo-build .build-log-error-element i.fa { +.repo-build .build-log-error-element .error-message-container i.fa { color: red; position: absolute; top: 13px; @@ -4598,6 +4597,27 @@ i.quay-icon { height: 16px; } +i.flowdock-icon { + background-image: url(/static/img/flowdock.ico); + background-size: 16px; + width: 16px; + height: 16px; +} + +i.hipchat-icon { + background-image: url(/static/img/hipchat.png); + background-size: 16px; + width: 16px; + height: 16px; +} + +i.slack-icon { + background-image: url(/static/img/slack.ico); + background-size: 16px; + width: 16px; + height: 16px; +} + .external-notification-view-element { margin: 10px; padding: 6px; diff --git a/static/directives/build-log-error.html b/static/directives/build-log-error.html index 095f8edd0..13b399bb9 100644 --- a/static/directives/build-log-error.html +++ b/static/directives/build-log-error.html @@ -1,4 +1,23 @@ - - - - +
+ + + + + caused by attempting to pull private repository {{ getLocalPullInfo().repo }} + with inaccessible crdentials + without credentials + + + + +
+
+ Note: The credentials {{ getLocalPullInfo().login.username }} for registry {{ getLocalPullInfo().login.registry }} cannot + access repository {{ getLocalPullInfo().repo }}. +
+
+ Note: No robot account is specified for this build. Without such credentials, this pull will always fail. Please setup a new + build trigger with a robot account that has access to {{ getLocalPullInfo().repo }} or make that repository public. +
+
+
diff --git a/static/directives/create-external-notification-dialog.html b/static/directives/create-external-notification-dialog.html index d384f3f59..bf0c5da03 100644 --- a/static/directives/create-external-notification-dialog.html +++ b/static/directives/create-external-notification-dialog.html @@ -73,7 +73,7 @@
- {{ field.title }}: + {{ field.title }}:
@@ -86,7 +86,11 @@ current-entity="currentConfig[field.name]" ng-model="currentConfig[field.name]" allowed-entities="['user', 'team', 'org']" - ng-switch-when="entity"> + ng-switch-when="entity">
+ +
+ See: {{ getHelpUrl(field, currentConfig) }} +
diff --git a/static/directives/dropdown-select.html b/static/directives/dropdown-select.html index c1157e3d0..69404e161 100644 --- a/static/directives/dropdown-select.html +++ b/static/directives/dropdown-select.html @@ -2,7 +2,7 @@
+ ng-readonly="!allowCustomInput">
diff --git a/static/directives/signin-form.html b/static/directives/signin-form.html index f56b8f8db..de67c18f6 100644 --- a/static/directives/signin-form.html +++ b/static/directives/signin-form.html @@ -4,18 +4,23 @@ placeholder="Username or E-mail Address" ng-model="user.username" autofocus> - - - - - Sign In with GitHub - - +
+ Too many attempts have been made to login. Please try again in {{ tryAgainSoon }} seconds. +
+ + + + + + + + + +
Invalid username or password.
diff --git a/static/directives/signup-form.html b/static/directives/signup-form.html index 4947a966e..ba4efe287 100644 --- a/static/directives/signup-form.html +++ b/static/directives/signup-form.html @@ -18,10 +18,8 @@ OR - - Sign In with GitHub - + +
diff --git a/static/directives/trigger-setup-github.html b/static/directives/trigger-setup-github.html index 9b0e194ab..48ac359f9 100644 --- a/static/directives/trigger-setup-github.html +++ b/static/directives/trigger-setup-github.html @@ -29,7 +29,8 @@
Dockerfile Location:
- See All Repositories + See All Repositories
diff --git a/static/partials/landing-normal.html b/static/partials/landing-normal.html index 8a0badad1..6b9b6e42e 100644 --- a/static/partials/landing-normal.html +++ b/static/partials/landing-normal.html @@ -34,7 +34,7 @@ {{repository.namespace}}/{{repository.name}}
- See All Repositories + See All Repositories diff --git a/static/partials/repo-build.html b/static/partials/repo-build.html index 3afe87508..225f58701 100644 --- a/static/partials/repo-build.html +++ b/static/partials/repo-build.html @@ -77,7 +77,7 @@
- +
diff --git a/static/partials/user-admin.html b/static/partials/user-admin.html index 1b2ad7fd1..c4d3b94a0 100644 --- a/static/partials/user-admin.html +++ b/static/partials/user-admin.html @@ -33,7 +33,7 @@
  • Account E-mail
  • Robot Accounts
  • Change Password
  • -
  • GitHub Login
  • +
  • External Logins
  • Authorized Applications
  • Usage Logs @@ -138,13 +138,14 @@
    -
    -
    -
    Change Password
    +
    +
    +
    + Password changed successfully
    @@ -162,25 +163,52 @@
    - -
    + +
    -
    + + +
    GitHub Login:
    -
    + -
    - Connect with GitHub +
    + + Account attached to Github Account +
    +
    +
    + + +
    +
    +
    Google Login:
    +
    +
    + + {{ googleLogin }} +
    +
    + + Account attached to Google Account +
    +
    + +
    +
    +
    +
    +
    diff --git a/static/partials/view-repo.html b/static/partials/view-repo.html index 4f588ccf2..e5f2cecc6 100644 --- a/static/partials/view-repo.html +++ b/static/partials/view-repo.html @@ -391,7 +391,10 @@ ?
    - -