Merge master into branch
This commit is contained in:
commit
e028d4ae0a
103 changed files with 2319 additions and 1187 deletions
|
@ -41,6 +41,15 @@ PLANS = [
|
|||
'bus_features': False,
|
||||
'deprecated': True,
|
||||
},
|
||||
{
|
||||
'title': 'Yacht',
|
||||
'price': 5000,
|
||||
'privateRepos': 20,
|
||||
'stripeId': 'bus-small',
|
||||
'audience': 'For small businesses',
|
||||
'bus_features': True,
|
||||
'deprecated': True,
|
||||
},
|
||||
|
||||
# Active plans
|
||||
{
|
||||
|
@ -74,7 +83,7 @@ PLANS = [
|
|||
'title': 'Yacht',
|
||||
'price': 5000,
|
||||
'privateRepos': 20,
|
||||
'stripeId': 'bus-small',
|
||||
'stripeId': 'bus-coreos-trial',
|
||||
'audience': 'For small businesses',
|
||||
'bus_features': True,
|
||||
'deprecated': False,
|
||||
|
|
|
@ -17,6 +17,8 @@ SCHEME_DRIVERS = {
|
|||
'mysql': MySQLDatabase,
|
||||
'mysql+pymysql': MySQLDatabase,
|
||||
'sqlite': SqliteDatabase,
|
||||
'postgresql': PostgresqlDatabase,
|
||||
'postgresql+psycopg2': PostgresqlDatabase,
|
||||
}
|
||||
|
||||
db = Proxy()
|
||||
|
@ -32,7 +34,7 @@ def _db_from_url(url, db_kwargs):
|
|||
if parsed_url.username:
|
||||
db_kwargs['user'] = parsed_url.username
|
||||
if parsed_url.password:
|
||||
db_kwargs['passwd'] = parsed_url.password
|
||||
db_kwargs['password'] = parsed_url.password
|
||||
|
||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
|
||||
|
@ -74,6 +76,8 @@ class User(BaseModel):
|
|||
organization = BooleanField(default=False, index=True)
|
||||
robot = BooleanField(default=False, index=True)
|
||||
invoice_email = BooleanField(default=False)
|
||||
invalid_login_attempts = IntegerField(default=0)
|
||||
last_invalid_login = DateTimeField(default=datetime.utcnow)
|
||||
|
||||
|
||||
class TeamRole(BaseModel):
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
"""add log kind for regenerating robot tokens
|
||||
|
||||
Revision ID: 43e943c0639f
|
||||
Revises: 82297d834ad
|
||||
Create Date: 2014-08-25 17:14:42.784518
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '43e943c0639f'
|
||||
down_revision = '82297d834ad'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from data.database import all_models
|
||||
|
||||
|
||||
def upgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
op.bulk_insert(schema.tables['logentrykind'],
|
||||
[
|
||||
{'id': 41, 'name':'regenerate_robot_token'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
logentrykind = schema.tables['logentrykind']
|
||||
op.execute(
|
||||
(logentrykind.delete()
|
||||
.where(logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
|
||||
|
||||
)
|
|
@ -20,12 +20,12 @@ def get_id(query):
|
|||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1')
|
||||
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1')
|
||||
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
|
||||
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
|
||||
conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id))
|
||||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1')
|
||||
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1')
|
||||
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
|
||||
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
|
||||
conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id))
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
"""Add brute force prevention metadata to the user table.
|
||||
|
||||
Revision ID: 4fdb65816b8d
|
||||
Revises: 43e943c0639f
|
||||
Create Date: 2014-09-03 12:35:33.722435
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4fdb65816b8d'
|
||||
down_revision = '43e943c0639f'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
|
||||
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now()))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'last_invalid_login')
|
||||
op.drop_column('user', 'invalid_login_attempts')
|
||||
### end Alembic commands ###
|
|
@ -203,7 +203,7 @@ def upgrade():
|
|||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False),
|
||||
sa.Column('service_ident', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
|
@ -375,7 +375,7 @@ def upgrade():
|
|||
sa.Column('command', sa.Text(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_size', sa.BigInteger(), nullable=True),
|
||||
sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True),
|
||||
sa.Column('ancestors', sa.String(length=60535), nullable=True),
|
||||
sa.Column('storage_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
|
||||
|
|
36
data/migrations/versions/82297d834ad_add_us_west_location.py
Normal file
36
data/migrations/versions/82297d834ad_add_us_west_location.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
"""add US West location
|
||||
|
||||
Revision ID: 82297d834ad
|
||||
Revises: 47670cbeced
|
||||
Create Date: 2014-08-15 13:35:23.834079
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '82297d834ad'
|
||||
down_revision = '47670cbeced'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from data.database import all_models
|
||||
|
||||
|
||||
def upgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
op.bulk_insert(schema.tables['imagestoragelocation'],
|
||||
[
|
||||
{'id':8, 'name':'s3_us_west_1'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
op.execute(
|
||||
(imagestoragelocation.delete()
|
||||
.where(imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
|
||||
|
||||
)
|
|
@ -35,6 +35,7 @@ def upgrade():
|
|||
{'id':4, 'name':'s3_ap_southeast_2'},
|
||||
{'id':5, 'name':'s3_ap_northeast_1'},
|
||||
{'id':6, 'name':'s3_sa_east_1'},
|
||||
{'id':7, 'name':'local'},
|
||||
])
|
||||
|
||||
op.create_table('imagestorageplacement',
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
"""Remove the old webhooks table.
|
||||
|
||||
Revision ID: f42b0ea7a4d
|
||||
Revises: 4fdb65816b8d
|
||||
Create Date: 2014-09-03 13:43:23.391464
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f42b0ea7a4d'
|
||||
down_revision = '4fdb65816b8d'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('webhook')
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('webhook',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
|
||||
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_default_charset=u'latin1',
|
||||
mysql_engine=u'InnoDB'
|
||||
)
|
||||
### end Alembic commands ###
|
|
@ -1,12 +1,17 @@
|
|||
import bcrypt
|
||||
import logging
|
||||
import datetime
|
||||
import dateutil.parser
|
||||
import json
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import *
|
||||
from util.validation import *
|
||||
from util.names import format_robot_username
|
||||
from util.backoff import exponential_backoff
|
||||
|
||||
|
||||
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -68,10 +73,15 @@ class TooManyUsersException(DataModelException):
|
|||
pass
|
||||
|
||||
|
||||
def is_create_user_allowed():
|
||||
return get_active_user_count() < config.app_config['LICENSE_USER_LIMIT']
|
||||
class TooManyLoginAttemptsException(Exception):
|
||||
def __init__(self, message, retry_after):
|
||||
super(TooManyLoginAttemptsException, self).__init__(message)
|
||||
self.retry_after = retry_after
|
||||
|
||||
|
||||
def is_create_user_allowed():
|
||||
return True
|
||||
|
||||
def create_user(username, password, email):
|
||||
""" Creates a regular user, if allowed. """
|
||||
if not validate_password(password):
|
||||
|
@ -181,6 +191,19 @@ def create_robot(robot_shortname, parent):
|
|||
except Exception as ex:
|
||||
raise DataModelException(ex.message)
|
||||
|
||||
def get_robot(robot_shortname, parent):
|
||||
robot_username = format_robot_username(parent.username, robot_shortname)
|
||||
robot = lookup_robot(robot_username)
|
||||
|
||||
if not robot:
|
||||
msg = ('Could not find robot with username: %s' %
|
||||
robot_username)
|
||||
raise InvalidRobotException(msg)
|
||||
|
||||
service = LoginService.get(name='quayrobot')
|
||||
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
|
||||
|
||||
return robot, login.service_ident
|
||||
|
||||
def lookup_robot(robot_username):
|
||||
joined = User.select().join(FederatedLogin).join(LoginService)
|
||||
|
@ -191,7 +214,6 @@ def lookup_robot(robot_username):
|
|||
|
||||
return found[0]
|
||||
|
||||
|
||||
def verify_robot(robot_username, password):
|
||||
joined = User.select().join(FederatedLogin).join(LoginService)
|
||||
found = list(joined.where(FederatedLogin.service_ident == password,
|
||||
|
@ -204,6 +226,25 @@ def verify_robot(robot_username, password):
|
|||
|
||||
return found[0]
|
||||
|
||||
def regenerate_robot_token(robot_shortname, parent):
|
||||
robot_username = format_robot_username(parent.username, robot_shortname)
|
||||
|
||||
robot = lookup_robot(robot_username)
|
||||
if not robot:
|
||||
raise InvalidRobotException('Could not find robot with username: %s' %
|
||||
robot_username)
|
||||
|
||||
password = random_string_generator(length=64)()
|
||||
robot.email = password
|
||||
|
||||
service = LoginService.get(name='quayrobot')
|
||||
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
|
||||
login.service_ident = password
|
||||
|
||||
login.save()
|
||||
robot.save()
|
||||
|
||||
return robot, password
|
||||
|
||||
def delete_robot(robot_username):
|
||||
try:
|
||||
|
@ -524,11 +565,30 @@ def verify_user(username_or_email, password):
|
|||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
if fetched.invalid_login_attempts > 0:
|
||||
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
|
||||
fetched.last_invalid_login)
|
||||
|
||||
if can_retry_at > now:
|
||||
retry_after = can_retry_at - now
|
||||
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
|
||||
|
||||
if (fetched.password_hash and
|
||||
bcrypt.hashpw(password, fetched.password_hash) ==
|
||||
fetched.password_hash):
|
||||
|
||||
if fetched.invalid_login_attempts > 0:
|
||||
fetched.invalid_login_attempts = 0
|
||||
fetched.save()
|
||||
|
||||
return fetched
|
||||
|
||||
fetched.invalid_login_attempts += 1
|
||||
fetched.last_invalid_login = now
|
||||
fetched.save()
|
||||
|
||||
# We weren't able to authorize the user
|
||||
return None
|
||||
|
||||
|
@ -828,6 +888,34 @@ def get_all_repo_users(namespace_name, repository_name):
|
|||
Repository.name == repository_name)
|
||||
|
||||
|
||||
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
|
||||
select = User.select().distinct()
|
||||
with_team_member = select.join(TeamMember)
|
||||
with_team = with_team_member.join(Team)
|
||||
with_perm = with_team.join(RepositoryPermission)
|
||||
with_repo = with_perm.join(Repository)
|
||||
return with_repo.where(Repository.namespace == namespace_name,
|
||||
Repository.name == repository_name)
|
||||
|
||||
|
||||
def get_all_repo_users_transitive(namespace_name, repository_name):
|
||||
# Load the users found via teams and directly via permissions.
|
||||
via_teams = get_all_repo_users_transitive_via_teams(namespace_name, repository_name)
|
||||
directly = [perm.user for perm in get_all_repo_users(namespace_name, repository_name)]
|
||||
|
||||
# Filter duplicates.
|
||||
user_set = set()
|
||||
|
||||
def check_add(u):
|
||||
if u.username in user_set:
|
||||
return False
|
||||
|
||||
user_set.add(u.username)
|
||||
return True
|
||||
|
||||
return [user for user in list(directly) + list(via_teams) if check_add(user)]
|
||||
|
||||
|
||||
def get_repository_for_resource(resource_key):
|
||||
try:
|
||||
return (Repository
|
||||
|
@ -982,7 +1070,8 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
|
|||
.join(Repository)
|
||||
.join(Visibility)
|
||||
.switch(Repository)
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER))
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||
.where(ImageStorage.uploading == False))
|
||||
|
||||
query = (_filter_to_repos_for_user(query, username)
|
||||
.where(Image.docker_image_id == docker_image_id))
|
||||
|
@ -1662,19 +1751,20 @@ def create_notification(kind_name, target, metadata={}):
|
|||
|
||||
def create_unique_notification(kind_name, target, metadata={}):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
if list_notifications(target, kind_name).count() == 0:
|
||||
if list_notifications(target, kind_name, limit=1).count() == 0:
|
||||
create_notification(kind_name, target, metadata)
|
||||
|
||||
|
||||
def lookup_notification(user, uuid):
|
||||
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True))
|
||||
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
|
||||
if not results:
|
||||
return None
|
||||
|
||||
return results[0]
|
||||
|
||||
|
||||
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False):
|
||||
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
|
||||
page=None, limit=None):
|
||||
Org = User.alias()
|
||||
AdminTeam = Team.alias()
|
||||
AdminTeamMember = TeamMember.alias()
|
||||
|
@ -1712,6 +1802,11 @@ def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=F
|
|||
.switch(Notification)
|
||||
.where(Notification.uuid == id_filter))
|
||||
|
||||
if page:
|
||||
query = query.paginate(page, limit)
|
||||
elif limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
|
|
Reference in a new issue