2013-09-20 15:55:44 +00:00
|
|
|
import bcrypt
|
2013-09-20 22:38:17 +00:00
|
|
|
import logging
|
2013-09-26 19:58:11 +00:00
|
|
|
import dateutil.parser
|
2013-11-15 21:45:02 +00:00
|
|
|
import json
|
2015-02-12 19:11:56 +00:00
|
|
|
import time
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2014-10-30 17:26:02 +00:00
|
|
|
from datetime import datetime, timedelta, date
|
2015-02-18 21:37:38 +00:00
|
|
|
from uuid import uuid4
|
2014-09-02 19:27:05 +00:00
|
|
|
|
2014-09-22 21:27:02 +00:00
|
|
|
from data.database import (User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
|
|
|
Visibility, RepositoryTag, EmailConfirmation, FederatedLogin,
|
|
|
|
LoginService, RepositoryBuild, Team, TeamMember, TeamRole,
|
|
|
|
LogEntryKind, LogEntry, PermissionPrototype, ImageStorage,
|
|
|
|
BuildTriggerService, RepositoryBuildTrigger, NotificationKind,
|
|
|
|
Notification, ImageStorageLocation, ImageStoragePlacement,
|
|
|
|
ExternalNotificationEvent, ExternalNotificationMethod,
|
|
|
|
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
2014-10-07 21:33:20 +00:00
|
|
|
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
2015-02-13 20:54:01 +00:00
|
|
|
db, BUILD_PHASE, QuayUserField, ImageStorageSignature, QueueItem,
|
2015-02-17 17:35:16 +00:00
|
|
|
ImageStorageSignatureKind, validate_database_url, db_for_update,
|
2015-04-13 17:31:07 +00:00
|
|
|
AccessTokenKind, Star, get_epoch_timestamp, RepositoryActionCount)
|
2015-05-11 18:38:10 +00:00
|
|
|
from peewee import JOIN_LEFT_OUTER, fn, SQL, IntegrityError
|
2014-09-22 21:27:02 +00:00
|
|
|
from util.validation import (validate_username, validate_email, validate_password,
|
|
|
|
INVALID_PASSWORD_MESSAGE)
|
2014-11-09 22:50:57 +00:00
|
|
|
from util.names import format_robot_username, parse_robot_username
|
2014-09-02 19:27:05 +00:00
|
|
|
from util.backoff import exponential_backoff
|
|
|
|
|
|
|
|
|
|
|
|
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
2014-09-08 20:43:17 +00:00
|
|
|
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
2013-09-20 22:38:17 +00:00
|
|
|
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
Namespace = User.alias()
|
|
|
|
|
|
|
|
|
2013-09-20 22:38:17 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-13 16:17:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Config(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.app_config = None
|
|
|
|
self.store = None
|
|
|
|
|
|
|
|
config = Config()
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2013-09-27 19:53:39 +00:00
|
|
|
class DataModelException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-10-10 16:55:03 +00:00
|
|
|
class InvalidEmailAddressException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class InvalidUsernameException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
2013-09-27 22:38:41 +00:00
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
class InvalidOrganizationException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
class InvalidRobotException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
class InvalidTeamException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
2015-05-11 21:13:42 +00:00
|
|
|
|
2014-08-15 21:47:43 +00:00
|
|
|
class InvalidTeamMemberException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
|
2013-10-10 16:55:03 +00:00
|
|
|
class InvalidPasswordException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-10-16 18:24:10 +00:00
|
|
|
class InvalidTokenException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-10-24 20:37:03 +00:00
|
|
|
class InvalidRepositoryBuildException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-07-16 20:30:47 +00:00
|
|
|
class InvalidNotificationException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
class InvalidBuildTriggerException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-09-23 20:06:38 +00:00
|
|
|
class InvalidImageException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-05-28 17:51:52 +00:00
|
|
|
class TooManyUsersException(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
class UserAlreadyInTeam(DataModelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-09-02 19:27:05 +00:00
|
|
|
class TooManyLoginAttemptsException(Exception):
|
|
|
|
def __init__(self, message, retry_after):
|
|
|
|
super(TooManyLoginAttemptsException, self).__init__(message)
|
|
|
|
self.retry_after = retry_after
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2015-02-11 16:54:30 +00:00
|
|
|
def _get_repository(namespace_name, repository_name, for_update=False):
|
|
|
|
query = (Repository
|
|
|
|
.select(Repository, Namespace)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
|
|
|
if for_update:
|
|
|
|
query = db_for_update(query)
|
|
|
|
|
|
|
|
return query.get()
|
2014-09-24 22:01:35 +00:00
|
|
|
|
|
|
|
|
2014-09-18 21:36:26 +00:00
|
|
|
def hash_password(password, salt=None):
|
|
|
|
salt = salt or bcrypt.gensalt()
|
|
|
|
return bcrypt.hashpw(password.encode('utf-8'), salt)
|
2014-05-28 17:51:52 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-05-28 17:51:52 +00:00
|
|
|
def is_create_user_allowed():
|
2014-08-21 23:21:20 +00:00
|
|
|
return True
|
2014-05-28 17:51:52 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-09-22 23:11:48 +00:00
|
|
|
def create_user(username, password, email, auto_verify=False):
|
2014-05-28 17:51:52 +00:00
|
|
|
""" Creates a regular user, if allowed. """
|
|
|
|
if not validate_password(password):
|
|
|
|
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
|
|
|
|
|
|
|
if not is_create_user_allowed():
|
|
|
|
raise TooManyUsersException()
|
|
|
|
|
|
|
|
created = _create_user(username, email)
|
2014-09-22 23:11:48 +00:00
|
|
|
created.password_hash = hash_password(password)
|
2014-11-11 22:22:37 +00:00
|
|
|
created.verified = auto_verify
|
2014-05-28 17:51:52 +00:00
|
|
|
created.save()
|
|
|
|
|
|
|
|
return created
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-05-28 17:51:52 +00:00
|
|
|
def _create_user(username, email):
|
2013-09-27 22:38:41 +00:00
|
|
|
if not validate_email(email):
|
2013-10-10 16:55:03 +00:00
|
|
|
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
2014-04-08 00:37:02 +00:00
|
|
|
|
|
|
|
(username_valid, username_issue) = validate_username(username)
|
|
|
|
if not username_valid:
|
|
|
|
raise InvalidUsernameException('Invalid username %s: %s' % (username, username_issue))
|
2013-10-10 16:55:03 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
existing = User.get((User.username == username) | (User.email == email))
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
logger.info('Existing user with same username or email.')
|
2013-10-10 16:55:03 +00:00
|
|
|
|
|
|
|
# A user already exists with either the same username or email
|
|
|
|
if existing.username == username:
|
|
|
|
raise InvalidUsernameException('Username has already been taken: %s' %
|
|
|
|
username)
|
|
|
|
raise InvalidEmailAddressException('Email has already been used: %s' %
|
|
|
|
email)
|
|
|
|
|
|
|
|
except User.DoesNotExist:
|
|
|
|
# This is actually the happy path
|
|
|
|
logger.debug('Email and username are unique!')
|
|
|
|
pass
|
2013-09-27 22:38:41 +00:00
|
|
|
|
2013-09-27 22:16:26 +00:00
|
|
|
try:
|
2015-02-18 20:57:05 +00:00
|
|
|
return User.create(username=username, email=email)
|
2013-09-27 22:16:26 +00:00
|
|
|
except Exception as ex:
|
|
|
|
raise DataModelException(ex.message)
|
2013-09-27 23:29:01 +00:00
|
|
|
|
|
|
|
|
2014-05-13 16:17:26 +00:00
|
|
|
def is_username_unique(test_username):
|
|
|
|
try:
|
|
|
|
User.get((User.username == test_username))
|
|
|
|
return False
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def create_organization(name, email, creating_user):
|
|
|
|
try:
|
|
|
|
# Create the org
|
2014-05-28 17:51:52 +00:00
|
|
|
new_org = _create_user(name, email)
|
2013-11-01 23:34:17 +00:00
|
|
|
new_org.organization = True
|
|
|
|
new_org.save()
|
|
|
|
|
|
|
|
# Create a team for the owners
|
2013-11-04 21:57:20 +00:00
|
|
|
owners_team = create_team('owners', new_org, 'admin')
|
2013-11-01 23:34:17 +00:00
|
|
|
|
2013-11-04 20:42:08 +00:00
|
|
|
# Add the user who created the org to the owners team
|
2013-11-01 23:34:17 +00:00
|
|
|
add_user_to_team(creating_user, owners_team)
|
|
|
|
|
|
|
|
return new_org
|
|
|
|
except InvalidUsernameException:
|
2014-02-07 00:59:10 +00:00
|
|
|
msg = ('Invalid organization name: %s Organization names must consist ' +
|
2014-11-11 22:22:37 +00:00
|
|
|
'solely of lower case letters, numbers, and underscores. ' +
|
2014-02-07 00:59:10 +00:00
|
|
|
'[a-z0-9_]') % name
|
|
|
|
raise InvalidOrganizationException(msg)
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
def create_robot(robot_shortname, parent):
|
2014-04-08 00:37:02 +00:00
|
|
|
(username_valid, username_issue) = validate_username(robot_shortname)
|
|
|
|
if not username_valid:
|
|
|
|
raise InvalidRobotException('The name for the robot \'%s\' is invalid: %s' %
|
|
|
|
(robot_shortname, username_issue))
|
2013-11-20 21:13:03 +00:00
|
|
|
|
|
|
|
username = format_robot_username(parent.username, robot_shortname)
|
|
|
|
|
|
|
|
try:
|
|
|
|
User.get(User.username == username)
|
|
|
|
|
|
|
|
msg = 'Existing robot with name: %s' % username
|
|
|
|
logger.info(msg)
|
|
|
|
raise InvalidRobotException(msg)
|
|
|
|
|
|
|
|
except User.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
created = User.create(username=username, robot=True)
|
|
|
|
|
|
|
|
service = LoginService.get(name='quayrobot')
|
|
|
|
password = created.email
|
|
|
|
FederatedLogin.create(user=created, service=service,
|
|
|
|
service_ident=password)
|
|
|
|
|
|
|
|
return created, password
|
|
|
|
except Exception as ex:
|
|
|
|
raise DataModelException(ex.message)
|
|
|
|
|
2014-08-25 21:19:23 +00:00
|
|
|
def get_robot(robot_shortname, parent):
|
|
|
|
robot_username = format_robot_username(parent.username, robot_shortname)
|
|
|
|
robot = lookup_robot(robot_username)
|
|
|
|
|
|
|
|
if not robot:
|
|
|
|
msg = ('Could not find robot with username: %s' %
|
|
|
|
robot_username)
|
|
|
|
raise InvalidRobotException(msg)
|
|
|
|
|
|
|
|
service = LoginService.get(name='quayrobot')
|
|
|
|
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
|
|
|
|
|
|
|
|
return robot, login.service_ident
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2014-03-27 22:33:13 +00:00
|
|
|
def lookup_robot(robot_username):
|
|
|
|
joined = User.select().join(FederatedLogin).join(LoginService)
|
|
|
|
found = list(joined.where(LoginService.name == 'quayrobot',
|
|
|
|
User.username == robot_username))
|
|
|
|
if not found or len(found) < 1 or not found[0].robot:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return found[0]
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
def verify_robot(robot_username, password):
|
2015-05-11 21:13:42 +00:00
|
|
|
result = parse_robot_username(robot_username)
|
|
|
|
if result is None:
|
|
|
|
raise InvalidRobotException('%s is an invalid robot name' % robot_username)
|
|
|
|
|
|
|
|
# Find the matching robot.
|
|
|
|
query = (User.select()
|
|
|
|
.join(FederatedLogin)
|
|
|
|
.join(LoginService)
|
|
|
|
.where(FederatedLogin.service_ident == password,
|
|
|
|
LoginService.name == 'quayrobot',
|
|
|
|
User.username == robot_username))
|
|
|
|
|
|
|
|
try:
|
|
|
|
robot = query.get()
|
|
|
|
except User.DoesNotExist:
|
2013-11-20 21:13:03 +00:00
|
|
|
msg = ('Could not find robot with username: %s and supplied password.' %
|
|
|
|
robot_username)
|
|
|
|
raise InvalidRobotException(msg)
|
|
|
|
|
2015-05-11 21:13:42 +00:00
|
|
|
# Find the owner user and ensure it is not disabled.
|
|
|
|
try:
|
|
|
|
owner = User.get(User.username == result[0])
|
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidRobotException('Robot %s owner does not exist' % robot_username)
|
|
|
|
|
|
|
|
if not owner.enabled:
|
|
|
|
raise InvalidRobotException('This user has been disabled. Please contact your administrator.')
|
|
|
|
|
|
|
|
return robot
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2014-08-25 21:19:23 +00:00
|
|
|
def regenerate_robot_token(robot_shortname, parent):
|
|
|
|
robot_username = format_robot_username(parent.username, robot_shortname)
|
|
|
|
|
|
|
|
robot = lookup_robot(robot_username)
|
|
|
|
if not robot:
|
|
|
|
raise InvalidRobotException('Could not find robot with username: %s' %
|
|
|
|
robot_username)
|
|
|
|
|
|
|
|
password = random_string_generator(length=64)()
|
|
|
|
robot.email = password
|
|
|
|
|
|
|
|
service = LoginService.get(name='quayrobot')
|
|
|
|
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
|
|
|
|
login.service_ident = password
|
|
|
|
|
|
|
|
login.save()
|
|
|
|
robot.save()
|
|
|
|
|
|
|
|
return robot, password
|
2013-11-20 21:13:03 +00:00
|
|
|
|
|
|
|
def delete_robot(robot_username):
|
|
|
|
try:
|
|
|
|
robot = User.get(username=robot_username, robot=True)
|
|
|
|
robot.delete_instance(recursive=True, delete_nullable=True)
|
2014-11-07 17:05:21 +00:00
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidRobotException('Could not find robot with username: %s' %
|
|
|
|
robot_username)
|
|
|
|
|
|
|
|
|
2014-11-18 15:24:48 +00:00
|
|
|
def _list_entity_robots(entity_name):
|
2015-01-30 21:32:13 +00:00
|
|
|
""" Return the list of robots for the specified entity. This MUST return a query, not a
|
|
|
|
materialized list so that callers can use db_for_update.
|
|
|
|
"""
|
2014-11-18 15:24:48 +00:00
|
|
|
return (User
|
|
|
|
.select()
|
|
|
|
.join(FederatedLogin)
|
|
|
|
.where(User.robot == True, User.username ** (entity_name + '+%')))
|
|
|
|
|
|
|
|
|
2015-04-01 17:56:30 +00:00
|
|
|
class TupleSelector(object):
|
|
|
|
""" Helper class for selecting tuples from a peewee query and easily accessing
|
|
|
|
them as if they were objects.
|
|
|
|
"""
|
2015-06-09 21:58:57 +00:00
|
|
|
class _TupleWrapper(object):
|
|
|
|
def __init__(self, data, fields):
|
|
|
|
self._data = data
|
|
|
|
self._fields = fields
|
|
|
|
|
|
|
|
def get(self, field):
|
|
|
|
return self._data[self._fields.index(TupleSelector.tuple_reference_key(field))]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tuple_reference_key(cls, field):
|
|
|
|
""" Returns a string key for referencing a field in a TupleSelector. """
|
|
|
|
if field._node_type == 'func':
|
|
|
|
return field.name + ','.join([cls.tuple_reference_key(arg) for arg in field.arguments])
|
|
|
|
|
|
|
|
if field._node_type == 'field':
|
|
|
|
return field.name + ':' + field.model_class.__name__
|
|
|
|
|
|
|
|
raise Exception('Unknown field type %s in TupleSelector' % field._node_type)
|
|
|
|
|
2015-04-01 17:56:30 +00:00
|
|
|
def __init__(self, query, fields):
|
|
|
|
self._query = query.select(*fields).tuples()
|
2015-06-09 21:58:57 +00:00
|
|
|
self._fields = [TupleSelector.tuple_reference_key(field) for field in fields]
|
2015-04-01 17:56:30 +00:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self._build_iterator()
|
|
|
|
|
|
|
|
def _build_iterator(self):
|
|
|
|
for tuple_data in self._query:
|
2015-06-09 21:58:57 +00:00
|
|
|
yield TupleSelector._TupleWrapper(tuple_data, self._fields)
|
2015-04-01 17:56:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2015-05-08 20:43:07 +00:00
|
|
|
def list_entity_robot_permission_teams(entity_name, include_permissions=False):
|
|
|
|
query = (_list_entity_robots(entity_name))
|
|
|
|
|
|
|
|
fields = [User.username, FederatedLogin.service_ident]
|
|
|
|
if include_permissions:
|
|
|
|
query = (query.join(RepositoryPermission, JOIN_LEFT_OUTER,
|
|
|
|
on=(RepositoryPermission.user == FederatedLogin.user))
|
|
|
|
.join(Repository, JOIN_LEFT_OUTER)
|
|
|
|
.switch(User)
|
|
|
|
.join(TeamMember, JOIN_LEFT_OUTER)
|
|
|
|
.join(Team, JOIN_LEFT_OUTER))
|
|
|
|
|
|
|
|
fields.append(Repository.name)
|
|
|
|
fields.append(Team.name)
|
|
|
|
|
2015-04-01 17:56:30 +00:00
|
|
|
return TupleSelector(query, fields)
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2015-03-31 22:50:43 +00:00
|
|
|
def list_robot_permissions(robot_name):
|
|
|
|
return (RepositoryPermission.select(RepositoryPermission, User, Repository)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Visibility)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Role)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(User)
|
|
|
|
.where(User.username == robot_name, User.robot == True))
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2013-11-07 21:33:56 +00:00
|
|
|
def convert_user_to_organization(user, admin_user):
|
|
|
|
# Change the user to an organization.
|
|
|
|
user.organization = True
|
|
|
|
|
2013-11-07 23:08:38 +00:00
|
|
|
# disable this account for login.
|
|
|
|
user.password_hash = None
|
2013-11-07 21:33:56 +00:00
|
|
|
user.save()
|
|
|
|
|
2013-11-07 23:08:38 +00:00
|
|
|
# Clear any federated auth pointing to this user
|
|
|
|
FederatedLogin.delete().where(FederatedLogin.user == user).execute()
|
|
|
|
|
2013-11-07 21:33:56 +00:00
|
|
|
# Create a team for the owners
|
|
|
|
owners_team = create_team('owners', user, 'admin')
|
|
|
|
|
|
|
|
# Add the user who will admin the org to the owners team
|
|
|
|
add_user_to_team(admin_user, owners_team)
|
|
|
|
|
|
|
|
return user
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2013-11-04 21:57:20 +00:00
|
|
|
def create_team(name, org, team_role_name, description=''):
|
2014-04-08 00:37:02 +00:00
|
|
|
(username_valid, username_issue) = validate_username(name)
|
|
|
|
if not username_valid:
|
|
|
|
raise InvalidTeamException('Invalid team name %s: %s' % (name, username_issue))
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
if not org.organization:
|
|
|
|
raise InvalidOrganizationException('User with name %s is not an org.' %
|
|
|
|
org.username)
|
|
|
|
|
2013-11-04 20:42:08 +00:00
|
|
|
team_role = TeamRole.get(TeamRole.name == team_role_name)
|
2013-11-04 21:57:20 +00:00
|
|
|
return Team.create(name=name, organization=org, role=team_role,
|
|
|
|
description=description)
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
|
2013-11-27 07:29:31 +00:00
|
|
|
def __get_user_admin_teams(org_name, teamname, username):
|
2013-11-20 21:13:03 +00:00
|
|
|
Org = User.alias()
|
|
|
|
user_teams = Team.select().join(TeamMember).join(User)
|
|
|
|
with_org = user_teams.switch(Team).join(Org,
|
|
|
|
on=(Org.id == Team.organization))
|
|
|
|
with_role = with_org.switch(Team).join(TeamRole)
|
|
|
|
admin_teams = with_role.where(User.username == username,
|
|
|
|
Org.username == org_name,
|
|
|
|
TeamRole.name == 'admin')
|
|
|
|
return admin_teams
|
2013-11-05 23:37:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
def remove_team(org_name, team_name, removed_by_username):
|
|
|
|
joined = Team.select(Team, TeamRole).join(User).switch(Team).join(TeamRole)
|
|
|
|
|
|
|
|
found = list(joined.where(User.organization == True,
|
|
|
|
User.username == org_name,
|
|
|
|
Team.name == team_name))
|
2013-11-05 20:50:56 +00:00
|
|
|
if not found:
|
2013-11-05 23:37:28 +00:00
|
|
|
raise InvalidTeamException('Team \'%s\' is not a team in org \'%s\'' %
|
2013-11-05 20:50:56 +00:00
|
|
|
(team_name, org_name))
|
|
|
|
|
|
|
|
team = found[0]
|
2013-11-05 23:37:28 +00:00
|
|
|
if team.role.name == 'admin':
|
|
|
|
admin_teams = list(__get_user_admin_teams(org_name, team_name,
|
|
|
|
removed_by_username))
|
|
|
|
|
|
|
|
if len(admin_teams) <= 1:
|
|
|
|
# The team we are trying to remove is the only admin team for this user
|
|
|
|
msg = ('Deleting team \'%s\' would remove all admin from user \'%s\'' %
|
|
|
|
(team_name, removed_by_username))
|
|
|
|
raise DataModelException(msg)
|
|
|
|
|
2013-11-05 20:50:56 +00:00
|
|
|
team.delete_instance(recursive=True, delete_nullable=True)
|
|
|
|
|
2013-11-05 03:58:21 +00:00
|
|
|
|
2014-09-22 23:11:48 +00:00
|
|
|
def add_or_invite_to_team(inviter, team, user=None, email=None, requires_invite=True):
|
2014-08-15 21:47:43 +00:00
|
|
|
# If the user is a member of the organization, then we simply add the
|
|
|
|
# user directly to the team. Otherwise, an invite is created for the user/email.
|
2014-11-11 22:22:37 +00:00
|
|
|
# We return None if the user was directly added and the invite object if the user was invited.
|
2014-09-22 23:11:48 +00:00
|
|
|
if user and requires_invite:
|
2014-08-15 21:47:43 +00:00
|
|
|
orgname = team.organization.username
|
|
|
|
|
|
|
|
# If the user is part of the organization (or a robot), then no invite is required.
|
|
|
|
if user.robot:
|
|
|
|
requires_invite = False
|
|
|
|
if not user.username.startswith(orgname + '+'):
|
|
|
|
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
|
|
|
|
'as it is not a member of the organization')
|
2014-11-11 22:22:37 +00:00
|
|
|
else:
|
2014-08-15 21:47:43 +00:00
|
|
|
Org = User.alias()
|
|
|
|
found = User.select(User.username)
|
|
|
|
found = found.where(User.username == user.username).join(TeamMember).join(Team)
|
|
|
|
found = found.join(Org, on=(Org.username == orgname)).limit(1)
|
|
|
|
requires_invite = not any(found)
|
|
|
|
|
|
|
|
# If we have a valid user and no invite is required, simply add the user to the team.
|
|
|
|
if user and not requires_invite:
|
|
|
|
add_user_to_team(user, team)
|
|
|
|
return None
|
|
|
|
|
2014-08-28 23:07:22 +00:00
|
|
|
email_address = email if not user else None
|
|
|
|
return TeamMemberInvite.create(user=user, email=email_address, team=team, inviter=inviter)
|
2014-08-15 21:47:43 +00:00
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def add_user_to_team(user, team):
|
2013-11-07 17:54:21 +00:00
|
|
|
try:
|
|
|
|
return TeamMember.create(user=user, team=team)
|
|
|
|
except Exception:
|
2014-08-18 21:24:00 +00:00
|
|
|
raise UserAlreadyInTeam('User \'%s\' is already a member of team \'%s\'' %
|
|
|
|
(user.username, team.name))
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
|
2013-11-05 23:37:28 +00:00
|
|
|
def remove_user_from_team(org_name, team_name, username, removed_by_username):
|
|
|
|
Org = User.alias()
|
|
|
|
joined = TeamMember.select().join(User).switch(TeamMember).join(Team)
|
|
|
|
with_role = joined.join(TeamRole)
|
|
|
|
with_org = with_role.switch(Team).join(Org,
|
|
|
|
on=(Org.id == Team.organization))
|
|
|
|
found = list(with_org.where(User.username == username,
|
|
|
|
Org.username == org_name,
|
|
|
|
Team.name == team_name))
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
raise DataModelException('User %s does not belong to team %s' %
|
2013-11-07 17:54:21 +00:00
|
|
|
(username, team_name))
|
2013-11-05 23:37:28 +00:00
|
|
|
|
|
|
|
if username == removed_by_username:
|
|
|
|
admin_team_query = __get_user_admin_teams(org_name, team_name, username)
|
|
|
|
admin_team_names = {team.name for team in admin_team_query}
|
|
|
|
if team_name in admin_team_names and len(admin_team_names) <= 1:
|
|
|
|
msg = 'User cannot remove themselves from their only admin team.'
|
|
|
|
raise DataModelException(msg)
|
|
|
|
|
|
|
|
user_in_team = found[0]
|
|
|
|
user_in_team.delete_instance()
|
2013-11-04 20:47:27 +00:00
|
|
|
|
|
|
|
|
2013-11-05 03:58:21 +00:00
|
|
|
def get_team_org_role(team):
|
|
|
|
return TeamRole.get(TeamRole.id == team.role.id)
|
|
|
|
|
2013-11-05 23:37:28 +00:00
|
|
|
|
|
|
|
def set_team_org_permission(team, team_role_name, set_by_username):
|
|
|
|
if team.role.name == 'admin' and team_role_name != 'admin':
|
|
|
|
# We need to make sure we're not removing the users only admin role
|
|
|
|
user_admin_teams = __get_user_admin_teams(team.organization.username,
|
|
|
|
team.name, set_by_username)
|
|
|
|
admin_team_set = {admin_team.name for admin_team in user_admin_teams}
|
|
|
|
if team.name in admin_team_set and len(admin_team_set) <= 1:
|
|
|
|
msg = (('Cannot remove admin from team \'%s\' because calling user ' +
|
|
|
|
'would no longer have admin on org \'%s\'') %
|
|
|
|
(team.name, team.organization.username))
|
|
|
|
raise DataModelException(msg)
|
|
|
|
|
2013-11-05 03:58:21 +00:00
|
|
|
new_role = TeamRole.get(TeamRole.name == team_role_name)
|
2013-11-04 20:42:08 +00:00
|
|
|
team.role = new_role
|
|
|
|
team.save()
|
|
|
|
return team
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
|
2014-08-11 22:25:01 +00:00
|
|
|
def create_federated_user(username, email, service_name, service_id,
|
|
|
|
set_password_notification, metadata={}):
|
2014-05-28 17:51:52 +00:00
|
|
|
if not is_create_user_allowed():
|
|
|
|
raise TooManyUsersException()
|
|
|
|
|
|
|
|
new_user = _create_user(username, email)
|
2013-11-20 21:13:03 +00:00
|
|
|
new_user.verified = True
|
|
|
|
new_user.save()
|
2013-10-10 16:55:03 +00:00
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
service = LoginService.get(LoginService.name == service_name)
|
|
|
|
FederatedLogin.create(user=new_user, service=service,
|
2014-08-11 22:25:01 +00:00
|
|
|
service_ident=service_id,
|
|
|
|
metadata_json=json.dumps(metadata))
|
2013-11-20 21:13:03 +00:00
|
|
|
|
2014-05-28 17:51:52 +00:00
|
|
|
if set_password_notification:
|
|
|
|
create_notification('password_required', new_user)
|
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
return new_user
|
2013-10-10 03:00:34 +00:00
|
|
|
|
|
|
|
|
2014-08-11 22:25:01 +00:00
|
|
|
def attach_federated_login(user, service_name, service_id, metadata={}):
|
2014-01-14 20:23:44 +00:00
|
|
|
service = LoginService.get(LoginService.name == service_name)
|
2014-08-11 22:25:01 +00:00
|
|
|
FederatedLogin.create(user=user, service=service, service_ident=service_id,
|
|
|
|
metadata_json=json.dumps(metadata))
|
2014-01-14 20:23:44 +00:00
|
|
|
return user
|
|
|
|
|
|
|
|
|
2013-10-10 03:00:34 +00:00
|
|
|
def verify_federated_login(service_name, service_id):
|
2014-05-13 16:17:26 +00:00
|
|
|
try:
|
|
|
|
found = (FederatedLogin
|
|
|
|
.select(FederatedLogin, User)
|
|
|
|
.join(LoginService)
|
|
|
|
.switch(FederatedLogin).join(User)
|
|
|
|
.where(FederatedLogin.service_ident == service_id, LoginService.name == service_name)
|
|
|
|
.get())
|
|
|
|
return found.user
|
|
|
|
except FederatedLogin.DoesNotExist:
|
|
|
|
return None
|
2013-10-10 03:00:34 +00:00
|
|
|
|
|
|
|
|
2014-01-14 20:23:44 +00:00
|
|
|
def list_federated_logins(user):
|
|
|
|
selected = FederatedLogin.select(FederatedLogin.service_ident,
|
2014-08-11 22:25:01 +00:00
|
|
|
LoginService.name, FederatedLogin.metadata_json)
|
2014-01-14 20:23:44 +00:00
|
|
|
joined = selected.join(LoginService)
|
|
|
|
return joined.where(LoginService.name != 'quayrobot',
|
|
|
|
FederatedLogin.user == user)
|
|
|
|
|
|
|
|
|
2015-05-20 20:37:09 +00:00
|
|
|
def lookup_federated_login(user, service_name):
|
|
|
|
try:
|
|
|
|
return list_federated_logins(user).where(LoginService.name == service_name).get()
|
|
|
|
except FederatedLogin.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2014-01-17 22:04:05 +00:00
|
|
|
def create_confirm_email_code(user, new_email=None):
|
|
|
|
if new_email:
|
|
|
|
if not validate_email(new_email):
|
2014-01-22 00:23:26 +00:00
|
|
|
raise InvalidEmailAddressException('Invalid email address: %s' %
|
|
|
|
new_email)
|
2014-01-17 22:04:05 +00:00
|
|
|
|
2014-01-22 00:23:26 +00:00
|
|
|
code = EmailConfirmation.create(user=user, email_confirm=True,
|
|
|
|
new_email=new_email)
|
2013-09-27 23:29:01 +00:00
|
|
|
return code
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2013-09-27 23:55:04 +00:00
|
|
|
def confirm_user_email(code):
|
2013-12-19 00:47:42 +00:00
|
|
|
try:
|
|
|
|
code = EmailConfirmation.get(EmailConfirmation.code == code,
|
|
|
|
EmailConfirmation.email_confirm == True)
|
|
|
|
except EmailConfirmation.DoesNotExist:
|
2014-11-11 22:22:37 +00:00
|
|
|
raise DataModelException('Invalid email confirmation code.')
|
2013-09-27 23:55:04 +00:00
|
|
|
|
|
|
|
user = code.user
|
|
|
|
user.verified = True
|
2014-01-17 22:04:05 +00:00
|
|
|
|
2014-09-05 23:57:33 +00:00
|
|
|
old_email = None
|
2014-01-17 22:04:05 +00:00
|
|
|
new_email = code.new_email
|
2015-03-03 19:26:48 +00:00
|
|
|
if new_email and new_email != old_email:
|
2014-01-17 22:20:51 +00:00
|
|
|
if find_user_by_email(new_email):
|
2014-11-11 22:22:37 +00:00
|
|
|
raise DataModelException('E-mail address already used.')
|
|
|
|
|
2014-09-05 23:57:33 +00:00
|
|
|
old_email = user.email
|
2014-01-17 22:04:05 +00:00
|
|
|
user.email = new_email
|
2014-11-11 22:22:37 +00:00
|
|
|
|
2013-09-27 23:55:04 +00:00
|
|
|
user.save()
|
|
|
|
|
|
|
|
code.delete_instance()
|
|
|
|
|
2014-09-05 23:57:33 +00:00
|
|
|
return user, new_email, old_email
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2013-10-14 21:50:07 +00:00
|
|
|
def create_reset_password_email_code(email):
|
|
|
|
try:
|
|
|
|
user = User.get(User.email == email)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidEmailAddressException('Email address was not found.');
|
|
|
|
|
2014-01-10 18:30:17 +00:00
|
|
|
if user.organization:
|
|
|
|
raise InvalidEmailAddressException('Organizations can not have passwords.')
|
|
|
|
|
2013-10-14 21:50:07 +00:00
|
|
|
code = EmailConfirmation.create(user=user, pw_reset=True)
|
|
|
|
return code
|
|
|
|
|
|
|
|
|
|
|
|
def validate_reset_code(code):
|
|
|
|
try:
|
|
|
|
code = EmailConfirmation.get(EmailConfirmation.code == code,
|
|
|
|
EmailConfirmation.pw_reset == True)
|
|
|
|
except EmailConfirmation.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
user = code.user
|
|
|
|
code.delete_instance()
|
|
|
|
|
|
|
|
return user
|
|
|
|
|
|
|
|
|
2014-01-17 22:04:05 +00:00
|
|
|
def find_user_by_email(email):
|
|
|
|
try:
|
|
|
|
return User.get(User.email == email)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-05-26 21:47:33 +00:00
|
|
|
def get_nonrobot_user(username):
|
|
|
|
try:
|
|
|
|
return User.get(User.username == username, User.organization == False, User.robot == False)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2013-09-23 16:37:40 +00:00
|
|
|
def get_user(username):
|
|
|
|
try:
|
2013-11-05 20:50:56 +00:00
|
|
|
return User.get(User.username == username, User.organization == False)
|
2013-09-23 16:37:40 +00:00
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-10-22 23:01:56 +00:00
|
|
|
def get_namespace_user(username):
|
|
|
|
try:
|
|
|
|
return User.get(User.username == username)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-09-18 17:20:32 +00:00
|
|
|
def get_user_or_org(username):
|
|
|
|
try:
|
|
|
|
return User.get(User.username == username, User.robot == False)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-09-22 21:27:02 +00:00
|
|
|
def get_user_by_id(user_db_id):
|
2014-09-18 18:52:29 +00:00
|
|
|
try:
|
2014-09-22 21:27:02 +00:00
|
|
|
return User.get(User.id == user_db_id, User.organization == False)
|
2014-09-18 18:52:29 +00:00
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-10-01 18:23:15 +00:00
|
|
|
def get_namespace_by_user_id(namespace_user_db_id):
|
|
|
|
try:
|
|
|
|
return User.get(User.id == namespace_user_db_id, User.robot == False).username
|
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
|
|
|
|
|
|
|
|
|
2014-11-11 22:22:37 +00:00
|
|
|
def get_user_by_uuid(user_uuid):
|
|
|
|
try:
|
|
|
|
return User.get(User.uuid == user_uuid, User.organization == False)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2013-11-15 19:42:31 +00:00
|
|
|
def get_user_or_org_by_customer_id(customer_id):
|
|
|
|
try:
|
|
|
|
return User.get(User.stripe_id == customer_id)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2015-04-09 16:57:20 +00:00
|
|
|
def get_matching_user_namespaces(namespace_prefix, username, limit=10):
|
|
|
|
query = (Repository
|
|
|
|
.select()
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Repository)
|
|
|
|
.join(Visibility)
|
|
|
|
.switch(Repository)
|
|
|
|
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
|
|
|
.where(Namespace.username ** (namespace_prefix + '%'))
|
2015-04-09 18:41:59 +00:00
|
|
|
.group_by(Repository.namespace_user, Repository))
|
2015-04-06 23:17:18 +00:00
|
|
|
|
2015-04-09 18:41:59 +00:00
|
|
|
count = 0
|
|
|
|
namespaces = {}
|
|
|
|
for repo in _filter_to_repos_for_user(query, username):
|
|
|
|
if not repo.namespace_user.username in namespaces:
|
|
|
|
namespaces[repo.namespace_user.username] = repo.namespace_user
|
|
|
|
count = count + 1
|
|
|
|
if count >= limit:
|
|
|
|
break
|
|
|
|
|
|
|
|
return namespaces.values()
|
2015-04-06 23:17:18 +00:00
|
|
|
|
2015-04-07 22:33:43 +00:00
|
|
|
def get_matching_user_teams(team_prefix, user, limit=10):
|
2015-04-06 23:17:18 +00:00
|
|
|
query = (Team.select()
|
|
|
|
.join(User)
|
|
|
|
.switch(Team)
|
|
|
|
.join(TeamMember)
|
|
|
|
.where(TeamMember.user == user, Team.name ** (team_prefix + '%'))
|
2015-04-07 17:45:49 +00:00
|
|
|
.distinct(Team.id)
|
2015-04-07 22:33:43 +00:00
|
|
|
.limit(limit))
|
2015-04-07 17:45:49 +00:00
|
|
|
|
|
|
|
return query
|
|
|
|
|
2015-04-09 16:57:20 +00:00
|
|
|
|
|
|
|
def get_matching_robots(name_prefix, username, limit=10):
|
|
|
|
admined_orgs = (get_user_organizations(username)
|
|
|
|
.switch(Team)
|
|
|
|
.join(TeamRole)
|
|
|
|
.where(TeamRole.name == 'admin'))
|
|
|
|
|
|
|
|
prefix_checks = False
|
|
|
|
|
|
|
|
for org in admined_orgs:
|
|
|
|
prefix_checks = prefix_checks | (User.username ** (org.username + '+' + name_prefix + '%'))
|
|
|
|
|
|
|
|
prefix_checks = prefix_checks | (User.username ** (username + '+' + name_prefix + '%'))
|
|
|
|
|
|
|
|
return User.select().where(prefix_checks).limit(limit)
|
|
|
|
|
|
|
|
|
2015-04-07 22:33:43 +00:00
|
|
|
def get_matching_admined_teams(team_prefix, user, limit=10):
|
2015-04-07 17:45:49 +00:00
|
|
|
admined_orgs = (get_user_organizations(user.username)
|
|
|
|
.switch(Team)
|
|
|
|
.join(TeamRole)
|
|
|
|
.where(TeamRole.name == 'admin'))
|
|
|
|
|
|
|
|
query = (Team.select()
|
|
|
|
.join(User)
|
|
|
|
.switch(Team)
|
|
|
|
.join(TeamMember)
|
|
|
|
.where(Team.name ** (team_prefix + '%'), Team.organization << (admined_orgs))
|
|
|
|
.distinct(Team.id)
|
2015-04-07 22:33:43 +00:00
|
|
|
.limit(limit))
|
2015-04-07 17:45:49 +00:00
|
|
|
|
2015-04-06 23:17:18 +00:00
|
|
|
return query
|
|
|
|
|
|
|
|
|
2014-11-11 22:22:37 +00:00
|
|
|
def get_matching_teams(team_prefix, organization):
|
2013-11-04 23:52:38 +00:00
|
|
|
query = Team.select().where(Team.name ** (team_prefix + '%'),
|
|
|
|
Team.organization == organization)
|
2013-11-08 04:35:20 +00:00
|
|
|
return query.limit(10)
|
2013-11-02 01:48:10 +00:00
|
|
|
|
|
|
|
|
2013-11-20 23:23:59 +00:00
|
|
|
def get_matching_users(username_prefix, robot_namespace=None,
|
|
|
|
organization=None):
|
2014-11-11 22:22:37 +00:00
|
|
|
direct_user_query = (User.username ** (username_prefix + '%') &
|
2013-11-20 23:23:59 +00:00
|
|
|
(User.organization == False) & (User.robot == False))
|
|
|
|
|
|
|
|
if robot_namespace:
|
|
|
|
robot_prefix = format_robot_username(robot_namespace, username_prefix)
|
|
|
|
direct_user_query = (direct_user_query |
|
2014-11-11 22:22:37 +00:00
|
|
|
(User.username ** (robot_prefix + '%') &
|
2013-11-20 23:23:59 +00:00
|
|
|
(User.robot == True)))
|
|
|
|
|
2013-12-27 21:56:31 +00:00
|
|
|
query = (User
|
2015-03-30 21:55:04 +00:00
|
|
|
.select(User.username, User.email, User.robot)
|
|
|
|
.group_by(User.username, User.email, User.robot)
|
2013-12-27 21:56:31 +00:00
|
|
|
.where(direct_user_query))
|
2013-11-04 23:52:38 +00:00
|
|
|
|
|
|
|
if organization:
|
2013-12-27 21:56:31 +00:00
|
|
|
query = (query
|
2015-03-30 21:55:04 +00:00
|
|
|
.select(User.username, User.email, User.robot, fn.Sum(Team.id))
|
2013-12-27 21:56:31 +00:00
|
|
|
.join(TeamMember, JOIN_LEFT_OUTER)
|
|
|
|
.join(Team, JOIN_LEFT_OUTER, on=((Team.id == TeamMember.team) &
|
|
|
|
(Team.organization == organization))))
|
2013-11-04 23:52:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class MatchingUserResult(object):
|
|
|
|
def __init__(self, *args):
|
|
|
|
self.username = args[0]
|
2015-03-30 21:55:04 +00:00
|
|
|
self.email = args[1]
|
|
|
|
self.robot = args[2]
|
|
|
|
|
2013-11-04 23:52:38 +00:00
|
|
|
if organization:
|
2015-03-30 21:55:04 +00:00
|
|
|
self.is_org_member = (args[3] != None)
|
2013-11-04 23:52:38 +00:00
|
|
|
else:
|
2013-11-05 22:10:14 +00:00
|
|
|
self.is_org_member = None
|
2013-11-04 23:52:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
return (MatchingUserResult(*args) for args in query.tuples().limit(10))
|
2013-09-28 00:03:07 +00:00
|
|
|
|
2013-09-27 23:21:54 +00:00
|
|
|
|
2014-01-16 21:14:38 +00:00
|
|
|
def verify_user(username_or_email, password):
|
2015-05-07 22:13:45 +00:00
|
|
|
# Make sure we didn't get any unicode for the username.
|
|
|
|
try:
|
|
|
|
str(username_or_email)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
try:
|
2014-01-22 00:23:26 +00:00
|
|
|
fetched = User.get((User.username == username_or_email) |
|
|
|
|
(User.email == username_or_email))
|
2013-09-20 15:55:44 +00:00
|
|
|
except User.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2014-09-02 19:27:05 +00:00
|
|
|
now = datetime.utcnow()
|
|
|
|
|
|
|
|
if fetched.invalid_login_attempts > 0:
|
|
|
|
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
|
|
|
|
fetched.last_invalid_login)
|
|
|
|
|
|
|
|
if can_retry_at > now:
|
|
|
|
retry_after = can_retry_at - now
|
|
|
|
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
|
|
|
|
|
2014-09-22 21:27:02 +00:00
|
|
|
if (fetched.password_hash and
|
|
|
|
hash_password(password, fetched.password_hash) == fetched.password_hash):
|
2014-09-02 19:27:05 +00:00
|
|
|
if fetched.invalid_login_attempts > 0:
|
|
|
|
fetched.invalid_login_attempts = 0
|
|
|
|
fetched.save()
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
return fetched
|
|
|
|
|
2014-09-02 19:27:05 +00:00
|
|
|
fetched.invalid_login_attempts += 1
|
|
|
|
fetched.last_invalid_login = now
|
|
|
|
fetched.save()
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
# We weren't able to authorize the user
|
|
|
|
return None
|
|
|
|
|
2013-10-31 22:17:26 +00:00
|
|
|
|
|
|
|
def get_user_organizations(username):
|
2013-11-01 23:34:17 +00:00
|
|
|
UserAlias = User.alias()
|
2013-11-07 05:49:13 +00:00
|
|
|
all_teams = User.select().distinct().join(Team).join(TeamMember)
|
2013-11-01 23:34:17 +00:00
|
|
|
with_user = all_teams.join(UserAlias, on=(UserAlias.id == TeamMember.user))
|
|
|
|
return with_user.where(User.organization == True,
|
|
|
|
UserAlias.username == username)
|
2013-10-31 22:17:26 +00:00
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def get_organization(name):
|
|
|
|
try:
|
2013-11-05 19:55:05 +00:00
|
|
|
return User.get(username=name, organization=True)
|
2013-11-01 23:34:17 +00:00
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidOrganizationException('Organization does not exist: %s' %
|
|
|
|
name)
|
2013-11-04 19:56:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_organization_team(orgname, teamname):
|
|
|
|
joined = Team.select().join(User)
|
2013-11-04 23:52:38 +00:00
|
|
|
query = joined.where(Team.name == teamname, User.organization == True,
|
|
|
|
User.username == orgname).limit(1)
|
2013-11-04 19:56:54 +00:00
|
|
|
result = list(query)
|
|
|
|
if not result:
|
2013-11-04 23:52:38 +00:00
|
|
|
raise InvalidTeamException('Team does not exist: %s/%s', orgname,
|
|
|
|
teamname)
|
2013-11-04 19:56:54 +00:00
|
|
|
|
|
|
|
return result[0]
|
|
|
|
|
|
|
|
|
2013-12-07 00:25:27 +00:00
|
|
|
def get_organization_members_with_teams(organization, membername = None):
|
2013-11-07 00:06:59 +00:00
|
|
|
joined = TeamMember.select().annotate(Team).annotate(User)
|
|
|
|
query = joined.where(Team.organization == organization)
|
2013-12-07 00:25:27 +00:00
|
|
|
if membername:
|
|
|
|
query = query.where(User.username == membername)
|
2013-11-07 00:06:59 +00:00
|
|
|
return query
|
|
|
|
|
2013-11-04 19:56:54 +00:00
|
|
|
def get_organization_team_members(teamid):
|
|
|
|
joined = User.select().join(TeamMember).join(Team)
|
|
|
|
query = joined.where(Team.id == teamid)
|
|
|
|
return query
|
2013-10-31 22:17:26 +00:00
|
|
|
|
2014-08-15 21:47:43 +00:00
|
|
|
def get_organization_team_member_invites(teamid):
|
|
|
|
joined = TeamMemberInvite.select().join(Team).join(User)
|
|
|
|
query = joined.where(Team.id == teamid)
|
|
|
|
return query
|
2013-11-04 21:39:29 +00:00
|
|
|
|
2013-11-04 23:52:38 +00:00
|
|
|
def get_organization_member_set(orgname):
|
|
|
|
Org = User.alias()
|
2014-11-20 16:33:42 +00:00
|
|
|
org_users = (User.select(User.username)
|
|
|
|
.join(TeamMember)
|
|
|
|
.join(Team)
|
|
|
|
.join(Org, on=(Org.id == Team.organization))
|
|
|
|
.where(Org.username == orgname)
|
|
|
|
.distinct())
|
|
|
|
return {user.username for user in org_users}
|
2013-11-04 23:52:38 +00:00
|
|
|
|
2013-11-04 21:39:29 +00:00
|
|
|
|
|
|
|
def get_teams_within_org(organization):
|
2013-11-04 21:51:25 +00:00
|
|
|
return Team.select().where(Team.organization == organization)
|
2013-11-04 21:39:29 +00:00
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def get_user_teams_within_org(username, organization):
|
|
|
|
joined = Team.select().join(TeamMember).join(User)
|
|
|
|
return joined.where(Team.organization == organization,
|
|
|
|
User.username == username)
|
2013-10-31 22:17:26 +00:00
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2015-06-09 21:58:57 +00:00
|
|
|
def get_visible_repositories(username=None, include_public=True, page=None,
|
|
|
|
limit=None, namespace=None, namespace_only=False,
|
|
|
|
include_actions=False, include_latest_tag=False):
|
|
|
|
|
|
|
|
fields = [Repository.name, Repository.id, Repository.description, Visibility.name,
|
|
|
|
Namespace.username]
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2015-06-09 21:58:57 +00:00
|
|
|
if include_actions:
|
|
|
|
fields.append(fn.Max(RepositoryActionCount.count))
|
|
|
|
|
|
|
|
if include_latest_tag:
|
|
|
|
fields.append(fn.Max(RepositoryTag.lifetime_start_ts))
|
2014-01-13 19:49:05 +00:00
|
|
|
|
2014-03-25 21:26:45 +00:00
|
|
|
query = _visible_repository_query(username=username, include_public=include_public, page=page,
|
2014-12-29 19:11:46 +00:00
|
|
|
limit=limit, namespace=namespace,
|
2015-06-09 21:58:57 +00:00
|
|
|
select_models=fields)
|
2014-01-13 19:49:05 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
if limit:
|
|
|
|
query = query.limit(limit)
|
2013-10-02 17:29:18 +00:00
|
|
|
|
2015-04-03 18:55:09 +00:00
|
|
|
if namespace and namespace_only:
|
|
|
|
query = query.where(Namespace.username == namespace)
|
|
|
|
|
2015-06-09 21:58:57 +00:00
|
|
|
if include_actions:
|
|
|
|
# Filter the join to recent entries only.
|
|
|
|
last_week = datetime.now() - timedelta(weeks=1)
|
|
|
|
join_query = ((RepositoryActionCount.repository == Repository.id) &
|
|
|
|
(RepositoryActionCount.date >= last_week))
|
|
|
|
|
|
|
|
query = (query.switch(Repository)
|
|
|
|
.join(RepositoryActionCount, JOIN_LEFT_OUTER, on=join_query)
|
|
|
|
.group_by(RepositoryActionCount.repository, Repository.name, Repository.id,
|
|
|
|
Repository.description, Visibility.name, Namespace.username))
|
|
|
|
|
|
|
|
if include_latest_tag:
|
|
|
|
query = (query.switch(Repository)
|
|
|
|
.join(RepositoryTag, JOIN_LEFT_OUTER)
|
|
|
|
.group_by(RepositoryTag.repository, Repository.name, Repository.id,
|
|
|
|
Repository.description, Visibility.name, Namespace.username))
|
|
|
|
|
|
|
|
return TupleSelector(query, fields)
|
2013-10-02 17:29:18 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
def _visible_repository_query(username=None, include_public=True, limit=None,
|
2014-12-29 19:11:46 +00:00
|
|
|
page=None, namespace=None, select_models=[]):
|
2013-11-08 03:44:18 +00:00
|
|
|
query = (Repository
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(*select_models) # MySQL/RDS complains is there are selected models for counts.
|
|
|
|
.distinct()
|
|
|
|
.join(Visibility)
|
|
|
|
.switch(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Repository)
|
|
|
|
.join(RepositoryPermission, JOIN_LEFT_OUTER))
|
2013-11-08 03:44:18 +00:00
|
|
|
|
2014-12-29 19:11:46 +00:00
|
|
|
query = _filter_to_repos_for_user(query, username, namespace, include_public)
|
2014-02-16 22:38:47 +00:00
|
|
|
if page:
|
|
|
|
query = query.paginate(page, limit)
|
|
|
|
elif limit:
|
|
|
|
query = query.limit(limit)
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
|
2014-12-29 19:11:46 +00:00
|
|
|
def _filter_to_repos_for_user(query, username=None, namespace=None, include_public=True):
|
2014-02-16 23:59:24 +00:00
|
|
|
if not include_public and not username:
|
|
|
|
return Repository.select().where(Repository.id == '-1')
|
|
|
|
|
2013-11-08 03:44:18 +00:00
|
|
|
where_clause = None
|
2013-09-28 03:25:57 +00:00
|
|
|
if username:
|
2013-11-08 03:44:18 +00:00
|
|
|
UserThroughTeam = User.alias()
|
|
|
|
Org = User.alias()
|
|
|
|
AdminTeam = Team.alias()
|
|
|
|
AdminTeamMember = TeamMember.alias()
|
|
|
|
AdminUser = User.alias()
|
|
|
|
|
|
|
|
query = (query
|
2014-09-22 21:27:02 +00:00
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(User, JOIN_LEFT_OUTER)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Team, JOIN_LEFT_OUTER)
|
|
|
|
.join(TeamMember, JOIN_LEFT_OUTER)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
|
2014-09-22 21:27:02 +00:00
|
|
|
.switch(Repository)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(Org, JOIN_LEFT_OUTER, on=(Repository.namespace_user == Org.id))
|
|
|
|
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
|
2014-09-22 21:27:02 +00:00
|
|
|
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
|
|
|
|
.switch(AdminTeam)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
|
|
|
|
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id)))
|
2013-11-08 03:44:18 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
where_clause = ((User.username == username) | (UserThroughTeam.username == username) |
|
|
|
|
((AdminUser.username == username) & (TeamRole.name == 'admin')))
|
2013-11-08 03:44:18 +00:00
|
|
|
|
|
|
|
if namespace:
|
2014-09-24 22:01:35 +00:00
|
|
|
where_clause = where_clause & (Namespace.username == namespace)
|
2013-09-28 04:05:32 +00:00
|
|
|
|
2014-11-12 19:51:24 +00:00
|
|
|
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
|
|
|
|
# remove this hack.
|
2013-11-08 03:44:18 +00:00
|
|
|
if include_public:
|
2014-11-12 19:51:24 +00:00
|
|
|
new_clause = (Repository.visibility == _get_public_repo_visibility())
|
2013-11-08 03:44:18 +00:00
|
|
|
if where_clause:
|
|
|
|
where_clause = where_clause | new_clause
|
|
|
|
else:
|
|
|
|
where_clause = new_clause
|
2013-10-02 17:29:18 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
return query.where(where_clause)
|
2013-09-28 04:05:32 +00:00
|
|
|
|
2013-10-08 15:29:42 +00:00
|
|
|
|
2014-11-12 19:51:24 +00:00
|
|
|
_public_repo_visibility_cache = None
|
|
|
|
def _get_public_repo_visibility():
|
|
|
|
global _public_repo_visibility_cache
|
|
|
|
|
|
|
|
if not _public_repo_visibility_cache:
|
|
|
|
_public_repo_visibility_cache = Visibility.get(name='public')
|
|
|
|
|
|
|
|
return _public_repo_visibility_cache
|
|
|
|
|
|
|
|
|
2015-04-10 19:27:37 +00:00
|
|
|
def get_sorted_matching_repositories(prefix, only_public, checker, limit=10):
|
|
|
|
""" Returns repositories matching the given prefix string and passing the given checker
|
|
|
|
function.
|
|
|
|
"""
|
|
|
|
last_week = datetime.now() - timedelta(weeks=1)
|
|
|
|
results = []
|
|
|
|
existing_ids = []
|
|
|
|
|
2015-04-13 17:31:07 +00:00
|
|
|
def get_search_results(search_clause, with_count=False):
|
2015-04-10 19:27:37 +00:00
|
|
|
if len(results) >= limit:
|
|
|
|
return
|
|
|
|
|
2015-04-13 17:31:07 +00:00
|
|
|
select_items = [Repository, Namespace]
|
2015-04-10 19:27:37 +00:00
|
|
|
if with_count:
|
2015-04-13 17:31:07 +00:00
|
|
|
select_items.append(fn.Sum(RepositoryActionCount.count).alias('count'))
|
2015-04-10 19:27:37 +00:00
|
|
|
|
2015-04-13 17:31:07 +00:00
|
|
|
query = (Repository.select(*select_items)
|
2015-04-10 19:27:37 +00:00
|
|
|
.join(Namespace, JOIN_LEFT_OUTER, on=(Namespace.id == Repository.namespace_user))
|
|
|
|
.switch(Repository)
|
|
|
|
.where(search_clause)
|
|
|
|
.group_by(Repository, Namespace))
|
|
|
|
|
|
|
|
if only_public:
|
|
|
|
query = query.where(Repository.visibility == _get_public_repo_visibility())
|
|
|
|
|
|
|
|
if existing_ids:
|
|
|
|
query = query.where(~(Repository.id << existing_ids))
|
|
|
|
|
|
|
|
if with_count:
|
2015-04-13 17:31:07 +00:00
|
|
|
query = (query.switch(Repository)
|
|
|
|
.join(RepositoryActionCount)
|
|
|
|
.where(RepositoryActionCount.date >= last_week)
|
|
|
|
.order_by(fn.Sum(RepositoryActionCount.count).desc()))
|
2015-04-10 19:27:37 +00:00
|
|
|
|
|
|
|
for result in query:
|
|
|
|
if len(results) >= limit:
|
|
|
|
return results
|
|
|
|
|
|
|
|
# Note: We compare IDs here, instead of objects, because calling .visibility on the
|
|
|
|
# Repository will kick off a new SQL query to retrieve that visibility enum value. We don't
|
|
|
|
# join the visibility table in SQL, as well, because it is ungodly slow in MySQL :-/
|
|
|
|
result.is_public = result.visibility_id == _get_public_repo_visibility().id
|
|
|
|
result.count = result.count if with_count else 0
|
|
|
|
|
|
|
|
if not checker(result):
|
|
|
|
continue
|
|
|
|
|
|
|
|
results.append(result)
|
|
|
|
existing_ids.append(result.id)
|
|
|
|
|
|
|
|
# For performance reasons, we conduct the repo name and repo namespace searches on their
|
2015-04-13 17:31:07 +00:00
|
|
|
# own. This also affords us the ability to give higher precedence to repository names matching
|
|
|
|
# over namespaces, which is semantically correct.
|
|
|
|
get_search_results(Repository.name ** (prefix + '%'), with_count=True)
|
|
|
|
get_search_results(Repository.name ** (prefix + '%'), with_count=False)
|
2015-04-10 19:27:37 +00:00
|
|
|
|
2015-04-13 17:31:07 +00:00
|
|
|
get_search_results(Namespace.username ** (prefix + '%'), with_count=True)
|
|
|
|
get_search_results(Namespace.username ** (prefix + '%'), with_count=False)
|
2015-04-10 19:27:37 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def get_matching_repositories(repo_term, username=None, limit=10, include_public=True):
|
2013-10-01 21:02:49 +00:00
|
|
|
namespace_term = repo_term
|
|
|
|
name_term = repo_term
|
|
|
|
|
2015-06-09 21:58:57 +00:00
|
|
|
visible = _visible_repository_query(username, include_public=include_public)
|
2013-10-01 21:02:49 +00:00
|
|
|
|
|
|
|
search_clauses = (Repository.name ** ('%' + name_term + '%') |
|
2014-09-24 22:01:35 +00:00
|
|
|
Namespace.username ** ('%' + namespace_term + '%'))
|
2013-10-01 21:02:49 +00:00
|
|
|
|
|
|
|
# Handle the case where the user has already entered a namespace path.
|
|
|
|
if repo_term.find('/') > 0:
|
|
|
|
parts = repo_term.split('/', 1)
|
|
|
|
namespace_term = '/'.join(parts[:-1])
|
|
|
|
name_term = parts[-1]
|
|
|
|
|
|
|
|
search_clauses = (Repository.name ** ('%' + name_term + '%') &
|
2014-09-24 22:01:35 +00:00
|
|
|
Namespace.username ** ('%' + namespace_term + '%'))
|
2013-09-28 03:25:57 +00:00
|
|
|
|
2015-04-10 19:27:37 +00:00
|
|
|
return visible.where(search_clauses).limit(limit)
|
2013-09-27 23:21:54 +00:00
|
|
|
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
def change_password(user, new_password):
|
2013-10-10 17:44:34 +00:00
|
|
|
if not validate_password(new_password):
|
|
|
|
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
|
|
|
|
2014-09-18 21:36:26 +00:00
|
|
|
pw_hash = hash_password(new_password)
|
2015-02-23 18:56:42 +00:00
|
|
|
user.invalid_login_attempts = 0
|
2013-09-20 15:55:44 +00:00
|
|
|
user.password_hash = pw_hash
|
2015-03-26 20:30:41 +00:00
|
|
|
user.uuid = str(uuid4())
|
2013-09-20 15:55:44 +00:00
|
|
|
user.save()
|
|
|
|
|
2014-03-12 04:49:03 +00:00
|
|
|
# Remove any password required notifications for the user.
|
|
|
|
delete_notifications_by_kind(user, 'password_required')
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2015-01-30 21:32:13 +00:00
|
|
|
def change_username(user_id, new_username):
|
2014-10-01 18:23:15 +00:00
|
|
|
(username_valid, username_issue) = validate_username(new_username)
|
|
|
|
if not username_valid:
|
|
|
|
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
|
|
|
|
2014-11-09 22:50:57 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2015-01-30 21:32:13 +00:00
|
|
|
# Reload the user for update
|
|
|
|
user = db_for_update(User.select().where(User.id == user_id)).get()
|
|
|
|
|
2014-11-09 22:50:57 +00:00
|
|
|
# Rename the robots
|
2015-01-30 21:32:13 +00:00
|
|
|
for robot in db_for_update(_list_entity_robots(user.username)):
|
2014-11-09 22:50:57 +00:00
|
|
|
_, robot_shortname = parse_robot_username(robot.username)
|
|
|
|
new_robot_name = format_robot_username(new_username, robot_shortname)
|
|
|
|
robot.username = new_robot_name
|
|
|
|
robot.save()
|
|
|
|
|
|
|
|
# Rename the user
|
|
|
|
user.username = new_username
|
|
|
|
user.save()
|
2015-05-11 22:03:25 +00:00
|
|
|
return user
|
2014-10-01 18:23:15 +00:00
|
|
|
|
|
|
|
|
2013-11-15 19:42:31 +00:00
|
|
|
def change_invoice_email(user, invoice_email):
|
|
|
|
user.invoice_email = invoice_email
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
|
2015-02-12 19:37:11 +00:00
|
|
|
def change_user_tag_expiration(user, tag_expiration_s):
|
|
|
|
user.removed_tag_expiration_s = tag_expiration_s
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
|
2014-09-22 23:11:48 +00:00
|
|
|
def update_email(user, new_email, auto_verify=False):
|
2015-05-11 18:38:10 +00:00
|
|
|
try:
|
|
|
|
user.email = new_email
|
|
|
|
user.verified = auto_verify
|
|
|
|
user.save()
|
|
|
|
except IntegrityError:
|
|
|
|
raise DataModelException('E-mail address already used')
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2013-09-27 17:24:07 +00:00
|
|
|
def get_all_user_permissions(user):
|
2013-11-04 20:42:08 +00:00
|
|
|
UserThroughTeam = User.alias()
|
|
|
|
|
2015-06-23 21:09:48 +00:00
|
|
|
base_query = (RepositoryPermission
|
|
|
|
.select(RepositoryPermission, Role, Repository, Namespace)
|
|
|
|
.join(Role)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryPermission))
|
|
|
|
|
|
|
|
direct = (base_query
|
|
|
|
.clone()
|
|
|
|
.join(User)
|
|
|
|
.where(User.id == user))
|
|
|
|
|
|
|
|
team = (base_query
|
|
|
|
.clone()
|
|
|
|
.join(Team)
|
|
|
|
.join(TeamMember)
|
|
|
|
.join(UserThroughTeam, on=(UserThroughTeam.id == TeamMember.user))
|
|
|
|
.where(UserThroughTeam.id == user))
|
|
|
|
|
|
|
|
return direct | team
|
2013-11-04 20:42:08 +00:00
|
|
|
|
|
|
|
|
2014-01-21 19:18:20 +00:00
|
|
|
def delete_prototype_permission(org, uid):
|
|
|
|
found = get_prototype_permission(org, uid)
|
|
|
|
if not found:
|
|
|
|
return None
|
|
|
|
|
|
|
|
found.delete_instance()
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
|
|
|
def get_prototype_permission(org, uid):
|
|
|
|
try:
|
2014-01-21 22:32:49 +00:00
|
|
|
return PermissionPrototype.get(PermissionPrototype.org == org,
|
|
|
|
PermissionPrototype.uuid == uid)
|
2014-01-21 19:18:20 +00:00
|
|
|
except PermissionPrototype.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def get_prototype_permissions(org):
|
|
|
|
ActivatingUser = User.alias()
|
|
|
|
DelegateUser = User.alias()
|
2014-01-21 22:32:49 +00:00
|
|
|
query = (PermissionPrototype
|
|
|
|
.select()
|
|
|
|
.where(PermissionPrototype.org == org)
|
|
|
|
.join(ActivatingUser, JOIN_LEFT_OUTER,
|
|
|
|
on=(ActivatingUser.id == PermissionPrototype.activating_user))
|
|
|
|
.join(DelegateUser, JOIN_LEFT_OUTER,
|
|
|
|
on=(DelegateUser.id == PermissionPrototype.delegate_user))
|
|
|
|
.join(Team, JOIN_LEFT_OUTER,
|
|
|
|
on=(Team.id == PermissionPrototype.delegate_team))
|
|
|
|
.join(Role, JOIN_LEFT_OUTER, on=(Role.id == PermissionPrototype.role)))
|
|
|
|
return query
|
2014-01-21 19:18:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_prototype_permission(org, uid, role_name):
|
|
|
|
found = get_prototype_permission(org, uid)
|
|
|
|
if not found:
|
|
|
|
return None
|
|
|
|
|
|
|
|
new_role = Role.get(Role.name == role_name)
|
|
|
|
found.role = new_role
|
|
|
|
found.save()
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
2014-01-21 22:32:49 +00:00
|
|
|
def add_prototype_permission(org, role_name, activating_user,
|
|
|
|
delegate_user=None, delegate_team=None):
|
2014-01-21 19:18:20 +00:00
|
|
|
new_role = Role.get(Role.name == role_name)
|
2014-02-04 00:08:37 +00:00
|
|
|
return PermissionPrototype.create(org=org, role=new_role,
|
2014-01-21 22:32:49 +00:00
|
|
|
activating_user=activating_user,
|
2014-01-21 19:18:20 +00:00
|
|
|
delegate_user=delegate_user, delegate_team=delegate_team)
|
|
|
|
|
|
|
|
|
2013-11-04 20:42:08 +00:00
|
|
|
def get_org_wide_permissions(user):
|
|
|
|
Org = User.alias()
|
|
|
|
team_with_role = Team.select(Team, Org, TeamRole).join(TeamRole)
|
|
|
|
with_org = team_with_role.switch(Team).join(Org, on=(Team.organization ==
|
|
|
|
Org.id))
|
|
|
|
with_user = with_org.switch(Team).join(TeamMember).join(User)
|
|
|
|
return with_user.where(User.id == user, Org.organization == True)
|
2013-09-27 17:24:07 +00:00
|
|
|
|
|
|
|
|
2013-11-02 01:48:10 +00:00
|
|
|
def get_all_repo_teams(namespace_name, repository_name):
|
2015-06-23 21:09:48 +00:00
|
|
|
return (RepositoryPermission.select(Team.name, Role.name, RepositoryPermission)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(Team)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Role)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
2013-11-02 01:48:10 +00:00
|
|
|
|
2013-11-04 23:52:38 +00:00
|
|
|
|
2013-09-27 17:24:07 +00:00
|
|
|
def get_all_repo_users(namespace_name, repository_name):
|
2015-03-30 21:55:04 +00:00
|
|
|
return (RepositoryPermission.select(User.username, User.email, User.robot, Role.name,
|
|
|
|
RepositoryPermission)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(User)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Role)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
2013-09-20 22:38:17 +00:00
|
|
|
|
|
|
|
|
2014-08-18 23:19:01 +00:00
|
|
|
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
|
2014-09-24 22:01:35 +00:00
|
|
|
return (User
|
|
|
|
.select()
|
|
|
|
.distinct()
|
|
|
|
.join(TeamMember)
|
|
|
|
.join(Team)
|
|
|
|
.join(RepositoryPermission)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
2014-08-18 23:19:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_all_repo_users_transitive(namespace_name, repository_name):
|
|
|
|
# Load the users found via teams and directly via permissions.
|
|
|
|
via_teams = get_all_repo_users_transitive_via_teams(namespace_name, repository_name)
|
|
|
|
directly = [perm.user for perm in get_all_repo_users(namespace_name, repository_name)]
|
|
|
|
|
|
|
|
# Filter duplicates.
|
|
|
|
user_set = set()
|
|
|
|
|
|
|
|
def check_add(u):
|
|
|
|
if u.username in user_set:
|
|
|
|
return False
|
|
|
|
|
|
|
|
user_set.add(u.username)
|
|
|
|
return True
|
|
|
|
|
|
|
|
return [user for user in list(directly) + list(via_teams) if check_add(user)]
|
|
|
|
|
|
|
|
|
2014-02-14 23:37:06 +00:00
|
|
|
def get_repository_for_resource(resource_key):
|
2014-02-25 23:22:02 +00:00
|
|
|
try:
|
|
|
|
return (Repository
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(Repository, Namespace)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Repository)
|
|
|
|
.join(RepositoryBuild)
|
|
|
|
.where(RepositoryBuild.resource_key == resource_key)
|
|
|
|
.get())
|
2014-02-25 23:22:02 +00:00
|
|
|
except Repository.DoesNotExist:
|
2014-02-14 23:37:06 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-07-18 02:51:58 +00:00
|
|
|
def lookup_repository(repo_id):
|
|
|
|
try:
|
|
|
|
return Repository.get(Repository.id == repo_id)
|
|
|
|
except Repository.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2013-09-28 04:05:32 +00:00
|
|
|
def get_repository(namespace_name, repository_name):
|
2013-09-20 15:55:44 +00:00
|
|
|
try:
|
2014-09-24 22:01:35 +00:00
|
|
|
return _get_repository(namespace_name, repository_name)
|
2013-09-20 15:55:44 +00:00
|
|
|
except Repository.DoesNotExist:
|
2013-09-20 22:38:17 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-12-11 16:03:40 +00:00
|
|
|
def get_image(repo, dockerfile_id):
|
|
|
|
try:
|
|
|
|
return Image.get(Image.docker_image_id == dockerfile_id, Image.repository == repo)
|
|
|
|
except Image.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def find_child_image(repo, parent_image, command):
|
|
|
|
try:
|
|
|
|
return (Image.select()
|
|
|
|
.join(ImageStorage)
|
|
|
|
.switch(Image)
|
|
|
|
.where(Image.ancestors % '%/' + parent_image.id + '/%',
|
|
|
|
ImageStorage.command == command)
|
2015-02-09 21:45:06 +00:00
|
|
|
.order_by(ImageStorage.created.desc())
|
2014-12-11 16:03:40 +00:00
|
|
|
.get())
|
|
|
|
except Image.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
def get_repo_image(namespace_name, repository_name, docker_image_id):
|
2014-06-28 00:04:26 +00:00
|
|
|
def limit_to_image_id(query):
|
2014-11-06 19:48:16 +00:00
|
|
|
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
query = _get_repository_images(namespace_name, repository_name, limit_to_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
try:
|
|
|
|
return query.get()
|
|
|
|
except Image.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
|
2014-11-06 19:48:16 +00:00
|
|
|
def limit_to_image_id(query):
|
|
|
|
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
2013-10-19 02:28:46 +00:00
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
images = _get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
|
|
|
|
if not images:
|
|
|
|
return None
|
2014-06-17 20:03:43 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
return images[0]
|
2013-10-19 02:28:46 +00:00
|
|
|
|
2015-03-18 18:47:53 +00:00
|
|
|
def is_repository_public(repository):
|
|
|
|
return repository.visibility == _get_public_repo_visibility()
|
|
|
|
|
2013-09-28 04:05:32 +00:00
|
|
|
def repository_is_public(namespace_name, repository_name):
|
2014-09-24 22:01:35 +00:00
|
|
|
try:
|
|
|
|
(Repository
|
|
|
|
.select()
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Repository)
|
|
|
|
.join(Visibility)
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name,
|
|
|
|
Visibility.name == 'public')
|
|
|
|
.get())
|
|
|
|
return True
|
|
|
|
except Repository.DoesNotExist:
|
|
|
|
return False
|
2013-09-23 16:37:40 +00:00
|
|
|
|
|
|
|
|
2013-09-28 21:11:10 +00:00
|
|
|
def set_repository_visibility(repo, visibility):
|
|
|
|
visibility_obj = Visibility.get(name=visibility)
|
|
|
|
if not visibility_obj:
|
|
|
|
return
|
|
|
|
|
|
|
|
repo.visibility = visibility_obj
|
|
|
|
repo.save()
|
2013-10-08 15:29:42 +00:00
|
|
|
|
2013-09-28 21:11:10 +00:00
|
|
|
|
2014-01-21 19:20:42 +00:00
|
|
|
def __apply_default_permissions(repo, proto_query, name_property,
|
|
|
|
create_permission_func):
|
|
|
|
final_protos = {}
|
|
|
|
for proto in proto_query:
|
2014-01-21 22:18:12 +00:00
|
|
|
applies_to = proto.delegate_team or proto.delegate_user
|
2014-01-21 19:20:42 +00:00
|
|
|
name = getattr(applies_to, name_property)
|
|
|
|
# We will skip the proto if it is pre-empted by a more important proto
|
|
|
|
if name in final_protos and proto.activating_user is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# By this point, it is either a user specific proto, or there is no
|
|
|
|
# proto yet, so we can safely assume it applies
|
|
|
|
final_protos[name] = (applies_to, proto.role)
|
|
|
|
|
|
|
|
for delegate, role in final_protos.values():
|
|
|
|
create_permission_func(delegate, repo, role)
|
|
|
|
|
|
|
|
|
2014-01-21 00:05:26 +00:00
|
|
|
def create_repository(namespace, name, creating_user, visibility='private'):
|
2013-10-25 05:14:38 +00:00
|
|
|
private = Visibility.get(name=visibility)
|
2014-09-22 21:27:02 +00:00
|
|
|
namespace_user = User.get(username=namespace)
|
2014-09-29 19:44:12 +00:00
|
|
|
repo = Repository.create(name=name, visibility=private, namespace_user=namespace_user)
|
2013-09-20 22:38:17 +00:00
|
|
|
admin = Role.get(name='admin')
|
2013-11-01 23:34:17 +00:00
|
|
|
|
2014-01-21 00:05:26 +00:00
|
|
|
if creating_user and not creating_user.organization:
|
2014-01-21 19:20:42 +00:00
|
|
|
RepositoryPermission.create(user=creating_user, repository=repo,
|
|
|
|
role=admin)
|
2014-01-21 00:05:26 +00:00
|
|
|
|
|
|
|
if creating_user.username != namespace:
|
|
|
|
# Permission prototypes only work for orgs
|
|
|
|
org = get_organization(namespace)
|
2014-01-22 17:13:51 +00:00
|
|
|
user_clause = ((PermissionPrototype.activating_user == creating_user) |
|
|
|
|
(PermissionPrototype.activating_user >> None))
|
2014-01-21 00:05:26 +00:00
|
|
|
|
|
|
|
team_protos = (PermissionPrototype
|
|
|
|
.select()
|
|
|
|
.where(PermissionPrototype.org == org, user_clause,
|
|
|
|
PermissionPrototype.delegate_user >> None))
|
|
|
|
|
2014-01-21 19:20:42 +00:00
|
|
|
def create_team_permission(team, repo, role):
|
2014-01-21 00:05:26 +00:00
|
|
|
RepositoryPermission.create(team=team, repository=repo, role=role)
|
|
|
|
|
2014-01-21 19:20:42 +00:00
|
|
|
__apply_default_permissions(repo, team_protos, 'name',
|
|
|
|
create_team_permission)
|
|
|
|
|
2014-01-21 00:07:06 +00:00
|
|
|
user_protos = (PermissionPrototype
|
|
|
|
.select()
|
|
|
|
.where(PermissionPrototype.org == org, user_clause,
|
|
|
|
PermissionPrototype.delegate_team >> None))
|
|
|
|
|
2014-01-21 19:20:42 +00:00
|
|
|
def create_user_permission(user, repo, role):
|
2014-01-22 00:23:26 +00:00
|
|
|
# The creating user always gets admin anyway
|
|
|
|
if user.username == creating_user.username:
|
|
|
|
return
|
|
|
|
|
2014-01-21 00:05:26 +00:00
|
|
|
RepositoryPermission.create(user=user, repository=repo, role=role)
|
|
|
|
|
2014-01-21 19:20:42 +00:00
|
|
|
__apply_default_permissions(repo, user_protos, 'username',
|
|
|
|
create_user_permission)
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
return repo
|
|
|
|
|
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
def __translate_ancestry(old_ancestry, translations, repository, username, preferred_location):
|
2014-02-21 03:35:54 +00:00
|
|
|
if old_ancestry == '/':
|
|
|
|
return '/'
|
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
def translate_id(old_id, docker_image_id):
|
2014-02-25 21:31:52 +00:00
|
|
|
logger.debug('Translating id: %s', old_id)
|
2014-02-21 13:00:47 +00:00
|
|
|
if old_id not in translations:
|
2014-11-06 19:48:16 +00:00
|
|
|
image_in_repo = find_create_or_link_image(docker_image_id, repository, username,
|
2014-06-17 20:03:43 +00:00
|
|
|
translations, preferred_location)
|
2014-02-25 21:31:52 +00:00
|
|
|
translations[old_id] = image_in_repo.id
|
2014-02-21 13:00:47 +00:00
|
|
|
return translations[old_id]
|
|
|
|
|
2014-11-11 22:22:37 +00:00
|
|
|
# Select all the ancestor Docker IDs in a single query.
|
2014-02-21 03:26:10 +00:00
|
|
|
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
|
2014-11-10 18:44:36 +00:00
|
|
|
query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
|
|
|
|
old_images = {i.id: i.docker_image_id for i in query}
|
2014-11-06 19:48:16 +00:00
|
|
|
|
|
|
|
# Translate the old images into new ones.
|
|
|
|
new_ids = [str(translate_id(old_id, old_images[old_id])) for old_id in old_ids]
|
2014-02-21 03:26:10 +00:00
|
|
|
return '/%s/' % '/'.join(new_ids)
|
|
|
|
|
|
|
|
|
2014-09-18 21:26:40 +00:00
|
|
|
def _create_storage(location_name):
|
|
|
|
storage = ImageStorage.create()
|
|
|
|
location = ImageStorageLocation.get(name=location_name)
|
|
|
|
ImageStoragePlacement.create(location=location, storage=storage)
|
|
|
|
storage.locations = {location_name}
|
|
|
|
return storage
|
|
|
|
|
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
def _find_or_link_image(existing_image, repository, username, translations, preferred_location):
|
|
|
|
# TODO(jake): This call is currently recursively done under a single transaction. Can we make
|
|
|
|
# it instead be done under a set of transactions?
|
2014-05-13 16:17:26 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-11-06 19:48:16 +00:00
|
|
|
# Check for an existing image, under the transaction, to make sure it doesn't already exist.
|
2014-09-24 22:01:35 +00:00
|
|
|
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
2014-11-10 18:44:36 +00:00
|
|
|
existing_image.docker_image_id)
|
2014-02-25 21:31:52 +00:00
|
|
|
if repo_image:
|
|
|
|
return repo_image
|
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
# Make sure the existing base image still exists.
|
2014-02-16 22:38:47 +00:00
|
|
|
try:
|
2014-11-06 19:48:16 +00:00
|
|
|
to_copy = Image.select().join(ImageStorage).where(Image.id == existing_image.id).get()
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
|
2014-11-06 19:48:16 +00:00
|
|
|
logger.debug(msg, existing_image.docker_image_id, to_copy.storage.uuid)
|
2014-09-22 21:27:02 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repository,
|
|
|
|
username, preferred_location)
|
2014-02-21 03:26:10 +00:00
|
|
|
|
|
|
|
storage = to_copy.storage
|
2014-06-17 20:03:43 +00:00
|
|
|
storage.locations = {placement.location.name
|
|
|
|
for placement in storage.imagestorageplacement_set}
|
2014-11-06 19:48:16 +00:00
|
|
|
|
2015-01-30 21:32:13 +00:00
|
|
|
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
|
|
|
repository=repository, storage=storage,
|
|
|
|
ancestors=new_image_ancestry)
|
2014-11-06 19:48:16 +00:00
|
|
|
|
|
|
|
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
|
|
|
translations[existing_image.id] = new_image.id
|
|
|
|
return new_image
|
2014-02-21 03:26:10 +00:00
|
|
|
except Image.DoesNotExist:
|
2014-11-06 19:48:16 +00:00
|
|
|
return None
|
2014-06-17 20:03:43 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
def find_create_or_link_image(docker_image_id, repository, username, translations,
|
|
|
|
preferred_location):
|
2014-02-21 03:26:10 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
# First check for the image existing in the repository. If found, we simply return it.
|
2014-11-10 18:44:36 +00:00
|
|
|
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
|
|
|
docker_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
if repo_image:
|
|
|
|
return repo_image
|
|
|
|
|
|
|
|
# We next check to see if there is an existing storage the new image can link to.
|
|
|
|
existing_image_query = (Image
|
|
|
|
.select(Image, ImageStorage)
|
|
|
|
.distinct()
|
|
|
|
.join(ImageStorage)
|
|
|
|
.switch(Image)
|
|
|
|
.join(Repository)
|
|
|
|
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
|
|
|
.switch(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(ImageStorage.uploading == False))
|
|
|
|
|
|
|
|
existing_image_query = (_filter_to_repos_for_user(existing_image_query, username)
|
|
|
|
.where(Image.docker_image_id == docker_image_id))
|
|
|
|
|
|
|
|
# If there is an existing image, we try to translate its ancestry and copy its storage.
|
|
|
|
new_image = None
|
|
|
|
try:
|
2014-11-07 19:36:32 +00:00
|
|
|
logger.debug('Looking up existing image for ID: %s', docker_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
existing_image = existing_image_query.get()
|
2014-11-07 19:36:32 +00:00
|
|
|
|
|
|
|
logger.debug('Existing image %s found for ID: %s', existing_image.id, docker_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
new_image = _find_or_link_image(existing_image, repository, username, translations,
|
|
|
|
preferred_location)
|
|
|
|
if new_image:
|
|
|
|
return new_image
|
|
|
|
except Image.DoesNotExist:
|
2014-11-07 19:36:32 +00:00
|
|
|
logger.debug('No existing image found for ID: %s', docker_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
pass
|
2014-06-17 20:03:43 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
# Otherwise, create a new storage directly.
|
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
|
|
|
# Final check for an existing image, under the transaction.
|
2014-11-10 18:44:36 +00:00
|
|
|
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
|
|
|
docker_image_id)
|
2014-11-06 19:48:16 +00:00
|
|
|
if repo_image:
|
|
|
|
return repo_image
|
2014-06-17 20:03:43 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
logger.debug('Creating new storage for docker id: %s', docker_image_id)
|
|
|
|
storage = _create_storage(preferred_location)
|
2014-02-21 03:26:10 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
return Image.create(docker_image_id=docker_image_id,
|
|
|
|
repository=repository, storage=storage,
|
|
|
|
ancestors='/')
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2015-02-04 20:29:24 +00:00
|
|
|
def find_or_create_storage_signature(storage, signature_kind):
|
|
|
|
found = lookup_storage_signature(storage, signature_kind)
|
|
|
|
if found is None:
|
|
|
|
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
|
|
|
found = ImageStorageSignature.create(storage=storage, kind=kind)
|
|
|
|
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
|
|
|
def lookup_storage_signature(storage, signature_kind):
|
|
|
|
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
|
|
|
try:
|
|
|
|
return (ImageStorageSignature
|
|
|
|
.select()
|
|
|
|
.where(ImageStorageSignature.storage == storage,
|
|
|
|
ImageStorageSignature.kind == kind)
|
|
|
|
.get())
|
|
|
|
except ImageStorageSignature.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def find_derived_storage(source, transformation_name):
|
2014-09-18 21:26:40 +00:00
|
|
|
try:
|
|
|
|
found = (ImageStorage
|
|
|
|
.select(ImageStorage, DerivedImageStorage)
|
|
|
|
.join(DerivedImageStorage, on=(ImageStorage.id == DerivedImageStorage.derivative))
|
|
|
|
.join(ImageStorageTransformation)
|
|
|
|
.where(DerivedImageStorage.source == source,
|
|
|
|
ImageStorageTransformation.name == transformation_name)
|
|
|
|
.get())
|
|
|
|
|
|
|
|
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
|
|
|
|
return found
|
|
|
|
except ImageStorage.DoesNotExist:
|
2015-02-04 20:29:24 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def find_or_create_derived_storage(source, transformation_name, preferred_location):
|
|
|
|
existing = find_derived_storage(source, transformation_name)
|
|
|
|
if existing is not None:
|
|
|
|
return existing
|
|
|
|
|
|
|
|
logger.debug('Creating storage dervied from source: %s', source.uuid)
|
|
|
|
trans = ImageStorageTransformation.get(name=transformation_name)
|
|
|
|
new_storage = _create_storage(preferred_location)
|
|
|
|
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
|
|
|
|
return new_storage
|
2014-09-18 21:26:40 +00:00
|
|
|
|
|
|
|
|
2014-10-08 17:43:12 +00:00
|
|
|
def delete_derived_storage_by_uuid(storage_uuid):
|
|
|
|
try:
|
|
|
|
image_storage = get_storage_by_uuid(storage_uuid)
|
|
|
|
except InvalidImageException:
|
|
|
|
return
|
2014-10-08 17:51:50 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
DerivedImageStorage.get(derivative=image_storage)
|
2014-10-08 17:43:12 +00:00
|
|
|
except DerivedImageStorage.DoesNotExist:
|
|
|
|
return
|
|
|
|
|
|
|
|
image_storage.delete_instance(recursive=True)
|
|
|
|
|
|
|
|
|
2014-09-23 20:06:38 +00:00
|
|
|
def get_storage_by_uuid(storage_uuid):
|
|
|
|
placements = list(ImageStoragePlacement
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
|
|
|
|
.join(ImageStorageLocation)
|
|
|
|
.switch(ImageStoragePlacement)
|
|
|
|
.join(ImageStorage)
|
|
|
|
.where(ImageStorage.uuid == storage_uuid))
|
2014-09-23 20:06:38 +00:00
|
|
|
|
|
|
|
if not placements:
|
|
|
|
raise InvalidImageException('No storage found with uuid: %s', storage_uuid)
|
|
|
|
|
|
|
|
found = placements[0].storage
|
|
|
|
found.locations = {placement.location.name for placement in placements}
|
|
|
|
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
2014-10-02 14:46:20 +00:00
|
|
|
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
|
2014-02-16 22:38:47 +00:00
|
|
|
try:
|
|
|
|
image = (Image
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(Image, ImageStorage)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Image)
|
|
|
|
.join(ImageStorage, JOIN_LEFT_OUTER)
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
2014-10-02 14:46:20 +00:00
|
|
|
Image.docker_image_id == docker_image_id)
|
2014-09-24 22:01:35 +00:00
|
|
|
.get())
|
2014-01-03 21:32:00 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
except Image.DoesNotExist:
|
2014-01-03 21:32:00 +00:00
|
|
|
raise DataModelException('No image with specified id and repository')
|
|
|
|
|
2014-10-02 14:46:20 +00:00
|
|
|
image.storage.image_size = image_size
|
|
|
|
image.storage.uncompressed_size = uncompressed_size
|
2015-03-10 05:03:39 +00:00
|
|
|
|
|
|
|
ancestors = image.ancestors.split('/')[1:-1]
|
|
|
|
if ancestors:
|
|
|
|
try:
|
2015-03-17 16:13:01 +00:00
|
|
|
# TODO(jschorr): Switch to this faster route once we have full ancestor aggregate_size
|
|
|
|
# parent_image = Image.get(Image.id == ancestors[-1])
|
|
|
|
# total_size = image_size + parent_image.storage.aggregate_size
|
|
|
|
total_size = (ImageStorage.select(fn.Sum(ImageStorage.image_size))
|
|
|
|
.join(Image)
|
2015-03-17 17:54:51 +00:00
|
|
|
.where(Image.id << ancestors)
|
|
|
|
.scalar()) + image_size
|
2015-03-17 16:13:01 +00:00
|
|
|
|
2015-03-10 05:03:39 +00:00
|
|
|
image.storage.aggregate_size = total_size
|
|
|
|
except Image.DoesNotExist:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
image.storage.aggregate_size = image_size
|
|
|
|
|
2014-10-02 14:46:20 +00:00
|
|
|
image.storage.save()
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
return image
|
2014-01-03 21:32:00 +00:00
|
|
|
|
|
|
|
|
2014-09-23 19:49:28 +00:00
|
|
|
def set_image_metadata(docker_image_id, namespace_name, repository_name, created_date_str, comment,
|
2014-09-29 21:00:47 +00:00
|
|
|
command, parent=None):
|
2014-05-13 16:17:26 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-02-16 22:38:47 +00:00
|
|
|
query = (Image
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(Image, ImageStorage)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Image)
|
|
|
|
.join(ImageStorage)
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
|
|
|
Image.docker_image_id == docker_image_id))
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
try:
|
2015-01-30 21:32:13 +00:00
|
|
|
fetched = db_for_update(query).get()
|
2014-02-16 22:38:47 +00:00
|
|
|
except Image.DoesNotExist:
|
|
|
|
raise DataModelException('No image with specified id and repository')
|
|
|
|
|
2014-09-23 19:49:28 +00:00
|
|
|
# We cleanup any old checksum in case it's a retry after a fail
|
|
|
|
fetched.storage.checksum = None
|
2014-10-20 16:07:49 +00:00
|
|
|
fetched.storage.created = datetime.now()
|
|
|
|
|
|
|
|
if created_date_str is not None:
|
|
|
|
try:
|
|
|
|
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
|
|
|
except:
|
|
|
|
# parse raises different exceptions, so we cannot use a specific kind of handler here.
|
|
|
|
pass
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
fetched.storage.comment = comment
|
2014-09-24 22:01:35 +00:00
|
|
|
fetched.storage.command = command
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
if parent:
|
|
|
|
fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id)
|
|
|
|
|
|
|
|
fetched.save()
|
|
|
|
fetched.storage.save()
|
|
|
|
return fetched
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
def _get_repository_images(namespace_name, repository_name, query_modifier):
|
2014-11-06 19:48:16 +00:00
|
|
|
query = (Image
|
|
|
|
.select()
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
|
|
|
|
|
|
|
query = query_modifier(query)
|
|
|
|
return query
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
def _get_repository_images_base(namespace_name, repository_name, query_modifier):
|
|
|
|
query = (ImageStoragePlacement
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
|
|
|
|
.join(ImageStorageLocation)
|
|
|
|
.switch(ImageStoragePlacement)
|
|
|
|
.join(ImageStorage, JOIN_LEFT_OUTER)
|
|
|
|
.join(Image)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
2014-06-28 00:04:26 +00:00
|
|
|
|
|
|
|
query = query_modifier(query)
|
|
|
|
|
|
|
|
location_list = list(query)
|
2014-06-24 22:48:42 +00:00
|
|
|
|
|
|
|
images = {}
|
|
|
|
for location in location_list:
|
|
|
|
# Make sure we're always retrieving the same image object.
|
|
|
|
image = location.storage.image
|
|
|
|
|
2014-07-22 20:46:36 +00:00
|
|
|
# Set the storage to the one we got from the location, to prevent another query
|
|
|
|
image.storage = location.storage
|
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
if not image.id in images:
|
|
|
|
images[image.id] = image
|
2014-06-24 22:48:42 +00:00
|
|
|
image.storage.locations = set()
|
|
|
|
else:
|
2014-06-28 00:04:26 +00:00
|
|
|
image = images[image.id]
|
2014-06-24 22:48:42 +00:00
|
|
|
|
|
|
|
# Add the location to the image's locations set.
|
2014-06-28 00:04:26 +00:00
|
|
|
image.storage.locations.add(location.location.name)
|
2014-06-24 22:48:42 +00:00
|
|
|
|
|
|
|
return images.values()
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2013-11-06 22:09:22 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
|
2014-11-10 18:44:36 +00:00
|
|
|
return (Image
|
|
|
|
.select()
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
|
|
|
Image.docker_image_id << docker_image_ids))
|
2014-11-06 19:48:16 +00:00
|
|
|
|
2014-11-06 22:50:48 +00:00
|
|
|
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
|
|
|
|
def modify_query(q):
|
|
|
|
return q.where(Image.docker_image_id << docker_image_ids)
|
|
|
|
|
|
|
|
return _get_repository_images_base(namespace_name, repository_name, modify_query)
|
|
|
|
|
2015-04-24 20:22:19 +00:00
|
|
|
|
2015-04-24 20:37:37 +00:00
|
|
|
def get_repository_images_without_placements(repository, with_ancestor=None):
|
2015-04-24 20:22:19 +00:00
|
|
|
query = (Image
|
|
|
|
.select(Image, ImageStorage)
|
|
|
|
.join(ImageStorage)
|
|
|
|
.where(Image.repository == repository))
|
|
|
|
|
|
|
|
if with_ancestor:
|
|
|
|
ancestors_string = '%s%s/' % (with_ancestor.ancestors, with_ancestor.id)
|
|
|
|
query = query.where((Image.ancestors ** (ancestors_string + '%')) |
|
|
|
|
(Image.id == with_ancestor.id))
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
def get_repository_images(namespace_name, repository_name):
|
|
|
|
return _get_repository_images_base(namespace_name, repository_name, lambda q: q)
|
|
|
|
|
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
def _tag_alive(query, now_ts=None):
|
|
|
|
if now_ts is None:
|
|
|
|
now_ts = get_epoch_timestamp()
|
2015-02-12 19:11:56 +00:00
|
|
|
return query.where((RepositoryTag.lifetime_end_ts >> None) |
|
2015-03-21 03:21:20 +00:00
|
|
|
(RepositoryTag.lifetime_end_ts > now_ts))
|
2015-02-11 20:02:50 +00:00
|
|
|
|
|
|
|
|
2015-06-30 08:38:58 +00:00
|
|
|
def list_repository_tag_history(repository, page=1, size=100, specific_tag=None):
|
2015-04-15 19:21:09 +00:00
|
|
|
query = (RepositoryTag
|
|
|
|
.select(RepositoryTag, Image)
|
|
|
|
.join(Image)
|
|
|
|
.where(RepositoryTag.repository == repository)
|
2015-04-19 22:12:06 +00:00
|
|
|
.where(RepositoryTag.hidden == False)
|
2015-04-15 19:21:09 +00:00
|
|
|
.order_by(RepositoryTag.lifetime_start_ts.desc())
|
2015-06-30 08:38:58 +00:00
|
|
|
.paginate(page, size))
|
2015-04-16 21:18:00 +00:00
|
|
|
|
|
|
|
if specific_tag:
|
|
|
|
query = query.where(RepositoryTag.name == specific_tag)
|
|
|
|
|
2015-04-15 19:21:09 +00:00
|
|
|
return query
|
|
|
|
|
|
|
|
|
2015-03-18 18:47:53 +00:00
|
|
|
def list_repository_tags(namespace_name, repository_name, include_hidden=False,
|
|
|
|
include_storage=False):
|
|
|
|
|
|
|
|
toSelect = (RepositoryTag, Image)
|
|
|
|
if include_storage:
|
|
|
|
toSelect = (RepositoryTag, Image, ImageStorage)
|
|
|
|
|
2015-02-18 21:37:38 +00:00
|
|
|
query = _tag_alive(RepositoryTag
|
2015-03-18 18:47:53 +00:00
|
|
|
.select(*toSelect)
|
2015-02-18 21:37:38 +00:00
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryTag)
|
|
|
|
.join(Image)
|
|
|
|
.where(Repository.name == repository_name,
|
|
|
|
Namespace.username == namespace_name))
|
|
|
|
|
|
|
|
if not include_hidden:
|
|
|
|
query = query.where(RepositoryTag.hidden == False)
|
|
|
|
|
2015-03-18 18:47:53 +00:00
|
|
|
if include_storage:
|
|
|
|
query = query.switch(Image).join(ImageStorage)
|
|
|
|
|
2015-02-18 21:37:38 +00:00
|
|
|
return query
|
2015-02-11 16:54:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _garbage_collect_tags(namespace_name, repository_name):
|
2015-02-25 22:49:46 +00:00
|
|
|
# We do this without using a join to prevent holding read locks on the repository table
|
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
2015-03-21 03:21:20 +00:00
|
|
|
expired_time = get_epoch_timestamp() - repo.namespace_user.removed_tag_expiration_s
|
2015-02-11 16:54:30 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
tags_to_delete = list(RepositoryTag
|
|
|
|
.select(RepositoryTag.id)
|
|
|
|
.where(RepositoryTag.repository == repo,
|
|
|
|
~(RepositoryTag.lifetime_end_ts >> None),
|
|
|
|
(RepositoryTag.lifetime_end_ts <= expired_time))
|
|
|
|
.order_by(RepositoryTag.id))
|
|
|
|
if len(tags_to_delete) > 0:
|
|
|
|
(RepositoryTag
|
|
|
|
.delete()
|
|
|
|
.where(RepositoryTag.id << tags_to_delete)
|
|
|
|
.execute())
|
2013-09-25 20:46:28 +00:00
|
|
|
|
|
|
|
|
2014-02-06 19:13:35 +00:00
|
|
|
def garbage_collect_repository(namespace_name, repository_name):
|
2014-10-29 15:59:02 +00:00
|
|
|
storage_id_whitelist = {}
|
|
|
|
|
2015-02-11 16:54:30 +00:00
|
|
|
_garbage_collect_tags(namespace_name, repository_name)
|
|
|
|
|
2014-05-13 19:17:16 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-11-06 19:48:16 +00:00
|
|
|
# TODO (jake): We could probably select this and all the images in a single query using
|
|
|
|
# a different kind of join.
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
# Get a list of all images used by tags in the repository
|
|
|
|
tag_query = (RepositoryTag
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(RepositoryTag, Image, ImageStorage)
|
|
|
|
.join(Image)
|
|
|
|
.join(ImageStorage, JOIN_LEFT_OUTER)
|
|
|
|
.switch(RepositoryTag)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
referenced_anscestors = set()
|
|
|
|
for tag in tag_query:
|
|
|
|
# The anscestor list is in the format '/1/2/3/', extract just the ids
|
|
|
|
anscestor_id_strings = tag.image.ancestors.split('/')[1:-1]
|
|
|
|
ancestor_list = [int(img_id_str) for img_id_str in anscestor_id_strings]
|
|
|
|
referenced_anscestors = referenced_anscestors.union(set(ancestor_list))
|
|
|
|
referenced_anscestors.add(tag.image.id)
|
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
all_repo_images = _get_repository_images(namespace_name, repository_name, lambda q: q)
|
2014-02-16 22:38:47 +00:00
|
|
|
all_images = {int(img.id): img for img in all_repo_images}
|
|
|
|
to_remove = set(all_images.keys()).difference(referenced_anscestors)
|
|
|
|
|
2014-10-17 20:37:05 +00:00
|
|
|
if len(to_remove) > 0:
|
|
|
|
logger.info('Cleaning up unreferenced images: %s', to_remove)
|
|
|
|
storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove}
|
|
|
|
Image.delete().where(Image.id << list(to_remove)).execute()
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2014-10-29 15:59:02 +00:00
|
|
|
if len(to_remove) > 0:
|
|
|
|
logger.info('Garbage collecting storage for images: %s', to_remove)
|
2015-02-11 16:54:30 +00:00
|
|
|
_garbage_collect_storage(storage_id_whitelist)
|
2014-02-06 19:13:35 +00:00
|
|
|
|
2014-09-18 21:26:40 +00:00
|
|
|
|
2015-02-11 16:54:30 +00:00
|
|
|
def _garbage_collect_storage(storage_id_whitelist):
|
2014-11-13 17:51:37 +00:00
|
|
|
if len(storage_id_whitelist) == 0:
|
|
|
|
return
|
|
|
|
|
2014-10-17 15:42:09 +00:00
|
|
|
def placements_query_to_paths_set(placements_query):
|
|
|
|
return {(placement.location.name, config.store.image_path(placement.storage.uuid))
|
|
|
|
for placement in placements_query}
|
2014-09-18 21:26:40 +00:00
|
|
|
|
2014-11-13 17:51:37 +00:00
|
|
|
def orphaned_storage_query(select_base_query, candidates, group_by):
|
2014-10-17 18:33:04 +00:00
|
|
|
return (select_base_query
|
|
|
|
.switch(ImageStorage)
|
|
|
|
.join(Image, JOIN_LEFT_OUTER)
|
|
|
|
.switch(ImageStorage)
|
2014-10-17 19:26:51 +00:00
|
|
|
.join(DerivedImageStorage, JOIN_LEFT_OUTER,
|
|
|
|
on=(ImageStorage.id == DerivedImageStorage.derivative))
|
|
|
|
.where(ImageStorage.id << list(candidates))
|
2014-11-13 17:51:37 +00:00
|
|
|
.group_by(*group_by)
|
2014-10-17 18:33:04 +00:00
|
|
|
.having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0)))
|
2014-09-18 21:26:40 +00:00
|
|
|
|
2014-10-29 15:59:02 +00:00
|
|
|
# Note: We remove the derived image storage in its own transaction as a way to reduce the
|
|
|
|
# time that the transaction holds on the database indicies. This could result in a derived
|
|
|
|
# image storage being deleted for an image storage which is later reused during this time,
|
|
|
|
# but since these are caches anyway, it isn't terrible and worth the tradeoff (for now).
|
|
|
|
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
|
2014-10-17 15:42:09 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-10-17 19:26:51 +00:00
|
|
|
# Find out which derived storages will be removed, and add them to the whitelist
|
2015-02-23 20:06:24 +00:00
|
|
|
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
2014-10-17 21:01:55 +00:00
|
|
|
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
2014-11-13 17:51:37 +00:00
|
|
|
storage_id_whitelist,
|
|
|
|
(ImageStorage.id,)))
|
2014-10-17 21:49:18 +00:00
|
|
|
|
|
|
|
if len(orphaned_from_candidates) > 0:
|
|
|
|
derived_to_remove = (ImageStorage
|
|
|
|
.select(ImageStorage.id)
|
|
|
|
.join(DerivedImageStorage,
|
|
|
|
on=(ImageStorage.id == DerivedImageStorage.derivative))
|
|
|
|
.where(DerivedImageStorage.source << orphaned_from_candidates))
|
|
|
|
storage_id_whitelist.update({derived.id for derived in derived_to_remove})
|
|
|
|
|
|
|
|
# Remove the dervived image storages with sources of orphaned storages
|
|
|
|
(DerivedImageStorage
|
|
|
|
.delete()
|
|
|
|
.where(DerivedImageStorage.source << orphaned_from_candidates)
|
|
|
|
.execute())
|
2014-10-17 18:33:04 +00:00
|
|
|
|
2014-10-29 15:59:02 +00:00
|
|
|
# Note: Both of these deletes must occur in the same transaction (unfortunately) because a
|
|
|
|
# storage without any placement is invalid, and a placement cannot exist without a storage.
|
2014-11-06 19:48:16 +00:00
|
|
|
# TODO(jake): We might want to allow for null storages on placements, which would allow us to
|
|
|
|
# delete the storages, then delete the placements in a non-transaction.
|
2014-10-29 15:59:02 +00:00
|
|
|
logger.debug('Garbage collecting storages from candidates: %s', storage_id_whitelist)
|
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-10-17 18:33:04 +00:00
|
|
|
# Track all of the data that should be removed from blob storage
|
2015-03-21 03:21:20 +00:00
|
|
|
placements_to_remove = list(orphaned_storage_query(ImageStoragePlacement
|
|
|
|
.select(ImageStoragePlacement,
|
|
|
|
ImageStorage,
|
|
|
|
ImageStorageLocation)
|
|
|
|
.join(ImageStorageLocation)
|
|
|
|
.switch(ImageStoragePlacement)
|
|
|
|
.join(ImageStorage),
|
|
|
|
storage_id_whitelist,
|
|
|
|
(ImageStorage, ImageStoragePlacement,
|
|
|
|
ImageStorageLocation)))
|
|
|
|
|
|
|
|
paths_to_remove = placements_query_to_paths_set(placements_to_remove)
|
2014-10-17 18:33:04 +00:00
|
|
|
|
|
|
|
# Remove the placements for orphaned storages
|
2015-03-21 03:21:20 +00:00
|
|
|
if len(placements_to_remove) > 0:
|
|
|
|
placement_ids_to_remove = [placement.id for placement in placements_to_remove]
|
|
|
|
placements_removed = (ImageStoragePlacement
|
|
|
|
.delete()
|
|
|
|
.where(ImageStoragePlacement.id << placement_ids_to_remove)
|
|
|
|
.execute())
|
|
|
|
logger.debug('Removed %s image storage placements', placements_removed)
|
2015-02-23 20:06:24 +00:00
|
|
|
|
|
|
|
# Remove all orphaned storages
|
|
|
|
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
2015-03-21 03:21:20 +00:00
|
|
|
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
|
|
|
storage_id_whitelist,
|
|
|
|
(ImageStorage.id,)).alias('osq'))
|
|
|
|
if len(orphaned_storages) > 0:
|
|
|
|
storages_removed = (ImageStorage
|
|
|
|
.delete()
|
|
|
|
.where(ImageStorage.id << orphaned_storages)
|
|
|
|
.execute())
|
|
|
|
logger.debug('Removed %s image storage records', storages_removed)
|
2014-10-17 15:42:09 +00:00
|
|
|
|
2014-10-29 15:59:02 +00:00
|
|
|
# We are going to make the conscious decision to not delete image storage blobs inside
|
|
|
|
# transactions.
|
|
|
|
# This may end up producing garbage in s3, trading off for higher availability in the database.
|
2014-10-17 15:42:09 +00:00
|
|
|
for location_name, image_path in paths_to_remove:
|
|
|
|
logger.debug('Removing %s from %s', image_path, location_name)
|
|
|
|
config.store.remove({location_name}, image_path)
|
2014-02-06 19:13:35 +00:00
|
|
|
|
|
|
|
|
2013-09-25 20:46:28 +00:00
|
|
|
def get_tag_image(namespace_name, repository_name, tag_name):
|
2014-06-28 00:04:26 +00:00
|
|
|
def limit_to_tag(query):
|
2015-02-11 20:02:50 +00:00
|
|
|
return _tag_alive(query
|
|
|
|
.switch(Image)
|
|
|
|
.join(RepositoryTag)
|
|
|
|
.where(RepositoryTag.name == tag_name))
|
2013-09-30 19:30:00 +00:00
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
images = _get_repository_images_base(namespace_name, repository_name, limit_to_tag)
|
|
|
|
if not images:
|
2013-09-27 19:53:39 +00:00
|
|
|
raise DataModelException('Unable to find image for tag.')
|
2014-06-28 00:04:26 +00:00
|
|
|
else:
|
|
|
|
return images[0]
|
2013-09-26 21:58:41 +00:00
|
|
|
|
2013-10-01 18:14:39 +00:00
|
|
|
def get_image_by_id(namespace_name, repository_name, docker_image_id):
|
2014-11-10 18:44:36 +00:00
|
|
|
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
|
2014-06-30 18:51:01 +00:00
|
|
|
if not image:
|
2013-11-13 19:41:20 +00:00
|
|
|
raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' %
|
2014-06-30 18:51:01 +00:00
|
|
|
(docker_image_id, namespace_name, repository_name))
|
|
|
|
return image
|
2013-10-01 18:14:39 +00:00
|
|
|
|
2013-09-30 19:30:00 +00:00
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
def get_parent_images(namespace_name, repository_name, image_obj):
|
2013-09-30 19:30:00 +00:00
|
|
|
""" Returns a list of parent Image objects in chronilogical order. """
|
|
|
|
parents = image_obj.ancestors
|
|
|
|
parent_db_ids = parents.strip('/').split('/')
|
|
|
|
|
2013-10-02 21:29:45 +00:00
|
|
|
if parent_db_ids == ['']:
|
|
|
|
return []
|
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
def filter_to_parents(query):
|
|
|
|
return query.where(Image.id << parent_db_ids)
|
|
|
|
|
|
|
|
parents = _get_repository_images_base(namespace_name, repository_name, filter_to_parents)
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2014-06-28 00:04:26 +00:00
|
|
|
id_to_image = {unicode(image.id): image for image in parents}
|
2013-09-30 19:30:00 +00:00
|
|
|
|
|
|
|
return [id_to_image[parent_id] for parent_id in parent_db_ids]
|
|
|
|
|
|
|
|
|
2013-09-25 20:46:28 +00:00
|
|
|
def create_or_update_tag(namespace_name, repository_name, tag_name,
|
2015-04-16 21:18:00 +00:00
|
|
|
tag_docker_image_id, reversion=False):
|
2015-03-21 03:21:20 +00:00
|
|
|
try:
|
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
|
|
|
except Repository.DoesNotExist:
|
|
|
|
raise DataModelException('Invalid repository %s/%s' % (namespace_name, repository_name))
|
2013-11-07 22:09:15 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
now_ts = get_epoch_timestamp()
|
2015-02-25 22:49:46 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2015-02-11 16:54:30 +00:00
|
|
|
try:
|
2015-03-21 03:21:20 +00:00
|
|
|
tag = db_for_update(_tag_alive(RepositoryTag
|
|
|
|
.select()
|
|
|
|
.where(RepositoryTag.repository == repo,
|
|
|
|
RepositoryTag.name == tag_name), now_ts)).get()
|
2015-02-12 19:11:56 +00:00
|
|
|
tag.lifetime_end_ts = now_ts
|
2015-02-11 16:54:30 +00:00
|
|
|
tag.save()
|
|
|
|
except RepositoryTag.DoesNotExist:
|
|
|
|
pass
|
2013-09-25 20:46:28 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
try:
|
|
|
|
image = Image.get(Image.docker_image_id == tag_docker_image_id, Image.repository == repo)
|
|
|
|
except Image.DoesNotExist:
|
|
|
|
raise DataModelException('Invalid image with id: %s' % tag_docker_image_id)
|
2013-09-25 20:46:28 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
return RepositoryTag.create(repository=repo, image=image, name=tag_name,
|
2015-04-16 21:18:00 +00:00
|
|
|
lifetime_start_ts=now_ts, reversion=reversion)
|
2013-09-25 20:46:28 +00:00
|
|
|
|
|
|
|
def delete_tag(namespace_name, repository_name, tag_name):
|
2015-03-21 03:21:20 +00:00
|
|
|
now_ts = get_epoch_timestamp()
|
2015-02-11 16:54:30 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
|
|
|
try:
|
2015-02-11 20:02:50 +00:00
|
|
|
query = _tag_alive(RepositoryTag
|
|
|
|
.select(RepositoryTag, Repository)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name,
|
|
|
|
Namespace.username == namespace_name,
|
2015-03-21 03:21:20 +00:00
|
|
|
RepositoryTag.name == tag_name), now_ts)
|
2015-02-11 16:54:30 +00:00
|
|
|
found = db_for_update(query).get()
|
|
|
|
except RepositoryTag.DoesNotExist:
|
|
|
|
msg = ('Invalid repository tag \'%s\' on repository \'%s/%s\'' %
|
|
|
|
(tag_name, namespace_name, repository_name))
|
|
|
|
raise DataModelException(msg)
|
2013-11-07 22:09:15 +00:00
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
found.lifetime_end_ts = now_ts
|
2015-02-11 16:54:30 +00:00
|
|
|
found.save()
|
2013-11-07 22:09:15 +00:00
|
|
|
|
2013-09-25 20:46:28 +00:00
|
|
|
|
2015-02-18 21:37:38 +00:00
|
|
|
def create_temporary_hidden_tag(repo, image, expiration_s):
|
|
|
|
""" Create a tag with a defined timeline, that will not appear in the UI or CLI. Returns the name
|
|
|
|
of the temporary tag. """
|
2015-03-21 03:21:20 +00:00
|
|
|
now_ts = get_epoch_timestamp()
|
2015-02-18 21:37:38 +00:00
|
|
|
expire_ts = now_ts + expiration_s
|
|
|
|
tag_name = str(uuid4())
|
|
|
|
RepositoryTag.create(repository=repo, image=image, name=tag_name, lifetime_start_ts=now_ts,
|
|
|
|
lifetime_end_ts=expire_ts, hidden=True)
|
|
|
|
return tag_name
|
2013-09-25 20:46:28 +00:00
|
|
|
|
|
|
|
|
2015-02-11 16:54:30 +00:00
|
|
|
def purge_all_repository_tags(namespace_name, repository_name):
|
|
|
|
""" Immediately purge all repository tags without respecting the lifeline procedure """
|
2013-11-07 23:08:38 +00:00
|
|
|
try:
|
2014-09-24 22:01:35 +00:00
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
2013-11-07 23:08:38 +00:00
|
|
|
except Repository.DoesNotExist:
|
|
|
|
raise DataModelException('Invalid repository \'%s/%s\'' %
|
|
|
|
(namespace_name, repository_name))
|
2014-02-17 19:38:35 +00:00
|
|
|
RepositoryTag.delete().where(RepositoryTag.repository == repo.id).execute()
|
2013-09-25 20:46:28 +00:00
|
|
|
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
def __entity_permission_repo_query(entity_id, entity_table, entity_id_property, namespace_name,
|
2013-11-01 23:34:17 +00:00
|
|
|
repository_name):
|
|
|
|
""" This method works for both users and teams. """
|
2014-09-24 22:01:35 +00:00
|
|
|
|
|
|
|
return (RepositoryPermission
|
|
|
|
.select(entity_table, Repository, Namespace, Role, RepositoryPermission)
|
|
|
|
.join(entity_table)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Role)
|
|
|
|
.switch(RepositoryPermission)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
|
|
|
entity_id_property == entity_id))
|
2013-09-27 19:53:39 +00:00
|
|
|
|
2013-09-27 18:56:14 +00:00
|
|
|
|
2013-09-27 19:53:39 +00:00
|
|
|
def get_user_reponame_permission(username, namespace_name, repository_name):
|
2013-11-01 23:34:17 +00:00
|
|
|
fetched = list(__entity_permission_repo_query(username, User, User.username,
|
|
|
|
namespace_name,
|
|
|
|
repository_name))
|
2013-09-27 19:53:39 +00:00
|
|
|
if not fetched:
|
|
|
|
raise DataModelException('User does not have permission for repo.')
|
2013-09-27 18:56:14 +00:00
|
|
|
|
2013-09-27 19:53:39 +00:00
|
|
|
return fetched[0]
|
|
|
|
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def get_team_reponame_permission(team_name, namespace_name, repository_name):
|
|
|
|
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name,
|
|
|
|
namespace_name,
|
|
|
|
repository_name))
|
|
|
|
if not fetched:
|
|
|
|
raise DataModelException('Team does not have permission for repo.')
|
|
|
|
|
|
|
|
return fetched[0]
|
|
|
|
|
|
|
|
|
|
|
|
def delete_user_permission(username, namespace_name, repository_name):
|
2013-09-27 19:53:39 +00:00
|
|
|
if username == namespace_name:
|
|
|
|
raise DataModelException('Namespace owner must always be admin.')
|
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
fetched = list(__entity_permission_repo_query(username, User, User.username,
|
|
|
|
namespace_name,
|
|
|
|
repository_name))
|
|
|
|
if not fetched:
|
|
|
|
raise DataModelException('User does not have permission for repo.')
|
|
|
|
|
|
|
|
fetched[0].delete_instance()
|
|
|
|
|
|
|
|
|
|
|
|
def delete_team_permission(team_name, namespace_name, repository_name):
|
|
|
|
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name,
|
|
|
|
namespace_name,
|
|
|
|
repository_name))
|
|
|
|
if not fetched:
|
|
|
|
raise DataModelException('Team does not have permission for repo.')
|
|
|
|
|
|
|
|
fetched[0].delete_instance()
|
|
|
|
|
|
|
|
|
2013-11-07 17:54:21 +00:00
|
|
|
def __set_entity_repo_permission(entity, permission_entity_property,
|
|
|
|
namespace_name, repository_name, role_name):
|
2014-09-24 22:01:35 +00:00
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
2013-09-27 18:56:14 +00:00
|
|
|
new_role = Role.get(Role.name == role_name)
|
|
|
|
|
2013-11-07 17:54:21 +00:00
|
|
|
# Fetch any existing permission for this entity on the repo
|
2013-09-27 18:56:14 +00:00
|
|
|
try:
|
2013-11-01 23:34:17 +00:00
|
|
|
entity_attr = getattr(RepositoryPermission, permission_entity_property)
|
|
|
|
perm = RepositoryPermission.get(entity_attr == entity,
|
2013-09-27 18:56:14 +00:00
|
|
|
RepositoryPermission.repository == repo)
|
|
|
|
perm.role = new_role
|
|
|
|
perm.save()
|
|
|
|
return perm
|
|
|
|
except RepositoryPermission.DoesNotExist:
|
2013-11-01 23:34:17 +00:00
|
|
|
set_entity_kwargs = {permission_entity_property: entity}
|
|
|
|
new_perm = RepositoryPermission.create(repository=repo, role=new_role,
|
|
|
|
**set_entity_kwargs)
|
2013-09-27 18:56:14 +00:00
|
|
|
return new_perm
|
|
|
|
|
2013-09-28 00:03:07 +00:00
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
def set_user_repo_permission(username, namespace_name, repository_name,
|
|
|
|
role_name):
|
2013-09-27 19:53:39 +00:00
|
|
|
if username == namespace_name:
|
|
|
|
raise DataModelException('Namespace owner must always be admin.')
|
|
|
|
|
2014-03-18 19:58:37 +00:00
|
|
|
try:
|
|
|
|
user = User.get(User.username == username)
|
|
|
|
except User.DoesNotExist:
|
|
|
|
raise InvalidUsernameException('Invalid username: %s' % username)
|
2013-11-07 17:54:21 +00:00
|
|
|
return __set_entity_repo_permission(user, 'user', namespace_name,
|
|
|
|
repository_name, role_name)
|
2013-09-27 19:53:39 +00:00
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
|
|
|
|
def set_team_repo_permission(team_name, namespace_name, repository_name,
|
|
|
|
role_name):
|
2013-11-07 17:54:21 +00:00
|
|
|
team = list(Team.select().join(User).where(Team.name == team_name,
|
|
|
|
User.username == namespace_name))
|
|
|
|
if not team:
|
|
|
|
raise DataModelException('No team \'%s\' in organization \'%s\'.' %
|
|
|
|
(team_name, namespace_name))
|
|
|
|
|
|
|
|
return __set_entity_repo_permission(team[0], 'team', namespace_name,
|
|
|
|
repository_name, role_name)
|
2013-10-01 16:13:25 +00:00
|
|
|
|
2013-10-02 05:40:11 +00:00
|
|
|
|
2013-10-01 16:13:25 +00:00
|
|
|
def purge_repository(namespace_name, repository_name):
|
2014-02-16 22:38:47 +00:00
|
|
|
# Delete all tags to allow gc to reclaim storage
|
2015-02-11 16:54:30 +00:00
|
|
|
purge_all_repository_tags(namespace_name, repository_name)
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
# Gc to remove the images and storage
|
|
|
|
garbage_collect_repository(namespace_name, repository_name)
|
|
|
|
|
|
|
|
# Delete the rest of the repository metadata
|
2014-09-24 22:01:35 +00:00
|
|
|
fetched = _get_repository(namespace_name, repository_name)
|
2014-11-11 04:05:20 +00:00
|
|
|
fetched.delete_instance(recursive=True, delete_nullable=True)
|
2013-10-02 05:40:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_private_repo_count(username):
|
2014-09-24 22:01:35 +00:00
|
|
|
return (Repository
|
|
|
|
.select()
|
|
|
|
.join(Visibility)
|
|
|
|
.switch(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == username, Visibility.name == 'private')
|
|
|
|
.count())
|
2013-10-16 18:24:10 +00:00
|
|
|
|
|
|
|
|
2015-02-17 17:35:16 +00:00
|
|
|
def create_access_token(repository, role, kind=None, friendly_name=None):
|
2013-10-16 18:24:10 +00:00
|
|
|
role = Role.get(Role.name == role)
|
2015-02-17 17:35:16 +00:00
|
|
|
kind_ref = None
|
|
|
|
if kind is not None:
|
|
|
|
kind_ref = AccessTokenKind.get(AccessTokenKind.name == kind)
|
|
|
|
|
2013-10-16 18:24:10 +00:00
|
|
|
new_token = AccessToken.create(repository=repository, temporary=True,
|
2015-02-17 17:35:16 +00:00
|
|
|
role=role, kind=kind_ref, friendly_name=friendly_name)
|
2013-10-16 18:24:10 +00:00
|
|
|
return new_token
|
|
|
|
|
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
def create_delegate_token(namespace_name, repository_name, friendly_name,
|
|
|
|
role='read'):
|
|
|
|
read_only = Role.get(name=role)
|
2014-09-24 22:01:35 +00:00
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
2013-10-16 18:24:10 +00:00
|
|
|
new_token = AccessToken.create(repository=repo, role=read_only,
|
|
|
|
friendly_name=friendly_name, temporary=False)
|
|
|
|
return new_token
|
|
|
|
|
|
|
|
|
|
|
|
def get_repository_delegate_tokens(namespace_name, repository_name):
|
2014-09-24 22:01:35 +00:00
|
|
|
return (AccessToken
|
|
|
|
.select(AccessToken, Role)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(AccessToken)
|
|
|
|
.join(Role)
|
|
|
|
.switch(AccessToken)
|
|
|
|
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
|
|
|
AccessToken.temporary == False, RepositoryBuildTrigger.uuid >> None))
|
2013-10-16 18:24:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_repo_delegate_token(namespace_name, repository_name, code):
|
|
|
|
repo_query = get_repository_delegate_tokens(namespace_name, repository_name)
|
|
|
|
|
2014-11-10 18:44:36 +00:00
|
|
|
try:
|
|
|
|
return repo_query.where(AccessToken.code == code).get()
|
|
|
|
except AccessToken.DoesNotExist:
|
2013-10-16 18:24:10 +00:00
|
|
|
raise InvalidTokenException('Unable to find token with code: %s' % code)
|
|
|
|
|
|
|
|
|
|
|
|
def set_repo_delegate_token_role(namespace_name, repository_name, code, role):
|
|
|
|
token = get_repo_delegate_token(namespace_name, repository_name, code)
|
|
|
|
|
|
|
|
if role != 'read' and role != 'write':
|
|
|
|
raise DataModelException('Invalid role for delegate token: %s' % role)
|
|
|
|
|
|
|
|
new_role = Role.get(Role.name == role)
|
|
|
|
token.role = new_role
|
|
|
|
token.save()
|
|
|
|
|
|
|
|
return token
|
|
|
|
|
|
|
|
|
|
|
|
def delete_delegate_token(namespace_name, repository_name, code):
|
|
|
|
token = get_repo_delegate_token(namespace_name, repository_name, code)
|
2014-07-18 18:56:26 +00:00
|
|
|
token.delete_instance(recursive=True)
|
2013-11-29 05:04:50 +00:00
|
|
|
return token
|
2013-10-16 18:24:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def load_token_data(code):
|
|
|
|
""" Load the permissions for any token by code. """
|
2014-09-24 22:01:35 +00:00
|
|
|
try:
|
|
|
|
return (AccessToken
|
|
|
|
.select(AccessToken, Repository, Namespace, Role)
|
|
|
|
.join(Role)
|
|
|
|
.switch(AccessToken)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(AccessToken.code == code)
|
|
|
|
.get())
|
2013-10-16 18:24:10 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
except AccessToken.DoesNotExist:
|
2013-10-16 18:24:10 +00:00
|
|
|
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
2013-10-24 20:37:03 +00:00
|
|
|
|
|
|
|
|
2014-10-01 18:23:15 +00:00
|
|
|
def _get_build_base_query():
|
|
|
|
return (RepositoryBuild
|
|
|
|
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
2015-05-08 01:11:15 +00:00
|
|
|
Namespace, User)
|
2014-10-01 18:23:15 +00:00
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryBuild)
|
2015-05-08 01:11:15 +00:00
|
|
|
.join(User, JOIN_LEFT_OUTER)
|
|
|
|
.switch(RepositoryBuild)
|
2014-10-01 18:23:15 +00:00
|
|
|
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
|
|
|
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
|
|
|
.order_by(RepositoryBuild.started.desc()))
|
|
|
|
|
|
|
|
|
|
|
|
def get_repository_build(build_uuid):
|
2014-02-19 21:08:33 +00:00
|
|
|
try:
|
2014-10-01 18:23:15 +00:00
|
|
|
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
2014-02-04 00:08:37 +00:00
|
|
|
|
2014-02-19 21:08:33 +00:00
|
|
|
except RepositoryBuild.DoesNotExist:
|
2014-02-04 00:08:37 +00:00
|
|
|
msg = 'Unable to locate a build by id: %s' % build_uuid
|
2013-11-13 22:29:26 +00:00
|
|
|
raise InvalidRepositoryBuildException(msg)
|
|
|
|
|
|
|
|
|
2014-03-05 20:50:32 +00:00
|
|
|
def list_repository_builds(namespace_name, repository_name, limit,
|
2015-03-13 22:34:28 +00:00
|
|
|
include_inactive=True, since=None):
|
2014-10-01 18:23:15 +00:00
|
|
|
query = (_get_build_base_query()
|
2014-09-24 22:01:35 +00:00
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
|
|
|
.limit(limit))
|
2014-02-19 21:08:33 +00:00
|
|
|
|
2015-03-13 22:34:28 +00:00
|
|
|
if since is not None:
|
|
|
|
query = query.where(RepositoryBuild.started >= since)
|
|
|
|
|
2013-10-27 20:00:44 +00:00
|
|
|
if not include_inactive:
|
2014-02-19 21:08:33 +00:00
|
|
|
query = query.where(RepositoryBuild.phase != 'error',
|
|
|
|
RepositoryBuild.phase != 'complete')
|
|
|
|
|
|
|
|
return query
|
2013-10-25 19:13:11 +00:00
|
|
|
|
|
|
|
|
2014-02-28 21:23:36 +00:00
|
|
|
def get_recent_repository_build(namespace_name, repository_name):
|
2014-03-05 21:17:01 +00:00
|
|
|
query = list_repository_builds(namespace_name, repository_name, 1)
|
|
|
|
try:
|
|
|
|
return query.get()
|
|
|
|
except RepositoryBuild.DoesNotExist:
|
|
|
|
return None
|
2014-02-28 21:23:36 +00:00
|
|
|
|
|
|
|
|
2014-02-25 23:22:02 +00:00
|
|
|
def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
|
2014-04-02 01:49:06 +00:00
|
|
|
display_name, trigger=None, pull_robot_name=None):
|
|
|
|
pull_robot = None
|
|
|
|
if pull_robot_name:
|
|
|
|
pull_robot = lookup_robot(pull_robot_name)
|
|
|
|
|
2013-10-25 22:17:43 +00:00
|
|
|
return RepositoryBuild.create(repository=repo, access_token=access_token,
|
2015-02-17 17:35:16 +00:00
|
|
|
job_config=json.dumps(job_config_obj),
|
|
|
|
display_name=display_name, trigger=trigger,
|
|
|
|
resource_key=dockerfile_id,
|
|
|
|
pull_robot=pull_robot)
|
2014-04-02 01:49:06 +00:00
|
|
|
|
2013-11-15 21:45:02 +00:00
|
|
|
|
2014-04-02 01:49:06 +00:00
|
|
|
def get_pull_robot_name(trigger):
|
|
|
|
if not trigger.pull_robot:
|
2014-03-27 22:33:13 +00:00
|
|
|
return None
|
2014-11-11 22:22:37 +00:00
|
|
|
|
2014-04-02 01:49:06 +00:00
|
|
|
return trigger.pull_robot.username
|
|
|
|
|
|
|
|
|
|
|
|
def get_pull_credentials(robotname):
|
|
|
|
robot = lookup_robot(robotname)
|
|
|
|
if not robot:
|
|
|
|
return None
|
|
|
|
|
2014-03-27 22:33:13 +00:00
|
|
|
try:
|
2014-04-02 01:49:06 +00:00
|
|
|
login_info = FederatedLogin.get(user=robot)
|
2014-03-27 22:33:13 +00:00
|
|
|
except FederatedLogin.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return {
|
2014-04-02 01:49:06 +00:00
|
|
|
'username': robot.username,
|
2014-03-27 22:33:13 +00:00
|
|
|
'password': login_info.service_ident,
|
2014-05-13 16:17:26 +00:00
|
|
|
'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'],
|
|
|
|
config.app_config['SERVER_HOSTNAME']),
|
2014-03-27 22:33:13 +00:00
|
|
|
}
|
2013-11-15 21:45:02 +00:00
|
|
|
|
2014-04-02 01:49:06 +00:00
|
|
|
|
2014-07-16 20:30:47 +00:00
|
|
|
def create_repo_notification(repo, event_name, method_name, config):
|
|
|
|
event = ExternalNotificationEvent.get(ExternalNotificationEvent.name == event_name)
|
|
|
|
method = ExternalNotificationMethod.get(ExternalNotificationMethod.name == method_name)
|
|
|
|
|
|
|
|
return RepositoryNotification.create(repository=repo, event=event, method=method,
|
2014-07-17 17:32:39 +00:00
|
|
|
config_json=json.dumps(config))
|
2014-07-16 20:30:47 +00:00
|
|
|
|
|
|
|
|
2014-10-01 18:23:15 +00:00
|
|
|
def get_repo_notification(uuid):
|
2014-09-24 22:01:35 +00:00
|
|
|
try:
|
|
|
|
return (RepositoryNotification
|
|
|
|
.select(RepositoryNotification, Repository, Namespace)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
2014-10-01 18:23:15 +00:00
|
|
|
.where(RepositoryNotification.uuid == uuid)
|
2014-09-24 22:01:35 +00:00
|
|
|
.get())
|
|
|
|
except RepositoryNotification.DoesNotExist:
|
2014-07-16 20:30:47 +00:00
|
|
|
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
|
|
|
|
|
|
|
|
|
|
|
def delete_repo_notification(namespace_name, repository_name, uuid):
|
2014-10-01 18:23:15 +00:00
|
|
|
found = get_repo_notification(uuid)
|
|
|
|
if (found.repository.namespace_user.username != namespace_name or
|
|
|
|
found.repository.name != repository_name):
|
|
|
|
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
|
2014-07-16 20:30:47 +00:00
|
|
|
found.delete_instance()
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
2014-07-18 02:51:58 +00:00
|
|
|
def list_repo_notifications(namespace_name, repository_name, event_name=None):
|
2014-09-24 22:01:35 +00:00
|
|
|
query = (RepositoryNotification
|
|
|
|
.select(RepositoryNotification, Repository, Namespace)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
2014-07-18 02:51:58 +00:00
|
|
|
|
|
|
|
if event_name:
|
2014-09-24 22:01:35 +00:00
|
|
|
query = (query
|
|
|
|
.switch(RepositoryNotification)
|
|
|
|
.join(ExternalNotificationEvent)
|
|
|
|
.where(ExternalNotificationEvent.name == event_name))
|
2014-07-16 20:30:47 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
return query
|
2014-07-16 20:30:47 +00:00
|
|
|
|
2013-12-07 00:25:27 +00:00
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
def list_logs(start_time, end_time, performer=None, repository=None, namespace=None):
|
2014-11-20 17:03:16 +00:00
|
|
|
Performer = User.alias()
|
|
|
|
joined = (LogEntry.select(LogEntry, LogEntryKind, User, Performer)
|
|
|
|
.join(User)
|
|
|
|
.switch(LogEntry)
|
|
|
|
.join(Performer, JOIN_LEFT_OUTER,
|
|
|
|
on=(LogEntry.performer == Performer.id).alias('performer'))
|
|
|
|
.switch(LogEntry)
|
2014-11-20 17:04:53 +00:00
|
|
|
.join(LogEntryKind)
|
|
|
|
.switch(LogEntry))
|
2014-11-20 17:03:16 +00:00
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
if repository:
|
|
|
|
joined = joined.where(LogEntry.repository == repository)
|
2013-11-27 07:29:31 +00:00
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
if performer:
|
|
|
|
joined = joined.where(LogEntry.performer == performer)
|
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
if namespace:
|
2014-04-11 23:23:57 +00:00
|
|
|
joined = joined.where(User.username == namespace)
|
2014-04-10 04:26:55 +00:00
|
|
|
|
2014-11-06 19:48:16 +00:00
|
|
|
return list(joined.where(
|
2014-02-18 20:50:15 +00:00
|
|
|
LogEntry.datetime >= start_time,
|
2014-11-06 19:48:16 +00:00
|
|
|
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()))
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def log_action(kind_name, user_or_organization_name, performer=None,
|
|
|
|
repository=None, access_token=None, ip=None, metadata={},
|
|
|
|
timestamp=None):
|
2013-12-03 21:48:44 +00:00
|
|
|
if not timestamp:
|
|
|
|
timestamp = datetime.today()
|
2013-12-03 21:45:06 +00:00
|
|
|
|
2013-11-27 07:29:31 +00:00
|
|
|
kind = LogEntryKind.get(LogEntryKind.name == kind_name)
|
|
|
|
account = User.get(User.username == user_or_organization_name)
|
2014-02-18 20:50:15 +00:00
|
|
|
LogEntry.create(kind=kind, account=account, performer=performer,
|
2015-03-10 05:03:39 +00:00
|
|
|
repository=repository, ip=ip, metadata_json=json.dumps(metadata),
|
|
|
|
datetime=timestamp)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2015-04-24 19:13:08 +00:00
|
|
|
def update_build_trigger(trigger, config, auth_token=None):
|
|
|
|
trigger.config = json.dumps(config or {})
|
|
|
|
if auth_token is not None:
|
|
|
|
trigger.auth_token = auth_token
|
|
|
|
trigger.save()
|
|
|
|
|
|
|
|
|
|
|
|
def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None, config=None):
|
|
|
|
config = config or {}
|
2014-02-18 20:50:15 +00:00
|
|
|
service = BuildTriggerService.get(name=service_name)
|
|
|
|
trigger = RepositoryBuildTrigger.create(repository=repo, service=service,
|
|
|
|
auth_token=auth_token,
|
2014-03-27 22:33:13 +00:00
|
|
|
connected_user=user,
|
2015-04-24 19:13:08 +00:00
|
|
|
pull_robot=pull_robot,
|
|
|
|
config=json.dumps(config))
|
2014-02-18 20:50:15 +00:00
|
|
|
return trigger
|
|
|
|
|
|
|
|
|
2014-11-09 22:50:57 +00:00
|
|
|
def get_build_trigger(trigger_uuid):
|
2014-02-18 20:50:15 +00:00
|
|
|
try:
|
|
|
|
return (RepositoryBuildTrigger
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
|
|
|
.join(BuildTriggerService)
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(User)
|
2014-11-09 22:50:57 +00:00
|
|
|
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
|
2014-09-24 22:01:35 +00:00
|
|
|
.get())
|
2014-02-18 20:50:15 +00:00
|
|
|
except RepositoryBuildTrigger.DoesNotExist:
|
|
|
|
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
|
|
|
raise InvalidBuildTriggerException(msg)
|
|
|
|
|
|
|
|
|
|
|
|
def list_build_triggers(namespace_name, repository_name):
|
|
|
|
return (RepositoryBuildTrigger
|
2014-09-24 22:01:35 +00:00
|
|
|
.select(RepositoryBuildTrigger, BuildTriggerService, Repository)
|
|
|
|
.join(BuildTriggerService)
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2014-02-19 21:08:33 +00:00
|
|
|
def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
|
2014-03-05 20:50:32 +00:00
|
|
|
limit):
|
2014-03-06 20:04:59 +00:00
|
|
|
return (list_repository_builds(namespace_name, repository_name, limit)
|
2014-02-19 21:08:33 +00:00
|
|
|
.where(RepositoryBuildTrigger.uuid == trigger_uuid))
|
2014-03-12 04:49:03 +00:00
|
|
|
|
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
def create_notification(kind_name, target, metadata={}):
|
|
|
|
kind_ref = NotificationKind.get(name=kind_name)
|
2014-03-12 23:00:24 +00:00
|
|
|
notification = Notification.create(kind=kind_ref, target=target,
|
2014-03-12 04:49:03 +00:00
|
|
|
metadata_json=json.dumps(metadata))
|
|
|
|
return notification
|
|
|
|
|
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
def create_unique_notification(kind_name, target, metadata={}):
|
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
2014-08-26 19:19:39 +00:00
|
|
|
if list_notifications(target, kind_name, limit=1).count() == 0:
|
2014-05-29 15:24:10 +00:00
|
|
|
create_notification(kind_name, target, metadata)
|
|
|
|
|
|
|
|
|
2014-07-28 22:23:46 +00:00
|
|
|
def lookup_notification(user, uuid):
|
2014-08-26 19:19:39 +00:00
|
|
|
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
|
2014-07-28 22:23:46 +00:00
|
|
|
if not results:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return results[0]
|
|
|
|
|
|
|
|
|
2014-08-26 19:19:39 +00:00
|
|
|
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
|
|
|
|
page=None, limit=None):
|
2014-03-12 23:00:24 +00:00
|
|
|
Org = User.alias()
|
|
|
|
AdminTeam = Team.alias()
|
|
|
|
AdminTeamMember = TeamMember.alias()
|
|
|
|
AdminUser = User.alias()
|
2014-03-12 04:49:03 +00:00
|
|
|
|
2014-03-12 23:00:24 +00:00
|
|
|
query = (Notification.select()
|
|
|
|
.join(User)
|
|
|
|
|
|
|
|
.switch(Notification)
|
|
|
|
.join(Org, JOIN_LEFT_OUTER, on=(Org.id == Notification.target))
|
|
|
|
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id ==
|
|
|
|
AdminTeam.organization))
|
|
|
|
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
|
|
|
|
.switch(AdminTeam)
|
|
|
|
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id ==
|
|
|
|
AdminTeamMember.team))
|
|
|
|
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user ==
|
2014-05-29 15:24:10 +00:00
|
|
|
AdminUser.id))
|
2014-11-11 22:22:37 +00:00
|
|
|
.where((Notification.target == user) |
|
2014-05-29 15:24:10 +00:00
|
|
|
((AdminUser.id == user) & (TeamRole.name == 'admin')))
|
|
|
|
.order_by(Notification.created)
|
|
|
|
.desc())
|
2014-07-28 22:23:46 +00:00
|
|
|
|
|
|
|
if not include_dismissed:
|
|
|
|
query = query.switch(Notification).where(Notification.dismissed == False)
|
2014-11-11 22:22:37 +00:00
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
if kind_name:
|
|
|
|
query = (query
|
|
|
|
.switch(Notification)
|
|
|
|
.join(NotificationKind)
|
|
|
|
.where(NotificationKind.name == kind_name))
|
|
|
|
|
2014-07-28 22:23:46 +00:00
|
|
|
if id_filter:
|
|
|
|
query = (query
|
|
|
|
.switch(Notification)
|
|
|
|
.where(Notification.uuid == id_filter))
|
|
|
|
|
2014-08-26 19:19:39 +00:00
|
|
|
if page:
|
|
|
|
query = query.paginate(page, limit)
|
|
|
|
elif limit:
|
|
|
|
query = query.limit(limit)
|
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
return query
|
|
|
|
|
2014-03-12 04:49:03 +00:00
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
def delete_all_notifications_by_kind(kind_name):
|
|
|
|
kind_ref = NotificationKind.get(name=kind_name)
|
|
|
|
(Notification.delete()
|
|
|
|
.where(Notification.kind == kind_ref)
|
|
|
|
.execute())
|
2014-03-12 04:49:03 +00:00
|
|
|
|
|
|
|
|
2014-05-29 15:24:10 +00:00
|
|
|
def delete_notifications_by_kind(target, kind_name):
|
|
|
|
kind_ref = NotificationKind.get(name=kind_name)
|
2014-03-12 23:00:24 +00:00
|
|
|
Notification.delete().where(Notification.target == target,
|
|
|
|
Notification.kind == kind_ref).execute()
|
2014-04-10 04:26:55 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
def delete_matching_notifications(target, kind_name, **kwargs):
|
|
|
|
kind_ref = NotificationKind.get(name=kind_name)
|
|
|
|
|
|
|
|
# Load all notifications for the user with the given kind.
|
2014-08-28 23:07:22 +00:00
|
|
|
notifications = Notification.select().where(
|
2014-08-18 21:24:00 +00:00
|
|
|
Notification.target == target,
|
2014-08-28 23:07:22 +00:00
|
|
|
Notification.kind == kind_ref)
|
2014-08-18 21:24:00 +00:00
|
|
|
|
|
|
|
# For each, match the metadata to the specified values.
|
|
|
|
for notification in notifications:
|
|
|
|
matches = True
|
|
|
|
try:
|
|
|
|
metadata = json.loads(notification.metadata_json)
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
for (key, value) in kwargs.iteritems():
|
|
|
|
if not key in metadata or metadata[key] != value:
|
|
|
|
matches = False
|
|
|
|
break
|
|
|
|
|
|
|
|
if not matches:
|
|
|
|
continue
|
|
|
|
|
|
|
|
notification.delete_instance()
|
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
|
2015-05-11 22:03:25 +00:00
|
|
|
def get_organizations():
|
|
|
|
return User.select().where(User.organization == True, User.robot == False)
|
|
|
|
|
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
def get_active_users():
|
|
|
|
return User.select().where(User.organization == False, User.robot == False)
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
def get_active_user_count():
|
|
|
|
return get_active_users().count()
|
|
|
|
|
2014-09-15 16:01:02 +00:00
|
|
|
|
|
|
|
def detach_external_login(user, service_name):
|
|
|
|
try:
|
|
|
|
service = LoginService.get(name = service_name)
|
|
|
|
except LoginService.DoesNotExist:
|
|
|
|
return
|
|
|
|
|
|
|
|
FederatedLogin.delete().where(FederatedLogin.user == user,
|
|
|
|
FederatedLogin.service == service).execute()
|
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
def delete_user(user):
|
|
|
|
user.delete_instance(recursive=True, delete_nullable=True)
|
|
|
|
|
|
|
|
# TODO: also delete any repository data associated
|
2014-05-13 22:53:42 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2015-01-20 19:46:22 +00:00
|
|
|
def check_health(app_config):
|
|
|
|
# Attempt to connect to the database first. If the DB is not responding,
|
|
|
|
# using the validate_database_url will timeout quickly, as opposed to
|
|
|
|
# making a normal connect which will just hang (thus breaking the health
|
|
|
|
# check).
|
|
|
|
try:
|
|
|
|
validate_database_url(app_config['DB_URI'], connect_timeout=3)
|
|
|
|
except Exception:
|
|
|
|
logger.exception('Could not connect to the database')
|
|
|
|
return False
|
|
|
|
|
2014-05-13 22:53:42 +00:00
|
|
|
# We will connect to the db, check that it contains some log entry kinds
|
|
|
|
try:
|
2015-01-20 19:46:22 +00:00
|
|
|
return bool(list(LogEntryKind.select().limit(1)))
|
2014-05-13 22:53:42 +00:00
|
|
|
except:
|
|
|
|
return False
|
2014-07-28 18:58:12 +00:00
|
|
|
|
2014-09-24 22:01:35 +00:00
|
|
|
|
2014-07-28 18:58:12 +00:00
|
|
|
def get_email_authorized_for_repo(namespace, repository, email):
|
2014-09-24 22:01:35 +00:00
|
|
|
try:
|
|
|
|
return (RepositoryAuthorizedEmail
|
|
|
|
.select(RepositoryAuthorizedEmail, Repository, Namespace)
|
2014-07-28 18:58:12 +00:00
|
|
|
.join(Repository)
|
2014-09-24 22:01:35 +00:00
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace, Repository.name == repository,
|
2014-07-28 18:58:12 +00:00
|
|
|
RepositoryAuthorizedEmail.email == email)
|
2014-09-24 22:01:35 +00:00
|
|
|
.get())
|
|
|
|
except RepositoryAuthorizedEmail.DoesNotExist:
|
2014-07-28 18:58:12 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def create_email_authorization_for_repo(namespace_name, repository_name, email):
|
|
|
|
try:
|
2014-09-24 22:01:35 +00:00
|
|
|
repo = _get_repository(namespace_name, repository_name)
|
2014-07-28 18:58:12 +00:00
|
|
|
except Repository.DoesNotExist:
|
|
|
|
raise DataModelException('Invalid repository %s/%s' %
|
|
|
|
(namespace_name, repository_name))
|
|
|
|
|
|
|
|
return RepositoryAuthorizedEmail.create(repository=repo, email=email, confirmed=False)
|
|
|
|
|
|
|
|
|
|
|
|
def confirm_email_authorization_for_repo(code):
|
|
|
|
try:
|
2014-09-24 22:01:35 +00:00
|
|
|
found = (RepositoryAuthorizedEmail
|
|
|
|
.select(RepositoryAuthorizedEmail, Repository, Namespace)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
2014-09-30 17:19:32 +00:00
|
|
|
.where(RepositoryAuthorizedEmail.code == code)
|
|
|
|
.get())
|
2014-07-28 18:58:12 +00:00
|
|
|
except RepositoryAuthorizedEmail.DoesNotExist:
|
2014-11-11 22:22:37 +00:00
|
|
|
raise DataModelException('Invalid confirmation code.')
|
2014-07-28 18:58:12 +00:00
|
|
|
|
|
|
|
found.confirmed = True
|
|
|
|
found.save()
|
|
|
|
|
|
|
|
return found
|
2014-08-18 21:24:00 +00:00
|
|
|
|
|
|
|
|
2014-09-08 21:20:01 +00:00
|
|
|
def delete_team_email_invite(team, email):
|
|
|
|
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
|
|
|
|
found.delete_instance()
|
|
|
|
|
|
|
|
def delete_team_user_invite(team, user):
|
|
|
|
try:
|
|
|
|
found = TeamMemberInvite.get(TeamMemberInvite.user == user, TeamMemberInvite.team == team)
|
|
|
|
except TeamMemberInvite.DoesNotExist:
|
|
|
|
return False
|
|
|
|
|
|
|
|
found.delete_instance()
|
|
|
|
return True
|
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
def lookup_team_invites(user):
|
|
|
|
return TeamMemberInvite.select().where(TeamMemberInvite.user == user)
|
|
|
|
|
2014-09-04 22:42:23 +00:00
|
|
|
def lookup_team_invite(code, user=None):
|
2014-08-18 21:24:00 +00:00
|
|
|
# Lookup the invite code.
|
|
|
|
try:
|
|
|
|
found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
|
|
|
|
except TeamMemberInvite.DoesNotExist:
|
|
|
|
raise DataModelException('Invalid confirmation code.')
|
|
|
|
|
2014-09-04 22:42:23 +00:00
|
|
|
if user and found.user != user:
|
2014-11-11 22:22:37 +00:00
|
|
|
raise DataModelException('Invalid confirmation code.')
|
2014-08-18 21:24:00 +00:00
|
|
|
|
2014-09-04 22:42:23 +00:00
|
|
|
return found
|
2014-08-18 21:24:00 +00:00
|
|
|
|
2014-09-04 22:42:23 +00:00
|
|
|
def delete_team_invite(code, user=None):
|
2014-08-18 21:24:00 +00:00
|
|
|
found = lookup_team_invite(code, user)
|
|
|
|
|
|
|
|
team = found.team
|
|
|
|
inviter = found.inviter
|
|
|
|
|
|
|
|
found.delete_instance()
|
|
|
|
|
|
|
|
return (team, inviter)
|
|
|
|
|
2014-09-04 22:42:23 +00:00
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
def confirm_team_invite(code, user):
|
2014-09-04 22:42:23 +00:00
|
|
|
found = lookup_team_invite(code)
|
2014-08-18 21:24:00 +00:00
|
|
|
|
2014-09-12 18:29:01 +00:00
|
|
|
# If the invite is for a specific user, we have to confirm that here.
|
|
|
|
if found.user is not None and found.user != user:
|
2014-11-11 22:22:37 +00:00
|
|
|
message = """This invite is intended for user "%s".
|
2014-09-12 18:29:01 +00:00
|
|
|
Please login to that account and try again.""" % found.user.username
|
|
|
|
raise DataModelException(message)
|
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
# Add the user to the team.
|
|
|
|
try:
|
|
|
|
add_user_to_team(user, found.team)
|
|
|
|
except UserAlreadyInTeam:
|
|
|
|
# Ignore.
|
|
|
|
pass
|
2014-09-04 22:42:23 +00:00
|
|
|
|
2014-08-18 21:24:00 +00:00
|
|
|
# Delete the invite and return the team.
|
|
|
|
team = found.team
|
|
|
|
inviter = found.inviter
|
|
|
|
found.delete_instance()
|
|
|
|
return (team, inviter)
|
2014-09-15 21:52:17 +00:00
|
|
|
|
2015-02-23 18:38:01 +00:00
|
|
|
def cancel_repository_build(build, work_queue):
|
2015-02-13 20:54:01 +00:00
|
|
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
|
|
|
# Reload the build for update.
|
|
|
|
try:
|
|
|
|
build = db_for_update(RepositoryBuild.select().where(RepositoryBuild.id == build.id)).get()
|
|
|
|
except RepositoryBuild.DoesNotExist:
|
|
|
|
return False
|
|
|
|
|
2015-02-23 18:38:01 +00:00
|
|
|
if build.phase != BUILD_PHASE.WAITING or not build.queue_id:
|
2015-02-13 20:54:01 +00:00
|
|
|
return False
|
|
|
|
|
2015-02-23 18:38:01 +00:00
|
|
|
# Try to cancel the queue item.
|
|
|
|
if not work_queue.cancel(build.queue_id):
|
2015-02-13 20:54:01 +00:00
|
|
|
return False
|
|
|
|
|
2015-02-23 18:38:01 +00:00
|
|
|
# Delete the build row.
|
2015-02-13 20:54:01 +00:00
|
|
|
build.delete_instance()
|
|
|
|
return True
|
2014-10-28 20:33:13 +00:00
|
|
|
|
2015-05-08 02:49:11 +00:00
|
|
|
def _get_repository_events(repository, time_delta, time_delta_earlier, clause):
|
2015-05-08 17:38:34 +00:00
|
|
|
""" Returns a pair representing the count of the number of events for the given
|
|
|
|
repository in each of the specified time deltas. The date ranges are calculated by
|
|
|
|
taking the current time today and subtracting the time delta given. Since
|
|
|
|
we want to grab *two* ranges, we restrict the second range to be greater
|
|
|
|
than the first (i.e. referring to an earlier time), so we can conduct the
|
|
|
|
lookup in a single query. The clause is used to further filter the kind of
|
|
|
|
events being found.
|
|
|
|
"""
|
2015-03-11 00:22:46 +00:00
|
|
|
since = date.today() - time_delta
|
2015-05-08 02:49:11 +00:00
|
|
|
since_earlier = date.today() - time_delta_earlier
|
|
|
|
|
|
|
|
if since_earlier >= since:
|
2015-05-08 17:38:34 +00:00
|
|
|
raise ValueError('time_delta_earlier must be greater than time_delta')
|
2015-05-08 02:49:11 +00:00
|
|
|
|
|
|
|
# This uses a CASE WHEN inner clause to further filter the count.
|
|
|
|
formatted = since.strftime('%Y-%m-%d')
|
|
|
|
case_query = 'CASE WHEN datetime >= \'%s\' THEN 1 ELSE 0 END' % formatted
|
|
|
|
|
|
|
|
result = (LogEntry.select(fn.Sum(SQL(case_query)), fn.Count(SQL('*')))
|
|
|
|
.where(LogEntry.repository == repository)
|
|
|
|
.where(clause)
|
|
|
|
.where(LogEntry.datetime >= since_earlier)
|
|
|
|
.tuples()
|
|
|
|
.get())
|
|
|
|
|
2015-05-08 18:55:54 +00:00
|
|
|
return (int(result[0]) if result[0] else 0, int(result[1]) if result[1] else 0)
|
2015-05-08 02:49:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_repository_pushes(repository, time_delta, time_delta_earlier):
|
2015-05-08 17:38:34 +00:00
|
|
|
push_repo = LogEntryKind.get(name='push_repo')
|
2015-05-08 02:49:11 +00:00
|
|
|
clauses = (LogEntry.kind == push_repo)
|
|
|
|
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
|
2015-03-11 00:22:46 +00:00
|
|
|
|
2015-05-08 02:49:11 +00:00
|
|
|
|
|
|
|
def get_repository_pulls(repository, time_delta, time_delta_earlier):
|
2015-05-08 17:38:34 +00:00
|
|
|
repo_pull = LogEntryKind.get(name='pull_repo')
|
|
|
|
repo_verb = LogEntryKind.get(name='repo_verb')
|
2015-05-08 02:49:11 +00:00
|
|
|
clauses = ((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
|
|
|
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
|
2015-03-11 00:22:46 +00:00
|
|
|
|
|
|
|
|
2014-10-28 20:33:13 +00:00
|
|
|
def get_repository_usage():
|
2014-10-30 17:26:02 +00:00
|
|
|
one_month_ago = date.today() - timedelta(weeks=4)
|
2015-05-08 17:38:34 +00:00
|
|
|
repo_pull = LogEntryKind.get(name='pull_repo')
|
|
|
|
repo_verb = LogEntryKind.get(name='repo_verb')
|
2014-11-13 01:32:06 +00:00
|
|
|
return (LogEntry.select(LogEntry.ip, LogEntry.repository)
|
2014-10-29 19:42:44 +00:00
|
|
|
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
|
|
|
.where(~(LogEntry.repository >> None))
|
2014-10-30 17:26:02 +00:00
|
|
|
.where(LogEntry.datetime >= one_month_ago)
|
2014-11-13 01:32:06 +00:00
|
|
|
.group_by(LogEntry.ip, LogEntry.repository)
|
2014-10-28 20:33:13 +00:00
|
|
|
.count())
|
|
|
|
|
2014-09-08 20:43:17 +00:00
|
|
|
def archivable_buildlogs_query():
|
|
|
|
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
|
|
|
return (RepositoryBuild.select()
|
|
|
|
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
|
|
|
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
|
|
|
(RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False))
|
2014-11-19 19:50:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def star_repository(user, repository):
|
|
|
|
""" Stars a repository. """
|
|
|
|
star = Star.create(user=user.id, repository=repository.id)
|
|
|
|
star.save()
|
|
|
|
|
|
|
|
|
|
|
|
def unstar_repository(user, repository):
|
|
|
|
""" Unstars a repository. """
|
|
|
|
try:
|
|
|
|
star = (Star
|
2015-02-24 22:50:54 +00:00
|
|
|
.delete()
|
|
|
|
.where(Star.repository == repository.id, Star.user == user.id)
|
|
|
|
.execute())
|
2014-11-19 19:50:56 +00:00
|
|
|
except Star.DoesNotExist:
|
|
|
|
raise DataModelException('Star not found.')
|
|
|
|
|
|
|
|
|
|
|
|
def get_user_starred_repositories(user, limit=None, page=None):
|
|
|
|
""" Retrieves all of the repositories a user has starred. """
|
|
|
|
query = (Repository
|
2015-05-07 20:59:13 +00:00
|
|
|
.select(Repository, User, Visibility)
|
2014-11-19 19:50:56 +00:00
|
|
|
.join(Star)
|
2015-05-07 20:18:17 +00:00
|
|
|
.switch(Repository)
|
2014-11-19 19:50:56 +00:00
|
|
|
.join(User)
|
2015-05-07 20:18:17 +00:00
|
|
|
.switch(Repository)
|
|
|
|
.join(Visibility)
|
2015-05-08 19:10:31 +00:00
|
|
|
.where(Star.user == user))
|
2014-11-19 19:50:56 +00:00
|
|
|
|
|
|
|
if page and limit:
|
|
|
|
query = query.paginate(page, limit)
|
|
|
|
elif limit:
|
|
|
|
query = query.limit(limit)
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
|
|
|
|
def repository_is_starred(user, repository):
|
|
|
|
""" Determines whether a user has starred a repository or not. """
|
|
|
|
try:
|
|
|
|
(Star
|
|
|
|
.select()
|
|
|
|
.where(Star.repository == repository.id, Star.user == user.id)
|
|
|
|
.get())
|
|
|
|
return True
|
|
|
|
except Star.DoesNotExist:
|
|
|
|
return False
|
2015-04-16 21:18:00 +00:00
|
|
|
|
|
|
|
|
2015-04-19 19:43:16 +00:00
|
|
|
def revert_tag(repository, tag_name, docker_image_id):
|
2015-04-16 21:18:00 +00:00
|
|
|
""" Reverts a tag to a specific image ID. """
|
2015-04-19 19:43:16 +00:00
|
|
|
# Verify that the image ID already existed under this repository under the
|
|
|
|
# tag.
|
|
|
|
try:
|
|
|
|
(RepositoryTag.select()
|
|
|
|
.join(Image)
|
|
|
|
.where(RepositoryTag.repository == repository)
|
|
|
|
.where(RepositoryTag.name == tag_name)
|
|
|
|
.where(Image.docker_image_id == docker_image_id)
|
|
|
|
.get())
|
|
|
|
except RepositoryTag.DoesNotExist:
|
|
|
|
raise DataModelException('Cannot revert to unknown or invalid image')
|
|
|
|
|
|
|
|
return create_or_update_tag(repository.namespace_user.username, repository.name,
|
|
|
|
tag_name, docker_image_id, reversion=True)
|