Accidental refactor, split out legacy.py into separate sumodules and update all call sites.
This commit is contained in:
parent
2109d24483
commit
3efaa255e8
92 changed files with 4458 additions and 4269 deletions
4
app.py
4
app.py
|
@ -2,7 +2,7 @@ import logging
|
|||
import os
|
||||
import json
|
||||
|
||||
from flask import Flask, Config, request, Request, _request_ctx_stack
|
||||
from flask import Flask, request, Request, _request_ctx_stack
|
||||
from flask.ext.principal import Principal
|
||||
from flask.ext.login import LoginManager, UserMixin
|
||||
from flask.ext.mail import Mail
|
||||
|
@ -161,7 +161,7 @@ class LoginWrappedDBUser(UserMixin):
|
|||
|
||||
def db_user(self):
|
||||
if not self._db_user:
|
||||
self._db_user = model.get_user_by_uuid(self._uuid)
|
||||
self._db_user = model.user.get_user_by_uuid(self._uuid)
|
||||
return self._db_user
|
||||
|
||||
def is_authenticated(self):
|
||||
|
|
25
auth/auth.py
25
auth/auth.py
|
@ -12,11 +12,10 @@ from base64 import b64decode
|
|||
import scopes
|
||||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import app, authentication
|
||||
from permissions import QuayDeferredPermissionUser
|
||||
from auth_context import (set_authenticated_user, set_validated_token, set_grant_user_context,
|
||||
set_authenticated_user_deferred, set_validated_oauth_token)
|
||||
set_validated_oauth_token)
|
||||
from util.http import abort
|
||||
|
||||
|
||||
|
@ -48,7 +47,7 @@ def _load_user_from_cookie():
|
|||
|
||||
|
||||
def _validate_and_apply_oauth_token(token):
|
||||
validated = oauth.validate_access_token(token)
|
||||
validated = model.oauth.validate_access_token(token)
|
||||
if not validated:
|
||||
logger.warning('OAuth access token could not be validated: %s', token)
|
||||
authenticate_header = {
|
||||
|
@ -96,40 +95,40 @@ def _process_basic_auth(auth):
|
|||
elif credentials[0] == '$token':
|
||||
# Use as token auth
|
||||
try:
|
||||
token = model.load_token_data(credentials[1])
|
||||
logger.debug('Successfully validated token: %s' % credentials[1])
|
||||
token = model.token.load_token_data(credentials[1])
|
||||
logger.debug('Successfully validated token: %s', credentials[1])
|
||||
set_validated_token(token)
|
||||
|
||||
identity_changed.send(app, identity=Identity(token.code, 'token'))
|
||||
return
|
||||
|
||||
except model.DataModelException:
|
||||
logger.debug('Invalid token: %s' % credentials[1])
|
||||
logger.debug('Invalid token: %s', credentials[1])
|
||||
|
||||
elif credentials[0] == '$oauthtoken':
|
||||
oauth_token = credentials[1]
|
||||
_validate_and_apply_oauth_token(oauth_token)
|
||||
|
||||
elif '+' in credentials[0]:
|
||||
logger.debug('Trying robot auth with credentials %s' % str(credentials))
|
||||
logger.debug('Trying robot auth with credentials %s', str(credentials))
|
||||
# Use as robot auth
|
||||
try:
|
||||
robot = model.verify_robot(credentials[0], credentials[1])
|
||||
logger.debug('Successfully validated robot: %s' % credentials[0])
|
||||
robot = model.user.verify_robot(credentials[0], credentials[1])
|
||||
logger.debug('Successfully validated robot: %s', credentials[0])
|
||||
set_authenticated_user(robot)
|
||||
|
||||
deferred_robot = QuayDeferredPermissionUser.for_user(robot)
|
||||
identity_changed.send(app, identity=deferred_robot)
|
||||
return
|
||||
except model.InvalidRobotException:
|
||||
logger.debug('Invalid robot or password for robot: %s' % credentials[0])
|
||||
logger.debug('Invalid robot or password for robot: %s', credentials[0])
|
||||
|
||||
else:
|
||||
(authenticated, error_message) = authentication.verify_user(credentials[0], credentials[1],
|
||||
basic_auth=True)
|
||||
|
||||
if authenticated:
|
||||
logger.debug('Successfully validated user: %s' % authenticated.username)
|
||||
logger.debug('Successfully validated user: %s', authenticated.username)
|
||||
set_authenticated_user(authenticated)
|
||||
|
||||
new_identity = QuayDeferredPermissionUser.for_user(authenticated)
|
||||
|
@ -203,7 +202,7 @@ def process_auth(func):
|
|||
auth = request.headers.get('authorization', '')
|
||||
|
||||
if auth:
|
||||
logger.debug('Validating auth header: %s' % auth)
|
||||
logger.debug('Validating auth header: %s', auth)
|
||||
_process_signed_grant(auth)
|
||||
_process_basic_auth(auth)
|
||||
else:
|
||||
|
@ -227,7 +226,7 @@ def extract_namespace_repo_from_session(func):
|
|||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if 'namespace' not in session or 'repository' not in session:
|
||||
logger.error('Unable to load namespace or repository from session: %s' % session)
|
||||
logger.error('Unable to load namespace or repository from session: %s', session)
|
||||
abort(400, message='Missing namespace in request')
|
||||
|
||||
return func(session['namespace'], session['repository'], *args, **kwargs)
|
||||
|
|
|
@ -16,7 +16,7 @@ def get_authenticated_user():
|
|||
return None
|
||||
|
||||
logger.debug('Loading deferred authenticated user.')
|
||||
loaded = model.get_user_by_uuid(user_uuid)
|
||||
loaded = model.user.get_user_by_uuid(user_uuid)
|
||||
if not loaded.enabled:
|
||||
return None
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ class QuayDeferredPermissionUser(Identity):
|
|||
def can(self, permission):
|
||||
if not self._permissions_loaded:
|
||||
logger.debug('Loading user permissions after deferring for: %s', self.id)
|
||||
user_object = self._user_object or model.get_user_by_uuid(self.id)
|
||||
user_object = self._user_object or model.user.get_user_by_uuid(self.id)
|
||||
if user_object is None:
|
||||
return super(QuayDeferredPermissionUser, self).can(permission)
|
||||
|
||||
|
@ -130,14 +130,14 @@ class QuayDeferredPermissionUser(Identity):
|
|||
self.provides.add(user_repos)
|
||||
|
||||
# Add repository permissions
|
||||
for perm in model.get_all_user_permissions(user_object):
|
||||
for perm in model.permission.get_all_user_permissions(user_object):
|
||||
repo_grant = _RepositoryNeed(perm.repository.namespace_user.username, perm.repository.name,
|
||||
self._repo_role_for_scopes(perm.role.name))
|
||||
logger.debug('User added permission: {0}'.format(repo_grant))
|
||||
self.provides.add(repo_grant)
|
||||
|
||||
# Add namespace permissions derived
|
||||
for team in model.get_org_wide_permissions(user_object):
|
||||
for team in model.permission.get_org_wide_permissions(user_object):
|
||||
team_org_grant = _OrganizationNeed(team.organization.username,
|
||||
self._team_role_for_scopes(team.role.name))
|
||||
logger.debug('Organization team added permission: {0}'.format(team_org_grant))
|
||||
|
@ -261,7 +261,7 @@ def on_identity_loaded(sender, identity):
|
|||
|
||||
elif identity.auth_type == 'token':
|
||||
logger.debug('Loading permissions for token: %s', identity.id)
|
||||
token_data = model.load_token_data(identity.id)
|
||||
token_data = model.token.load_token_data(identity.id)
|
||||
|
||||
repo_grant = _RepositoryNeed(token_data.repository.namespace_user.username,
|
||||
token_data.repository.name,
|
||||
|
|
|
@ -53,7 +53,7 @@ class BuildJob(object):
|
|||
@lru_cache(maxsize=1)
|
||||
def _load_repo_build(self):
|
||||
try:
|
||||
return model.get_repository_build(self.job_details['build_uuid'])
|
||||
return model.build.get_repository_build(self.job_details['build_uuid'])
|
||||
except model.InvalidRepositoryBuildException:
|
||||
raise BuildJobLoadException(
|
||||
'Could not load repository build with ID %s' % self.job_details['build_uuid'])
|
||||
|
@ -99,15 +99,15 @@ class BuildJob(object):
|
|||
repo_namespace = repo_build.repository.namespace_user.username
|
||||
repo_name = repo_build.repository.name
|
||||
|
||||
base_image = model.get_image(repo_build.repository, base_image_id)
|
||||
base_image = model.image.get_image(repo_build.repository, base_image_id)
|
||||
if base_image is None:
|
||||
return None
|
||||
|
||||
# Build an in-memory tree of the full heirarchy of images in the repository.
|
||||
all_images = model.get_repository_images_without_placements(repo_build.repository,
|
||||
all_images = model.image.get_repository_images_without_placements(repo_build.repository,
|
||||
with_ancestor=base_image)
|
||||
|
||||
all_tags = model.list_repository_tags(repo_namespace, repo_name)
|
||||
all_tags = model.tag.list_repository_tags(repo_namespace, repo_name)
|
||||
tree = ImageTree(all_images, all_tags, base_filter=base_image.id)
|
||||
|
||||
# Find a path in the tree, starting at the base image, that matches the cache comments
|
||||
|
@ -136,7 +136,8 @@ class BuildJob(object):
|
|||
"""
|
||||
tags = self.build_config.get('docker_tags', ['latest'])
|
||||
repository = self.repo_build.repository
|
||||
existing_tags = model.list_repository_tags(repository.namespace_user.username, repository.name)
|
||||
existing_tags = model.tag.list_repository_tags(repository.namespace_user.username,
|
||||
repository.name)
|
||||
cached_tags = set(tags) & set([tag.name for tag in existing_tags])
|
||||
if cached_tags:
|
||||
return list(cached_tags)[0]
|
||||
|
|
|
@ -54,7 +54,7 @@ class StatusHandler(object):
|
|||
self._append_log_message(phase, self._build_logs.PHASE, extra_data)
|
||||
|
||||
# Update the repository build with the new phase
|
||||
repo_build = model.get_repository_build(self._uuid)
|
||||
repo_build = model.build.get_repository_build(self._uuid)
|
||||
repo_build.phase = phase
|
||||
repo_build.save()
|
||||
|
||||
|
|
|
@ -80,6 +80,41 @@ class UseThenDisconnect(object):
|
|||
close_db_filter(None)
|
||||
|
||||
|
||||
class TupleSelector(object):
|
||||
""" Helper class for selecting tuples from a peewee query and easily accessing
|
||||
them as if they were objects.
|
||||
"""
|
||||
class _TupleWrapper(object):
|
||||
def __init__(self, data, fields):
|
||||
self._data = data
|
||||
self._fields = fields
|
||||
|
||||
def get(self, field):
|
||||
return self._data[self._fields.index(TupleSelector.tuple_reference_key(field))]
|
||||
|
||||
@classmethod
|
||||
def tuple_reference_key(cls, field):
|
||||
""" Returns a string key for referencing a field in a TupleSelector. """
|
||||
if field._node_type == 'func':
|
||||
return field.name + ','.join([cls.tuple_reference_key(arg) for arg in field.arguments])
|
||||
|
||||
if field._node_type == 'field':
|
||||
return field.name + ':' + field.model_class.__name__
|
||||
|
||||
raise Exception('Unknown field type %s in TupleSelector' % field._node_type)
|
||||
|
||||
def __init__(self, query, fields):
|
||||
self._query = query.select(*fields).tuples()
|
||||
self._fields = [TupleSelector.tuple_reference_key(field) for field in fields]
|
||||
|
||||
def __iter__(self):
|
||||
return self._build_iterator()
|
||||
|
||||
def _build_iterator(self):
|
||||
for tuple_data in self._query:
|
||||
yield TupleSelector._TupleWrapper(tuple_data, self._fields)
|
||||
|
||||
|
||||
db = Proxy()
|
||||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
|
@ -216,6 +251,10 @@ class User(BaseModel):
|
|||
else:
|
||||
super(User, self).delete_instance(recursive=recursive, delete_nullable=delete_nullable)
|
||||
|
||||
|
||||
Namespace = User.alias()
|
||||
|
||||
|
||||
class TeamRole(BaseModel):
|
||||
name = CharField(index=True)
|
||||
|
||||
|
@ -313,6 +352,9 @@ class Repository(BaseModel):
|
|||
dependencies = defaultdict(set)
|
||||
|
||||
for query, fk in ops:
|
||||
# We only want to skip transitive deletes, which are done using subqueries in the form of
|
||||
# DELETE FROM <table> in <subquery>. If an op is not using a subquery, we allow it to be
|
||||
# applied directly.
|
||||
if fk.model_class not in skip_transitive_deletes or query.op != 'in':
|
||||
filtered_ops.append((query, fk))
|
||||
|
||||
|
|
|
@ -1,7 +1,76 @@
|
|||
from data.database import db
|
||||
|
||||
|
||||
class DataModelException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BlobDoesNotExist(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidEmailAddressException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidOrganizationException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidPasswordException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidRobotException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidUsernameException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class TooManyUsersException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidRepositoryBuildException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidBuildTriggerException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTokenException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidNotificationException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidImageException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class UserAlreadyInTeam(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTeamException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTeamMemberException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class TooManyLoginAttemptsException(Exception):
|
||||
def __init__(self, message, retry_after):
|
||||
super(TooManyLoginAttemptsException, self).__init__(message)
|
||||
self.retry_after = retry_after
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
self.app_config = None
|
||||
|
@ -11,4 +80,12 @@ class Config(object):
|
|||
config = Config()
|
||||
|
||||
|
||||
from data.model.legacy import *
|
||||
def db_transaction():
|
||||
return config.app_config['DB_TRANSACTION_FACTORY'](db)
|
||||
|
||||
|
||||
# There MUST NOT be any circular dependencies between these subsections. If there are fix it by
|
||||
# moving the minimal number of things to _basequery
|
||||
# TODO document the methods and modules for each one of the submodules below.
|
||||
from data.model import (blob, build, image, log, notification, oauth, organization, permission,
|
||||
repository, storage, tag, team, token, user)
|
||||
|
|
77
data/model/_basequery.py
Normal file
77
data/model/_basequery.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
from peewee import JOIN_LEFT_OUTER
|
||||
from cachetools import lru_cache
|
||||
|
||||
from data.database import (Repository, User, Team, TeamMember, RepositoryPermission, TeamRole,
|
||||
Namespace, Visibility, db_for_update)
|
||||
|
||||
|
||||
def get_existing_repository(namespace_name, repository_name, for_update=False):
|
||||
query = (Repository
|
||||
.select(Repository, Namespace)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
if for_update:
|
||||
query = db_for_update(query)
|
||||
|
||||
return query.get()
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_public_repo_visibility():
|
||||
return Visibility.get(name='public')
|
||||
|
||||
|
||||
def filter_to_repos_for_user(query, username=None, namespace=None, include_public=True):
|
||||
if not include_public and not username:
|
||||
return Repository.select().where(Repository.id == '-1')
|
||||
|
||||
where_clause = None
|
||||
if username:
|
||||
UserThroughTeam = User.alias()
|
||||
Org = User.alias()
|
||||
AdminTeam = Team.alias()
|
||||
AdminTeamMember = TeamMember.alias()
|
||||
AdminUser = User.alias()
|
||||
|
||||
query = (query
|
||||
.switch(RepositoryPermission)
|
||||
.join(User, JOIN_LEFT_OUTER)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Team, JOIN_LEFT_OUTER)
|
||||
.join(TeamMember, JOIN_LEFT_OUTER)
|
||||
.join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
|
||||
.switch(Repository)
|
||||
.join(Org, JOIN_LEFT_OUTER, on=(Repository.namespace_user == Org.id))
|
||||
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
|
||||
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
|
||||
.switch(AdminTeam)
|
||||
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
|
||||
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id)))
|
||||
|
||||
where_clause = ((User.username == username) | (UserThroughTeam.username == username) |
|
||||
((AdminUser.username == username) & (TeamRole.name == 'admin')))
|
||||
|
||||
if namespace:
|
||||
where_clause = where_clause & (Namespace.username == namespace)
|
||||
|
||||
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
|
||||
# remove this hack.
|
||||
if include_public:
|
||||
new_clause = (Repository.visibility == get_public_repo_visibility())
|
||||
if where_clause:
|
||||
where_clause = where_clause | new_clause
|
||||
else:
|
||||
where_clause = new_clause
|
||||
|
||||
return query.where(where_clause)
|
||||
|
||||
|
||||
def get_user_organizations(username):
|
||||
UserAlias = User.alias()
|
||||
return (User
|
||||
.select()
|
||||
.distinct()
|
||||
.join(Team)
|
||||
.join(TeamMember)
|
||||
.join(UserAlias, on=(UserAlias.id == TeamMember.user))
|
||||
.where(User.organization == True, UserAlias.username == username))
|
|
@ -1,22 +1,49 @@
|
|||
from data.model import config, DataModelException
|
||||
from uuid import uuid4
|
||||
|
||||
from data.database import ImageStorage, Image, ImageStorageLocation, ImageStoragePlacement
|
||||
from data.model import tag, _basequery, BlobDoesNotExist, db_transaction
|
||||
from data.database import (Repository, Namespace, ImageStorage, Image, ImageStorageLocation,
|
||||
ImageStoragePlacement)
|
||||
|
||||
|
||||
class BlobDoesNotExist(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
def get_blob_by_digest(blob_digest):
|
||||
try:
|
||||
return ImageStorage.get(checksum=blob_digest)
|
||||
except ImageStorage.DoesNotExist:
|
||||
def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
|
||||
""" Find the content-addressable blob linked to the specified repository.
|
||||
"""
|
||||
placements = list(ImageStoragePlacement
|
||||
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage)
|
||||
.join(Image)
|
||||
.join(Repository)
|
||||
.join(Namespace)
|
||||
.where(Repository.name == repo_name, Namespace.username == namespace,
|
||||
ImageStorage.checksum == blob_digest))
|
||||
if not placements:
|
||||
raise BlobDoesNotExist('Blob does not exist with digest: {0}'.format(blob_digest))
|
||||
|
||||
found = placements[0].storage
|
||||
found.locations = {placement.location.name for placement in placements}
|
||||
|
||||
def store_blob_record(blob_digest, location_name):
|
||||
storage = ImageStorage.create(checksum=blob_digest)
|
||||
return found
|
||||
|
||||
def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_name,
|
||||
link_expiration_s):
|
||||
""" Store a record of the blob and temporarily link it to the specified repository.
|
||||
"""
|
||||
random_image_name = str(uuid4())
|
||||
with db_transaction:
|
||||
repo = _basequery.get_existing_repository(namespace, repo_name)
|
||||
|
||||
try:
|
||||
storage = ImageStorage.get(checksum=blob_digest)
|
||||
location = ImageStorageLocation.get(name=location_name)
|
||||
ImageStoragePlacement.create(location=location, storage=storage)
|
||||
storage.locations = {location_name}
|
||||
return storage
|
||||
ImageStoragePlacement.get(storage=storage, location=location)
|
||||
except ImageStorage.DoesNotExist:
|
||||
storage = ImageStorage.create(checksum=blob_digest)
|
||||
except ImageStoragePlacement.DoesNotExist:
|
||||
ImageStoragePlacement.create(storage=storage, location=location)
|
||||
|
||||
# Create a temporary link into the repository, to be replaced by the v1 metadata later
|
||||
# and create a temporary tag to reference it
|
||||
image = Image.create(storage=storage, docker_image_id=random_image_name, repository=repo)
|
||||
tag.create_temporary_hidden_tag(repo, image, link_expiration_s)
|
||||
|
|
173
data/model/build.py
Normal file
173
data/model/build.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
import json
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER
|
||||
from datetime import timedelta, datetime
|
||||
|
||||
from data.database import (BuildTriggerService, RepositoryBuildTrigger, Repository, Namespace, User,
|
||||
RepositoryBuild, BUILD_PHASE, db_for_update)
|
||||
from data.model import (InvalidBuildTriggerException, InvalidRepositoryBuildException,
|
||||
db_transaction, user as user_model)
|
||||
|
||||
|
||||
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
||||
|
||||
|
||||
def update_build_trigger(trigger, config, auth_token=None):
|
||||
trigger.config = json.dumps(config or {})
|
||||
if auth_token is not None:
|
||||
trigger.auth_token = auth_token
|
||||
trigger.save()
|
||||
|
||||
|
||||
def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None, config=None):
|
||||
config = config or {}
|
||||
service = BuildTriggerService.get(name=service_name)
|
||||
trigger = RepositoryBuildTrigger.create(repository=repo, service=service,
|
||||
auth_token=auth_token,
|
||||
connected_user=user,
|
||||
pull_robot=pull_robot,
|
||||
config=json.dumps(config))
|
||||
return trigger
|
||||
|
||||
|
||||
def get_build_trigger(trigger_uuid):
|
||||
try:
|
||||
return (RepositoryBuildTrigger
|
||||
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
||||
.join(BuildTriggerService)
|
||||
.switch(RepositoryBuildTrigger)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuildTrigger)
|
||||
.join(User)
|
||||
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
|
||||
.get())
|
||||
except RepositoryBuildTrigger.DoesNotExist:
|
||||
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
||||
raise InvalidBuildTriggerException(msg)
|
||||
|
||||
|
||||
def list_build_triggers(namespace_name, repository_name):
|
||||
return (RepositoryBuildTrigger
|
||||
.select(RepositoryBuildTrigger, BuildTriggerService, Repository)
|
||||
.join(BuildTriggerService)
|
||||
.switch(RepositoryBuildTrigger)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
|
||||
def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
|
||||
limit):
|
||||
return (list_repository_builds(namespace_name, repository_name, limit)
|
||||
.where(RepositoryBuildTrigger.uuid == trigger_uuid))
|
||||
|
||||
|
||||
def get_repository_for_resource(resource_key):
|
||||
try:
|
||||
return (Repository
|
||||
.select(Repository, Namespace)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(Repository)
|
||||
.join(RepositoryBuild)
|
||||
.where(RepositoryBuild.resource_key == resource_key)
|
||||
.get())
|
||||
except Repository.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def _get_build_base_query():
|
||||
return (RepositoryBuild
|
||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
||||
Namespace, User)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuild)
|
||||
.join(User, JOIN_LEFT_OUTER)
|
||||
.switch(RepositoryBuild)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||
.order_by(RepositoryBuild.started.desc()))
|
||||
|
||||
|
||||
def get_repository_build(build_uuid):
|
||||
try:
|
||||
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
||||
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
msg = 'Unable to locate a build by id: %s' % build_uuid
|
||||
raise InvalidRepositoryBuildException(msg)
|
||||
|
||||
|
||||
def list_repository_builds(namespace_name, repository_name, limit,
|
||||
include_inactive=True, since=None):
|
||||
query = (_get_build_base_query()
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
||||
.limit(limit))
|
||||
|
||||
if since is not None:
|
||||
query = query.where(RepositoryBuild.started >= since)
|
||||
|
||||
if not include_inactive:
|
||||
query = query.where(RepositoryBuild.phase != BUILD_PHASE.ERROR,
|
||||
RepositoryBuild.phase != BUILD_PHASE.COMPLETE)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def get_recent_repository_build(namespace_name, repository_name):
|
||||
query = list_repository_builds(namespace_name, repository_name, 1)
|
||||
try:
|
||||
return query.get()
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
|
||||
display_name, trigger=None, pull_robot_name=None):
|
||||
pull_robot = None
|
||||
if pull_robot_name:
|
||||
pull_robot = user_model.lookup_robot(pull_robot_name)
|
||||
|
||||
return RepositoryBuild.create(repository=repo, access_token=access_token,
|
||||
job_config=json.dumps(job_config_obj),
|
||||
display_name=display_name, trigger=trigger,
|
||||
resource_key=dockerfile_id,
|
||||
pull_robot=pull_robot)
|
||||
|
||||
|
||||
def get_pull_robot_name(trigger):
|
||||
if not trigger.pull_robot:
|
||||
return None
|
||||
|
||||
return trigger.pull_robot.username
|
||||
|
||||
|
||||
def cancel_repository_build(build, work_queue):
|
||||
with db_transaction():
|
||||
# Reload the build for update.
|
||||
try:
|
||||
build = db_for_update(RepositoryBuild.select().where(RepositoryBuild.id == build.id)).get()
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
return False
|
||||
|
||||
if build.phase != BUILD_PHASE.WAITING or not build.queue_id:
|
||||
return False
|
||||
|
||||
# Try to cancel the queue item.
|
||||
if not work_queue.cancel(build.queue_id):
|
||||
return False
|
||||
|
||||
# Delete the build row.
|
||||
build.delete_instance()
|
||||
return True
|
||||
|
||||
|
||||
def archivable_buildlogs_query():
|
||||
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||
return (RepositoryBuild
|
||||
.select()
|
||||
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
||||
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
||||
(RepositoryBuild.started < presumed_dead_date),
|
||||
RepositoryBuild.logs_archived == False))
|
25
data/model/health.py
Normal file
25
data/model/health.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import logging
|
||||
|
||||
from data.database import TeamRole
|
||||
from util.config.validator import validate_database_url
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_health(app_config):
|
||||
# Attempt to connect to the database first. If the DB is not responding,
|
||||
# using the validate_database_url will timeout quickly, as opposed to
|
||||
# making a normal connect which will just hang (thus breaking the health
|
||||
# check).
|
||||
try:
|
||||
validate_database_url(app_config['DB_URI'], {}, connect_timeout=3)
|
||||
except Exception:
|
||||
logger.exception('Could not connect to the database')
|
||||
return False
|
||||
|
||||
# We will connect to the db, check that it contains some team role kinds
|
||||
try:
|
||||
return bool(list(TeamRole.select().limit(1)))
|
||||
except:
|
||||
return False
|
341
data/model/image.py
Normal file
341
data/model/image.py
Normal file
|
@ -0,0 +1,341 @@
|
|||
import logging
|
||||
import dateutil.parser
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, fn
|
||||
from datetime import datetime
|
||||
|
||||
from data.model import DataModelException, db_transaction, _basequery, storage
|
||||
from data.database import (Image, Repository, ImageStoragePlacement, Namespace, ImageStorage,
|
||||
ImageStorageLocation, RepositoryPermission, db_for_update)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_parent_images(namespace_name, repository_name, image_obj):
|
||||
""" Returns a list of parent Image objects in chronilogical order. """
|
||||
parents = image_obj.ancestors
|
||||
|
||||
# Ancestors are in the format /<root>/<intermediate>/.../<parent>/, with each path section
|
||||
# containing the database Id of the image row.
|
||||
parent_db_ids = parents.strip('/').split('/')
|
||||
|
||||
if parent_db_ids == ['']:
|
||||
return []
|
||||
|
||||
def filter_to_parents(query):
|
||||
return query.where(Image.id << parent_db_ids)
|
||||
|
||||
parents = get_repository_images_base(namespace_name, repository_name, filter_to_parents)
|
||||
|
||||
id_to_image = {unicode(image.id): image for image in parents}
|
||||
|
||||
return [id_to_image[parent_id] for parent_id in parent_db_ids]
|
||||
|
||||
|
||||
def get_repo_image(namespace_name, repository_name, docker_image_id):
|
||||
def limit_to_image_id(query):
|
||||
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
||||
|
||||
query = _get_repository_images(namespace_name, repository_name, limit_to_image_id)
|
||||
try:
|
||||
return query.get()
|
||||
except Image.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
|
||||
def limit_to_image_id(query):
|
||||
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
||||
|
||||
images = get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
|
||||
if not images:
|
||||
return None
|
||||
|
||||
return images[0]
|
||||
|
||||
|
||||
def _get_repository_images(namespace_name, repository_name, query_modifier):
|
||||
query = (Image
|
||||
.select()
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
||||
|
||||
query = query_modifier(query)
|
||||
return query
|
||||
|
||||
|
||||
def get_repository_images_base(namespace_name, repository_name, query_modifier):
|
||||
query = (ImageStoragePlacement
|
||||
.select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||
.join(Image)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
||||
|
||||
query = query_modifier(query)
|
||||
|
||||
location_list = list(query)
|
||||
|
||||
images = {}
|
||||
for location in location_list:
|
||||
# Make sure we're always retrieving the same image object.
|
||||
image = location.storage.image
|
||||
|
||||
# Set the storage to the one we got from the location, to prevent another query
|
||||
image.storage = location.storage
|
||||
|
||||
if not image.id in images:
|
||||
images[image.id] = image
|
||||
image.storage.locations = set()
|
||||
else:
|
||||
image = images[image.id]
|
||||
|
||||
# Add the location to the image's locations set.
|
||||
image.storage.locations.add(location.location.name)
|
||||
|
||||
return images.values()
|
||||
|
||||
|
||||
def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
|
||||
return (Image
|
||||
.select()
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
Image.docker_image_id << docker_image_ids))
|
||||
|
||||
|
||||
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
|
||||
def modify_query(query):
|
||||
return query.where(Image.docker_image_id << docker_image_ids)
|
||||
|
||||
return get_repository_images_base(namespace_name, repository_name, modify_query)
|
||||
|
||||
|
||||
def get_repository_images_without_placements(repo_obj, with_ancestor=None):
|
||||
query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.join(ImageStorage)
|
||||
.where(Image.repository == repo_obj))
|
||||
|
||||
if with_ancestor:
|
||||
ancestors_string = '%s%s/' % (with_ancestor.ancestors, with_ancestor.id)
|
||||
query = query.where((Image.ancestors ** (ancestors_string + '%')) |
|
||||
(Image.id == with_ancestor.id))
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def get_repository_images(namespace_name, repository_name):
|
||||
return get_repository_images_base(namespace_name, repository_name, lambda q: q)
|
||||
|
||||
|
||||
def get_image_by_id(namespace_name, repository_name, docker_image_id):
|
||||
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
|
||||
if not image:
|
||||
raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' %
|
||||
(docker_image_id, namespace_name, repository_name))
|
||||
return image
|
||||
|
||||
|
||||
def __translate_ancestry(old_ancestry, translations, repo_obj, username, preferred_location):
|
||||
if old_ancestry == '/':
|
||||
return '/'
|
||||
|
||||
def translate_id(old_id, docker_image_id):
|
||||
logger.debug('Translating id: %s', old_id)
|
||||
if old_id not in translations:
|
||||
image_in_repo = find_create_or_link_image(docker_image_id, repo_obj, username, translations,
|
||||
preferred_location)
|
||||
translations[old_id] = image_in_repo.id
|
||||
return translations[old_id]
|
||||
|
||||
# Select all the ancestor Docker IDs in a single query.
|
||||
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
|
||||
query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
|
||||
old_images = {i.id: i.docker_image_id for i in query}
|
||||
|
||||
# Translate the old images into new ones.
|
||||
new_ids = [str(translate_id(old_id, old_images[old_id])) for old_id in old_ids]
|
||||
return '/%s/' % '/'.join(new_ids)
|
||||
|
||||
|
||||
def _find_or_link_image(existing_image, repo_obj, username, translations, preferred_location):
|
||||
# TODO(jake): This call is currently recursively done under a single transaction. Can we make
|
||||
# it instead be done under a set of transactions?
|
||||
with db_transaction():
|
||||
# Check for an existing image, under the transaction, to make sure it doesn't already exist.
|
||||
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
|
||||
existing_image.docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
# Make sure the existing base image still exists.
|
||||
try:
|
||||
to_copy = Image.select().join(ImageStorage).where(Image.id == existing_image.id).get()
|
||||
|
||||
msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
|
||||
logger.debug(msg, existing_image.docker_image_id, to_copy.storage.uuid)
|
||||
|
||||
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repo_obj,
|
||||
username, preferred_location)
|
||||
|
||||
copied_storage = to_copy.storage
|
||||
copied_storage.locations = {placement.location.name
|
||||
for placement in copied_storage.imagestorageplacement_set}
|
||||
|
||||
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
||||
repository=repo_obj, storage=copied_storage,
|
||||
ancestors=new_image_ancestry)
|
||||
|
||||
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
||||
translations[existing_image.id] = new_image.id
|
||||
return new_image
|
||||
except Image.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def find_create_or_link_image(docker_image_id, repo_obj, username, translations,
|
||||
preferred_location):
|
||||
|
||||
# First check for the image existing in the repository. If found, we simply return it.
|
||||
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
# We next check to see if there is an existing storage the new image can link to.
|
||||
existing_image_query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.distinct()
|
||||
.join(ImageStorage)
|
||||
.switch(Image)
|
||||
.join(Repository)
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||
.switch(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(ImageStorage.uploading == False,
|
||||
Image.docker_image_id == docker_image_id))
|
||||
|
||||
existing_image_query = _basequery.filter_to_repos_for_user(existing_image_query, username)
|
||||
|
||||
# If there is an existing image, we try to translate its ancestry and copy its storage.
|
||||
new_image = None
|
||||
try:
|
||||
logger.debug('Looking up existing image for ID: %s', docker_image_id)
|
||||
existing_image = existing_image_query.get()
|
||||
|
||||
logger.debug('Existing image %s found for ID: %s', existing_image.id, docker_image_id)
|
||||
new_image = _find_or_link_image(existing_image, repo_obj, username, translations,
|
||||
preferred_location)
|
||||
if new_image:
|
||||
return new_image
|
||||
except Image.DoesNotExist:
|
||||
logger.debug('No existing image found for ID: %s', docker_image_id)
|
||||
|
||||
# Otherwise, create a new storage directly.
|
||||
with db_transaction():
|
||||
# Final check for an existing image, under the transaction.
|
||||
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
logger.debug('Creating new storage for docker id: %s', docker_image_id)
|
||||
new_storage = storage.create_storage(preferred_location)
|
||||
|
||||
return Image.create(docker_image_id=docker_image_id,
|
||||
repository=repo_obj, storage=new_storage,
|
||||
ancestors='/')
|
||||
|
||||
|
||||
def set_image_metadata(docker_image_id, namespace_name, repository_name, created_date_str, comment,
|
||||
command, parent=None):
|
||||
with db_transaction():
|
||||
query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(Image)
|
||||
.join(ImageStorage)
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
Image.docker_image_id == docker_image_id))
|
||||
|
||||
try:
|
||||
fetched = db_for_update(query).get()
|
||||
except Image.DoesNotExist:
|
||||
raise DataModelException('No image with specified id and repository')
|
||||
|
||||
# We cleanup any old checksum in case it's a retry after a fail
|
||||
fetched.storage.checksum = None
|
||||
fetched.storage.created = datetime.now()
|
||||
|
||||
if created_date_str is not None:
|
||||
try:
|
||||
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
||||
except:
|
||||
# parse raises different exceptions, so we cannot use a specific kind of handler here.
|
||||
pass
|
||||
|
||||
fetched.storage.comment = comment
|
||||
fetched.storage.command = command
|
||||
|
||||
if parent:
|
||||
fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id)
|
||||
|
||||
fetched.save()
|
||||
fetched.storage.save()
|
||||
return fetched
|
||||
|
||||
|
||||
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
|
||||
try:
|
||||
image = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(Image)
|
||||
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
Image.docker_image_id == docker_image_id)
|
||||
.get())
|
||||
|
||||
except Image.DoesNotExist:
|
||||
raise DataModelException('No image with specified id and repository')
|
||||
|
||||
image.storage.image_size = image_size
|
||||
image.storage.uncompressed_size = uncompressed_size
|
||||
|
||||
ancestors = image.ancestors.split('/')[1:-1]
|
||||
if ancestors:
|
||||
try:
|
||||
# TODO(jschorr): Switch to this faster route once we have full ancestor aggregate_size
|
||||
# parent_image = Image.get(Image.id == ancestors[-1])
|
||||
# total_size = image_size + parent_image.storage.aggregate_size
|
||||
total_size = (ImageStorage
|
||||
.select(fn.Sum(ImageStorage.image_size))
|
||||
.join(Image)
|
||||
.where(Image.id << ancestors)
|
||||
.scalar()) + image_size
|
||||
|
||||
image.storage.aggregate_size = total_size
|
||||
except Image.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
image.storage.aggregate_size = image_size
|
||||
|
||||
image.storage.save()
|
||||
|
||||
return image
|
||||
|
||||
|
||||
def get_image(repo, dockerfile_id):
|
||||
try:
|
||||
return Image.get(Image.docker_image_id == dockerfile_id, Image.repository == repo)
|
||||
except Image.DoesNotExist:
|
||||
return None
|
3031
data/model/legacy.py
3031
data/model/legacy.py
File diff suppressed because it is too large
Load diff
99
data/model/log.py
Normal file
99
data/model/log.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
import json
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, SQL, fn
|
||||
from datetime import datetime, timedelta, date
|
||||
|
||||
from data.database import LogEntry, LogEntryKind, User
|
||||
|
||||
|
||||
def list_logs(start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
Performer = User.alias()
|
||||
joined = (LogEntry
|
||||
.select(LogEntry, LogEntryKind, User, Performer)
|
||||
.join(User)
|
||||
.switch(LogEntry)
|
||||
.join(Performer, JOIN_LEFT_OUTER,
|
||||
on=(LogEntry.performer == Performer.id).alias('performer'))
|
||||
.switch(LogEntry)
|
||||
.join(LogEntryKind)
|
||||
.switch(LogEntry))
|
||||
|
||||
if repository:
|
||||
joined = joined.where(LogEntry.repository == repository)
|
||||
|
||||
if performer:
|
||||
joined = joined.where(LogEntry.performer == performer)
|
||||
|
||||
if namespace:
|
||||
joined = joined.where(User.username == namespace)
|
||||
|
||||
return list(joined.where(LogEntry.datetime >= start_time,
|
||||
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()))
|
||||
|
||||
|
||||
def log_action(kind_name, user_or_organization_name, performer=None, repository=None,
|
||||
ip=None, metadata={}, timestamp=None):
|
||||
if not timestamp:
|
||||
timestamp = datetime.today()
|
||||
|
||||
kind = LogEntryKind.get(LogEntryKind.name == kind_name)
|
||||
account = User.get(User.username == user_or_organization_name)
|
||||
LogEntry.create(kind=kind, account=account, performer=performer,
|
||||
repository=repository, ip=ip, metadata_json=json.dumps(metadata),
|
||||
datetime=timestamp)
|
||||
|
||||
|
||||
def _get_repository_events(repository, time_delta, time_delta_earlier, clause):
|
||||
""" Returns a pair representing the count of the number of events for the given
|
||||
repository in each of the specified time deltas. The date ranges are calculated by
|
||||
taking the current time today and subtracting the time delta given. Since
|
||||
we want to grab *two* ranges, we restrict the second range to be greater
|
||||
than the first (i.e. referring to an earlier time), so we can conduct the
|
||||
lookup in a single query. The clause is used to further filter the kind of
|
||||
events being found.
|
||||
"""
|
||||
since = date.today() - time_delta
|
||||
since_earlier = date.today() - time_delta_earlier
|
||||
|
||||
if since_earlier >= since:
|
||||
raise ValueError('time_delta_earlier must be greater than time_delta')
|
||||
|
||||
# This uses a CASE WHEN inner clause to further filter the count.
|
||||
formatted = since.strftime('%Y-%m-%d')
|
||||
case_query = 'CASE WHEN datetime >= \'%s\' THEN 1 ELSE 0 END' % formatted
|
||||
|
||||
result = (LogEntry
|
||||
.select(fn.Sum(SQL(case_query)), fn.Count(SQL('*')))
|
||||
.where(LogEntry.repository == repository)
|
||||
.where(clause)
|
||||
.where(LogEntry.datetime >= since_earlier)
|
||||
.tuples()
|
||||
.get())
|
||||
|
||||
return (int(result[0]) if result[0] else 0, int(result[1]) if result[1] else 0)
|
||||
|
||||
|
||||
def get_repository_pushes(repository, time_delta, time_delta_earlier):
|
||||
push_repo = LogEntryKind.get(name='push_repo')
|
||||
clauses = (LogEntry.kind == push_repo)
|
||||
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
|
||||
|
||||
|
||||
def get_repository_pulls(repository, time_delta, time_delta_earlier):
|
||||
repo_pull = LogEntryKind.get(name='pull_repo')
|
||||
repo_verb = LogEntryKind.get(name='repo_verb')
|
||||
clauses = ((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
||||
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
|
||||
|
||||
|
||||
def get_repository_usage():
|
||||
one_month_ago = date.today() - timedelta(weeks=4)
|
||||
repo_pull = LogEntryKind.get(name='pull_repo')
|
||||
repo_verb = LogEntryKind.get(name='repo_verb')
|
||||
return (LogEntry
|
||||
.select(LogEntry.ip, LogEntry.repository)
|
||||
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
||||
.where(~(LogEntry.repository >> None))
|
||||
.where(LogEntry.datetime >= one_month_ago)
|
||||
.group_by(LogEntry.ip, LogEntry.repository)
|
||||
.count())
|
158
data/model/notification.py
Normal file
158
data/model/notification.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
import json
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER
|
||||
|
||||
from data.model import InvalidNotificationException, db_transaction
|
||||
from data.database import (Notification, NotificationKind, User, Team, TeamMember, TeamRole,
|
||||
RepositoryNotification, ExternalNotificationEvent, Repository,
|
||||
ExternalNotificationMethod, Namespace)
|
||||
|
||||
|
||||
def create_notification(kind_name, target, metadata={}):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
notification = Notification.create(kind=kind_ref, target=target,
|
||||
metadata_json=json.dumps(metadata))
|
||||
return notification
|
||||
|
||||
|
||||
def create_unique_notification(kind_name, target, metadata={}):
|
||||
with db_transaction():
|
||||
if list_notifications(target, kind_name, limit=1).count() == 0:
|
||||
create_notification(kind_name, target, metadata)
|
||||
|
||||
|
||||
def lookup_notification(user, uuid):
|
||||
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
|
||||
if not results:
|
||||
return None
|
||||
|
||||
return results[0]
|
||||
|
||||
|
||||
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
|
||||
page=None, limit=None):
|
||||
Org = User.alias()
|
||||
AdminTeam = Team.alias()
|
||||
AdminTeamMember = TeamMember.alias()
|
||||
AdminUser = User.alias()
|
||||
|
||||
query = (Notification.select()
|
||||
.join(User)
|
||||
.switch(Notification)
|
||||
.join(Org, JOIN_LEFT_OUTER, on=(Org.id == Notification.target))
|
||||
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
|
||||
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
|
||||
.switch(AdminTeam)
|
||||
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
|
||||
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id))
|
||||
.where((Notification.target == user) |
|
||||
((AdminUser.id == user) & (TeamRole.name == 'admin')))
|
||||
.order_by(Notification.created)
|
||||
.desc())
|
||||
|
||||
if not include_dismissed:
|
||||
query = query.switch(Notification).where(Notification.dismissed == False)
|
||||
|
||||
if kind_name:
|
||||
query = (query
|
||||
.switch(Notification)
|
||||
.join(NotificationKind)
|
||||
.where(NotificationKind.name == kind_name))
|
||||
|
||||
if id_filter:
|
||||
query = (query
|
||||
.switch(Notification)
|
||||
.where(Notification.uuid == id_filter))
|
||||
|
||||
if page:
|
||||
query = query.paginate(page, limit)
|
||||
elif limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def delete_all_notifications_by_kind(kind_name):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
(Notification
|
||||
.delete()
|
||||
.where(Notification.kind == kind_ref)
|
||||
.execute())
|
||||
|
||||
|
||||
def delete_notifications_by_kind(target, kind_name):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
Notification.delete().where(Notification.target == target,
|
||||
Notification.kind == kind_ref).execute()
|
||||
|
||||
|
||||
def delete_matching_notifications(target, kind_name, **kwargs):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
|
||||
# Load all notifications for the user with the given kind.
|
||||
notifications = Notification.select().where(
|
||||
Notification.target == target,
|
||||
Notification.kind == kind_ref)
|
||||
|
||||
# For each, match the metadata to the specified values.
|
||||
for notification in notifications:
|
||||
matches = True
|
||||
try:
|
||||
metadata = json.loads(notification.metadata_json)
|
||||
except:
|
||||
continue
|
||||
|
||||
for (key, value) in kwargs.iteritems():
|
||||
if not key in metadata or metadata[key] != value:
|
||||
matches = False
|
||||
break
|
||||
|
||||
if not matches:
|
||||
continue
|
||||
|
||||
notification.delete_instance()
|
||||
|
||||
|
||||
def create_repo_notification(repo, event_name, method_name, config):
|
||||
event = ExternalNotificationEvent.get(ExternalNotificationEvent.name == event_name)
|
||||
method = ExternalNotificationMethod.get(ExternalNotificationMethod.name == method_name)
|
||||
|
||||
return RepositoryNotification.create(repository=repo, event=event, method=method,
|
||||
config_json=json.dumps(config))
|
||||
|
||||
|
||||
def get_repo_notification(uuid):
|
||||
try:
|
||||
return (RepositoryNotification
|
||||
.select(RepositoryNotification, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(RepositoryNotification.uuid == uuid)
|
||||
.get())
|
||||
except RepositoryNotification.DoesNotExist:
|
||||
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
||||
|
||||
|
||||
def delete_repo_notification(namespace_name, repository_name, uuid):
|
||||
found = get_repo_notification(uuid)
|
||||
if (found.repository.namespace_user.username != namespace_name or
|
||||
found.repository.name != repository_name):
|
||||
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
|
||||
found.delete_instance()
|
||||
return found
|
||||
|
||||
|
||||
def list_repo_notifications(namespace_name, repository_name, event_name=None):
|
||||
query = (RepositoryNotification
|
||||
.select(RepositoryNotification, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
if event_name:
|
||||
query = (query
|
||||
.switch(RepositoryNotification)
|
||||
.join(ExternalNotificationEvent)
|
||||
.where(ExternalNotificationEvent.name == event_name))
|
||||
|
||||
return query
|
|
@ -7,13 +7,14 @@ from oauth2lib.provider import AuthorizationProvider
|
|||
from oauth2lib import utils
|
||||
|
||||
from data.database import (OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, User,
|
||||
random_string_generator)
|
||||
from data.model.legacy import get_user
|
||||
AccessToken, random_string_generator)
|
||||
from data.model import user
|
||||
from auth import scopes
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseAuthorizationProvider(AuthorizationProvider):
|
||||
def get_authorized_user(self):
|
||||
raise NotImplementedError('Subclasses must fill in the ability to get the authorized_user.')
|
||||
|
@ -49,7 +50,8 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
|
||||
try:
|
||||
oauth_app = OAuthApplication.get(client_id=client_id)
|
||||
if oauth_app.redirect_uri and redirect_uri and redirect_uri.startswith(oauth_app.redirect_uri):
|
||||
if (oauth_app.redirect_uri and redirect_uri and
|
||||
redirect_uri.startswith(oauth_app.redirect_uri)):
|
||||
return True
|
||||
return False
|
||||
except OAuthApplication.DoesNotExist:
|
||||
|
@ -114,13 +116,13 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
|
||||
def persist_token_information(self, client_id, scope, access_token, token_type, expires_in,
|
||||
refresh_token, data):
|
||||
user = get_user(json.loads(data)['username'])
|
||||
if not user:
|
||||
found = user.get_user(json.loads(data)['username'])
|
||||
if not found:
|
||||
raise RuntimeError('Username must be in the data field')
|
||||
|
||||
oauth_app = OAuthApplication.get(client_id=client_id)
|
||||
expires_at = datetime.utcnow() + timedelta(seconds=expires_in)
|
||||
OAuthAccessToken.create(application=oauth_app, authorized_user=user, scope=scope,
|
||||
OAuthAccessToken.create(application=oauth_app, authorized_user=found, scope=scope,
|
||||
access_token=access_token, token_type=token_type,
|
||||
expires_at=expires_at, refresh_token=refresh_token, data=data)
|
||||
|
||||
|
@ -157,7 +159,6 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
|
||||
|
||||
def get_token_response(self, response_type, client_id, redirect_uri, **params):
|
||||
|
||||
# Ensure proper response_type
|
||||
if response_type != 'token':
|
||||
err = 'unsupported_response_type'
|
||||
|
@ -249,21 +250,21 @@ def delete_application(org, client_id):
|
|||
return application
|
||||
|
||||
|
||||
def lookup_access_token_for_user(user, token_uuid):
|
||||
def lookup_access_token_for_user(user_obj, token_uuid):
|
||||
try:
|
||||
return OAuthAccessToken.get(OAuthAccessToken.authorized_user == user,
|
||||
return OAuthAccessToken.get(OAuthAccessToken.authorized_user == user_obj,
|
||||
OAuthAccessToken.uuid == token_uuid)
|
||||
except OAuthAccessToken.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def list_access_tokens_for_user(user):
|
||||
def list_access_tokens_for_user(user_obj):
|
||||
query = (OAuthAccessToken
|
||||
.select()
|
||||
.join(OAuthApplication)
|
||||
.switch(OAuthAccessToken)
|
||||
.join(User)
|
||||
.where(OAuthAccessToken.authorized_user == user))
|
||||
.where(OAuthAccessToken.authorized_user == user_obj))
|
||||
|
||||
return query
|
||||
|
||||
|
@ -277,9 +278,9 @@ def list_applications_for_org(org):
|
|||
return query
|
||||
|
||||
|
||||
def create_access_token_for_testing(user, client_id, scope):
|
||||
def create_access_token_for_testing(user_obj, client_id, scope):
|
||||
expires_at = datetime.utcnow() + timedelta(seconds=10000)
|
||||
application = get_application_for_client_id(client_id)
|
||||
OAuthAccessToken.create(application=application, authorized_user=user, scope=scope,
|
||||
OAuthAccessToken.create(application=application, authorized_user=user_obj, scope=scope,
|
||||
token_type='token', access_token='test',
|
||||
expires_at=expires_at, refresh_token='', data='')
|
||||
|
|
126
data/model/organization.py
Normal file
126
data/model/organization.py
Normal file
|
@ -0,0 +1,126 @@
|
|||
|
||||
from data.database import (User, FederatedLogin, TeamMember, Team, TeamRole, RepositoryPermission,
|
||||
Repository, Namespace)
|
||||
from data.model import (user, team, DataModelException, InvalidOrganizationException,
|
||||
InvalidUsernameException, db_transaction, _basequery)
|
||||
|
||||
|
||||
def create_organization(name, email, creating_user):
|
||||
try:
|
||||
# Create the org
|
||||
new_org = user.create_user_noverify(name, email)
|
||||
new_org.organization = True
|
||||
new_org.save()
|
||||
|
||||
# Create a team for the owners
|
||||
owners_team = team.create_team('owners', new_org, 'admin')
|
||||
|
||||
# Add the user who created the org to the owners team
|
||||
team.add_user_to_team(creating_user, owners_team)
|
||||
|
||||
return new_org
|
||||
except InvalidUsernameException:
|
||||
msg = ('Invalid organization name: %s Organization names must consist ' +
|
||||
'solely of lower case letters, numbers, and underscores. ' +
|
||||
'[a-z0-9_]') % name
|
||||
raise InvalidOrganizationException(msg)
|
||||
|
||||
|
||||
def get_organization(name):
|
||||
try:
|
||||
return User.get(username=name, organization=True)
|
||||
except User.DoesNotExist:
|
||||
raise InvalidOrganizationException('Organization does not exist: %s' %
|
||||
name)
|
||||
|
||||
|
||||
def convert_user_to_organization(user_obj, admin_user):
|
||||
# Change the user to an organization.
|
||||
user_obj.organization = True
|
||||
|
||||
# disable this account for login.
|
||||
user_obj.password_hash = None
|
||||
user_obj.save()
|
||||
|
||||
# Clear any federated auth pointing to this user
|
||||
FederatedLogin.delete().where(FederatedLogin.user == user_obj).execute()
|
||||
|
||||
# Create a team for the owners
|
||||
owners_team = team.create_team('owners', user_obj, 'admin')
|
||||
|
||||
# Add the user who will admin the org to the owners team
|
||||
team.add_user_to_team(admin_user, owners_team)
|
||||
|
||||
return user_obj
|
||||
|
||||
|
||||
def get_user_organizations(username):
|
||||
return _basequery.get_user_organizations(username)
|
||||
|
||||
def get_organization_team_members(teamid):
|
||||
joined = User.select().join(TeamMember).join(Team)
|
||||
query = joined.where(Team.id == teamid)
|
||||
return query
|
||||
|
||||
|
||||
def __get_org_admin_users(org):
|
||||
return (User
|
||||
.select()
|
||||
.join(TeamMember)
|
||||
.join(Team)
|
||||
.join(TeamRole)
|
||||
.where(Team.organization == org, TeamRole.name == 'admin', User.robot == False)
|
||||
.distinct())
|
||||
|
||||
|
||||
def remove_organization_member(org, user_obj):
|
||||
org_admins = [u.username for u in __get_org_admin_users(org)]
|
||||
if len(org_admins) == 1 and user_obj.username in org_admins:
|
||||
raise DataModelException('Cannot remove user as they are the only organization admin')
|
||||
|
||||
with db_transaction():
|
||||
# Find and remove the user from any repositorys under the org.
|
||||
permissions = (RepositoryPermission
|
||||
.select(RepositoryPermission.id)
|
||||
.join(Repository)
|
||||
.where(Repository.namespace_user == org,
|
||||
RepositoryPermission.user == user_obj))
|
||||
|
||||
RepositoryPermission.delete().where(RepositoryPermission.id << permissions).execute()
|
||||
|
||||
# Find and remove the user from any teams under the org.
|
||||
members = (TeamMember
|
||||
.select(TeamMember.id)
|
||||
.join(Team)
|
||||
.where(Team.organization == org, TeamMember.user == user_obj))
|
||||
|
||||
TeamMember.delete().where(TeamMember.id << members).execute()
|
||||
|
||||
|
||||
def get_organization_member_set(orgname):
|
||||
Org = User.alias()
|
||||
org_users = (User
|
||||
.select(User.username)
|
||||
.join(TeamMember)
|
||||
.join(Team)
|
||||
.join(Org, on=(Org.id == Team.organization))
|
||||
.where(Org.username == orgname)
|
||||
.distinct())
|
||||
return {user.username for user in org_users}
|
||||
|
||||
|
||||
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
|
||||
return (User
|
||||
.select()
|
||||
.distinct()
|
||||
.join(TeamMember)
|
||||
.join(Team)
|
||||
.join(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
|
||||
def get_organizations():
|
||||
return User.select().where(User.organization == True, User.robot == False)
|
||||
|
283
data/model/permission.py
Normal file
283
data/model/permission.py
Normal file
|
@ -0,0 +1,283 @@
|
|||
from peewee import JOIN_LEFT_OUTER
|
||||
|
||||
from data.database import (RepositoryPermission, User, Repository, Visibility, Role, TeamMember,
|
||||
PermissionPrototype, Team, TeamRole, Namespace)
|
||||
from data.model import DataModelException, _basequery
|
||||
|
||||
|
||||
def list_robot_permissions(robot_name):
|
||||
return (RepositoryPermission
|
||||
.select(RepositoryPermission, User, Repository)
|
||||
.join(Repository)
|
||||
.join(Visibility)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Role)
|
||||
.switch(RepositoryPermission)
|
||||
.join(User)
|
||||
.where(User.username == robot_name, User.robot == True))
|
||||
|
||||
|
||||
def list_organization_member_permissions(organization):
|
||||
query = (RepositoryPermission
|
||||
.select(RepositoryPermission, Repository, User)
|
||||
.join(Repository)
|
||||
.switch(RepositoryPermission)
|
||||
.join(User)
|
||||
.where(Repository.namespace_user == organization)
|
||||
.where(User.robot == False))
|
||||
return query
|
||||
|
||||
|
||||
def get_all_user_permissions(user):
|
||||
return _get_user_repo_permissions(user)
|
||||
|
||||
|
||||
def get_user_repo_permissions(user, repo):
|
||||
return _get_user_repo_permissions(user, limit_to_repository_obj=repo)
|
||||
|
||||
|
||||
def _get_user_repo_permissions(user, limit_to_repository_obj=None):
|
||||
UserThroughTeam = User.alias()
|
||||
|
||||
base_query = (RepositoryPermission
|
||||
.select(RepositoryPermission, Role, Repository, Namespace)
|
||||
.join(Role)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryPermission))
|
||||
|
||||
if limit_to_repository_obj is not None:
|
||||
base_query = base_query.where(RepositoryPermission.repository == limit_to_repository_obj)
|
||||
|
||||
direct = (base_query
|
||||
.clone()
|
||||
.join(User)
|
||||
.where(User.id == user))
|
||||
|
||||
team = (base_query
|
||||
.clone()
|
||||
.join(Team)
|
||||
.join(TeamMember)
|
||||
.join(UserThroughTeam, on=(UserThroughTeam.id == TeamMember.user))
|
||||
.where(UserThroughTeam.id == user))
|
||||
|
||||
return direct | team
|
||||
|
||||
|
||||
def delete_prototype_permission(org, uid):
|
||||
found = get_prototype_permission(org, uid)
|
||||
if not found:
|
||||
return None
|
||||
|
||||
found.delete_instance()
|
||||
return found
|
||||
|
||||
|
||||
def get_prototype_permission(org, uid):
|
||||
try:
|
||||
return PermissionPrototype.get(PermissionPrototype.org == org,
|
||||
PermissionPrototype.uuid == uid)
|
||||
except PermissionPrototype.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_prototype_permissions(org):
|
||||
ActivatingUser = User.alias()
|
||||
DelegateUser = User.alias()
|
||||
query = (PermissionPrototype
|
||||
.select()
|
||||
.where(PermissionPrototype.org == org)
|
||||
.join(ActivatingUser, JOIN_LEFT_OUTER,
|
||||
on=(ActivatingUser.id == PermissionPrototype.activating_user))
|
||||
.join(DelegateUser, JOIN_LEFT_OUTER,
|
||||
on=(DelegateUser.id == PermissionPrototype.delegate_user))
|
||||
.join(Team, JOIN_LEFT_OUTER,
|
||||
on=(Team.id == PermissionPrototype.delegate_team))
|
||||
.join(Role, JOIN_LEFT_OUTER, on=(Role.id == PermissionPrototype.role)))
|
||||
return query
|
||||
|
||||
|
||||
def update_prototype_permission(org, uid, role_name):
|
||||
found = get_prototype_permission(org, uid)
|
||||
if not found:
|
||||
return None
|
||||
|
||||
new_role = Role.get(Role.name == role_name)
|
||||
found.role = new_role
|
||||
found.save()
|
||||
return found
|
||||
|
||||
|
||||
def add_prototype_permission(org, role_name, activating_user,
|
||||
delegate_user=None, delegate_team=None):
|
||||
new_role = Role.get(Role.name == role_name)
|
||||
return PermissionPrototype.create(org=org, role=new_role, activating_user=activating_user,
|
||||
delegate_user=delegate_user, delegate_team=delegate_team)
|
||||
|
||||
|
||||
def get_org_wide_permissions(user):
|
||||
Org = User.alias()
|
||||
team_with_role = Team.select(Team, Org, TeamRole).join(TeamRole)
|
||||
with_org = team_with_role.switch(Team).join(Org, on=(Team.organization ==
|
||||
Org.id))
|
||||
with_user = with_org.switch(Team).join(TeamMember).join(User)
|
||||
return with_user.where(User.id == user, Org.organization == True)
|
||||
|
||||
|
||||
def get_all_repo_teams(namespace_name, repository_name):
|
||||
return (RepositoryPermission
|
||||
.select(Team.name, Role.name, RepositoryPermission)
|
||||
.join(Team)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Role)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
|
||||
def apply_default_permissions(repo_obj, creating_user_obj):
|
||||
org = repo_obj.namespace_user
|
||||
user_clause = ((PermissionPrototype.activating_user == creating_user_obj) |
|
||||
(PermissionPrototype.activating_user >> None))
|
||||
|
||||
team_protos = (PermissionPrototype
|
||||
.select()
|
||||
.where(PermissionPrototype.org == org, user_clause,
|
||||
PermissionPrototype.delegate_user >> None))
|
||||
|
||||
def create_team_permission(team, repo, role):
|
||||
RepositoryPermission.create(team=team, repository=repo, role=role)
|
||||
|
||||
__apply_permission_list(repo_obj, team_protos, 'name', create_team_permission)
|
||||
|
||||
user_protos = (PermissionPrototype
|
||||
.select()
|
||||
.where(PermissionPrototype.org == org, user_clause,
|
||||
PermissionPrototype.delegate_team >> None))
|
||||
|
||||
def create_user_permission(user, repo, role):
|
||||
# The creating user always gets admin anyway
|
||||
if user.username == creating_user_obj.username:
|
||||
return
|
||||
|
||||
RepositoryPermission.create(user=user, repository=repo, role=role)
|
||||
|
||||
__apply_permission_list(repo_obj, user_protos, 'username', create_user_permission)
|
||||
|
||||
|
||||
def __apply_permission_list(repo, proto_query, name_property, create_permission_func):
|
||||
final_protos = {}
|
||||
for proto in proto_query:
|
||||
applies_to = proto.delegate_team or proto.delegate_user
|
||||
name = getattr(applies_to, name_property)
|
||||
# We will skip the proto if it is pre-empted by a more important proto
|
||||
if name in final_protos and proto.activating_user is None:
|
||||
continue
|
||||
|
||||
# By this point, it is either a user specific proto, or there is no
|
||||
# proto yet, so we can safely assume it applies
|
||||
final_protos[name] = (applies_to, proto.role)
|
||||
|
||||
for delegate, role in final_protos.values():
|
||||
create_permission_func(delegate, repo, role)
|
||||
|
||||
|
||||
def __entity_permission_repo_query(entity_id, entity_table, entity_id_property, namespace_name,
|
||||
repository_name):
|
||||
""" This method works for both users and teams. """
|
||||
|
||||
return (RepositoryPermission
|
||||
.select(entity_table, Repository, Namespace, Role, RepositoryPermission)
|
||||
.join(entity_table)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Role)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
entity_id_property == entity_id))
|
||||
|
||||
|
||||
def get_user_reponame_permission(username, namespace_name, repository_name):
|
||||
fetched = list(__entity_permission_repo_query(username, User, User.username, namespace_name,
|
||||
repository_name))
|
||||
if not fetched:
|
||||
raise DataModelException('User does not have permission for repo.')
|
||||
|
||||
return fetched[0]
|
||||
|
||||
|
||||
def get_team_reponame_permission(team_name, namespace_name, repository_name):
|
||||
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name, namespace_name,
|
||||
repository_name))
|
||||
if not fetched:
|
||||
raise DataModelException('Team does not have permission for repo.')
|
||||
|
||||
return fetched[0]
|
||||
|
||||
|
||||
def delete_user_permission(username, namespace_name, repository_name):
|
||||
if username == namespace_name:
|
||||
raise DataModelException('Namespace owner must always be admin.')
|
||||
|
||||
fetched = list(__entity_permission_repo_query(username, User, User.username, namespace_name,
|
||||
repository_name))
|
||||
if not fetched:
|
||||
raise DataModelException('User does not have permission for repo.')
|
||||
|
||||
fetched[0].delete_instance()
|
||||
|
||||
|
||||
def delete_team_permission(team_name, namespace_name, repository_name):
|
||||
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name, namespace_name,
|
||||
repository_name))
|
||||
if not fetched:
|
||||
raise DataModelException('Team does not have permission for repo.')
|
||||
|
||||
fetched[0].delete_instance()
|
||||
|
||||
|
||||
def __set_entity_repo_permission(entity, permission_entity_property,
|
||||
namespace_name, repository_name, role_name):
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
new_role = Role.get(Role.name == role_name)
|
||||
|
||||
# Fetch any existing permission for this entity on the repo
|
||||
try:
|
||||
entity_attr = getattr(RepositoryPermission, permission_entity_property)
|
||||
perm = RepositoryPermission.get(entity_attr == entity, RepositoryPermission.repository == repo)
|
||||
perm.role = new_role
|
||||
perm.save()
|
||||
return perm
|
||||
except RepositoryPermission.DoesNotExist:
|
||||
set_entity_kwargs = {permission_entity_property: entity}
|
||||
new_perm = RepositoryPermission.create(repository=repo, role=new_role, **set_entity_kwargs)
|
||||
return new_perm
|
||||
|
||||
|
||||
def set_user_repo_permission(username, namespace_name, repository_name, role_name):
|
||||
if username == namespace_name:
|
||||
raise DataModelException('Namespace owner must always be admin.')
|
||||
|
||||
try:
|
||||
user = User.get(User.username == username)
|
||||
except User.DoesNotExist:
|
||||
raise DataModelException('Invalid username: %s' % username)
|
||||
return __set_entity_repo_permission(user, 'user', namespace_name, repository_name, role_name)
|
||||
|
||||
|
||||
def set_team_repo_permission(team_name, namespace_name, repository_name, role_name):
|
||||
try:
|
||||
team = (Team
|
||||
.select()
|
||||
.join(User)
|
||||
.where(Team.name == team_name, User.username == namespace_name)
|
||||
.get())
|
||||
except Team.DoesNotExist:
|
||||
raise DataModelException('No team %s in organization %s' % (team_name, namespace_name))
|
||||
|
||||
return __set_entity_repo_permission(team, 'team', namespace_name, repository_name, role_name)
|
||||
|
||||
|
377
data/model/repository.py
Normal file
377
data/model/repository.py
Normal file
|
@ -0,0 +1,377 @@
|
|||
import logging
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, fn
|
||||
from datetime import timedelta, datetime
|
||||
|
||||
from data.model import (DataModelException, tag, db_transaction, storage, image, permission,
|
||||
_basequery)
|
||||
from data.database import (Repository, Namespace, RepositoryTag, Star, Image, ImageStorage, User,
|
||||
Visibility, RepositoryPermission, TupleSelector, RepositoryActionCount,
|
||||
Role, RepositoryAuthorizedEmail, db_for_update)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_repository(namespace, name, creating_user, visibility='private'):
|
||||
private = Visibility.get(name=visibility)
|
||||
namespace_user = User.get(username=namespace)
|
||||
repo = Repository.create(name=name, visibility=private, namespace_user=namespace_user)
|
||||
admin = Role.get(name='admin')
|
||||
|
||||
if creating_user and not creating_user.organization:
|
||||
RepositoryPermission.create(user=creating_user, repository=repo, role=admin)
|
||||
|
||||
if creating_user.username != namespace:
|
||||
# Permission prototypes only work for orgs
|
||||
permission.apply_default_permissions(repo, creating_user)
|
||||
return repo
|
||||
|
||||
|
||||
def get_repository(namespace_name, repository_name):
|
||||
try:
|
||||
return _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
except Repository.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def _purge_all_repository_tags(namespace_name, repository_name):
|
||||
""" Immediately purge all repository tags without respecting the lifeline procedure """
|
||||
try:
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
except Repository.DoesNotExist:
|
||||
raise DataModelException('Invalid repository \'%s/%s\'' %
|
||||
(namespace_name, repository_name))
|
||||
RepositoryTag.delete().where(RepositoryTag.repository == repo.id).execute()
|
||||
|
||||
|
||||
def purge_repository(namespace_name, repository_name):
|
||||
# Delete all tags to allow gc to reclaim storage
|
||||
_purge_all_repository_tags(namespace_name, repository_name)
|
||||
|
||||
# Gc to remove the images and storage
|
||||
garbage_collect_repository(namespace_name, repository_name)
|
||||
|
||||
# Delete the rest of the repository metadata
|
||||
fetched = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
fetched.delete_instance(recursive=True, delete_nullable=False)
|
||||
|
||||
|
||||
def garbage_collect_repository(namespace_name, repository_name):
|
||||
storage_id_whitelist = {}
|
||||
|
||||
tag.garbage_collect_tags(namespace_name, repository_name)
|
||||
|
||||
with db_transaction():
|
||||
# TODO (jake): We could probably select this and all the images in a single query using
|
||||
# a different kind of join.
|
||||
|
||||
# Get a list of all images used by tags in the repository
|
||||
tag_query = (RepositoryTag
|
||||
.select(RepositoryTag, Image, ImageStorage)
|
||||
.join(Image)
|
||||
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||
.switch(RepositoryTag)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
||||
|
||||
referenced_ancestors = set()
|
||||
for one_tag in tag_query:
|
||||
# The ancestor list is in the format '/1/2/3/', extract just the ids
|
||||
ancestor_id_strings = one_tag.image.ancestors.split('/')[1:-1]
|
||||
ancestor_list = [int(img_id_str) for img_id_str in ancestor_id_strings]
|
||||
referenced_ancestors = referenced_ancestors.union(set(ancestor_list))
|
||||
referenced_ancestors.add(one_tag.image.id)
|
||||
|
||||
all_repo_images = image.get_repository_images(namespace_name, repository_name)
|
||||
all_images = {int(img.id): img for img in all_repo_images}
|
||||
to_remove = set(all_images.keys()).difference(referenced_ancestors)
|
||||
|
||||
if len(to_remove) > 0:
|
||||
logger.info('Cleaning up unreferenced images: %s', to_remove)
|
||||
storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove}
|
||||
Image.delete().where(Image.id << list(to_remove)).execute()
|
||||
|
||||
if len(to_remove) > 0:
|
||||
logger.info('Garbage collecting storage for images: %s', to_remove)
|
||||
storage.garbage_collect_storage(storage_id_whitelist)
|
||||
|
||||
|
||||
def star_repository(user, repository):
|
||||
""" Stars a repository. """
|
||||
star = Star.create(user=user.id, repository=repository.id)
|
||||
star.save()
|
||||
|
||||
|
||||
def unstar_repository(user, repository):
|
||||
""" Unstars a repository. """
|
||||
try:
|
||||
(Star
|
||||
.delete()
|
||||
.where(Star.repository == repository.id, Star.user == user.id)
|
||||
.execute())
|
||||
except Star.DoesNotExist:
|
||||
raise DataModelException('Star not found.')
|
||||
|
||||
|
||||
def get_user_starred_repositories(user, limit=None, page=None):
|
||||
""" Retrieves all of the repositories a user has starred. """
|
||||
query = (Repository
|
||||
.select(Repository, User, Visibility)
|
||||
.join(Star)
|
||||
.switch(Repository)
|
||||
.join(User)
|
||||
.switch(Repository)
|
||||
.join(Visibility)
|
||||
.where(Star.user == user))
|
||||
|
||||
if page and limit:
|
||||
query = query.paginate(page, limit)
|
||||
elif limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def repository_is_starred(user, repository):
|
||||
""" Determines whether a user has starred a repository or not. """
|
||||
try:
|
||||
(Star
|
||||
.select()
|
||||
.where(Star.repository == repository.id, Star.user == user.id)
|
||||
.get())
|
||||
return True
|
||||
except Star.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def get_when_last_modified(repository_ids):
|
||||
tuples = (RepositoryTag
|
||||
.select(RepositoryTag.repository, fn.Max(RepositoryTag.lifetime_start_ts))
|
||||
.where(RepositoryTag.repository << repository_ids)
|
||||
.group_by(RepositoryTag.repository)
|
||||
.tuples())
|
||||
|
||||
last_modified_map = {}
|
||||
for record in tuples:
|
||||
last_modified_map[record[0]] = record[1]
|
||||
|
||||
return last_modified_map
|
||||
|
||||
|
||||
def get_action_counts(repository_ids):
|
||||
# Filter the join to recent entries only.
|
||||
last_week = datetime.now() - timedelta(weeks=1)
|
||||
tuples = (RepositoryActionCount
|
||||
.select(RepositoryActionCount.repository, fn.Sum(RepositoryActionCount.count))
|
||||
.where(RepositoryActionCount.repository << repository_ids)
|
||||
.where(RepositoryActionCount.date >= last_week)
|
||||
.group_by(RepositoryActionCount.repository)
|
||||
.tuples())
|
||||
|
||||
action_count_map = {}
|
||||
for record in tuples:
|
||||
action_count_map[record[0]] = record[1]
|
||||
|
||||
return action_count_map
|
||||
|
||||
|
||||
def get_visible_repositories(username=None, include_public=True, page=None,
|
||||
limit=None, namespace=None, namespace_only=False):
|
||||
fields = [Repository.name, Repository.id, Repository.description, Visibility.name,
|
||||
Namespace.username]
|
||||
|
||||
query = _visible_repository_query(username=username, include_public=include_public, page=page,
|
||||
limit=limit, namespace=namespace,
|
||||
select_models=fields)
|
||||
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
if namespace and namespace_only:
|
||||
query = query.where(Namespace.username == namespace)
|
||||
|
||||
return TupleSelector(query, fields)
|
||||
|
||||
|
||||
def _visible_repository_query(username=None, include_public=True, limit=None,
|
||||
page=None, namespace=None, select_models=[]):
|
||||
query = (Repository
|
||||
.select(*select_models) # MySQL/RDS complains is there are selected models for counts.
|
||||
.distinct()
|
||||
.join(Visibility)
|
||||
.switch(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(Repository)
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER))
|
||||
|
||||
query = _basequery.filter_to_repos_for_user(query, username, namespace, include_public)
|
||||
if page:
|
||||
query = query.paginate(page, limit)
|
||||
elif limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def get_sorted_matching_repositories(prefix, only_public, checker, limit=10):
|
||||
""" Returns repositories matching the given prefix string and passing the given checker
|
||||
function.
|
||||
"""
|
||||
last_week = datetime.now() - timedelta(weeks=1)
|
||||
results = []
|
||||
existing_ids = []
|
||||
|
||||
def get_search_results(search_clause, with_count=False):
|
||||
if len(results) >= limit:
|
||||
return
|
||||
|
||||
select_items = [Repository, Namespace]
|
||||
if with_count:
|
||||
select_items.append(fn.Sum(RepositoryActionCount.count).alias('count'))
|
||||
|
||||
query = (Repository
|
||||
.select(*select_items)
|
||||
.join(Namespace, JOIN_LEFT_OUTER, on=(Namespace.id == Repository.namespace_user))
|
||||
.switch(Repository)
|
||||
.where(search_clause)
|
||||
.group_by(Repository, Namespace))
|
||||
|
||||
if only_public:
|
||||
query = query.where(Repository.visibility == _basequery.get_public_repo_visibility())
|
||||
|
||||
if existing_ids:
|
||||
query = query.where(~(Repository.id << existing_ids))
|
||||
|
||||
if with_count:
|
||||
query = (query
|
||||
.switch(Repository)
|
||||
.join(RepositoryActionCount)
|
||||
.where(RepositoryActionCount.date >= last_week)
|
||||
.order_by(fn.Sum(RepositoryActionCount.count).desc()))
|
||||
|
||||
for result in query:
|
||||
if len(results) >= limit:
|
||||
return results
|
||||
|
||||
# Note: We compare IDs here, instead of objects, because calling .visibility on the
|
||||
# Repository will kick off a new SQL query to retrieve that visibility enum value. We don't
|
||||
# join the visibility table in SQL, as well, because it is ungodly slow in MySQL :-/
|
||||
result.is_public = result.visibility_id == _basequery.get_public_repo_visibility().id
|
||||
result.count = result.count if with_count else 0
|
||||
|
||||
if not checker(result):
|
||||
continue
|
||||
|
||||
results.append(result)
|
||||
existing_ids.append(result.id)
|
||||
|
||||
# For performance reasons, we conduct the repo name and repo namespace searches on their
|
||||
# own. This also affords us the ability to give higher precedence to repository names matching
|
||||
# over namespaces, which is semantically correct.
|
||||
get_search_results(Repository.name ** (prefix + '%'), with_count=True)
|
||||
get_search_results(Repository.name ** (prefix + '%'), with_count=False)
|
||||
|
||||
get_search_results(Namespace.username ** (prefix + '%'), with_count=True)
|
||||
get_search_results(Namespace.username ** (prefix + '%'), with_count=False)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_matching_repositories(repo_term, username=None, limit=10, include_public=True):
|
||||
namespace_term = repo_term
|
||||
name_term = repo_term
|
||||
|
||||
visible = _visible_repository_query(username, include_public=include_public)
|
||||
|
||||
search_clauses = (Repository.name ** ('%' + name_term + '%') |
|
||||
Namespace.username ** ('%' + namespace_term + '%'))
|
||||
|
||||
# Handle the case where the user has already entered a namespace path.
|
||||
if repo_term.find('/') > 0:
|
||||
parts = repo_term.split('/', 1)
|
||||
namespace_term = '/'.join(parts[:-1])
|
||||
name_term = parts[-1]
|
||||
|
||||
search_clauses = (Repository.name ** ('%' + name_term + '%') &
|
||||
Namespace.username ** ('%' + namespace_term + '%'))
|
||||
|
||||
return visible.where(search_clauses).limit(limit)
|
||||
|
||||
|
||||
def lookup_repository(repo_id):
|
||||
try:
|
||||
return Repository.get(Repository.id == repo_id)
|
||||
except Repository.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def is_repository_public(repository):
|
||||
return repository.visibility == _basequery.get_public_repo_visibility()
|
||||
|
||||
|
||||
def repository_is_public(namespace_name, repository_name):
|
||||
try:
|
||||
(Repository
|
||||
.select()
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(Repository)
|
||||
.join(Visibility)
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name,
|
||||
Visibility.name == 'public')
|
||||
.get())
|
||||
return True
|
||||
except Repository.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def set_repository_visibility(repo, visibility):
|
||||
visibility_obj = Visibility.get(name=visibility)
|
||||
if not visibility_obj:
|
||||
return
|
||||
|
||||
repo.visibility = visibility_obj
|
||||
repo.save()
|
||||
|
||||
|
||||
def get_email_authorized_for_repo(namespace, repository, email):
|
||||
try:
|
||||
return (RepositoryAuthorizedEmail
|
||||
.select(RepositoryAuthorizedEmail, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace, Repository.name == repository,
|
||||
RepositoryAuthorizedEmail.email == email)
|
||||
.get())
|
||||
except RepositoryAuthorizedEmail.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def create_email_authorization_for_repo(namespace_name, repository_name, email):
|
||||
try:
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
except Repository.DoesNotExist:
|
||||
raise DataModelException('Invalid repository %s/%s' %
|
||||
(namespace_name, repository_name))
|
||||
|
||||
return RepositoryAuthorizedEmail.create(repository=repo, email=email, confirmed=False)
|
||||
|
||||
|
||||
def confirm_email_authorization_for_repo(code):
|
||||
try:
|
||||
found = (RepositoryAuthorizedEmail
|
||||
.select(RepositoryAuthorizedEmail, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(RepositoryAuthorizedEmail.code == code)
|
||||
.get())
|
||||
except RepositoryAuthorizedEmail.DoesNotExist:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
found.confirmed = True
|
||||
found.save()
|
||||
|
||||
return found
|
||||
|
||||
|
195
data/model/storage.py
Normal file
195
data/model/storage.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
import logging
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, fn
|
||||
|
||||
from data.model import config, db_transaction, InvalidImageException
|
||||
from data.database import (ImageStorage, Image, DerivedImageStorage, ImageStoragePlacement,
|
||||
ImageStorageLocation, ImageStorageTransformation, ImageStorageSignature,
|
||||
ImageStorageSignatureKind)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def find_or_create_derived_storage(source, transformation_name, preferred_location):
|
||||
existing = find_derived_storage(source, transformation_name)
|
||||
if existing is not None:
|
||||
return existing
|
||||
|
||||
logger.debug('Creating storage dervied from source: %s', source.uuid)
|
||||
trans = ImageStorageTransformation.get(name=transformation_name)
|
||||
new_storage = create_storage(preferred_location)
|
||||
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
|
||||
return new_storage
|
||||
|
||||
|
||||
def garbage_collect_storage(storage_id_whitelist):
|
||||
if len(storage_id_whitelist) == 0:
|
||||
return
|
||||
|
||||
def placements_query_to_paths_set(placements_query):
|
||||
return {(placement.location.name, config.store.image_path(placement.storage.uuid))
|
||||
for placement in placements_query}
|
||||
|
||||
def orphaned_storage_query(select_base_query, candidates, group_by):
|
||||
return (select_base_query
|
||||
.switch(ImageStorage)
|
||||
.join(Image, JOIN_LEFT_OUTER)
|
||||
.switch(ImageStorage)
|
||||
.join(DerivedImageStorage, JOIN_LEFT_OUTER,
|
||||
on=(ImageStorage.id == DerivedImageStorage.derivative))
|
||||
.where(ImageStorage.id << list(candidates))
|
||||
.group_by(*group_by)
|
||||
.having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0)))
|
||||
|
||||
# Note: We remove the derived image storage in its own transaction as a way to reduce the
|
||||
# time that the transaction holds on the database indicies. This could result in a derived
|
||||
# image storage being deleted for an image storage which is later reused during this time,
|
||||
# but since these are caches anyway, it isn't terrible and worth the tradeoff (for now).
|
||||
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
|
||||
with db_transaction():
|
||||
# Find out which derived storages will be removed, and add them to the whitelist
|
||||
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
||||
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||
storage_id_whitelist,
|
||||
(ImageStorage.id,)))
|
||||
|
||||
if len(orphaned_from_candidates) > 0:
|
||||
derived_to_remove = (ImageStorage
|
||||
.select(ImageStorage.id)
|
||||
.join(DerivedImageStorage,
|
||||
on=(ImageStorage.id == DerivedImageStorage.derivative))
|
||||
.where(DerivedImageStorage.source << orphaned_from_candidates))
|
||||
storage_id_whitelist.update({derived.id for derived in derived_to_remove})
|
||||
|
||||
# Remove the dervived image storages with sources of orphaned storages
|
||||
(DerivedImageStorage
|
||||
.delete()
|
||||
.where(DerivedImageStorage.source << orphaned_from_candidates)
|
||||
.execute())
|
||||
|
||||
# Note: Both of these deletes must occur in the same transaction (unfortunately) because a
|
||||
# storage without any placement is invalid, and a placement cannot exist without a storage.
|
||||
# TODO(jake): We might want to allow for null storages on placements, which would allow us to
|
||||
# delete the storages, then delete the placements in a non-transaction.
|
||||
logger.debug('Garbage collecting storages from candidates: %s', storage_id_whitelist)
|
||||
with db_transaction():
|
||||
# Track all of the data that should be removed from blob storage
|
||||
placements_to_remove = list(orphaned_storage_query(ImageStoragePlacement
|
||||
.select(ImageStoragePlacement,
|
||||
ImageStorage,
|
||||
ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage),
|
||||
storage_id_whitelist,
|
||||
(ImageStorage, ImageStoragePlacement,
|
||||
ImageStorageLocation)))
|
||||
|
||||
paths_to_remove = placements_query_to_paths_set(placements_to_remove)
|
||||
|
||||
# Remove the placements for orphaned storages
|
||||
if len(placements_to_remove) > 0:
|
||||
placement_ids_to_remove = [placement.id for placement in placements_to_remove]
|
||||
placements_removed = (ImageStoragePlacement
|
||||
.delete()
|
||||
.where(ImageStoragePlacement.id << placement_ids_to_remove)
|
||||
.execute())
|
||||
logger.debug('Removed %s image storage placements', placements_removed)
|
||||
|
||||
# Remove all orphaned storages
|
||||
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
||||
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||
storage_id_whitelist,
|
||||
(ImageStorage.id,)).alias('osq'))
|
||||
if len(orphaned_storages) > 0:
|
||||
storages_removed = (ImageStorage
|
||||
.delete()
|
||||
.where(ImageStorage.id << orphaned_storages)
|
||||
.execute())
|
||||
logger.debug('Removed %s image storage records', storages_removed)
|
||||
|
||||
# We are going to make the conscious decision to not delete image storage blobs inside
|
||||
# transactions.
|
||||
# This may end up producing garbage in s3, trading off for higher availability in the database.
|
||||
for location_name, image_path in paths_to_remove:
|
||||
logger.debug('Removing %s from %s', image_path, location_name)
|
||||
config.store.remove({location_name}, image_path)
|
||||
|
||||
|
||||
def create_storage(location_name):
|
||||
storage = ImageStorage.create()
|
||||
location = ImageStorageLocation.get(name=location_name)
|
||||
ImageStoragePlacement.create(location=location, storage=storage)
|
||||
storage.locations = {location_name}
|
||||
return storage
|
||||
|
||||
|
||||
def find_or_create_storage_signature(storage, signature_kind):
|
||||
found = lookup_storage_signature(storage, signature_kind)
|
||||
if found is None:
|
||||
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
||||
found = ImageStorageSignature.create(storage=storage, kind=kind)
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def lookup_storage_signature(storage, signature_kind):
|
||||
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
||||
try:
|
||||
return (ImageStorageSignature
|
||||
.select()
|
||||
.where(ImageStorageSignature.storage == storage,
|
||||
ImageStorageSignature.kind == kind)
|
||||
.get())
|
||||
except ImageStorageSignature.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def find_derived_storage(source, transformation_name):
|
||||
try:
|
||||
found = (ImageStorage
|
||||
.select(ImageStorage, DerivedImageStorage)
|
||||
.join(DerivedImageStorage, on=(ImageStorage.id == DerivedImageStorage.derivative))
|
||||
.join(ImageStorageTransformation)
|
||||
.where(DerivedImageStorage.source == source,
|
||||
ImageStorageTransformation.name == transformation_name)
|
||||
.get())
|
||||
|
||||
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
|
||||
return found
|
||||
except ImageStorage.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def delete_derived_storage_by_uuid(storage_uuid):
|
||||
try:
|
||||
image_storage = get_storage_by_uuid(storage_uuid)
|
||||
except InvalidImageException:
|
||||
return
|
||||
|
||||
try:
|
||||
DerivedImageStorage.get(derivative=image_storage)
|
||||
except DerivedImageStorage.DoesNotExist:
|
||||
return
|
||||
|
||||
image_storage.delete_instance(recursive=True)
|
||||
|
||||
|
||||
def get_storage_by_uuid(storage_uuid):
|
||||
placements = list(ImageStoragePlacement
|
||||
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage)
|
||||
.where(ImageStorage.uuid == storage_uuid))
|
||||
|
||||
if not placements:
|
||||
raise InvalidImageException('No storage found with uuid: %s', storage_uuid)
|
||||
|
||||
found = placements[0].storage
|
||||
found.locations = {placement.location.name for placement in placements}
|
||||
|
||||
return found
|
||||
|
||||
|
164
data/model/tag.py
Normal file
164
data/model/tag.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
from uuid import uuid4
|
||||
|
||||
from data.model import image, db_transaction, DataModelException, _basequery
|
||||
from data.database import (RepositoryTag, Repository, Image, ImageStorage, Namespace,
|
||||
get_epoch_timestamp, db_for_update)
|
||||
|
||||
|
||||
def _tag_alive(query, now_ts=None):
|
||||
if now_ts is None:
|
||||
now_ts = get_epoch_timestamp()
|
||||
return query.where((RepositoryTag.lifetime_end_ts >> None) |
|
||||
(RepositoryTag.lifetime_end_ts > now_ts))
|
||||
|
||||
|
||||
def list_repository_tags(namespace_name, repository_name, include_hidden=False,
|
||||
include_storage=False):
|
||||
to_select = (RepositoryTag, Image)
|
||||
if include_storage:
|
||||
to_select = (RepositoryTag, Image, ImageStorage)
|
||||
|
||||
query = _tag_alive(RepositoryTag
|
||||
.select(*to_select)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryTag)
|
||||
.join(Image)
|
||||
.where(Repository.name == repository_name,
|
||||
Namespace.username == namespace_name))
|
||||
|
||||
if not include_hidden:
|
||||
query = query.where(RepositoryTag.hidden == False)
|
||||
|
||||
if include_storage:
|
||||
query = query.switch(Image).join(ImageStorage)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def create_or_update_tag(namespace_name, repository_name, tag_name,
|
||||
tag_docker_image_id, reversion=False):
|
||||
try:
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
except Repository.DoesNotExist:
|
||||
raise DataModelException('Invalid repository %s/%s' % (namespace_name, repository_name))
|
||||
|
||||
now_ts = get_epoch_timestamp()
|
||||
|
||||
with db_transaction():
|
||||
try:
|
||||
tag = db_for_update(_tag_alive(RepositoryTag
|
||||
.select()
|
||||
.where(RepositoryTag.repository == repo,
|
||||
RepositoryTag.name == tag_name), now_ts)).get()
|
||||
tag.lifetime_end_ts = now_ts
|
||||
tag.save()
|
||||
except RepositoryTag.DoesNotExist:
|
||||
pass
|
||||
|
||||
try:
|
||||
image_obj = Image.get(Image.docker_image_id == tag_docker_image_id, Image.repository == repo)
|
||||
except Image.DoesNotExist:
|
||||
raise DataModelException('Invalid image with id: %s' % tag_docker_image_id)
|
||||
|
||||
return RepositoryTag.create(repository=repo, image=image_obj, name=tag_name,
|
||||
lifetime_start_ts=now_ts, reversion=reversion)
|
||||
|
||||
|
||||
def create_temporary_hidden_tag(repo, image_obj, expiration_s):
|
||||
""" Create a tag with a defined timeline, that will not appear in the UI or CLI. Returns the name
|
||||
of the temporary tag. """
|
||||
now_ts = get_epoch_timestamp()
|
||||
expire_ts = now_ts + expiration_s
|
||||
tag_name = str(uuid4())
|
||||
RepositoryTag.create(repository=repo, image=image_obj, name=tag_name, lifetime_start_ts=now_ts,
|
||||
lifetime_end_ts=expire_ts, hidden=True)
|
||||
return tag_name
|
||||
|
||||
|
||||
def delete_tag(namespace_name, repository_name, tag_name):
|
||||
now_ts = get_epoch_timestamp()
|
||||
with db_transaction():
|
||||
try:
|
||||
query = _tag_alive(RepositoryTag
|
||||
.select(RepositoryTag, Repository)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name,
|
||||
Namespace.username == namespace_name,
|
||||
RepositoryTag.name == tag_name), now_ts)
|
||||
found = db_for_update(query).get()
|
||||
except RepositoryTag.DoesNotExist:
|
||||
msg = ('Invalid repository tag \'%s\' on repository \'%s/%s\'' %
|
||||
(tag_name, namespace_name, repository_name))
|
||||
raise DataModelException(msg)
|
||||
|
||||
found.lifetime_end_ts = now_ts
|
||||
found.save()
|
||||
|
||||
|
||||
def garbage_collect_tags(namespace_name, repository_name):
|
||||
# We do this without using a join to prevent holding read locks on the repository table
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
expired_time = get_epoch_timestamp() - repo.namespace_user.removed_tag_expiration_s
|
||||
|
||||
tags_to_delete = list(RepositoryTag
|
||||
.select(RepositoryTag.id)
|
||||
.where(RepositoryTag.repository == repo,
|
||||
~(RepositoryTag.lifetime_end_ts >> None),
|
||||
(RepositoryTag.lifetime_end_ts <= expired_time))
|
||||
.order_by(RepositoryTag.id))
|
||||
if len(tags_to_delete) > 0:
|
||||
(RepositoryTag
|
||||
.delete()
|
||||
.where(RepositoryTag.id << tags_to_delete)
|
||||
.execute())
|
||||
|
||||
|
||||
def get_tag_image(namespace_name, repository_name, tag_name):
|
||||
def limit_to_tag(query):
|
||||
return _tag_alive(query
|
||||
.switch(Image)
|
||||
.join(RepositoryTag)
|
||||
.where(RepositoryTag.name == tag_name))
|
||||
|
||||
images = image.get_repository_images_base(namespace_name, repository_name, limit_to_tag)
|
||||
if not images:
|
||||
raise DataModelException('Unable to find image for tag.')
|
||||
else:
|
||||
return images[0]
|
||||
|
||||
|
||||
def list_repository_tag_history(repo_obj, page=1, size=100, specific_tag=None):
|
||||
query = (RepositoryTag
|
||||
.select(RepositoryTag, Image)
|
||||
.join(Image)
|
||||
.where(RepositoryTag.repository == repo_obj)
|
||||
.where(RepositoryTag.hidden == False)
|
||||
.order_by(RepositoryTag.lifetime_start_ts.desc())
|
||||
.paginate(page, size))
|
||||
|
||||
if specific_tag:
|
||||
query = query.where(RepositoryTag.name == specific_tag)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def revert_tag(repo_obj, tag_name, docker_image_id):
|
||||
""" Reverts a tag to a specific image ID. """
|
||||
# Verify that the image ID already existed under this repository under the
|
||||
# tag.
|
||||
try:
|
||||
(RepositoryTag
|
||||
.select()
|
||||
.join(Image)
|
||||
.where(RepositoryTag.repository == repo_obj)
|
||||
.where(RepositoryTag.name == tag_name)
|
||||
.where(Image.docker_image_id == docker_image_id)
|
||||
.get())
|
||||
except RepositoryTag.DoesNotExist:
|
||||
raise DataModelException('Cannot revert to unknown or invalid image')
|
||||
|
||||
return create_or_update_tag(repo_obj.namespace_user.username, repo_obj.name, tag_name,
|
||||
docker_image_id, reversion=True)
|
||||
|
276
data/model/team.py
Normal file
276
data/model/team.py
Normal file
|
@ -0,0 +1,276 @@
|
|||
from data.database import Team, TeamMember, TeamRole, User, TeamMemberInvite, Repository
|
||||
from data.model import (DataModelException, InvalidTeamException, UserAlreadyInTeam,
|
||||
InvalidTeamMemberException, user, _basequery)
|
||||
from util.validation import validate_username
|
||||
|
||||
|
||||
def create_team(name, org_obj, team_role_name, description=''):
|
||||
(username_valid, username_issue) = validate_username(name)
|
||||
if not username_valid:
|
||||
raise InvalidTeamException('Invalid team name %s: %s' % (name, username_issue))
|
||||
|
||||
if not org_obj.organization:
|
||||
raise InvalidTeamException('Specified organization %s was not an organization' %
|
||||
org_obj.username)
|
||||
|
||||
team_role = TeamRole.get(TeamRole.name == team_role_name)
|
||||
return Team.create(name=name, organization=org_obj, role=team_role,
|
||||
description=description)
|
||||
|
||||
|
||||
def add_user_to_team(user_obj, team):
|
||||
try:
|
||||
return TeamMember.create(user=user_obj, team=team)
|
||||
except Exception:
|
||||
raise UserAlreadyInTeam('User %s is already a member of team %s' %
|
||||
(user_obj.username, team.name))
|
||||
|
||||
|
||||
def remove_user_from_team(org_name, team_name, username, removed_by_username):
|
||||
Org = User.alias()
|
||||
joined = TeamMember.select().join(User).switch(TeamMember).join(Team)
|
||||
with_role = joined.join(TeamRole)
|
||||
with_org = with_role.switch(Team).join(Org,
|
||||
on=(Org.id == Team.organization))
|
||||
found = list(with_org.where(User.username == username,
|
||||
Org.username == org_name,
|
||||
Team.name == team_name))
|
||||
|
||||
if not found:
|
||||
raise DataModelException('User %s does not belong to team %s' %
|
||||
(username, team_name))
|
||||
|
||||
if username == removed_by_username:
|
||||
admin_team_query = __get_user_admin_teams(org_name, username)
|
||||
admin_team_names = {team.name for team in admin_team_query}
|
||||
if team_name in admin_team_names and len(admin_team_names) <= 1:
|
||||
msg = 'User cannot remove themselves from their only admin team.'
|
||||
raise DataModelException(msg)
|
||||
|
||||
user_in_team = found[0]
|
||||
user_in_team.delete_instance()
|
||||
|
||||
|
||||
def get_team_org_role(team):
|
||||
return TeamRole.get(TeamRole.id == team.role.id)
|
||||
|
||||
|
||||
def set_team_org_permission(team, team_role_name, set_by_username):
|
||||
if team.role.name == 'admin' and team_role_name != 'admin':
|
||||
# We need to make sure we're not removing the users only admin role
|
||||
user_admin_teams = __get_user_admin_teams(team.organization.username, set_by_username)
|
||||
admin_team_set = {admin_team.name for admin_team in user_admin_teams}
|
||||
if team.name in admin_team_set and len(admin_team_set) <= 1:
|
||||
msg = (('Cannot remove admin from team \'%s\' because calling user ' +
|
||||
'would no longer have admin on org \'%s\'') %
|
||||
(team.name, team.organization.username))
|
||||
raise DataModelException(msg)
|
||||
|
||||
new_role = TeamRole.get(TeamRole.name == team_role_name)
|
||||
team.role = new_role
|
||||
team.save()
|
||||
return team
|
||||
|
||||
|
||||
def __get_user_admin_teams(org_name, username):
|
||||
Org = User.alias()
|
||||
user_teams = Team.select().join(TeamMember).join(User)
|
||||
with_org = user_teams.switch(Team).join(Org,
|
||||
on=(Org.id == Team.organization))
|
||||
with_role = with_org.switch(Team).join(TeamRole)
|
||||
admin_teams = with_role.where(User.username == username,
|
||||
Org.username == org_name,
|
||||
TeamRole.name == 'admin')
|
||||
return admin_teams
|
||||
|
||||
|
||||
def remove_team(org_name, team_name, removed_by_username):
|
||||
joined = Team.select(Team, TeamRole).join(User).switch(Team).join(TeamRole)
|
||||
|
||||
found = list(joined.where(User.organization == True,
|
||||
User.username == org_name,
|
||||
Team.name == team_name))
|
||||
if not found:
|
||||
raise InvalidTeamException('Team \'%s\' is not a team in org \'%s\'' %
|
||||
(team_name, org_name))
|
||||
|
||||
team = found[0]
|
||||
if team.role.name == 'admin':
|
||||
admin_teams = list(__get_user_admin_teams(org_name, removed_by_username))
|
||||
|
||||
if len(admin_teams) <= 1:
|
||||
# The team we are trying to remove is the only admin team for this user
|
||||
msg = ('Deleting team \'%s\' would remove all admin from user \'%s\'' %
|
||||
(team_name, removed_by_username))
|
||||
raise DataModelException(msg)
|
||||
|
||||
team.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
|
||||
def add_or_invite_to_team(inviter, team, user_obj=None, email=None, requires_invite=True):
|
||||
# If the user is a member of the organization, then we simply add the
|
||||
# user directly to the team. Otherwise, an invite is created for the user/email.
|
||||
# We return None if the user was directly added and the invite object if the user was invited.
|
||||
if user_obj and requires_invite:
|
||||
orgname = team.organization.username
|
||||
|
||||
# If the user is part of the organization (or a robot), then no invite is required.
|
||||
if user_obj.robot:
|
||||
requires_invite = False
|
||||
if not user_obj.username.startswith(orgname + '+'):
|
||||
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
|
||||
'as it is not a member of the organization')
|
||||
else:
|
||||
Org = User.alias()
|
||||
found = User.select(User.username)
|
||||
found = found.where(User.username == user_obj.username).join(TeamMember).join(Team)
|
||||
found = found.join(Org, on=(Org.username == orgname)).limit(1)
|
||||
requires_invite = not any(found)
|
||||
|
||||
# If we have a valid user and no invite is required, simply add the user to the team.
|
||||
if user_obj and not requires_invite:
|
||||
add_user_to_team(user_obj, team)
|
||||
return None
|
||||
|
||||
email_address = email if not user_obj else None
|
||||
return TeamMemberInvite.create(user=user_obj, email=email_address, team=team, inviter=inviter)
|
||||
|
||||
|
||||
def get_matching_user_teams(team_prefix, user_obj, limit=10):
|
||||
query = (Team
|
||||
.select()
|
||||
.join(User)
|
||||
.switch(Team)
|
||||
.join(TeamMember)
|
||||
.where(TeamMember.user == user_obj, Team.name ** (team_prefix + '%'))
|
||||
.distinct(Team.id)
|
||||
.limit(limit))
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def get_organization_team(orgname, teamname):
|
||||
joined = Team.select().join(User)
|
||||
query = joined.where(Team.name == teamname, User.organization == True,
|
||||
User.username == orgname).limit(1)
|
||||
result = list(query)
|
||||
if not result:
|
||||
raise InvalidTeamException('Team does not exist: %s/%s', orgname,
|
||||
teamname)
|
||||
|
||||
return result[0]
|
||||
|
||||
|
||||
def get_matching_admined_teams(team_prefix, user_obj, limit=10):
|
||||
admined_orgs = (_basequery.get_user_organizations(user_obj.username)
|
||||
.switch(Team)
|
||||
.join(TeamRole)
|
||||
.where(TeamRole.name == 'admin'))
|
||||
|
||||
query = (Team
|
||||
.select()
|
||||
.join(User)
|
||||
.switch(Team)
|
||||
.join(TeamMember)
|
||||
.where(Team.name ** (team_prefix + '%'), Team.organization << (admined_orgs))
|
||||
.distinct(Team.id)
|
||||
.limit(limit))
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def get_matching_teams(team_prefix, organization):
|
||||
query = Team.select().where(Team.name ** (team_prefix + '%'),
|
||||
Team.organization == organization)
|
||||
return query.limit(10)
|
||||
|
||||
|
||||
def get_teams_within_org(organization):
|
||||
return Team.select().where(Team.organization == organization)
|
||||
|
||||
|
||||
def get_user_teams_within_org(username, organization):
|
||||
joined = Team.select().join(TeamMember).join(User)
|
||||
return joined.where(Team.organization == organization,
|
||||
User.username == username)
|
||||
|
||||
|
||||
def list_organization_members_by_teams(organization):
|
||||
query = (TeamMember
|
||||
.select(Team, User)
|
||||
.annotate(Team)
|
||||
.annotate(User)
|
||||
.where(Team.organization == organization))
|
||||
return query
|
||||
|
||||
|
||||
def get_organization_team_member_invites(teamid):
|
||||
joined = TeamMemberInvite.select().join(Team).join(User)
|
||||
query = joined.where(Team.id == teamid)
|
||||
return query
|
||||
|
||||
|
||||
def delete_team_email_invite(team, email):
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
|
||||
found.delete_instance()
|
||||
|
||||
|
||||
def delete_team_user_invite(team, user_obj):
|
||||
try:
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.user == user_obj, TeamMemberInvite.team == team)
|
||||
except TeamMemberInvite.DoesNotExist:
|
||||
return False
|
||||
|
||||
found.delete_instance()
|
||||
return True
|
||||
|
||||
|
||||
def lookup_team_invites(user_obj):
|
||||
return TeamMemberInvite.select().where(TeamMemberInvite.user == user_obj)
|
||||
|
||||
|
||||
def lookup_team_invite(code, user_obj=None):
|
||||
# Lookup the invite code.
|
||||
try:
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
|
||||
except TeamMemberInvite.DoesNotExist:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
if user_obj and found.user != user_obj:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def delete_team_invite(code, user_obj=None):
|
||||
found = lookup_team_invite(code, user_obj)
|
||||
|
||||
team = found.team
|
||||
inviter = found.inviter
|
||||
|
||||
found.delete_instance()
|
||||
|
||||
return (team, inviter)
|
||||
|
||||
|
||||
def confirm_team_invite(code, user_obj):
|
||||
found = lookup_team_invite(code)
|
||||
|
||||
# If the invite is for a specific user, we have to confirm that here.
|
||||
if found.user is not None and found.user != user_obj:
|
||||
message = """This invite is intended for user "%s".
|
||||
Please login to that account and try again.""" % found.user.username
|
||||
raise DataModelException(message)
|
||||
|
||||
# Add the user to the team.
|
||||
try:
|
||||
add_user_to_team(user_obj, found.team)
|
||||
except UserAlreadyInTeam:
|
||||
# Ignore.
|
||||
pass
|
||||
|
||||
# Delete the invite and return the team.
|
||||
team = found.team
|
||||
inviter = found.inviter
|
||||
found.delete_instance()
|
||||
return (team, inviter)
|
87
data/model/token.py
Normal file
87
data/model/token.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
import logging
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER
|
||||
|
||||
from data.database import (AccessToken, AccessTokenKind, Repository, Namespace, Role,
|
||||
RepositoryBuildTrigger, LogEntryKind)
|
||||
from data.model import DataModelException, _basequery, InvalidTokenException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_access_token(repo, role, kind=None, friendly_name=None):
|
||||
role = Role.get(Role.name == role)
|
||||
kind_ref = None
|
||||
if kind is not None:
|
||||
kind_ref = AccessTokenKind.get(AccessTokenKind.name == kind)
|
||||
|
||||
new_token = AccessToken.create(repository=repo, temporary=True, role=role, kind=kind_ref,
|
||||
friendly_name=friendly_name)
|
||||
return new_token
|
||||
|
||||
|
||||
def create_delegate_token(namespace_name, repository_name, friendly_name,
|
||||
role='read'):
|
||||
read_only = Role.get(name=role)
|
||||
repo = _basequery.get_existing_repository(namespace_name, repository_name)
|
||||
new_token = AccessToken.create(repository=repo, role=read_only,
|
||||
friendly_name=friendly_name, temporary=False)
|
||||
return new_token
|
||||
|
||||
|
||||
def get_repository_delegate_tokens(namespace_name, repository_name):
|
||||
return (AccessToken
|
||||
.select(AccessToken, Role)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(AccessToken)
|
||||
.join(Role)
|
||||
.switch(AccessToken)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
AccessToken.temporary == False, RepositoryBuildTrigger.uuid >> None))
|
||||
|
||||
|
||||
def get_repo_delegate_token(namespace_name, repository_name, code):
|
||||
repo_query = get_repository_delegate_tokens(namespace_name, repository_name)
|
||||
|
||||
try:
|
||||
return repo_query.where(AccessToken.code == code).get()
|
||||
except AccessToken.DoesNotExist:
|
||||
raise InvalidTokenException('Unable to find token with code: %s' % code)
|
||||
|
||||
|
||||
def set_repo_delegate_token_role(namespace_name, repository_name, code, role):
|
||||
token = get_repo_delegate_token(namespace_name, repository_name, code)
|
||||
|
||||
if role != 'read' and role != 'write':
|
||||
raise DataModelException('Invalid role for delegate token: %s' % role)
|
||||
|
||||
new_role = Role.get(Role.name == role)
|
||||
token.role = new_role
|
||||
token.save()
|
||||
|
||||
return token
|
||||
|
||||
|
||||
def delete_delegate_token(namespace_name, repository_name, code):
|
||||
token = get_repo_delegate_token(namespace_name, repository_name, code)
|
||||
token.delete_instance(recursive=True)
|
||||
return token
|
||||
|
||||
|
||||
def load_token_data(code):
|
||||
""" Load the permissions for any token by code. """
|
||||
try:
|
||||
return (AccessToken
|
||||
.select(AccessToken, Repository, Namespace, Role)
|
||||
.join(Role)
|
||||
.switch(AccessToken)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(AccessToken.code == code)
|
||||
.get())
|
||||
|
||||
except AccessToken.DoesNotExist:
|
||||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
657
data/model/user.py
Normal file
657
data/model/user.py
Normal file
|
@ -0,0 +1,657 @@
|
|||
import bcrypt
|
||||
import logging
|
||||
import json
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, IntegrityError, fn
|
||||
from uuid import uuid4
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import (User, LoginService, FederatedLogin, RepositoryPermission, TeamMember,
|
||||
Team, Repository, TupleSelector, TeamRole, Namespace, Visibility,
|
||||
EmailConfirmation, Role, db_for_update, random_string_generator)
|
||||
from data.model import (DataModelException, InvalidPasswordException, InvalidRobotException,
|
||||
InvalidUsernameException, InvalidEmailAddressException,
|
||||
TooManyUsersException, TooManyLoginAttemptsException, db_transaction,
|
||||
notification, config, repository, _basequery)
|
||||
from util.names import format_robot_username, parse_robot_username
|
||||
from util.validation import (validate_username, validate_email, validate_password,
|
||||
INVALID_PASSWORD_MESSAGE)
|
||||
from util.backoff import exponential_backoff
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
||||
|
||||
|
||||
def hash_password(password, salt=None):
|
||||
salt = salt or bcrypt.gensalt()
|
||||
return bcrypt.hashpw(password.encode('utf-8'), salt)
|
||||
|
||||
|
||||
def is_create_user_allowed():
|
||||
return True
|
||||
|
||||
|
||||
def create_user(username, password, email, auto_verify=False):
|
||||
""" Creates a regular user, if allowed. """
|
||||
if not validate_password(password):
|
||||
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
||||
|
||||
if not is_create_user_allowed():
|
||||
raise TooManyUsersException()
|
||||
|
||||
created = create_user_noverify(username, email)
|
||||
created.password_hash = hash_password(password)
|
||||
created.verified = auto_verify
|
||||
created.save()
|
||||
|
||||
return created
|
||||
|
||||
|
||||
def create_user_noverify(username, email):
|
||||
if not validate_email(email):
|
||||
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
||||
|
||||
(username_valid, username_issue) = validate_username(username)
|
||||
if not username_valid:
|
||||
raise InvalidUsernameException('Invalid username %s: %s' % (username, username_issue))
|
||||
|
||||
try:
|
||||
existing = User.get((User.username == username) | (User.email == email))
|
||||
|
||||
logger.info('Existing user with same username or email.')
|
||||
|
||||
# A user already exists with either the same username or email
|
||||
if existing.username == username:
|
||||
raise InvalidUsernameException('Username has already been taken: %s' %
|
||||
username)
|
||||
raise InvalidEmailAddressException('Email has already been used: %s' %
|
||||
email)
|
||||
|
||||
except User.DoesNotExist:
|
||||
# This is actually the happy path
|
||||
logger.debug('Email and username are unique!')
|
||||
|
||||
try:
|
||||
return User.create(username=username, email=email)
|
||||
except Exception as ex:
|
||||
raise DataModelException(ex.message)
|
||||
|
||||
|
||||
def is_username_unique(test_username):
|
||||
try:
|
||||
User.get((User.username == test_username))
|
||||
return False
|
||||
except User.DoesNotExist:
|
||||
return True
|
||||
|
||||
|
||||
def change_password(user, new_password):
|
||||
if not validate_password(new_password):
|
||||
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
||||
|
||||
pw_hash = hash_password(new_password)
|
||||
user.invalid_login_attempts = 0
|
||||
user.password_hash = pw_hash
|
||||
user.uuid = str(uuid4())
|
||||
user.save()
|
||||
|
||||
# Remove any password required notifications for the user.
|
||||
notification.delete_notifications_by_kind(user, 'password_required')
|
||||
|
||||
|
||||
def change_username(user_id, new_username):
|
||||
(username_valid, username_issue) = validate_username(new_username)
|
||||
if not username_valid:
|
||||
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
||||
|
||||
with db_transaction():
|
||||
# Reload the user for update
|
||||
user = db_for_update(User.select().where(User.id == user_id)).get()
|
||||
|
||||
# Rename the robots
|
||||
for robot in db_for_update(_list_entity_robots(user.username)):
|
||||
_, robot_shortname = parse_robot_username(robot.username)
|
||||
new_robot_name = format_robot_username(new_username, robot_shortname)
|
||||
robot.username = new_robot_name
|
||||
robot.save()
|
||||
|
||||
# Rename the user
|
||||
user.username = new_username
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
def change_invoice_email(user, invoice_email):
|
||||
user.invoice_email = invoice_email
|
||||
user.save()
|
||||
|
||||
|
||||
def change_user_tag_expiration(user, tag_expiration_s):
|
||||
user.removed_tag_expiration_s = tag_expiration_s
|
||||
user.save()
|
||||
|
||||
|
||||
def update_email(user, new_email, auto_verify=False):
|
||||
try:
|
||||
user.email = new_email
|
||||
user.verified = auto_verify
|
||||
user.save()
|
||||
except IntegrityError:
|
||||
raise DataModelException('E-mail address already used')
|
||||
|
||||
|
||||
def create_robot(robot_shortname, parent):
|
||||
(username_valid, username_issue) = validate_username(robot_shortname)
|
||||
if not username_valid:
|
||||
raise InvalidRobotException('The name for the robot \'%s\' is invalid: %s' %
|
||||
(robot_shortname, username_issue))
|
||||
|
||||
username = format_robot_username(parent.username, robot_shortname)
|
||||
|
||||
try:
|
||||
User.get(User.username == username)
|
||||
|
||||
msg = 'Existing robot with name: %s' % username
|
||||
logger.info(msg)
|
||||
raise InvalidRobotException(msg)
|
||||
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
try:
|
||||
created = User.create(username=username, robot=True)
|
||||
|
||||
service = LoginService.get(name='quayrobot')
|
||||
password = created.email
|
||||
FederatedLogin.create(user=created, service=service,
|
||||
service_ident=password)
|
||||
|
||||
return created, password
|
||||
except Exception as ex:
|
||||
raise DataModelException(ex.message)
|
||||
|
||||
|
||||
def get_robot(robot_shortname, parent):
|
||||
robot_username = format_robot_username(parent.username, robot_shortname)
|
||||
robot = lookup_robot(robot_username)
|
||||
return robot, robot.email
|
||||
|
||||
|
||||
def lookup_robot(robot_username):
|
||||
try:
|
||||
return (User
|
||||
.select()
|
||||
.join(FederatedLogin)
|
||||
.join(LoginService)
|
||||
.where(LoginService.name == 'quayrobot', User.username == robot_username,
|
||||
User.robot == True)
|
||||
.get())
|
||||
except User.DoesNotExist:
|
||||
raise InvalidRobotException('Could not find robot with username: %s' % robot_username)
|
||||
|
||||
|
||||
def get_matching_robots(name_prefix, username, limit=10):
|
||||
admined_orgs = (_basequery.get_user_organizations(username)
|
||||
.switch(Team)
|
||||
.join(TeamRole)
|
||||
.where(TeamRole.name == 'admin'))
|
||||
|
||||
prefix_checks = False
|
||||
|
||||
for org in admined_orgs:
|
||||
prefix_checks = prefix_checks | (User.username ** (org.username + '+' + name_prefix + '%'))
|
||||
|
||||
prefix_checks = prefix_checks | (User.username ** (username + '+' + name_prefix + '%'))
|
||||
|
||||
return User.select().where(prefix_checks).limit(limit)
|
||||
|
||||
|
||||
def verify_robot(robot_username, password):
|
||||
result = parse_robot_username(robot_username)
|
||||
if result is None:
|
||||
raise InvalidRobotException('%s is an invalid robot name' % robot_username)
|
||||
|
||||
# Find the matching robot.
|
||||
query = (User
|
||||
.select()
|
||||
.join(FederatedLogin)
|
||||
.join(LoginService)
|
||||
.where(FederatedLogin.service_ident == password, LoginService.name == 'quayrobot',
|
||||
User.username == robot_username))
|
||||
|
||||
try:
|
||||
robot = query.get()
|
||||
except User.DoesNotExist:
|
||||
msg = ('Could not find robot with username: %s and supplied password.' %
|
||||
robot_username)
|
||||
raise InvalidRobotException(msg)
|
||||
|
||||
# Find the owner user and ensure it is not disabled.
|
||||
try:
|
||||
owner = User.get(User.username == result[0])
|
||||
except User.DoesNotExist:
|
||||
raise InvalidRobotException('Robot %s owner does not exist' % robot_username)
|
||||
|
||||
if not owner.enabled:
|
||||
raise InvalidRobotException('This user has been disabled. Please contact your administrator.')
|
||||
|
||||
return robot
|
||||
|
||||
def regenerate_robot_token(robot_shortname, parent):
|
||||
robot_username = format_robot_username(parent.username, robot_shortname)
|
||||
|
||||
robot = lookup_robot(robot_username)
|
||||
password = random_string_generator(length=64)()
|
||||
robot.email = password
|
||||
|
||||
service = LoginService.get(name='quayrobot')
|
||||
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
|
||||
login.service_ident = password
|
||||
|
||||
login.save()
|
||||
robot.save()
|
||||
|
||||
return robot, password
|
||||
|
||||
def delete_robot(robot_username):
|
||||
try:
|
||||
robot = User.get(username=robot_username, robot=True)
|
||||
robot.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
except User.DoesNotExist:
|
||||
raise InvalidRobotException('Could not find robot with username: %s' %
|
||||
robot_username)
|
||||
|
||||
|
||||
def _list_entity_robots(entity_name):
|
||||
""" Return the list of robots for the specified entity. This MUST return a query, not a
|
||||
materialized list so that callers can use db_for_update.
|
||||
"""
|
||||
return (User
|
||||
.select()
|
||||
.join(FederatedLogin)
|
||||
.where(User.robot == True, User.username ** (entity_name + '+%')))
|
||||
|
||||
|
||||
def list_entity_robot_permission_teams(entity_name, include_permissions=False):
|
||||
query = (_list_entity_robots(entity_name))
|
||||
|
||||
fields = [User.username, FederatedLogin.service_ident]
|
||||
if include_permissions:
|
||||
query = (query
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER,
|
||||
on=(RepositoryPermission.user == FederatedLogin.user))
|
||||
.join(Repository, JOIN_LEFT_OUTER)
|
||||
.switch(User)
|
||||
.join(TeamMember, JOIN_LEFT_OUTER)
|
||||
.join(Team, JOIN_LEFT_OUTER))
|
||||
|
||||
fields.append(Repository.name)
|
||||
fields.append(Team.name)
|
||||
|
||||
return TupleSelector(query, fields)
|
||||
|
||||
|
||||
def create_federated_user(username, email, service_name, service_id,
|
||||
set_password_notification, metadata={}):
|
||||
if not is_create_user_allowed():
|
||||
raise TooManyUsersException()
|
||||
|
||||
new_user = create_user_noverify(username, email)
|
||||
new_user.verified = True
|
||||
new_user.save()
|
||||
|
||||
service = LoginService.get(LoginService.name == service_name)
|
||||
FederatedLogin.create(user=new_user, service=service,
|
||||
service_ident=service_id,
|
||||
metadata_json=json.dumps(metadata))
|
||||
|
||||
if set_password_notification:
|
||||
notification.create_notification('password_required', new_user)
|
||||
|
||||
return new_user
|
||||
|
||||
|
||||
def attach_federated_login(user, service_name, service_id, metadata={}):
|
||||
service = LoginService.get(LoginService.name == service_name)
|
||||
FederatedLogin.create(user=user, service=service, service_ident=service_id,
|
||||
metadata_json=json.dumps(metadata))
|
||||
return user
|
||||
|
||||
|
||||
def verify_federated_login(service_name, service_id):
|
||||
try:
|
||||
found = (FederatedLogin
|
||||
.select(FederatedLogin, User)
|
||||
.join(LoginService)
|
||||
.switch(FederatedLogin).join(User)
|
||||
.where(FederatedLogin.service_ident == service_id, LoginService.name == service_name)
|
||||
.get())
|
||||
return found.user
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def list_federated_logins(user):
|
||||
selected = FederatedLogin.select(FederatedLogin.service_ident,
|
||||
LoginService.name, FederatedLogin.metadata_json)
|
||||
joined = selected.join(LoginService)
|
||||
return joined.where(LoginService.name != 'quayrobot',
|
||||
FederatedLogin.user == user)
|
||||
|
||||
|
||||
def lookup_federated_login(user, service_name):
|
||||
try:
|
||||
return list_federated_logins(user).where(LoginService.name == service_name).get()
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
def create_confirm_email_code(user, new_email=None):
|
||||
if new_email:
|
||||
if not validate_email(new_email):
|
||||
raise InvalidEmailAddressException('Invalid email address: %s' %
|
||||
new_email)
|
||||
|
||||
code = EmailConfirmation.create(user=user, email_confirm=True,
|
||||
new_email=new_email)
|
||||
return code
|
||||
|
||||
|
||||
def confirm_user_email(code):
|
||||
try:
|
||||
code = EmailConfirmation.get(EmailConfirmation.code == code,
|
||||
EmailConfirmation.email_confirm == True)
|
||||
except EmailConfirmation.DoesNotExist:
|
||||
raise DataModelException('Invalid email confirmation code.')
|
||||
|
||||
user = code.user
|
||||
user.verified = True
|
||||
|
||||
old_email = None
|
||||
new_email = code.new_email
|
||||
if new_email and new_email != old_email:
|
||||
if find_user_by_email(new_email):
|
||||
raise DataModelException('E-mail address already used.')
|
||||
|
||||
old_email = user.email
|
||||
user.email = new_email
|
||||
|
||||
user.save()
|
||||
|
||||
code.delete_instance()
|
||||
|
||||
return user, new_email, old_email
|
||||
|
||||
|
||||
def create_reset_password_email_code(email):
|
||||
try:
|
||||
user = User.get(User.email == email)
|
||||
except User.DoesNotExist:
|
||||
raise InvalidEmailAddressException('Email address was not found.');
|
||||
|
||||
if user.organization:
|
||||
raise InvalidEmailAddressException('Organizations can not have passwords.')
|
||||
|
||||
code = EmailConfirmation.create(user=user, pw_reset=True)
|
||||
return code
|
||||
|
||||
|
||||
def validate_reset_code(code):
|
||||
try:
|
||||
code = EmailConfirmation.get(EmailConfirmation.code == code,
|
||||
EmailConfirmation.pw_reset == True)
|
||||
except EmailConfirmation.DoesNotExist:
|
||||
return None
|
||||
|
||||
user = code.user
|
||||
code.delete_instance()
|
||||
|
||||
return user
|
||||
|
||||
|
||||
def find_user_by_email(email):
|
||||
try:
|
||||
return User.get(User.email == email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_nonrobot_user(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.organization == False, User.robot == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.organization == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_namespace_user(username):
|
||||
try:
|
||||
return User.get(User.username == username)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_or_org(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.robot == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_by_id(user_db_id):
|
||||
try:
|
||||
return User.get(User.id == user_db_id, User.organization == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_namespace_by_user_id(namespace_user_db_id):
|
||||
try:
|
||||
return User.get(User.id == namespace_user_db_id, User.robot == False).username
|
||||
except User.DoesNotExist:
|
||||
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
|
||||
|
||||
|
||||
def get_user_by_uuid(user_uuid):
|
||||
try:
|
||||
return User.get(User.uuid == user_uuid, User.organization == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_or_org_by_customer_id(customer_id):
|
||||
try:
|
||||
return User.get(User.stripe_id == customer_id)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_matching_user_namespaces(namespace_prefix, username, limit=10):
|
||||
base_query = (Namespace
|
||||
.select()
|
||||
.distinct()
|
||||
.limit(limit)
|
||||
.join(Repository, on=(Repository.namespace_user == Namespace.id))
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||
.where(Namespace.username ** (namespace_prefix + '%')))
|
||||
|
||||
return _basequery.filter_to_repos_for_user(base_query, username)
|
||||
|
||||
def get_matching_users(username_prefix, robot_namespace=None,
|
||||
organization=None):
|
||||
direct_user_query = (User.username ** (username_prefix + '%') &
|
||||
(User.organization == False) & (User.robot == False))
|
||||
|
||||
if robot_namespace:
|
||||
robot_prefix = format_robot_username(robot_namespace, username_prefix)
|
||||
direct_user_query = (direct_user_query |
|
||||
(User.username ** (robot_prefix + '%') &
|
||||
(User.robot == True)))
|
||||
|
||||
query = (User
|
||||
.select(User.username, User.email, User.robot)
|
||||
.group_by(User.username, User.email, User.robot)
|
||||
.where(direct_user_query))
|
||||
|
||||
if organization:
|
||||
query = (query
|
||||
.select(User.username, User.email, User.robot, fn.Sum(Team.id))
|
||||
.join(TeamMember, JOIN_LEFT_OUTER)
|
||||
.join(Team, JOIN_LEFT_OUTER, on=((Team.id == TeamMember.team) &
|
||||
(Team.organization == organization))))
|
||||
|
||||
class MatchingUserResult(object):
|
||||
def __init__(self, *args):
|
||||
self.username = args[0]
|
||||
self.email = args[1]
|
||||
self.robot = args[2]
|
||||
|
||||
if organization:
|
||||
self.is_org_member = (args[3] != None)
|
||||
else:
|
||||
self.is_org_member = None
|
||||
|
||||
return (MatchingUserResult(*args) for args in query.tuples().limit(10))
|
||||
|
||||
|
||||
def verify_user(username_or_email, password):
|
||||
# Make sure we didn't get any unicode for the username.
|
||||
try:
|
||||
str(username_or_email)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
try:
|
||||
fetched = User.get((User.username == username_or_email) |
|
||||
(User.email == username_or_email))
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
if fetched.invalid_login_attempts > 0:
|
||||
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
|
||||
fetched.last_invalid_login)
|
||||
|
||||
if can_retry_at > now:
|
||||
retry_after = can_retry_at - now
|
||||
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
|
||||
|
||||
if (fetched.password_hash and
|
||||
hash_password(password, fetched.password_hash) == fetched.password_hash):
|
||||
if fetched.invalid_login_attempts > 0:
|
||||
fetched.invalid_login_attempts = 0
|
||||
fetched.save()
|
||||
|
||||
return fetched
|
||||
|
||||
fetched.invalid_login_attempts += 1
|
||||
fetched.last_invalid_login = now
|
||||
fetched.save()
|
||||
|
||||
# We weren't able to authorize the user
|
||||
return None
|
||||
|
||||
|
||||
def get_all_repo_users(namespace_name, repository_name):
|
||||
return (RepositoryPermission
|
||||
.select(User.username, User.email, User.robot, Role.name, RepositoryPermission)
|
||||
.join(User)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Role)
|
||||
.switch(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
|
||||
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
|
||||
return (User
|
||||
.select()
|
||||
.distinct()
|
||||
.join(TeamMember)
|
||||
.join(Team)
|
||||
.join(RepositoryPermission)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
||||
|
||||
|
||||
def get_all_repo_users_transitive(namespace_name, repository_name):
|
||||
# Load the users found via teams and directly via permissions.
|
||||
via_teams = get_all_repo_users_transitive_via_teams(namespace_name, repository_name)
|
||||
directly = [perm.user for perm in get_all_repo_users(namespace_name, repository_name)]
|
||||
|
||||
# Filter duplicates.
|
||||
user_set = set()
|
||||
|
||||
def check_add(u):
|
||||
if u.username in user_set:
|
||||
return False
|
||||
|
||||
user_set.add(u.username)
|
||||
return True
|
||||
|
||||
return [user for user in list(directly) + list(via_teams) if check_add(user)]
|
||||
|
||||
|
||||
def get_private_repo_count(username):
|
||||
return (Repository
|
||||
.select()
|
||||
.join(Visibility)
|
||||
.switch(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == username, Visibility.name == 'private')
|
||||
.count())
|
||||
|
||||
|
||||
def get_active_users():
|
||||
return User.select().where(User.organization == False, User.robot == False)
|
||||
|
||||
|
||||
def get_active_user_count():
|
||||
return get_active_users().count()
|
||||
|
||||
|
||||
def detach_external_login(user, service_name):
|
||||
try:
|
||||
service = LoginService.get(name=service_name)
|
||||
except LoginService.DoesNotExist:
|
||||
return
|
||||
|
||||
FederatedLogin.delete().where(FederatedLogin.user == user,
|
||||
FederatedLogin.service == service).execute()
|
||||
|
||||
|
||||
def delete_user(user):
|
||||
user.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
# TODO: also delete any repository data associated
|
||||
|
||||
|
||||
def get_pull_credentials(robotname):
|
||||
try:
|
||||
robot = lookup_robot(robotname)
|
||||
except InvalidRobotException:
|
||||
return None
|
||||
|
||||
try:
|
||||
login_info = FederatedLogin.get(user=robot)
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
return {
|
||||
'username': robot.username,
|
||||
'password': login_info.service_ident,
|
||||
'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'],
|
||||
config.app_config['SERVER_HOSTNAME']),
|
||||
}
|
|
@ -3,18 +3,21 @@ import logging
|
|||
import json
|
||||
import itertools
|
||||
import uuid
|
||||
import struct
|
||||
import os
|
||||
import urllib
|
||||
import jwt
|
||||
|
||||
from util.aes import AESCipher
|
||||
from util.validation import generate_valid_usernames
|
||||
from data import model
|
||||
from collections import namedtuple
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import features
|
||||
|
||||
from data import model
|
||||
from util.aes import AESCipher
|
||||
from util.validation import generate_valid_usernames
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if os.environ.get('LDAP_DEBUG') == '1':
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
@ -25,7 +28,7 @@ if os.environ.get('LDAP_DEBUG') == '1':
|
|||
|
||||
|
||||
def _get_federated_user(username, email, federated_service, create_new_user):
|
||||
db_user = model.verify_federated_login(federated_service, username)
|
||||
db_user = model.user.verify_federated_login(federated_service, username)
|
||||
if not db_user:
|
||||
if not create_new_user:
|
||||
return (None, 'Invalid user')
|
||||
|
@ -33,14 +36,14 @@ def _get_federated_user(username, email, federated_service, create_new_user):
|
|||
# We must create the user in our db
|
||||
valid_username = None
|
||||
for valid_username in generate_valid_usernames(username):
|
||||
if model.is_username_unique(valid_username):
|
||||
if model.user.is_username_unique(valid_username):
|
||||
break
|
||||
|
||||
if not valid_username:
|
||||
logger.error('Unable to pick a username for user: %s', username)
|
||||
return (None, 'Unable to pick a username. Please report this to your administrator.')
|
||||
|
||||
db_user = model.create_federated_user(valid_username, email, federated_service, username,
|
||||
db_user = model.user.create_federated_user(valid_username, email, federated_service, username,
|
||||
set_password_notification=False)
|
||||
else:
|
||||
# Update the db attributes from ldap
|
||||
|
@ -109,11 +112,11 @@ class JWTAuthUsers(object):
|
|||
return _get_federated_user(payload['sub'], payload['email'], 'jwtauthn', create_new_user)
|
||||
|
||||
def confirm_existing_user(self, username, password):
|
||||
db_user = model.get_user(username)
|
||||
db_user = model.user.get_user(username)
|
||||
if not db_user:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
federated_login = model.lookup_federated_login(db_user, 'jwtauthn')
|
||||
federated_login = model.user.lookup_federated_login(db_user, 'jwtauthn')
|
||||
if not federated_login:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
|
@ -123,7 +126,7 @@ class JWTAuthUsers(object):
|
|||
class DatabaseUsers(object):
|
||||
def verify_user(self, username_or_email, password):
|
||||
""" Simply delegate to the model implementation. """
|
||||
result = model.verify_user(username_or_email, password)
|
||||
result = model.user.verify_user(username_or_email, password)
|
||||
if not result:
|
||||
return (None, 'Invalid Username or Password')
|
||||
|
||||
|
@ -239,11 +242,11 @@ class LDAPUsers(object):
|
|||
""" Verify the username and password by looking up the *LDAP* username and confirming the
|
||||
password.
|
||||
"""
|
||||
db_user = model.get_user(username)
|
||||
db_user = model.user.get_user(username)
|
||||
if not db_user:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
federated_login = model.lookup_federated_login(db_user, 'ldap')
|
||||
federated_login = model.user.lookup_federated_login(db_user, 'ldap')
|
||||
if not federated_login:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
|
@ -399,8 +402,6 @@ class UserAuthentication(object):
|
|||
def verify_user(self, username_or_email, password, basic_auth=False):
|
||||
# First try to decode the password as a signed token.
|
||||
if basic_auth:
|
||||
import features
|
||||
|
||||
decrypted = self._decrypt_user_password(password)
|
||||
if decrypted is None:
|
||||
# This is a normal password.
|
||||
|
|
|
@ -34,6 +34,8 @@ def content_path(digest):
|
|||
if parsed.is_tarsum:
|
||||
components.extend(['tarsum', parsed.tarsum_version])
|
||||
|
||||
# Generate a prefix which is always two characters, and which will be filled with leading zeros
|
||||
# if the input does not contain at least two characters. e.g. ABC -> AB, A -> 0A
|
||||
prefix = parsed.hash_bytes[0:2].zfill(2)
|
||||
components.extend([parsed.hash_alg, prefix, parsed.hash_bytes])
|
||||
|
||||
|
|
|
@ -249,7 +249,7 @@ def require_repo_permission(permission_class, scope, allow_public=False):
|
|||
permission = permission_class(namespace, repository)
|
||||
if (permission.can() or
|
||||
(allow_public and
|
||||
model.repository_is_public(namespace, repository))):
|
||||
model.repository.repository_is_public(namespace, repository))):
|
||||
return func(self, namespace, repository, *args, **kwargs)
|
||||
raise Unauthorized()
|
||||
return wrapped
|
||||
|
@ -376,7 +376,7 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None):
|
|||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
|
||||
performer = get_authenticated_user()
|
||||
model.log_action(kind, user_or_orgname, performer=performer, ip=request.remote_addr,
|
||||
model.log.log_action(kind, user_or_orgname, performer=performer, ip=request.remote_addr,
|
||||
metadata=metadata, repository=repo)
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from flask import request
|
|||
from app import billing
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, log_action,
|
||||
related_user_resource, internal_only, Unauthorized, NotFound,
|
||||
require_user_admin, show_if, hide_if, path_param, require_scope, abort)
|
||||
require_user_admin, show_if, path_param, require_scope, abort)
|
||||
from endpoints.api.subscribe import subscribe, subscription_view
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
|
@ -225,7 +225,7 @@ class OrganizationCard(ApiResource):
|
|||
""" Get the organization's credit card. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
return get_card(organization)
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -236,7 +236,7 @@ class OrganizationCard(ApiResource):
|
|||
""" Update the orgnaization's credit card. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
token = request.get_json()['token']
|
||||
response = set_card(organization, token)
|
||||
log_action('account_change_cc', orgname)
|
||||
|
@ -288,7 +288,7 @@ class UserPlan(ApiResource):
|
|||
""" Fetch any existing subscription for the user. """
|
||||
cus = None
|
||||
user = get_authenticated_user()
|
||||
private_repos = model.get_private_repo_count(user.username)
|
||||
private_repos = model.user.get_private_repo_count(user.username)
|
||||
|
||||
if user.stripe_id:
|
||||
try:
|
||||
|
@ -345,7 +345,7 @@ class OrganizationPlan(ApiResource):
|
|||
request_data = request.get_json()
|
||||
plan = request_data['plan']
|
||||
token = request_data['token'] if 'token' in request_data else None
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
return subscribe(organization, plan, token, True) # Business plan required
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -357,8 +357,8 @@ class OrganizationPlan(ApiResource):
|
|||
cus = None
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
private_repos = model.get_private_repo_count(orgname)
|
||||
organization = model.get_organization(orgname)
|
||||
private_repos = model.user.get_private_repo_count(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if organization.stripe_id:
|
||||
try:
|
||||
cus = billing.Customer.retrieve(organization.stripe_id)
|
||||
|
@ -406,7 +406,7 @@ class OrganizationInvoiceList(ApiResource):
|
|||
""" List the invoices for the specified orgnaization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -519,7 +519,7 @@ class OrganizationInvoiceFieldList(ApiResource):
|
|||
""" List the invoice fields for the organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -534,7 +534,7 @@ class OrganizationInvoiceFieldList(ApiResource):
|
|||
""" Creates a new invoice field. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -558,7 +558,7 @@ class OrganizationInvoiceField(ApiResource):
|
|||
""" Deletes the invoice field for the current user. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
|
|
|
@ -2,10 +2,9 @@
|
|||
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from flask import request, redirect
|
||||
from flask import request
|
||||
|
||||
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||
|
@ -14,7 +13,8 @@ from endpoints.api import (RepositoryParamResource, parse_args, query_param, nic
|
|||
path_param, InvalidRequest, require_repo_admin)
|
||||
from endpoints.building import start_build, PreparedBuild
|
||||
from endpoints.trigger import BuildTriggerHandler
|
||||
from data import model, database
|
||||
from data import database
|
||||
from data import model
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
|
||||
AdministerRepositoryPermission, AdministerOrganizationPermission)
|
||||
|
@ -192,7 +192,7 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
if since is not None:
|
||||
since = datetime.datetime.utcfromtimestamp(since)
|
||||
|
||||
builds = model.list_repository_builds(namespace, repository, limit, since=since)
|
||||
builds = model.build.list_repository_builds(namespace, repository, limit, since=since)
|
||||
return {
|
||||
'builds': [build_status_view(build) for build in builds]
|
||||
}
|
||||
|
@ -214,12 +214,13 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
if pull_robot_name:
|
||||
result = parse_robot_username(pull_robot_name)
|
||||
if result:
|
||||
pull_robot = model.lookup_robot(pull_robot_name)
|
||||
if not pull_robot:
|
||||
try:
|
||||
model.user.lookup_robot(pull_robot_name)
|
||||
except model.InvalidRobotException:
|
||||
raise NotFound()
|
||||
|
||||
# Make sure the user has administer permissions for the robot's namespace.
|
||||
(robot_namespace, shortname) = result
|
||||
(robot_namespace, _) = result
|
||||
if not AdministerOrganizationPermission(robot_namespace).can():
|
||||
raise Unauthorized()
|
||||
else:
|
||||
|
@ -228,14 +229,14 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
# Check if the dockerfile resource has already been used. If so, then it
|
||||
# can only be reused if the user has access to the repository in which the
|
||||
# dockerfile was previously built.
|
||||
associated_repository = model.get_repository_for_resource(dockerfile_id)
|
||||
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
|
||||
if associated_repository:
|
||||
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
|
||||
associated_repository.name):
|
||||
raise Unauthorized()
|
||||
|
||||
# Start the build.
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
|
||||
prepared = PreparedBuild()
|
||||
prepared.build_name = user_files.get_file_checksum(dockerfile_id)
|
||||
|
@ -267,8 +268,8 @@ class RepositoryBuildResource(RepositoryParamResource):
|
|||
def get(self, namespace, repository, build_uuid):
|
||||
""" Returns information about a build. """
|
||||
try:
|
||||
build = model.get_repository_build(build_uuid)
|
||||
except model.InvalidRepositoryBuildException:
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
except model.build.InvalidRepositoryBuildException:
|
||||
raise NotFound()
|
||||
|
||||
return build_status_view(build)
|
||||
|
@ -278,14 +279,14 @@ class RepositoryBuildResource(RepositoryParamResource):
|
|||
def delete(self, namespace, repository, build_uuid):
|
||||
""" Cancels a repository build if it has not yet been picked up by a build worker. """
|
||||
try:
|
||||
build = model.get_repository_build(build_uuid)
|
||||
except model.InvalidRepositoryBuildException:
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
except model.build.InvalidRepositoryBuildException:
|
||||
raise NotFound()
|
||||
|
||||
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
||||
raise NotFound()
|
||||
|
||||
if model.cancel_repository_build(build, dockerfile_build_queue):
|
||||
if model.build.cancel_repository_build(build, dockerfile_build_queue):
|
||||
return 'Okay', 201
|
||||
else:
|
||||
raise InvalidRequest('Build is currently running or has finished')
|
||||
|
@ -300,7 +301,7 @@ class RepositoryBuildStatus(RepositoryParamResource):
|
|||
@nickname('getRepoBuildStatus')
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Return the status for the builds specified by the build uuids. """
|
||||
build = model.get_repository_build(build_uuid)
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
@ -319,7 +320,7 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
|||
""" Return the build logs for the build specified by the build uuid. """
|
||||
response_obj = {}
|
||||
|
||||
build = model.get_repository_build(build_uuid)
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
|
|
@ -60,8 +60,8 @@ class RepositoryImageList(RepositoryParamResource):
|
|||
@nickname('listRepositoryImages')
|
||||
def get(self, namespace, repository):
|
||||
""" List the images for the specified repository. """
|
||||
all_images = model.get_repository_images(namespace, repository)
|
||||
all_tags = model.list_repository_tags(namespace, repository)
|
||||
all_images = model.image.get_repository_images(namespace, repository)
|
||||
all_tags = model.tag.list_repository_tags(namespace, repository)
|
||||
|
||||
tags_by_image_id = defaultdict(list)
|
||||
found_image_ids = set()
|
||||
|
@ -96,13 +96,13 @@ class RepositoryImage(RepositoryParamResource):
|
|||
@nickname('getImage')
|
||||
def get(self, namespace, repository, image_id):
|
||||
""" Get the information available for the specified image. """
|
||||
image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not image:
|
||||
raise NotFound()
|
||||
|
||||
# Lookup all the ancestor images for the image.
|
||||
image_map = {}
|
||||
for current_image in model.get_parent_images(namespace, repository, image):
|
||||
for current_image in model.image.get_parent_images(namespace, repository, image):
|
||||
image_map[str(current_image.id)] = current_image
|
||||
|
||||
return historical_image_view(image, image_map)
|
||||
|
@ -119,7 +119,7 @@ class RepositoryImageChanges(RepositoryParamResource):
|
|||
@nickname('getImageChanges')
|
||||
def get(self, namespace, repository, image_id):
|
||||
""" Get the list of changes for the specified image. """
|
||||
image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
if not image:
|
||||
raise NotFound()
|
||||
|
|
|
@ -37,7 +37,7 @@ def log_view(log):
|
|||
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None):
|
||||
performer = None
|
||||
if performer_name:
|
||||
performer = model.get_user(performer_name)
|
||||
performer = model.user.get_user(performer_name)
|
||||
|
||||
if start_time:
|
||||
try:
|
||||
|
@ -58,7 +58,7 @@ def get_logs(start_time, end_time, performer_name=None, repository=None, namespa
|
|||
if not end_time:
|
||||
end_time = datetime.today()
|
||||
|
||||
logs = model.list_logs(start_time, end_time, performer=performer, repository=repository,
|
||||
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
|
||||
namespace=namespace)
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
|
@ -78,7 +78,7 @@ class RepositoryLogs(RepositoryParamResource):
|
|||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
def get(self, args, namespace, repository):
|
||||
""" List the logs for the specified repository. """
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@ import logging
|
|||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from app import billing as stripe, avatar
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
related_user_resource, internal_only, Unauthorized, NotFound,
|
||||
|
@ -18,8 +20,6 @@ from auth import scopes
|
|||
from data import model
|
||||
from data.billing import get_plan
|
||||
|
||||
import features
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -84,22 +84,19 @@ class OrganizationList(ApiResource):
|
|||
existing = None
|
||||
|
||||
try:
|
||||
existing = model.get_organization(org_data['name'])
|
||||
existing = model.organization.get_organization(org_data['name'])
|
||||
except model.InvalidOrganizationException:
|
||||
pass
|
||||
|
||||
if not existing:
|
||||
try:
|
||||
existing = model.get_user(org_data['name'])
|
||||
except model.InvalidUserException:
|
||||
pass
|
||||
existing = model.user.get_user(org_data['name'])
|
||||
|
||||
if existing:
|
||||
msg = 'A user or organization with this name already exists'
|
||||
raise request_error(message=msg)
|
||||
|
||||
try:
|
||||
model.create_organization(org_data['name'], org_data['email'], user)
|
||||
model.organization.create_organization(org_data['name'], org_data['email'], user)
|
||||
return 'Created', 201
|
||||
except model.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
@ -138,13 +135,13 @@ class Organization(ApiResource):
|
|||
def get(self, orgname):
|
||||
""" Get the details for the specified organization """
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
teams = None
|
||||
if OrganizationMemberPermission(orgname).can():
|
||||
teams = model.get_teams_within_org(org)
|
||||
teams = model.team.get_teams_within_org(org)
|
||||
|
||||
return org_view(org, teams)
|
||||
|
||||
|
@ -157,28 +154,28 @@ class Organization(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
org_data = request.get_json()
|
||||
if 'invoice_email' in org_data:
|
||||
logger.debug('Changing invoice_email for organization: %s', org.username)
|
||||
model.change_invoice_email(org, org_data['invoice_email'])
|
||||
model.user.change_invoice_email(org, org_data['invoice_email'])
|
||||
|
||||
if 'email' in org_data and org_data['email'] != org.email:
|
||||
new_email = org_data['email']
|
||||
if model.find_user_by_email(new_email):
|
||||
if model.user.find_user_by_email(new_email):
|
||||
raise request_error(message='E-mail address already used')
|
||||
|
||||
logger.debug('Changing email address for organization: %s', org.username)
|
||||
model.update_email(org, new_email)
|
||||
model.user.update_email(org, new_email)
|
||||
|
||||
if 'tag_expiration' in org_data:
|
||||
logger.debug('Changing organization tag expiration to: %ss', org_data['tag_expiration'])
|
||||
model.change_user_tag_expiration(org, org_data['tag_expiration'])
|
||||
model.user.change_user_tag_expiration(org, org_data['tag_expiration'])
|
||||
|
||||
teams = model.get_teams_within_org(org)
|
||||
teams = model.team.get_teams_within_org(org)
|
||||
return org_view(org, teams)
|
||||
raise Unauthorized()
|
||||
|
||||
|
@ -197,8 +194,8 @@ class OrgPrivateRepositories(ApiResource):
|
|||
""" Return whether or not this org is allowed to create new private repositories. """
|
||||
permission = CreateRepositoryPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.get_organization(orgname)
|
||||
private_repos = model.get_private_repo_count(organization.username)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
private_repos = model.user.get_private_repo_count(organization.username)
|
||||
data = {
|
||||
'privateAllowed': False
|
||||
}
|
||||
|
@ -234,7 +231,7 @@ class OrganizationMemberList(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -242,7 +239,7 @@ class OrganizationMemberList(ApiResource):
|
|||
# will return an entry for *every team* a member is on, so we will have
|
||||
# duplicate keys (which is why we pre-build the dictionary).
|
||||
members_dict = {}
|
||||
members = model.list_organization_members_by_teams(org)
|
||||
members = model.team.list_organization_members_by_teams(org)
|
||||
for member in members:
|
||||
if member.user.robot:
|
||||
continue
|
||||
|
@ -264,7 +261,7 @@ class OrganizationMemberList(ApiResource):
|
|||
})
|
||||
|
||||
# Loop to add direct repository permissions.
|
||||
for permission in model.list_organization_member_permissions(org):
|
||||
for permission in model.permission.list_organization_member_permissions(org):
|
||||
username = permission.user.username
|
||||
if not username in members_dict:
|
||||
continue
|
||||
|
@ -292,17 +289,17 @@ class OrganizationMember(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Lookup the user.
|
||||
user = model.get_nonrobot_user(membername)
|
||||
user = model.user.get_nonrobot_user(membername)
|
||||
if not user:
|
||||
raise NotFound()
|
||||
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
# Remove the user from the organization.
|
||||
model.remove_organization_member(org, user)
|
||||
model.organization.remove_organization_member(org, user)
|
||||
return 'Deleted', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -391,7 +388,7 @@ class OrganizationApplications(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -408,18 +405,16 @@ class OrganizationApplications(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
app_data = request.get_json()
|
||||
application = model.oauth.create_application(
|
||||
org, app_data['name'],
|
||||
application = model.oauth.create_application(org, app_data['name'],
|
||||
app_data.get('application_uri', ''),
|
||||
app_data.get('redirect_uri', ''),
|
||||
description=app_data.get('description', ''),
|
||||
avatar_email = app_data.get('avatar_email', None),)
|
||||
|
||||
avatar_email=app_data.get('avatar_email', None))
|
||||
|
||||
app_data.update({
|
||||
'application_name': application.name,
|
||||
|
@ -479,7 +474,7 @@ class OrganizationApplicationResource(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -499,7 +494,7 @@ class OrganizationApplicationResource(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -532,7 +527,7 @@ class OrganizationApplicationResource(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -559,7 +554,7 @@ class OrganizationApplicationResetClientSecret(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ class RepositoryTeamPermissionList(RepositoryParamResource):
|
|||
@nickname('listRepoTeamPermissions')
|
||||
def get(self, namespace, repository):
|
||||
""" List all team permission. """
|
||||
repo_perms = model.get_all_repo_teams(namespace, repository)
|
||||
repo_perms = model.permission.get_all_repo_teams(namespace, repository)
|
||||
|
||||
def wrapped_role_view(repo_perm):
|
||||
return wrap_role_view_team(role_view(repo_perm), repo_perm.team)
|
||||
|
@ -68,7 +68,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
|
|||
# Lookup the organization (if any).
|
||||
org = None
|
||||
try:
|
||||
org = model.get_organization(namespace) # Will raise an error if not org
|
||||
org = model.organization.get_organization(namespace) # Will raise an error if not org
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository isn't under an org
|
||||
pass
|
||||
|
@ -80,7 +80,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
|
|||
role_view_func = wrapped_role_view
|
||||
|
||||
if org:
|
||||
org_members = model.get_organization_member_set(namespace)
|
||||
org_members = model.organization.get_organization_member_set(namespace)
|
||||
current_func = role_view_func
|
||||
|
||||
def wrapped_role_org_view(repo_perm):
|
||||
|
@ -90,7 +90,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
|
|||
role_view_func = wrapped_role_org_view
|
||||
|
||||
# Load and return the permissions.
|
||||
repo_perms = model.get_all_repo_users(namespace, repository)
|
||||
repo_perms = model.user.get_all_repo_users(namespace, repository)
|
||||
return {
|
||||
'permissions': {perm.user.username: role_view_func(perm)
|
||||
for perm in repo_perms}
|
||||
|
@ -107,15 +107,15 @@ class RepositoryUserTransitivePermission(RepositoryParamResource):
|
|||
@nickname('getUserTransitivePermission')
|
||||
def get(self, namespace, repository, username):
|
||||
""" Get the fetch the permission for the specified user. """
|
||||
user = model.get_user(username)
|
||||
user = model.user.get_user(username)
|
||||
if not user:
|
||||
raise NotFound
|
||||
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
raise NotFound
|
||||
|
||||
permissions = list(model.get_user_repo_permissions(user, repo))
|
||||
permissions = list(model.permission.get_user_repo_permissions(user, repo))
|
||||
return {
|
||||
'permissions': [role_view(permission) for permission in permissions]
|
||||
}
|
||||
|
@ -152,14 +152,13 @@ class RepositoryUserPermission(RepositoryParamResource):
|
|||
@nickname('getUserPermissions')
|
||||
def get(self, namespace, repository, username):
|
||||
""" Get the Fetch the permission for the specified user. """
|
||||
logger.debug('Get repo: %s/%s permissions for user %s' %
|
||||
(namespace, repository, username))
|
||||
perm = model.get_user_reponame_permission(username, namespace, repository)
|
||||
logger.debug('Get repo: %s/%s permissions for user %s', namespace, repository, username)
|
||||
perm = model.permission.get_user_reponame_permission(username, namespace, repository)
|
||||
perm_view = wrap_role_view_user(role_view(perm), perm.user)
|
||||
|
||||
try:
|
||||
model.get_organization(namespace)
|
||||
org_members = model.get_organization_member_set(namespace)
|
||||
model.organization.get_organization(namespace)
|
||||
org_members = model.organization.get_organization_member_set(namespace)
|
||||
perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository is not part of an organization
|
||||
|
@ -174,20 +173,19 @@ class RepositoryUserPermission(RepositoryParamResource):
|
|||
""" Update the perimssions for an existing repository. """
|
||||
new_permission = request.get_json()
|
||||
|
||||
logger.debug('Setting permission to: %s for user %s' %
|
||||
(new_permission['role'], username))
|
||||
logger.debug('Setting permission to: %s for user %s', new_permission['role'], username)
|
||||
|
||||
try:
|
||||
perm = model.set_user_repo_permission(username, namespace, repository,
|
||||
perm = model.permission.set_user_repo_permission(username, namespace, repository,
|
||||
new_permission['role'])
|
||||
except model.InvalidUsernameException as ex:
|
||||
except model.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
perm_view = wrap_role_view_user(role_view(perm), perm.user)
|
||||
|
||||
try:
|
||||
model.get_organization(namespace)
|
||||
org_members = model.get_organization_member_set(namespace)
|
||||
model.organization.get_organization(namespace)
|
||||
org_members = model.organization.get_organization_member_set(namespace)
|
||||
perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository is not part of an organization
|
||||
|
@ -198,7 +196,7 @@ class RepositoryUserPermission(RepositoryParamResource):
|
|||
log_action('change_repo_permission', namespace,
|
||||
{'username': username, 'repo': repository,
|
||||
'role': new_permission['role']},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return perm_view, 200
|
||||
|
||||
|
@ -207,13 +205,13 @@ class RepositoryUserPermission(RepositoryParamResource):
|
|||
def delete(self, namespace, repository, username):
|
||||
""" Delete the permission for the user. """
|
||||
try:
|
||||
model.delete_user_permission(username, namespace, repository)
|
||||
model.permission.delete_user_permission(username, namespace, repository)
|
||||
except model.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
log_action('delete_repo_permission', namespace,
|
||||
{'username': username, 'repo': repository},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -249,9 +247,8 @@ class RepositoryTeamPermission(RepositoryParamResource):
|
|||
@nickname('getTeamPermissions')
|
||||
def get(self, namespace, repository, teamname):
|
||||
""" Fetch the permission for the specified team. """
|
||||
logger.debug('Get repo: %s/%s permissions for team %s' %
|
||||
(namespace, repository, teamname))
|
||||
perm = model.get_team_reponame_permission(teamname, namespace, repository)
|
||||
logger.debug('Get repo: %s/%s permissions for team %s', namespace, repository, teamname)
|
||||
perm = model.permission.get_team_reponame_permission(teamname, namespace, repository)
|
||||
return role_view(perm)
|
||||
|
||||
@require_repo_admin
|
||||
|
@ -261,16 +258,15 @@ class RepositoryTeamPermission(RepositoryParamResource):
|
|||
""" Update the existing team permission. """
|
||||
new_permission = request.get_json()
|
||||
|
||||
logger.debug('Setting permission to: %s for team %s' %
|
||||
(new_permission['role'], teamname))
|
||||
logger.debug('Setting permission to: %s for team %s', new_permission['role'], teamname)
|
||||
|
||||
perm = model.set_team_repo_permission(teamname, namespace, repository,
|
||||
perm = model.permission.set_team_repo_permission(teamname, namespace, repository,
|
||||
new_permission['role'])
|
||||
|
||||
log_action('change_repo_permission', namespace,
|
||||
{'team': teamname, 'repo': repository,
|
||||
'role': new_permission['role']},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return wrap_role_view_team(role_view(perm), perm.team), 200
|
||||
|
||||
|
@ -278,10 +274,10 @@ class RepositoryTeamPermission(RepositoryParamResource):
|
|||
@nickname('deleteTeamPermissions')
|
||||
def delete(self, namespace, repository, teamname):
|
||||
""" Delete the permission for the specified team. """
|
||||
model.delete_team_permission(teamname, namespace, repository)
|
||||
model.permission.delete_team_permission(teamname, namespace, repository)
|
||||
|
||||
log_action('delete_repo_permission', namespace,
|
||||
{'team': teamname, 'repo': repository},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'Deleted', 204
|
||||
|
|
|
@ -3,8 +3,7 @@
|
|||
from flask import request
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
log_action, Unauthorized, NotFound, internal_only, path_param,
|
||||
require_scope)
|
||||
log_action, Unauthorized, NotFound, path_param, require_scope)
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
|
@ -129,12 +128,12 @@ class PermissionPrototypeList(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
permissions = model.get_prototype_permissions(org)
|
||||
org_members = model.get_organization_member_set(orgname)
|
||||
permissions = model.permission.get_prototype_permissions(org)
|
||||
org_members = model.organization.get_organization_member_set(orgname)
|
||||
return {'prototypes': [prototype_view(p, org_members) for p in permissions]}
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -147,7 +146,7 @@ class PermissionPrototypeList(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -165,9 +164,9 @@ class PermissionPrototypeList(ApiResource):
|
|||
delegate_username = delegate_name if delegate_kind == 'user' else None
|
||||
delegate_teamname = delegate_name if delegate_kind == 'team' else None
|
||||
|
||||
activating_user = (model.get_user(activating_username) if activating_username else None)
|
||||
delegate_user = (model.get_user(delegate_username) if delegate_username else None)
|
||||
delegate_team = (model.get_organization_team(orgname, delegate_teamname)
|
||||
activating_user = (model.user.get_user(activating_username) if activating_username else None)
|
||||
delegate_user = (model.user.get_user(delegate_username) if delegate_username else None)
|
||||
delegate_team = (model.team.get_organization_team(orgname, delegate_teamname)
|
||||
if delegate_teamname else None)
|
||||
|
||||
if activating_username and not activating_user:
|
||||
|
@ -178,10 +177,10 @@ class PermissionPrototypeList(ApiResource):
|
|||
|
||||
role_name = details['role']
|
||||
|
||||
prototype = model.add_prototype_permission(org, role_name, activating_user,
|
||||
prototype = model.permission.add_prototype_permission(org, role_name, activating_user,
|
||||
delegate_user, delegate_team)
|
||||
log_prototype_action('create_prototype_permission', orgname, prototype)
|
||||
org_members = model.get_organization_member_set(orgname)
|
||||
org_members = model.organization.get_organization_member_set(orgname)
|
||||
return prototype_view(prototype, org_members)
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -221,11 +220,11 @@ class PermissionPrototype(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
prototype = model.delete_prototype_permission(org, prototypeid)
|
||||
prototype = model.permission.delete_prototype_permission(org, prototypeid)
|
||||
if not prototype:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -243,23 +242,23 @@ class PermissionPrototype(ApiResource):
|
|||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.get_organization(orgname)
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
existing = model.get_prototype_permission(org, prototypeid)
|
||||
existing = model.permission.get_prototype_permission(org, prototypeid)
|
||||
if not existing:
|
||||
raise NotFound()
|
||||
|
||||
details = request.get_json()
|
||||
role_name = details['role']
|
||||
prototype = model.update_prototype_permission(org, prototypeid, role_name)
|
||||
prototype = model.permission.update_prototype_permission(org, prototypeid, role_name)
|
||||
if not prototype:
|
||||
raise NotFound()
|
||||
|
||||
log_prototype_action('modify_prototype_permission', orgname, prototype,
|
||||
original_role=existing.role.name)
|
||||
org_members = model.get_organization_member_set(orgname)
|
||||
org_members = model.organization.get_organization_member_set(orgname)
|
||||
return prototype_view(prototype, org_members)
|
||||
|
||||
raise Unauthorized()
|
||||
|
|
|
@ -18,6 +18,7 @@ import features
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def record_view(record):
|
||||
return {
|
||||
'email': record.email,
|
||||
|
@ -38,7 +39,7 @@ class RepositoryAuthorizedEmail(RepositoryParamResource):
|
|||
@nickname('checkRepoEmailAuthorized')
|
||||
def get(self, namespace, repository, email):
|
||||
""" Checks to see if the given e-mail address is authorized on this repository. """
|
||||
record = model.get_email_authorized_for_repo(namespace, repository, email)
|
||||
record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
|
||||
if not record:
|
||||
abort(404)
|
||||
|
||||
|
@ -51,12 +52,12 @@ class RepositoryAuthorizedEmail(RepositoryParamResource):
|
|||
""" Starts the authorization process for an e-mail address on a repository. """
|
||||
|
||||
with tf(db):
|
||||
record = model.get_email_authorized_for_repo(namespace, repository, email)
|
||||
record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
|
||||
if record and record.confirmed:
|
||||
return record_view(record)
|
||||
|
||||
if not record:
|
||||
record = model.create_email_authorization_for_repo(namespace, repository, email)
|
||||
record = model.repository.create_email_authorization_for_repo(namespace, repository, email)
|
||||
|
||||
send_repo_authorization_email(namespace, repository, email, record.code)
|
||||
return record_view(record)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
""" List, create and manage repositories. """
|
||||
|
||||
import logging
|
||||
import json
|
||||
import datetime
|
||||
|
||||
from datetime import timedelta
|
||||
|
@ -9,9 +8,8 @@ from datetime import timedelta
|
|||
from flask import request
|
||||
|
||||
from data import model
|
||||
from data.model import Namespace
|
||||
from data.database import (Repository as RepositoryTable, Visibility, RepositoryTag,
|
||||
RepositoryActionCount, fn)
|
||||
RepositoryActionCount, Namespace, fn)
|
||||
|
||||
from endpoints.api import (truthy_bool, format_date, nickname, log_action, validate_json_request,
|
||||
require_repo_read, require_repo_write, require_repo_admin,
|
||||
|
@ -20,7 +18,7 @@ from endpoints.api import (truthy_bool, format_date, nickname, log_action, valid
|
|||
path_param)
|
||||
|
||||
from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPermission,
|
||||
CreateRepositoryPermission, ReadRepositoryPermission)
|
||||
CreateRepositoryPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
|
||||
|
@ -85,13 +83,13 @@ class RepositoryList(ApiResource):
|
|||
repository_name = req['repository']
|
||||
visibility = req['visibility']
|
||||
|
||||
existing = model.get_repository(namespace_name, repository_name)
|
||||
existing = model.repository.get_repository(namespace_name, repository_name)
|
||||
if existing:
|
||||
raise request_error(message='Repository already exists')
|
||||
|
||||
visibility = req['visibility']
|
||||
|
||||
repo = model.create_repository(namespace_name, repository_name, owner, visibility)
|
||||
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
|
||||
repo.description = req['description']
|
||||
repo.save()
|
||||
|
||||
|
@ -124,7 +122,7 @@ class RepositoryList(ApiResource):
|
|||
"""Fetch the list of repositories under a variety of situations."""
|
||||
username = None
|
||||
if get_authenticated_user():
|
||||
starred_repos = model.get_user_starred_repositories(get_authenticated_user())
|
||||
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user())
|
||||
star_lookup = set([repo.id for repo in starred_repos])
|
||||
|
||||
if args['private']:
|
||||
|
@ -133,7 +131,7 @@ class RepositoryList(ApiResource):
|
|||
response = {}
|
||||
|
||||
# Find the matching repositories.
|
||||
repo_query = model.get_visible_repositories(username,
|
||||
repo_query = model.repository.get_visible_repositories(username,
|
||||
limit=args['limit'],
|
||||
page=args['page'],
|
||||
include_public=args['public'],
|
||||
|
@ -145,10 +143,10 @@ class RepositoryList(ApiResource):
|
|||
repository_ids = [repo.get(RepositoryTable.id) for repo in repo_query]
|
||||
|
||||
if args['last_modified']:
|
||||
last_modified_map = model.get_when_last_modified(repository_ids)
|
||||
last_modified_map = model.repository.get_when_last_modified(repository_ids)
|
||||
|
||||
if args['popularity']:
|
||||
action_count_map = model.get_action_counts(repository_ids)
|
||||
action_count_map = model.repository.get_action_counts(repository_ids)
|
||||
|
||||
def repo_view(repo_obj):
|
||||
repo = {
|
||||
|
@ -210,25 +208,26 @@ class Repository(RepositoryParamResource):
|
|||
}
|
||||
|
||||
if tag.lifetime_start_ts > 0:
|
||||
tag_info['last_modified'] = format_date(datetime.datetime.fromtimestamp(tag.lifetime_start_ts))
|
||||
last_modified = format_date(datetime.datetime.fromtimestamp(tag.lifetime_start_ts))
|
||||
tag_info['last_modified'] = last_modified
|
||||
|
||||
return tag_info
|
||||
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo:
|
||||
tags = model.list_repository_tags(namespace, repository, include_storage=True)
|
||||
tags = model.tag.list_repository_tags(namespace, repository, include_storage=True)
|
||||
tag_dict = {tag.name: tag_view(tag) for tag in tags}
|
||||
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
||||
can_admin = AdministerRepositoryPermission(namespace, repository).can()
|
||||
|
||||
is_starred = (model.repository_is_starred(get_authenticated_user(), repo)
|
||||
is_starred = (model.repository.repository_is_starred(get_authenticated_user(), repo)
|
||||
if get_authenticated_user() else False)
|
||||
is_public = model.is_repository_public(repo)
|
||||
is_public = model.repository.is_repository_public(repo)
|
||||
|
||||
(pull_today, pull_thirty_day) = model.get_repository_pulls(repo, timedelta(days=1),
|
||||
(pull_today, pull_thirty_day) = model.log.get_repository_pulls(repo, timedelta(days=1),
|
||||
timedelta(days=30))
|
||||
|
||||
(push_today, push_thirty_day) = model.get_repository_pushes(repo, timedelta(days=1),
|
||||
(push_today, push_thirty_day) = model.log.get_repository_pushes(repo, timedelta(days=1),
|
||||
timedelta(days=30))
|
||||
|
||||
return {
|
||||
|
@ -261,7 +260,7 @@ class Repository(RepositoryParamResource):
|
|||
@validate_json_request('RepoUpdate')
|
||||
def put(self, namespace, repository):
|
||||
""" Update the description in the specified repository. """
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo:
|
||||
values = request.get_json()
|
||||
repo.description = values['description']
|
||||
|
@ -279,7 +278,7 @@ class Repository(RepositoryParamResource):
|
|||
@nickname('deleteRepository')
|
||||
def delete(self, namespace, repository):
|
||||
""" Delete a repository. """
|
||||
model.purge_repository(namespace, repository)
|
||||
model.repository.purge_repository(namespace, repository)
|
||||
log_action('delete_repo', namespace,
|
||||
{'repo': repository, 'namespace': namespace})
|
||||
return 'Deleted', 204
|
||||
|
@ -315,10 +314,10 @@ class RepositoryVisibility(RepositoryParamResource):
|
|||
@validate_json_request('ChangeVisibility')
|
||||
def post(self, namespace, repository):
|
||||
""" Change the visibility of a repository. """
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo:
|
||||
values = request.get_json()
|
||||
model.set_repository_visibility(repo, values['visibility'])
|
||||
model.repository.set_repository_visibility(repo, values['visibility'])
|
||||
log_action('change_repo_visibility', namespace,
|
||||
{'repo': repository, 'visibility': values['visibility']},
|
||||
repo=repo)
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
import json
|
||||
|
||||
from flask import request, abort
|
||||
from flask import request
|
||||
|
||||
from app import notification_queue
|
||||
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
|
||||
log_action, validate_json_request, api, NotFound, request_error,
|
||||
log_action, validate_json_request, NotFound, request_error,
|
||||
path_param)
|
||||
from endpoints.notificationevent import NotificationEvent
|
||||
from endpoints.notificationmethod import (NotificationMethod,
|
||||
|
@ -15,17 +15,17 @@ from endpoints.notificationhelper import build_notification_data
|
|||
from data import model
|
||||
|
||||
|
||||
def notification_view(notification):
|
||||
def notification_view(note):
|
||||
config = {}
|
||||
try:
|
||||
config = json.loads(notification.config_json)
|
||||
config = json.loads(note.config_json)
|
||||
except:
|
||||
config = {}
|
||||
|
||||
return {
|
||||
'uuid': notification.uuid,
|
||||
'event': notification.event.name,
|
||||
'method': notification.method.name,
|
||||
'uuid': note.uuid,
|
||||
'event': note.event.name,
|
||||
'method': note.method.name,
|
||||
'config': config
|
||||
}
|
||||
|
||||
|
@ -66,25 +66,25 @@ class RepositoryNotificationList(RepositoryParamResource):
|
|||
@validate_json_request('NotificationCreateRequest')
|
||||
def post(self, namespace, repository):
|
||||
""" Create a new notification for the specified repository. """
|
||||
repo = model.get_repository(namespace, repository)
|
||||
json = request.get_json()
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
parsed = request.get_json()
|
||||
|
||||
method_handler = NotificationMethod.get_method(json['method'])
|
||||
method_handler = NotificationMethod.get_method(parsed['method'])
|
||||
if not method_handler:
|
||||
raise request_error(message='Unknown method')
|
||||
|
||||
try:
|
||||
method_handler.validate(repo, json['config'])
|
||||
method_handler.validate(repo, parsed['config'])
|
||||
except CannotValidateNotificationMethodException as ex:
|
||||
raise request_error(message=ex.message)
|
||||
|
||||
notification = model.create_repo_notification(repo, json['event'], json['method'],
|
||||
json['config'])
|
||||
new_notification = model.notification.create_repo_notification(repo, parsed['event'],
|
||||
parsed['method'], parsed['config'])
|
||||
|
||||
resp = notification_view(notification)
|
||||
resp = notification_view(new_notification)
|
||||
log_action('add_repo_notification', namespace,
|
||||
{'repo': repository, 'notification_id': notification.uuid,
|
||||
'event': json['event'], 'method': json['method']},
|
||||
{'repo': repository, 'notification_id': new_notification.uuid,
|
||||
'event': parsed['event'], 'method': parsed['method']},
|
||||
repo=repo)
|
||||
return resp, 201
|
||||
|
||||
|
@ -92,7 +92,7 @@ class RepositoryNotificationList(RepositoryParamResource):
|
|||
@nickname('listRepoNotifications')
|
||||
def get(self, namespace, repository):
|
||||
""" List the notifications for the specified repository. """
|
||||
notifications = model.list_repo_notifications(namespace, repository)
|
||||
notifications = model.notification.list_repo_notifications(namespace, repository)
|
||||
return {
|
||||
'notifications': [notification_view(n) for n in notifications]
|
||||
}
|
||||
|
@ -108,25 +108,25 @@ class RepositoryNotification(RepositoryParamResource):
|
|||
def get(self, namespace, repository, uuid):
|
||||
""" Get information for the specified notification. """
|
||||
try:
|
||||
notification = model.get_repo_notification(uuid)
|
||||
found = model.notification.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
if (found.repository.namespace_user.username != namespace or
|
||||
found.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
return notification_view(notification)
|
||||
return notification_view(found)
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteRepoNotification')
|
||||
def delete(self, namespace, repository, uuid):
|
||||
""" Deletes the specified notification. """
|
||||
notification = model.delete_repo_notification(namespace, repository, uuid)
|
||||
deleted = model.notification.delete_repo_notification(namespace, repository, uuid)
|
||||
log_action('delete_repo_notification', namespace,
|
||||
{'repo': repository, 'notification_id': uuid,
|
||||
'event': notification.event.name, 'method': notification.method.name},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
'event': deleted.event.name, 'method': deleted.method.name},
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'No Content', 204
|
||||
|
||||
|
@ -141,18 +141,18 @@ class TestRepositoryNotification(RepositoryParamResource):
|
|||
def post(self, namespace, repository, uuid):
|
||||
""" Queues a test notification for this repository. """
|
||||
try:
|
||||
notification = model.get_repo_notification(uuid)
|
||||
test_note = model.notification.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
if (test_note.repository.namespace_user.username != namespace or
|
||||
test_note.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
event_info = NotificationEvent.get_event(notification.event.name)
|
||||
sample_data = event_info.get_sample_data(repository=notification.repository)
|
||||
notification_data = build_notification_data(notification, sample_data)
|
||||
notification_queue.put([notification.repository.namespace_user.username, repository,
|
||||
notification.event.name], json.dumps(notification_data))
|
||||
event_info = NotificationEvent.get_event(test_note.event.name)
|
||||
sample_data = event_info.get_sample_data(repository=test_note.repository)
|
||||
notification_data = build_notification_data(test_note, sample_data)
|
||||
notification_queue.put([test_note.repository.namespace_user.username, repository,
|
||||
test_note.event.name], json.dumps(notification_data))
|
||||
|
||||
return {}
|
||||
|
|
|
@ -45,7 +45,7 @@ class RepositoryTokenList(RepositoryParamResource):
|
|||
@nickname('listRepoTokens')
|
||||
def get(self, namespace, repository):
|
||||
""" List the tokens for the specified repository. """
|
||||
tokens = model.get_repository_delegate_tokens(namespace, repository)
|
||||
tokens = model.token.get_repository_delegate_tokens(namespace, repository)
|
||||
|
||||
return {
|
||||
'tokens': {token.code: token_view(token) for token in tokens}
|
||||
|
@ -58,12 +58,11 @@ class RepositoryTokenList(RepositoryParamResource):
|
|||
""" Create a new repository token. """
|
||||
token_params = request.get_json()
|
||||
|
||||
token = model.create_delegate_token(namespace, repository,
|
||||
token_params['friendlyName'])
|
||||
token = model.token.create_delegate_token(namespace, repository, token_params['friendlyName'])
|
||||
|
||||
log_action('add_repo_accesstoken', namespace,
|
||||
{'repo': repository, 'token': token_params['friendlyName']},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return token_view(token), 201
|
||||
|
||||
|
@ -99,7 +98,7 @@ class RepositoryToken(RepositoryParamResource):
|
|||
def get(self, namespace, repository, code):
|
||||
""" Fetch the specified repository token information. """
|
||||
try:
|
||||
perm = model.get_repo_delegate_token(namespace, repository, code)
|
||||
perm = model.token.get_repo_delegate_token(namespace, repository, code)
|
||||
except model.InvalidTokenException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -115,13 +114,13 @@ class RepositoryToken(RepositoryParamResource):
|
|||
logger.debug('Setting permission to: %s for code %s' %
|
||||
(new_permission['role'], code))
|
||||
|
||||
token = model.set_repo_delegate_token_role(namespace, repository, code,
|
||||
token = model.token.set_repo_delegate_token_role(namespace, repository, code,
|
||||
new_permission['role'])
|
||||
|
||||
log_action('change_repo_permission', namespace,
|
||||
{'repo': repository, 'token': token.friendly_name, 'code': code,
|
||||
'role': new_permission['role']},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return token_view(token)
|
||||
|
||||
|
@ -129,11 +128,11 @@ class RepositoryToken(RepositoryParamResource):
|
|||
@nickname('deleteToken')
|
||||
def delete(self, namespace, repository, code):
|
||||
""" Delete the repository token. """
|
||||
token = model.delete_delegate_token(namespace, repository, code)
|
||||
token = model.token.delete_delegate_token(namespace, repository, code)
|
||||
|
||||
log_action('delete_repo_accesstoken', namespace,
|
||||
{'repo': repository, 'token': token.friendly_name,
|
||||
'code': code},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'Deleted', 204
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
""" Manage user and organization robot accounts. """
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, log_action, related_user_resource,
|
||||
Unauthorized, require_user_admin, internal_only, require_scope,
|
||||
path_param, parse_args, truthy_bool, query_param)
|
||||
Unauthorized, require_user_admin, require_scope, path_param, parse_args,
|
||||
truthy_bool, query_param)
|
||||
from auth.permissions import AdministerOrganizationPermission, OrganizationMemberPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
|
@ -30,7 +30,8 @@ def permission_view(permission):
|
|||
|
||||
|
||||
def robots_list(prefix, include_permissions=False):
|
||||
tuples = model.list_entity_robot_permission_teams(prefix, include_permissions=include_permissions)
|
||||
tuples = model.user.list_entity_robot_permission_teams(prefix,
|
||||
include_permissions=include_permissions)
|
||||
|
||||
robots = {}
|
||||
robot_teams = set()
|
||||
|
@ -85,7 +86,8 @@ class UserRobotList(ApiResource):
|
|||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class UserRobot(ApiResource):
|
||||
""" Resource for managing a user's robots. """
|
||||
@require_user_admin
|
||||
|
@ -93,7 +95,7 @@ class UserRobot(ApiResource):
|
|||
def get(self, robot_shortname):
|
||||
""" Returns the user's robot with the specified name. """
|
||||
parent = get_authenticated_user()
|
||||
robot, password = model.get_robot(robot_shortname, parent)
|
||||
robot, password = model.user.get_robot(robot_shortname, parent)
|
||||
return robot_view(robot.username, password)
|
||||
|
||||
@require_user_admin
|
||||
|
@ -101,7 +103,7 @@ class UserRobot(ApiResource):
|
|||
def put(self, robot_shortname):
|
||||
""" Create a new user robot with the specified name. """
|
||||
parent = get_authenticated_user()
|
||||
robot, password = model.create_robot(robot_shortname, parent)
|
||||
robot, password = model.user.create_robot(robot_shortname, parent)
|
||||
log_action('create_robot', parent.username, {'robot': robot_shortname})
|
||||
return robot_view(robot.username, password), 201
|
||||
|
||||
|
@ -110,7 +112,7 @@ class UserRobot(ApiResource):
|
|||
def delete(self, robot_shortname):
|
||||
""" Delete an existing robot. """
|
||||
parent = get_authenticated_user()
|
||||
model.delete_robot(format_robot_username(parent.username, robot_shortname))
|
||||
model.user.delete_robot(format_robot_username(parent.username, robot_shortname))
|
||||
log_action('delete_robot', parent.username, {'robot': robot_shortname})
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -137,7 +139,8 @@ class OrgRobotList(ApiResource):
|
|||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(UserRobot)
|
||||
class OrgRobot(ApiResource):
|
||||
""" Resource for managing an organization's robots. """
|
||||
|
@ -147,8 +150,8 @@ class OrgRobot(ApiResource):
|
|||
""" Returns the organization's robot with the specified name. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
parent = model.get_organization(orgname)
|
||||
robot, password = model.get_robot(robot_shortname, parent)
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password = model.user.get_robot(robot_shortname, parent)
|
||||
return robot_view(robot.username, password)
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -159,8 +162,8 @@ class OrgRobot(ApiResource):
|
|||
""" Create a new robot in the organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
parent = model.get_organization(orgname)
|
||||
robot, password = model.create_robot(robot_shortname, parent)
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password = model.user.create_robot(robot_shortname, parent)
|
||||
log_action('create_robot', orgname, {'robot': robot_shortname})
|
||||
return robot_view(robot.username, password), 201
|
||||
|
||||
|
@ -172,7 +175,7 @@ class OrgRobot(ApiResource):
|
|||
""" Delete an existing organization robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
model.delete_robot(format_robot_username(orgname, robot_shortname))
|
||||
model.user.delete_robot(format_robot_username(orgname, robot_shortname))
|
||||
log_action('delete_robot', orgname, {'robot': robot_shortname})
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -180,7 +183,8 @@ class OrgRobot(ApiResource):
|
|||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>/permissions')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class UserRobotPermissions(ApiResource):
|
||||
""" Resource for listing the permissions a user's robot has in the system. """
|
||||
@require_user_admin
|
||||
|
@ -188,8 +192,8 @@ class UserRobotPermissions(ApiResource):
|
|||
def get(self, robot_shortname):
|
||||
""" Returns the list of repository permissions for the user's robot. """
|
||||
parent = get_authenticated_user()
|
||||
robot, password = model.get_robot(robot_shortname, parent)
|
||||
permissions = model.list_robot_permissions(robot.username)
|
||||
robot, _ = model.user.get_robot(robot_shortname, parent)
|
||||
permissions = model.permission.list_robot_permissions(robot.username)
|
||||
|
||||
return {
|
||||
'permissions': [permission_view(permission) for permission in permissions]
|
||||
|
@ -198,7 +202,8 @@ class UserRobotPermissions(ApiResource):
|
|||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/permissions')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(UserRobotPermissions)
|
||||
class OrgRobotPermissions(ApiResource):
|
||||
""" Resource for listing the permissions an org's robot has in the system. """
|
||||
|
@ -208,9 +213,9 @@ class OrgRobotPermissions(ApiResource):
|
|||
""" Returns the list of repository permissions for the org's robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
parent = model.get_organization(orgname)
|
||||
robot, password = model.get_robot(robot_shortname, parent)
|
||||
permissions = model.list_robot_permissions(robot.username)
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, _ = model.user.get_robot(robot_shortname, parent)
|
||||
permissions = model.permission.list_robot_permissions(robot.username)
|
||||
|
||||
return {
|
||||
'permissions': [permission_view(permission) for permission in permissions]
|
||||
|
@ -220,7 +225,8 @@ class OrgRobotPermissions(ApiResource):
|
|||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>/regenerate')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class RegenerateUserRobot(ApiResource):
|
||||
""" Resource for regenerate an organization's robot's token. """
|
||||
@require_user_admin
|
||||
|
@ -228,14 +234,15 @@ class RegenerateUserRobot(ApiResource):
|
|||
def post(self, robot_shortname):
|
||||
""" Regenerates the token for a user's robot. """
|
||||
parent = get_authenticated_user()
|
||||
robot, password = model.regenerate_robot_token(robot_shortname, parent)
|
||||
robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
|
||||
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
|
||||
return robot_view(robot.username, password)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/regenerate')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(RegenerateUserRobot)
|
||||
class RegenerateOrgRobot(ApiResource):
|
||||
""" Resource for regenerate an organization's robot's token. """
|
||||
|
@ -245,8 +252,8 @@ class RegenerateOrgRobot(ApiResource):
|
|||
""" Regenerates the token for an organization robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
parent = model.get_organization(orgname)
|
||||
robot, password = model.regenerate_robot_token(robot_shortname, parent)
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
|
||||
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
|
||||
return robot_view(robot.username, password)
|
||||
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
from endpoints.api import (ApiResource, parse_args, query_param, truthy_bool, nickname, resource,
|
||||
require_scope, path_param)
|
||||
from data import model
|
||||
from auth.permissions import (OrganizationMemberPermission, ViewTeamPermission,
|
||||
ReadRepositoryPermission, UserAdminPermission,
|
||||
AdministerOrganizationPermission, ReadRepositoryPermission)
|
||||
from auth.permissions import (OrganizationMemberPermission, ReadRepositoryPermission,
|
||||
UserAdminPermission, AdministerOrganizationPermission,
|
||||
ReadRepositoryPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from app import avatar, get_app_url
|
||||
from app import avatar
|
||||
from operator import itemgetter
|
||||
from stringscore import liquidmetal
|
||||
from util.names import parse_robot_username
|
||||
|
@ -35,7 +35,7 @@ class EntitySearch(ApiResource):
|
|||
organization = None
|
||||
|
||||
try:
|
||||
organization = model.get_organization(namespace_name)
|
||||
organization = model.organization.get_organization(namespace_name)
|
||||
|
||||
# namespace name was an org
|
||||
permission = OrganizationMemberPermission(namespace_name)
|
||||
|
@ -43,7 +43,7 @@ class EntitySearch(ApiResource):
|
|||
robot_namespace = namespace_name
|
||||
|
||||
if args['includeTeams']:
|
||||
teams = model.get_matching_teams(prefix, organization)
|
||||
teams = model.team.get_matching_teams(prefix, organization)
|
||||
|
||||
if args['includeOrgs'] and AdministerOrganizationPermission(namespace_name) \
|
||||
and namespace_name.startswith(prefix):
|
||||
|
@ -54,7 +54,7 @@ class EntitySearch(ApiResource):
|
|||
'avatar': avatar.get_data_for_org(organization),
|
||||
}]
|
||||
|
||||
except model.InvalidOrganizationException:
|
||||
except model.organization.InvalidOrganizationException:
|
||||
# namespace name was a user
|
||||
user = get_authenticated_user()
|
||||
if user and user.username == namespace_name:
|
||||
|
@ -63,7 +63,7 @@ class EntitySearch(ApiResource):
|
|||
if admin_permission.can():
|
||||
robot_namespace = namespace_name
|
||||
|
||||
users = model.get_matching_users(prefix, robot_namespace, organization)
|
||||
users = model.user.get_matching_users(prefix, robot_namespace, organization)
|
||||
|
||||
def entity_team_view(team):
|
||||
result = {
|
||||
|
@ -95,18 +95,6 @@ class EntitySearch(ApiResource):
|
|||
}
|
||||
|
||||
|
||||
def team_view(orgname, team):
|
||||
view_permission = ViewTeamPermission(orgname, team.name)
|
||||
role = model.get_team_org_role(team).name
|
||||
return {
|
||||
'id': team.id,
|
||||
'name': team.name,
|
||||
'description': team.description,
|
||||
'can_view': view_permission.can(),
|
||||
'role': role
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/find/repository')
|
||||
class FindRepositories(ApiResource):
|
||||
""" Resource for finding repositories. """
|
||||
|
@ -130,7 +118,7 @@ class FindRepositories(ApiResource):
|
|||
if user is not None:
|
||||
username = user.username
|
||||
|
||||
matching = model.get_matching_repositories(prefix, username)
|
||||
matching = model.repository.get_matching_repositories(prefix, username)
|
||||
return {
|
||||
'repositories': [repo_view(repo) for repo in matching
|
||||
if (repo.visibility.name == 'public' or
|
||||
|
@ -174,7 +162,7 @@ def search_entity_view(username, entity, get_short_name=None):
|
|||
|
||||
def conduct_team_search(username, query, encountered_teams, results):
|
||||
""" Finds the matching teams where the user is a member. """
|
||||
matching_teams = model.get_matching_user_teams(query, get_authenticated_user(), limit=5)
|
||||
matching_teams = model.team.get_matching_user_teams(query, get_authenticated_user(), limit=5)
|
||||
for team in matching_teams:
|
||||
if team.id in encountered_teams:
|
||||
continue
|
||||
|
@ -193,7 +181,7 @@ def conduct_team_search(username, query, encountered_teams, results):
|
|||
|
||||
def conduct_admined_team_search(username, query, encountered_teams, results):
|
||||
""" Finds matching teams in orgs admined by the user. """
|
||||
matching_teams = model.get_matching_admined_teams(query, get_authenticated_user(), limit=5)
|
||||
matching_teams = model.team.get_matching_admined_teams(query, get_authenticated_user(), limit=5)
|
||||
for team in matching_teams:
|
||||
if team.id in encountered_teams:
|
||||
continue
|
||||
|
@ -212,14 +200,15 @@ def conduct_admined_team_search(username, query, encountered_teams, results):
|
|||
|
||||
def conduct_repo_search(username, query, results):
|
||||
""" Finds matching repositories. """
|
||||
def can_read(repository):
|
||||
if repository.is_public:
|
||||
def can_read(repo):
|
||||
if repo.is_public:
|
||||
return True
|
||||
|
||||
return ReadRepositoryPermission(repository.namespace_user.username, repository.name).can()
|
||||
return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
|
||||
|
||||
only_public = username is None
|
||||
matching_repos = model.get_sorted_matching_repositories(query, only_public, can_read, limit=5)
|
||||
matching_repos = model.repository.get_sorted_matching_repositories(query, only_public, can_read,
|
||||
limit=5)
|
||||
|
||||
for repo in matching_repos:
|
||||
repo_score = math.log(repo.count or 1, 10) or 1
|
||||
|
@ -242,7 +231,7 @@ def conduct_repo_search(username, query, results):
|
|||
|
||||
def conduct_namespace_search(username, query, results):
|
||||
""" Finds matching users and organizations. """
|
||||
matching_entities = model.get_matching_user_namespaces(query, username, limit=5)
|
||||
matching_entities = model.user.get_matching_user_namespaces(query, username, limit=5)
|
||||
for entity in matching_entities:
|
||||
results.append(search_entity_view(username, entity))
|
||||
|
||||
|
@ -252,7 +241,7 @@ def conduct_robot_search(username, query, results):
|
|||
def get_short_name(name):
|
||||
return parse_robot_username(name)[1]
|
||||
|
||||
matching_robots = model.get_matching_robots(query, username, limit=5)
|
||||
matching_robots = model.user.get_matching_robots(query, username, limit=5)
|
||||
for robot in matching_robots:
|
||||
results.append(search_entity_view(username, robot, get_short_name))
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ from data.billing import PLANS
|
|||
|
||||
import features
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -55,7 +56,7 @@ def subscribe(user, plan, token, require_business_plan):
|
|||
user.username)
|
||||
raise request_error(message='No matching plan found')
|
||||
|
||||
private_repos = model.get_private_repo_count(user.username)
|
||||
private_repos = model.user.get_private_repo_count(user.username)
|
||||
|
||||
# This is the default response
|
||||
response_json = {
|
||||
|
|
|
@ -2,10 +2,9 @@
|
|||
|
||||
import logging
|
||||
import os
|
||||
import json
|
||||
import signal
|
||||
|
||||
from flask import abort, Response
|
||||
from flask import abort
|
||||
from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if,
|
||||
require_fresh_login, request, validate_json_request, verify_not_prod)
|
||||
|
||||
|
@ -14,17 +13,17 @@ from app import app, CONFIG_PROVIDER, superusers
|
|||
from data import model
|
||||
from data.database import configure
|
||||
from auth.permissions import SuperUserPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from data.database import User
|
||||
from util.config.configutil import add_enterprise_config_defaults
|
||||
from util.config.provider import CannotWriteConfigException
|
||||
from util.config.validator import validate_service_for_config, CONFIG_FILENAMES
|
||||
from data.runmigration import run_alembic_migration
|
||||
|
||||
import features
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def database_is_valid():
|
||||
""" Returns whether the database, as configured, is valid. """
|
||||
if app.config['TESTING']:
|
||||
|
@ -310,7 +309,7 @@ class SuperUserCreateInitialSuperUser(ApiResource):
|
|||
email = data['email']
|
||||
|
||||
# Create the user in the database.
|
||||
superuser = model.create_user(username, password, email, auto_verify=True)
|
||||
superuser = model.user.create_user(username, password, email, auto_verify=True)
|
||||
|
||||
# Add the user to the config.
|
||||
config_object = CONFIG_PROVIDER.get_yaml()
|
||||
|
|
|
@ -2,33 +2,31 @@
|
|||
|
||||
import string
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
from random import SystemRandom
|
||||
from app import app, avatar, superusers, authentication
|
||||
from flask import request
|
||||
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
||||
log_action, internal_only, NotFound, require_user_admin, format_date,
|
||||
InvalidToken, require_scope, format_date, hide_if, show_if, parse_args,
|
||||
query_param, abort, require_fresh_login, path_param, verify_not_prod)
|
||||
|
||||
from endpoints.api.logs import get_logs
|
||||
|
||||
from data import model
|
||||
from auth.permissions import SuperUserPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from util.useremails import send_confirmation_email, send_recovery_email
|
||||
|
||||
import features
|
||||
|
||||
from app import app, avatar, superusers, authentication
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
|
||||
internal_only, require_scope, show_if, parse_args,
|
||||
query_param, abort, require_fresh_login, path_param, verify_not_prod)
|
||||
from endpoints.api.logs import get_logs
|
||||
from data import model
|
||||
from auth.permissions import SuperUserPermission
|
||||
from auth import scopes
|
||||
from util.useremails import send_confirmation_email, send_recovery_email
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_immediate_subdirectories(directory):
|
||||
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
|
||||
|
||||
|
||||
def get_services():
|
||||
services = set(get_immediate_subdirectories(app.config['SYSTEM_SERVICES_PATH']))
|
||||
services = services - set(app.config['SYSTEM_SERVICE_BLACKLIST'])
|
||||
|
@ -55,7 +53,7 @@ class SuperUserGetLogsForService(ApiResource):
|
|||
with open(app.config['SYSTEM_LOGS_FILE'], 'r') as f:
|
||||
logs = [line for line in f if line.find(service + '[') >= 0]
|
||||
|
||||
except Exception as ex:
|
||||
except Exception:
|
||||
logger.exception('Cannot read logs')
|
||||
abort(400)
|
||||
|
||||
|
@ -102,7 +100,6 @@ class SuperUserLogs(ApiResource):
|
|||
def get(self, args):
|
||||
""" List the usage logs for the current system. """
|
||||
if SuperUserPermission().can():
|
||||
performer_name = args['performer']
|
||||
start_time = args['starttime']
|
||||
end_time = args['endtime']
|
||||
|
||||
|
@ -165,7 +162,7 @@ class SuperUserOrganizationList(ApiResource):
|
|||
def get(self):
|
||||
""" Returns a list of all organizations in the system. """
|
||||
if SuperUserPermission().can():
|
||||
orgs = model.get_organizations()
|
||||
orgs = model.organization.get_organizations()
|
||||
return {
|
||||
'organizations': [org_view(org) for org in orgs]
|
||||
}
|
||||
|
@ -204,7 +201,7 @@ class SuperUserList(ApiResource):
|
|||
def get(self):
|
||||
""" Returns a list of all users in the system. """
|
||||
if SuperUserPermission().can():
|
||||
users = model.get_active_users()
|
||||
users = model.user.get_active_users()
|
||||
return {
|
||||
'users': [user_view(user) for user in users]
|
||||
}
|
||||
|
@ -229,11 +226,11 @@ class SuperUserList(ApiResource):
|
|||
password = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(32)])
|
||||
|
||||
# Create the user.
|
||||
user = model.create_user(username, password, email, auto_verify=not features.MAILING)
|
||||
user = model.user.create_user(username, password, email, auto_verify=not features.MAILING)
|
||||
|
||||
# If mailing is turned on, send the user a verification email.
|
||||
if features.MAILING:
|
||||
confirmation = model.create_confirm_email_code(user)
|
||||
confirmation = model.user.create_confirm_email_code(user)
|
||||
send_confirmation_email(user.username, user.email, confirmation.code)
|
||||
|
||||
return {
|
||||
|
@ -258,14 +255,14 @@ class SuperUserSendRecoveryEmail(ApiResource):
|
|||
@require_scope(scopes.SUPERUSER)
|
||||
def post(self, username):
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
abort(403)
|
||||
|
||||
code = model.create_reset_password_email_code(user.email)
|
||||
code = model.user.create_reset_password_email_code(user.email)
|
||||
send_recovery_email(user.email, code.code)
|
||||
return {
|
||||
'email': user.email
|
||||
|
@ -309,7 +306,7 @@ class SuperUserManagement(ApiResource):
|
|||
def get(self, username):
|
||||
""" Returns information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
|
@ -324,14 +321,14 @@ class SuperUserManagement(ApiResource):
|
|||
def delete(self, username):
|
||||
""" Deletes the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
abort(403)
|
||||
|
||||
model.delete_user(user)
|
||||
model.user.delete_user(user)
|
||||
return 'Deleted', 204
|
||||
|
||||
abort(403)
|
||||
|
@ -344,7 +341,7 @@ class SuperUserManagement(ApiResource):
|
|||
def put(self, username):
|
||||
""" Updates information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
|
@ -353,10 +350,10 @@ class SuperUserManagement(ApiResource):
|
|||
|
||||
user_data = request.get_json()
|
||||
if 'password' in user_data:
|
||||
model.change_password(user, user_data['password'])
|
||||
model.user.change_password(user, user_data['password'])
|
||||
|
||||
if 'email' in user_data:
|
||||
model.update_email(user, user_data['email'], auto_verify=True)
|
||||
model.user.update_email(user, user_data['email'], auto_verify=True)
|
||||
|
||||
if 'enabled' in user_data:
|
||||
# Disable/enable the user.
|
||||
|
@ -395,9 +392,9 @@ class SuperUserOrganizationManagement(ApiResource):
|
|||
def delete(self, name):
|
||||
""" Deletes the specified organization. """
|
||||
if SuperUserPermission().can():
|
||||
org = model.get_organization(name)
|
||||
org = model.organization.get_organization(name)
|
||||
|
||||
model.delete_user(org)
|
||||
model.user.delete_user(org)
|
||||
return 'Deleted', 204
|
||||
|
||||
abort(403)
|
||||
|
@ -410,11 +407,11 @@ class SuperUserOrganizationManagement(ApiResource):
|
|||
def put(self, name):
|
||||
""" Updates information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
org = model.get_organization(name)
|
||||
org = model.organization.get_organization(name)
|
||||
org_data = request.get_json()
|
||||
|
||||
if 'name' in org_data:
|
||||
org = model.change_username(org.id, org_data['name'])
|
||||
org = model.user.change_username(org.id, org_data['name'])
|
||||
|
||||
return org_view(org)
|
||||
|
||||
|
|
|
@ -4,14 +4,11 @@ from flask import request, abort
|
|||
|
||||
from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
|
||||
RepositoryParamResource, log_action, NotFound, validate_json_request,
|
||||
path_param, format_date, parse_args, query_param)
|
||||
path_param, parse_args, query_param)
|
||||
from endpoints.api.image import image_view
|
||||
from data import model
|
||||
from auth.auth_context import get_authenticated_user
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
|
||||
@resource('/v1/repository/<repopath:repository>/tag/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
|
@ -25,7 +22,7 @@ class ListRepositoryTags(RepositoryParamResource):
|
|||
@query_param('page', 'Page index for the results. Default 1.', type=int, default=1)
|
||||
@nickname('listRepoTags')
|
||||
def get(self, args, namespace, repository):
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
abort(404)
|
||||
|
||||
|
@ -51,7 +48,7 @@ class ListRepositoryTags(RepositoryParamResource):
|
|||
|
||||
# Note: We ask for limit+1 here, so we can check to see if there are
|
||||
# additional pages of results.
|
||||
tags = model.list_repository_tag_history(repo, page=page, size=limit+1,
|
||||
tags = model.tag.list_repository_tag_history(repo, page=page, size=limit+1,
|
||||
specific_tag=specific_tag)
|
||||
|
||||
tags = list(tags)
|
||||
|
@ -90,27 +87,27 @@ class RepositoryTag(RepositoryParamResource):
|
|||
def put(self, namespace, repository, tag):
|
||||
""" Change which image a tag points to or create a new tag."""
|
||||
image_id = request.get_json()['image']
|
||||
image = model.get_repo_image(namespace, repository, image_id)
|
||||
image = model.image.get_repo_image(namespace, repository, image_id)
|
||||
if not image:
|
||||
raise NotFound()
|
||||
|
||||
original_image_id = None
|
||||
try:
|
||||
original_tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
original_tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
if original_tag_image:
|
||||
original_image_id = original_tag_image.docker_image_id
|
||||
except model.DataModelException:
|
||||
# This is a new tag.
|
||||
pass
|
||||
|
||||
model.create_or_update_tag(namespace, repository, tag, image_id)
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
model.tag.create_or_update_tag(namespace, repository, tag, image_id)
|
||||
model.repository.garbage_collect_repository(namespace, repository)
|
||||
|
||||
username = get_authenticated_user().username
|
||||
log_action('move_tag' if original_image_id else 'create_tag', namespace,
|
||||
{'username': username, 'repo': repository, 'tag': tag,
|
||||
'image': image_id, 'original_image': original_image_id},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'Updated', 201
|
||||
|
||||
|
@ -118,13 +115,13 @@ class RepositoryTag(RepositoryParamResource):
|
|||
@nickname('deleteFullTag')
|
||||
def delete(self, namespace, repository, tag):
|
||||
""" Delete the specified repository tag. """
|
||||
model.delete_tag(namespace, repository, tag)
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
model.tag.delete_tag(namespace, repository, tag)
|
||||
model.repository.garbage_collect_repository(namespace, repository)
|
||||
|
||||
username = get_authenticated_user().username
|
||||
log_action('delete_tag', namespace,
|
||||
{'username': username, 'repo': repository, 'tag': tag},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -139,11 +136,11 @@ class RepositoryTagImages(RepositoryParamResource):
|
|||
def get(self, namespace, repository, tag):
|
||||
""" List the images for the specified repository tag. """
|
||||
try:
|
||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
except model.DataModelException:
|
||||
raise NotFound()
|
||||
|
||||
parent_images = model.get_parent_images(namespace, repository, tag_image)
|
||||
parent_images = model.image.get_parent_images(namespace, repository, tag_image)
|
||||
image_map = {}
|
||||
for image in parent_images:
|
||||
image_map[str(image.id)] = image
|
||||
|
@ -186,21 +183,21 @@ class RevertTag(RepositoryParamResource):
|
|||
def post(self, namespace, repository, tag):
|
||||
""" Reverts a repository tag back to a previous image in the repository. """
|
||||
try:
|
||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
except model.DataModelException:
|
||||
raise NotFound()
|
||||
|
||||
# Revert the tag back to the previous image.
|
||||
image_id = request.get_json()['image']
|
||||
model.revert_tag(tag_image.repository, tag, image_id)
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
model.tag.revert_tag(tag_image.repository, tag, image_id)
|
||||
model.repository.garbage_collect_repository(namespace, repository)
|
||||
|
||||
# Log the reversion.
|
||||
username = get_authenticated_user().username
|
||||
log_action('revert_tag', namespace,
|
||||
{'username': username, 'repo': repository, 'tag': tag,
|
||||
'image': image_id, 'original_image': tag_image.docker_image_id},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
return {
|
||||
'image_id': image_id,
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
log_action, Unauthorized, NotFound, internal_only, require_scope,
|
||||
path_param, query_param, truthy_bool, parse_args, require_user_admin,
|
||||
|
@ -13,12 +15,11 @@ from data import model
|
|||
from util.useremails import send_org_invite_email
|
||||
from app import avatar
|
||||
|
||||
import features
|
||||
|
||||
def try_accept_invite(code, user):
|
||||
(team, inviter) = model.confirm_team_invite(code, user)
|
||||
(team, inviter) = model.team.confirm_team_invite(code, user)
|
||||
|
||||
model.delete_matching_notifications(user, 'org_team_invite', code=code)
|
||||
model.notification.delete_matching_notifications(user, 'org_team_invite', code=code)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_accepted', orgname, {
|
||||
|
@ -31,7 +32,7 @@ def try_accept_invite(code, user):
|
|||
|
||||
|
||||
def handle_addinvite_team(inviter, team, user=None, email=None):
|
||||
invite = model.add_or_invite_to_team(inviter, team, user, email,
|
||||
invite = model.team.add_or_invite_to_team(inviter, team, user, email,
|
||||
requires_invite=features.MAILING)
|
||||
if not invite:
|
||||
# User was added to the team directly.
|
||||
|
@ -39,7 +40,7 @@ def handle_addinvite_team(inviter, team, user=None, email=None):
|
|||
|
||||
orgname = team.organization.username
|
||||
if user:
|
||||
model.create_notification('org_team_invite', user, metadata = {
|
||||
model.notification.create_notification('org_team_invite', user, metadata={
|
||||
'code': invite.invite_token,
|
||||
'inviter': inviter.username,
|
||||
'org': orgname,
|
||||
|
@ -52,7 +53,7 @@ def handle_addinvite_team(inviter, team, user=None, email=None):
|
|||
|
||||
def team_view(orgname, team):
|
||||
view_permission = ViewTeamPermission(orgname, team.name)
|
||||
role = model.get_team_org_role(team).name
|
||||
role = model.team.get_team_org_role(team).name
|
||||
return {
|
||||
'name': team.name,
|
||||
'description': team.description,
|
||||
|
@ -126,15 +127,15 @@ class OrganizationTeam(ApiResource):
|
|||
details = request.get_json()
|
||||
is_existing = False
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
is_existing = True
|
||||
except model.InvalidTeamException:
|
||||
# Create the new team.
|
||||
description = details['description'] if 'description' in details else ''
|
||||
role = details['role'] if 'role' in details else 'member'
|
||||
|
||||
org = model.get_organization(orgname)
|
||||
team = model.create_team(teamname, org, role, description)
|
||||
org = model.organization.get_organization(orgname)
|
||||
team = model.team.create_team(teamname, org, role, description)
|
||||
log_action('org_create_team', orgname, {'team': teamname})
|
||||
|
||||
if is_existing:
|
||||
|
@ -146,9 +147,9 @@ class OrganizationTeam(ApiResource):
|
|||
{'team': teamname, 'description': team.description})
|
||||
|
||||
if 'role' in details:
|
||||
role = model.get_team_org_role(team).name
|
||||
role = model.team.get_team_org_role(team).name
|
||||
if role != details['role']:
|
||||
team = model.set_team_org_permission(team, details['role'],
|
||||
team = model.team.set_team_org_permission(team, details['role'],
|
||||
get_authenticated_user().username)
|
||||
log_action('org_set_team_role', orgname, {'team': teamname, 'role': details['role']})
|
||||
|
||||
|
@ -162,7 +163,7 @@ class OrganizationTeam(ApiResource):
|
|||
""" Delete the specified team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
model.remove_team(orgname, teamname, get_authenticated_user().username)
|
||||
model.team.remove_team(orgname, teamname, get_authenticated_user().username)
|
||||
log_action('org_delete_team', orgname, {'team': teamname})
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -176,7 +177,8 @@ class TeamMemberList(ApiResource):
|
|||
""" Resource for managing the list of members for a team. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@parse_args
|
||||
@query_param('includePending', 'Whether to include pending members', type=truthy_bool, default=False)
|
||||
@query_param('includePending', 'Whether to include pending members', type=truthy_bool,
|
||||
default=False)
|
||||
@nickname('getOrganizationTeamMembers')
|
||||
def get(self, args, orgname, teamname):
|
||||
""" Retrieve the list of members for the specified team. """
|
||||
|
@ -186,15 +188,15 @@ class TeamMemberList(ApiResource):
|
|||
if view_permission.can():
|
||||
team = None
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
members = model.get_organization_team_members(team.id)
|
||||
members = model.organization.get_organization_team_members(team.id)
|
||||
invites = []
|
||||
|
||||
if args['includePending'] and edit_permission.can():
|
||||
invites = model.get_organization_team_member_invites(team.id)
|
||||
invites = model.team.get_organization_team_member_invites(team.id)
|
||||
|
||||
data = {
|
||||
'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
|
||||
|
@ -224,12 +226,12 @@ class TeamMember(ApiResource):
|
|||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Find the user.
|
||||
user = model.get_user(membername)
|
||||
user = model.user.get_user(membername)
|
||||
if not user:
|
||||
raise request_error(message='Unknown user')
|
||||
|
||||
|
@ -263,18 +265,18 @@ class TeamMember(ApiResource):
|
|||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Find the member.
|
||||
member = model.get_user(membername)
|
||||
member = model.user.get_user(membername)
|
||||
if not member:
|
||||
raise NotFound()
|
||||
|
||||
# First attempt to delete an invite for the user to this team. If none found,
|
||||
# then we try to remove the user directly.
|
||||
if model.delete_team_user_invite(team, member):
|
||||
if model.team.delete_team_user_invite(team, member):
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'user': membername,
|
||||
'team': teamname,
|
||||
|
@ -282,7 +284,7 @@ class TeamMember(ApiResource):
|
|||
})
|
||||
return 'Deleted', 204
|
||||
|
||||
model.remove_user_from_team(orgname, teamname, membername, invoking_user)
|
||||
model.team.remove_user_from_team(orgname, teamname, membername, invoking_user)
|
||||
log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return 'Deleted', 204
|
||||
|
||||
|
@ -303,7 +305,7 @@ class InviteTeamMember(ApiResource):
|
|||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -329,12 +331,12 @@ class InviteTeamMember(ApiResource):
|
|||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Delete the invite.
|
||||
model.delete_team_email_invite(team, email)
|
||||
model.team.delete_team_email_invite(team, email)
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'email': email,
|
||||
'team': teamname,
|
||||
|
@ -369,9 +371,10 @@ class TeamMemberInvite(ApiResource):
|
|||
@require_user_admin
|
||||
def delete(self, code):
|
||||
""" Delete an existing member of a team. """
|
||||
(team, inviter) = model.delete_team_invite(code, get_authenticated_user())
|
||||
(team, inviter) = model.team.delete_team_invite(code, user_obj=get_authenticated_user())
|
||||
|
||||
model.delete_matching_notifications(get_authenticated_user(), 'org_team_invite', code=code)
|
||||
model.notification.delete_matching_notifications(get_authenticated_user(), 'org_team_invite',
|
||||
code=code)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_declined', orgname, {
|
||||
|
|
|
@ -12,14 +12,14 @@ from endpoints.api import (RepositoryParamResource, nickname, resource, require_
|
|||
log_action, request_error, query_param, parse_args, internal_only,
|
||||
validate_json_request, api, Unauthorized, NotFound, InvalidRequest,
|
||||
path_param)
|
||||
from endpoints.api.build import (build_status_view, trigger_view, RepositoryBuildStatus,
|
||||
get_trigger_config)
|
||||
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
|
||||
from endpoints.building import start_build
|
||||
from endpoints.trigger import (BuildTriggerHandler, TriggerDeactivationException,
|
||||
TriggerActivationException, EmptyRepositoryException,
|
||||
RepositoryReadException, TriggerStartException)
|
||||
from data import model
|
||||
from auth.permissions import UserAdminPermission, AdministerOrganizationPermission, ReadRepositoryPermission
|
||||
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
||||
ReadRepositoryPermission)
|
||||
from util.names import parse_robot_username
|
||||
from util.dockerfileparse import parse_dockerfile
|
||||
|
||||
|
@ -41,7 +41,7 @@ class BuildTriggerList(RepositoryParamResource):
|
|||
@nickname('listBuildTriggers')
|
||||
def get(self, namespace, repository):
|
||||
""" List the triggers for the specified repository. """
|
||||
triggers = model.list_build_triggers(namespace, repository)
|
||||
triggers = model.build.list_build_triggers(namespace, repository)
|
||||
return {
|
||||
'triggers': [trigger_view(trigger, can_admin=True) for trigger in triggers]
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ class BuildTrigger(RepositoryParamResource):
|
|||
def get(self, namespace, repository, trigger_uuid):
|
||||
""" Get information for the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -69,7 +69,7 @@ class BuildTrigger(RepositoryParamResource):
|
|||
def delete(self, namespace, repository, trigger_uuid):
|
||||
""" Delete the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -84,7 +84,7 @@ class BuildTrigger(RepositoryParamResource):
|
|||
log_action('delete_repo_trigger', namespace,
|
||||
{'repo': repository, 'trigger_id': trigger_uuid,
|
||||
'service': trigger.service.name},
|
||||
repo=model.get_repository(namespace, repository))
|
||||
repo=model.repository.get_repository(namespace, repository))
|
||||
|
||||
trigger.delete_instance(recursive=True)
|
||||
|
||||
|
@ -114,7 +114,7 @@ class BuildTriggerSubdirs(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" List the subdirectories available for the specified build trigger and source. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -175,7 +175,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Activate the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -188,8 +188,9 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
# Update the pull robot (if any).
|
||||
pull_robot_name = request.get_json().get('pull_robot', None)
|
||||
if pull_robot_name:
|
||||
pull_robot = model.lookup_robot(pull_robot_name)
|
||||
if not pull_robot:
|
||||
try:
|
||||
pull_robot = model.user.lookup_robot(pull_robot_name)
|
||||
except model.InvalidRobotException:
|
||||
raise NotFound()
|
||||
|
||||
# Make sure the user has administer permissions for the robot's namespace.
|
||||
|
@ -208,7 +209,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
new_config_dict = request.get_json()['config']
|
||||
|
||||
write_token_name = 'Build Trigger: %s' % trigger.service.name
|
||||
write_token = model.create_delegate_token(namespace, repository, write_token_name,
|
||||
write_token = model.token.create_delegate_token(namespace, repository, write_token_name,
|
||||
'write')
|
||||
|
||||
try:
|
||||
|
@ -233,7 +234,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
trigger.save()
|
||||
|
||||
# Log the trigger setup.
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
log_action('setup_repo_trigger', namespace,
|
||||
{'repo': repository, 'namespace': namespace,
|
||||
'trigger_id': trigger.uuid, 'service': trigger.service.name,
|
||||
|
@ -275,7 +276,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Analyze the specified build trigger configuration. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -324,7 +325,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
}
|
||||
|
||||
(base_namespace, base_repository) = result
|
||||
found_repository = model.get_repository(base_namespace, base_repository)
|
||||
found_repository = model.repository.get_repository(base_namespace, base_repository)
|
||||
if not found_repository:
|
||||
return {
|
||||
'status': 'error',
|
||||
|
@ -361,7 +362,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
(robot_namespace, shortname) = parse_robot_username(user.username)
|
||||
return AdministerOrganizationPermission(robot_namespace).can()
|
||||
|
||||
repo_users = list(model.get_all_repo_users_transitive(base_namespace, base_repository))
|
||||
repo_users = list(model.user.get_all_repo_users_transitive(base_namespace, base_repository))
|
||||
read_robots = [robot_view(user) for user in repo_users if is_valid_robot(user)]
|
||||
|
||||
return {
|
||||
|
@ -399,7 +400,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
'properties': {
|
||||
'branch_name': {
|
||||
'type': 'string',
|
||||
'description': '(SCM only) If specified, the name of the branch to build.'
|
||||
'description': '(SCM only) If specified, the name of the branch to model.build.'
|
||||
},
|
||||
'commit_sha': {
|
||||
'type': 'string',
|
||||
|
@ -415,7 +416,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Manually start a build from the specified trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -424,8 +425,8 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
raise InvalidRequest('Trigger is not active.')
|
||||
|
||||
try:
|
||||
repo = model.get_repository(namespace, repository)
|
||||
pull_robot_name = model.get_pull_robot_name(trigger)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
pull_robot_name = model.build.get_pull_robot_name(trigger)
|
||||
|
||||
run_parameters = request.get_json()
|
||||
prepared = handler.manual_start(run_parameters=run_parameters)
|
||||
|
@ -454,10 +455,9 @@ class TriggerBuildList(RepositoryParamResource):
|
|||
def get(self, args, namespace, repository, trigger_uuid):
|
||||
""" List the builds started by the specified trigger. """
|
||||
limit = args['limit']
|
||||
builds = list(model.list_trigger_builds(namespace, repository,
|
||||
trigger_uuid, limit))
|
||||
builds = model.build.list_trigger_builds(namespace, repository, trigger_uuid, limit)
|
||||
return {
|
||||
'builds': [build_status_view(build) for build in builds]
|
||||
'builds': [build_status_view(bld) for bld in builds]
|
||||
}
|
||||
|
||||
|
||||
|
@ -471,7 +471,7 @@ class BuildTriggerFieldValues(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid, field_name):
|
||||
""" List the field values for a custom run field. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -502,7 +502,7 @@ class BuildTriggerSources(RepositoryParamResource):
|
|||
def get(self, namespace, repository, trigger_uuid):
|
||||
""" List the build sources for the trigger configuration thus far. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
|
|
@ -3,33 +3,33 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from random import SystemRandom
|
||||
from flask import request, abort
|
||||
from flask.ext.login import logout_user
|
||||
from flask.ext.principal import identity_changed, AnonymousIdentity
|
||||
from peewee import IntegrityError
|
||||
|
||||
import features
|
||||
|
||||
from app import app, billing as stripe, authentication, avatar
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
||||
log_action, internal_only, NotFound, require_user_admin, parse_args,
|
||||
query_param, InvalidToken, require_scope, format_date, hide_if, show_if,
|
||||
query_param, InvalidToken, require_scope, format_date, show_if,
|
||||
license_error, require_fresh_login, path_param, define_json_response,
|
||||
RepositoryParamResource)
|
||||
from endpoints.api.subscribe import subscribe
|
||||
from endpoints.common import common_login
|
||||
from endpoints.decorators import anon_allowed
|
||||
from endpoints.api.team import try_accept_invite
|
||||
|
||||
from data import model
|
||||
from data.billing import get_plan
|
||||
from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission,
|
||||
UserAdminPermission, UserReadPermission, SuperUserPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email, send_password_changed)
|
||||
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email,
|
||||
send_password_changed)
|
||||
from util.names import parse_single_urn
|
||||
|
||||
import features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -45,7 +45,7 @@ def user_view(user):
|
|||
'preferred_namespace': not (o.stripe_id is None)
|
||||
}
|
||||
|
||||
organizations = model.get_user_organizations(user.username)
|
||||
organizations = model.organization.get_user_organizations(user.username)
|
||||
|
||||
def login_view(login):
|
||||
try:
|
||||
|
@ -59,7 +59,7 @@ def user_view(user):
|
|||
'metadata': metadata
|
||||
}
|
||||
|
||||
logins = model.list_federated_logins(user)
|
||||
logins = model.user.list_federated_logins(user)
|
||||
|
||||
user_response = {
|
||||
'anonymous': False,
|
||||
|
@ -89,14 +89,14 @@ def user_view(user):
|
|||
return user_response
|
||||
|
||||
|
||||
def notification_view(notification):
|
||||
def notification_view(note):
|
||||
return {
|
||||
'id': notification.uuid,
|
||||
'organization': notification.target.username if notification.target.organization else None,
|
||||
'kind': notification.kind.name,
|
||||
'created': format_date(notification.created),
|
||||
'metadata': json.loads(notification.metadata_json),
|
||||
'dismissed': notification.dismissed
|
||||
'id': note.uuid,
|
||||
'organization': note.target.username if note.target.organization else None,
|
||||
'kind': note.kind.name,
|
||||
'created': format_date(note.created),
|
||||
'metadata': json.loads(note.metadata_json),
|
||||
'dismissed': note.dismissed
|
||||
}
|
||||
|
||||
|
||||
|
@ -238,7 +238,7 @@ class User(ApiResource):
|
|||
log_action('account_change_password', user.username)
|
||||
|
||||
# Change the user's password.
|
||||
model.change_password(user, user_data['password'])
|
||||
model.user.change_password(user, user_data['password'])
|
||||
|
||||
# Login again to reset their session cookie.
|
||||
common_login(user)
|
||||
|
@ -248,36 +248,36 @@ class User(ApiResource):
|
|||
|
||||
if 'invoice_email' in user_data:
|
||||
logger.debug('Changing invoice_email for user: %s', user.username)
|
||||
model.change_invoice_email(user, user_data['invoice_email'])
|
||||
model.user.change_invoice_email(user, user_data['invoice_email'])
|
||||
|
||||
if 'tag_expiration' in user_data:
|
||||
logger.debug('Changing user tag expiration to: %ss', user_data['tag_expiration'])
|
||||
model.change_user_tag_expiration(user, user_data['tag_expiration'])
|
||||
model.user.change_user_tag_expiration(user, user_data['tag_expiration'])
|
||||
|
||||
if 'email' in user_data and user_data['email'] != user.email:
|
||||
new_email = user_data['email']
|
||||
if model.find_user_by_email(new_email):
|
||||
if model.user.find_user_by_email(new_email):
|
||||
# Email already used.
|
||||
raise request_error(message='E-mail address already used')
|
||||
|
||||
if features.MAILING:
|
||||
logger.debug('Sending email to change email address for user: %s',
|
||||
user.username)
|
||||
code = model.create_confirm_email_code(user, new_email=new_email)
|
||||
code = model.user.create_confirm_email_code(user, new_email=new_email)
|
||||
send_change_email(user.username, user_data['email'], code.code)
|
||||
else:
|
||||
model.update_email(user, new_email, auto_verify=not features.MAILING)
|
||||
model.user.update_email(user, new_email, auto_verify=not features.MAILING)
|
||||
|
||||
if ('username' in user_data and user_data['username'] != user.username and
|
||||
features.USER_RENAME):
|
||||
new_username = user_data['username']
|
||||
if model.get_user_or_org(new_username) is not None:
|
||||
if model.user.get_user_or_org(new_username) is not None:
|
||||
# Username already used
|
||||
raise request_error(message='Username is already in use')
|
||||
|
||||
model.change_username(user.id, new_username)
|
||||
model.user.change_username(user.id, new_username)
|
||||
|
||||
except model.InvalidPasswordException, ex:
|
||||
except model.user.InvalidPasswordException, ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
return user_view(user)
|
||||
|
@ -291,12 +291,12 @@ class User(ApiResource):
|
|||
user_data = request.get_json()
|
||||
invite_code = user_data.get('invite_code', '')
|
||||
|
||||
existing_user = model.get_nonrobot_user(user_data['username'])
|
||||
existing_user = model.user.get_nonrobot_user(user_data['username'])
|
||||
if existing_user:
|
||||
raise request_error(message='The username already exists')
|
||||
|
||||
try:
|
||||
new_user = model.create_user(user_data['username'], user_data['password'],
|
||||
new_user = model.user.create_user(user_data['username'], user_data['password'],
|
||||
user_data['email'], auto_verify=not features.MAILING)
|
||||
|
||||
# Handle any invite codes.
|
||||
|
@ -306,12 +306,12 @@ class User(ApiResource):
|
|||
# Add the user to the team.
|
||||
try:
|
||||
try_accept_invite(invite_code, new_user)
|
||||
except model.DataModelException:
|
||||
except model.user.DataModelException:
|
||||
pass
|
||||
|
||||
|
||||
if features.MAILING:
|
||||
code = model.create_confirm_email_code(new_user)
|
||||
code = model.user.create_confirm_email_code(new_user)
|
||||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||
return {
|
||||
'awaiting_verification': True
|
||||
|
@ -320,9 +320,9 @@ class User(ApiResource):
|
|||
common_login(new_user)
|
||||
return user_view(new_user)
|
||||
|
||||
except model.TooManyUsersException as ex:
|
||||
except model.user.TooManyUsersException as ex:
|
||||
raise license_error(exception=ex)
|
||||
except model.DataModelException as ex:
|
||||
except model.user.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
@resource('/v1/user/private')
|
||||
|
@ -336,7 +336,7 @@ class PrivateRepositories(ApiResource):
|
|||
""" Get the number of private repos this user has, and whether they are allowed to create more.
|
||||
"""
|
||||
user = get_authenticated_user()
|
||||
private_repos = model.get_private_repo_count(user.username)
|
||||
private_repos = model.user.get_private_repo_count(user.username)
|
||||
repos_allowed = 0
|
||||
|
||||
if user.stripe_id:
|
||||
|
@ -396,7 +396,7 @@ def conduct_signin(username_or_email, password):
|
|||
verified = None
|
||||
try:
|
||||
(verified, error_message) = authentication.verify_user(username_or_email, password)
|
||||
except model.TooManyUsersException as ex:
|
||||
except model.user.TooManyUsersException as ex:
|
||||
raise license_error(exception=ex)
|
||||
|
||||
if verified:
|
||||
|
@ -457,15 +457,14 @@ class ConvertToOrganization(ApiResource):
|
|||
# Ensure that the sign in credentials work.
|
||||
admin_username = convert_data['adminUser']
|
||||
admin_password = convert_data['adminPassword']
|
||||
(admin_user, error_message) = authentication.verify_user(admin_username, admin_password)
|
||||
(admin_user, _) = authentication.verify_user(admin_username, admin_password)
|
||||
if not admin_user:
|
||||
raise request_error(reason='invaliduser',
|
||||
message='The admin user credentials are not valid')
|
||||
|
||||
# Ensure that the new admin user is the not user being converted.
|
||||
if admin_user.id == user.id:
|
||||
raise request_error(reason='invaliduser',
|
||||
message='The admin user is not valid')
|
||||
raise request_error(reason='invaliduser', message='The admin user is not valid')
|
||||
|
||||
# Subscribe the organization to the new plan.
|
||||
if features.BILLING:
|
||||
|
@ -473,7 +472,7 @@ class ConvertToOrganization(ApiResource):
|
|||
subscribe(user, plan, None, True) # Require business plans
|
||||
|
||||
# Convert the user to an organization.
|
||||
model.convert_user_to_organization(user, admin_user)
|
||||
model.organization.convert_user_to_organization(user, admin_user)
|
||||
log_action('account_convert', user.username)
|
||||
|
||||
# And finally login with the admin credentials.
|
||||
|
@ -583,7 +582,7 @@ class DetachExternal(ApiResource):
|
|||
@nickname('detachExternalLogin')
|
||||
def post(self, servicename):
|
||||
""" Request that the current user be detached from the external login service. """
|
||||
model.detach_external_login(get_authenticated_user(), servicename)
|
||||
model.user.detach_external_login(get_authenticated_user(), servicename)
|
||||
return {'success': True}
|
||||
|
||||
|
||||
|
@ -614,7 +613,7 @@ class Recovery(ApiResource):
|
|||
def post(self):
|
||||
""" Request a password recovery email."""
|
||||
email = request.get_json()['email']
|
||||
code = model.create_reset_password_email_code(email)
|
||||
code = model.user.create_reset_password_email_code(email)
|
||||
send_recovery_email(email, code.code)
|
||||
return 'Created', 201
|
||||
|
||||
|
@ -631,7 +630,8 @@ class UserNotificationList(ApiResource):
|
|||
page = args['page']
|
||||
limit = args['limit']
|
||||
|
||||
notifications = list(model.list_notifications(get_authenticated_user(), page=page, limit=limit + 1))
|
||||
notifications = list(model.notification.list_notifications(get_authenticated_user(), page=page,
|
||||
limit=limit + 1))
|
||||
has_more = False
|
||||
|
||||
if len(notifications) > limit:
|
||||
|
@ -639,7 +639,7 @@ class UserNotificationList(ApiResource):
|
|||
notifications = notifications[0:limit]
|
||||
|
||||
return {
|
||||
'notifications': [notification_view(notification) for notification in notifications],
|
||||
'notifications': [notification_view(note) for note in notifications],
|
||||
'additional': has_more
|
||||
}
|
||||
|
||||
|
@ -665,24 +665,24 @@ class UserNotification(ApiResource):
|
|||
@require_user_admin
|
||||
@nickname('getUserNotification')
|
||||
def get(self, uuid):
|
||||
notification = model.lookup_notification(get_authenticated_user(), uuid)
|
||||
if not notification:
|
||||
note = model.notification.lookup_notification(get_authenticated_user(), uuid)
|
||||
if not note:
|
||||
raise NotFound()
|
||||
|
||||
return notification_view(notification)
|
||||
return notification_view(note)
|
||||
|
||||
@require_user_admin
|
||||
@nickname('updateUserNotification')
|
||||
@validate_json_request('UpdateNotification')
|
||||
def put(self, uuid):
|
||||
notification = model.lookup_notification(get_authenticated_user(), uuid)
|
||||
if not notification:
|
||||
note = model.notification.lookup_notification(get_authenticated_user(), uuid)
|
||||
if not note:
|
||||
raise NotFound()
|
||||
|
||||
notification.dismissed = request.get_json().get('dismissed', False)
|
||||
notification.save()
|
||||
note.dismissed = request.get_json().get('dismissed', False)
|
||||
note.save()
|
||||
|
||||
return notification_view(notification)
|
||||
return notification_view(note)
|
||||
|
||||
|
||||
def authorization_view(access_token):
|
||||
|
@ -733,8 +733,7 @@ class UserAuthorization(ApiResource):
|
|||
@require_user_admin
|
||||
@nickname('deleteUserAuthorization')
|
||||
def delete(self, access_token_uuid):
|
||||
access_token = model.oauth.lookup_access_token_for_user(get_authenticated_user(),
|
||||
access_token_uuid)
|
||||
access_token = model.oauth.lookup_access_token_for_user(get_authenticated_user(), access_token_uuid)
|
||||
if not access_token:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -774,9 +773,8 @@ class StarredRepositoryList(ApiResource):
|
|||
""" List all starred repositories. """
|
||||
page = args['page']
|
||||
limit = args['limit']
|
||||
starred_repos = model.get_user_starred_repositories(get_authenticated_user(),
|
||||
page=page,
|
||||
limit=limit)
|
||||
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user(),
|
||||
page=page, limit=limit)
|
||||
def repo_view(repo_obj):
|
||||
return {
|
||||
'namespace': repo_obj.namespace_user.username,
|
||||
|
@ -797,11 +795,11 @@ class StarredRepositoryList(ApiResource):
|
|||
req = request.get_json()
|
||||
namespace = req['namespace']
|
||||
repository = req['repository']
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
|
||||
if repo:
|
||||
try:
|
||||
model.star_repository(user, repo)
|
||||
model.repository.star_repository(user, repo)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
|
@ -820,10 +818,10 @@ class StarredRepository(RepositoryParamResource):
|
|||
def delete(self, namespace, repository):
|
||||
""" Removes a star from a repository. """
|
||||
user = get_authenticated_user()
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
|
||||
if repo:
|
||||
model.unstar_repository(user, repo)
|
||||
model.repository.unstar_repository(user, repo)
|
||||
return 'Deleted', 204
|
||||
|
||||
|
||||
|
@ -833,7 +831,7 @@ class Users(ApiResource):
|
|||
@nickname('getUserInformation')
|
||||
def get(self, username):
|
||||
""" Get user information for the specified user. """
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
abort(404)
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ bitbuckettrigger = Blueprint('bitbuckettrigger', __name__)
|
|||
@route_show_if(features.BITBUCKET_BUILD)
|
||||
@require_session_login
|
||||
def attach_bitbucket_build_trigger(trigger_uuid):
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
if not trigger or trigger.service.name != BitbucketBuildTrigger.service_name():
|
||||
abort(404)
|
||||
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
|
||||
from app import app, dockerfile_build_queue
|
||||
from data import model
|
||||
from data.database import db
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
from flask import request
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_build(repository, prepared_build, pull_robot_name=None):
|
||||
host = app.config['SERVER_HOSTNAME']
|
||||
repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
|
||||
|
||||
token = model.create_access_token(repository, 'write', kind='build-worker',
|
||||
new_token = model.token.create_access_token(repository, 'write', kind='build-worker',
|
||||
friendly_name='Repository Build Token')
|
||||
logger.debug('Creating build %s with repo %s tags %s',
|
||||
prepared_build.build_name, repo_path, prepared_build.tags)
|
||||
|
@ -29,15 +32,17 @@ def start_build(repository, prepared_build, pull_robot_name=None):
|
|||
}
|
||||
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
build_request = model.create_repository_build(repository, token, job_config,
|
||||
build_request = model.build.create_repository_build(repository, new_token, job_config,
|
||||
prepared_build.dockerfile_id,
|
||||
prepared_build.build_name,
|
||||
prepared_build.trigger,
|
||||
pull_robot_name=pull_robot_name)
|
||||
|
||||
pull_creds = model.user.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||
|
||||
json_data = json.dumps({
|
||||
'build_uuid': build_request.uuid,
|
||||
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||
'pull_credentials': pull_creds
|
||||
})
|
||||
|
||||
queue_id = dockerfile_build_queue.put([repository.namespace_user.username, repository.name],
|
||||
|
@ -62,8 +67,8 @@ def start_build(repository, prepared_build, pull_robot_name=None):
|
|||
event_log_metadata['trigger_kind'] = prepared_build.trigger.service.name
|
||||
event_log_metadata['trigger_metadata'] = prepared_build.metadata or {}
|
||||
|
||||
model.log_action('build_dockerfile', repository.namespace_user.username, ip=request.remote_addr,
|
||||
metadata=event_log_metadata, repository=repository)
|
||||
model.log.log_action('build_dockerfile', repository.namespace_user.username,
|
||||
ip=request.remote_addr, metadata=event_log_metadata, repository=repository)
|
||||
|
||||
spawn_notification(repository, 'build_queued', event_log_metadata,
|
||||
subpage='build/%s' % build_request.uuid,
|
||||
|
|
|
@ -199,11 +199,12 @@ def render_page_template(name, **kwargs):
|
|||
|
||||
|
||||
def check_repository_usage(user_or_org, plan_found):
|
||||
private_repos = model.get_private_repo_count(user_or_org.username)
|
||||
private_repos = model.user.get_private_repo_count(user_or_org.username)
|
||||
repos_allowed = plan_found['privateRepos']
|
||||
|
||||
if private_repos > repos_allowed:
|
||||
model.create_notification('over_private_usage', user_or_org, {'namespace': user_or_org.username})
|
||||
model.notification.create_notification('over_private_usage', user_or_org,
|
||||
{'namespace': user_or_org.username})
|
||||
else:
|
||||
model.delete_notifications_by_kind(user_or_org, 'over_private_usage')
|
||||
model.notification.delete_notifications_by_kind(user_or_org, 'over_private_usage')
|
||||
|
||||
|
|
|
@ -26,12 +26,12 @@ def attach_github_build_trigger(namespace, repository):
|
|||
if permission.can():
|
||||
code = request.args.get('code')
|
||||
token = github_trigger.exchange_code_for_token(app.config, client, code)
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
msg = 'Invalid repository: %s/%s' % (namespace, repository)
|
||||
abort(404, message=msg)
|
||||
|
||||
trigger = model.create_build_trigger(repo, 'github', token, current_user.db_user())
|
||||
trigger = model.build.create_build_trigger(repo, 'github', token, current_user.db_user())
|
||||
repo_path = '%s/%s' % (namespace, repository)
|
||||
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
|
||||
trigger.uuid)
|
||||
|
|
|
@ -9,7 +9,6 @@ from auth.permissions import AdministerRepositoryPermission
|
|||
from data import model
|
||||
from endpoints.common import route_show_if
|
||||
from util.http import abort
|
||||
from util.names import parse_repository_name
|
||||
|
||||
import features
|
||||
|
||||
|
@ -40,14 +39,15 @@ def attach_gitlab_build_trigger():
|
|||
msg = 'Could not exchange token. It may have expired.'
|
||||
abort(404, message=msg)
|
||||
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
msg = 'Invalid repository: %s/%s' % (namespace, repository)
|
||||
abort(404, message=msg)
|
||||
|
||||
trigger = model.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
|
||||
trigger = model.build.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
|
||||
repo_path = '%s/%s' % (namespace, repository)
|
||||
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=', trigger.uuid)
|
||||
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
|
||||
trigger.uuid)
|
||||
|
||||
logger.debug('Redirecting to full url: %s', full_url)
|
||||
return redirect(full_url)
|
||||
|
|
|
@ -54,7 +54,8 @@ def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[
|
|||
performer_data=None):
|
||||
event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage)
|
||||
|
||||
notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name,
|
||||
notifications = model.notification.list_repo_notifications(repo.namespace_user.username,
|
||||
repo.name,
|
||||
event_name=event_name)
|
||||
for notification in list(notifications):
|
||||
notification_data = build_notification_data(notification, event_data, performer_data)
|
||||
|
|
|
@ -38,11 +38,11 @@ class NotificationMethod(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
"""
|
||||
Performs the notification method.
|
||||
|
||||
notification: The noticication record itself.
|
||||
notification_obj: The noticication record itself.
|
||||
event_handler: The NotificationEvent handler.
|
||||
notification_data: The dict of notification data placed in the queue.
|
||||
"""
|
||||
|
@ -71,14 +71,14 @@ class QuayNotificationMethod(NotificationMethod):
|
|||
target_info = config_data['target']
|
||||
|
||||
if target_info['kind'] == 'user':
|
||||
target = model.get_nonrobot_user(target_info['name'])
|
||||
target = model.user.get_nonrobot_user(target_info['name'])
|
||||
if not target:
|
||||
# Just to be safe.
|
||||
return (True, 'Unknown user %s' % target_info['name'], [])
|
||||
|
||||
return (True, None, [target])
|
||||
elif target_info['kind'] == 'org':
|
||||
target = model.get_organization(target_info['name'])
|
||||
target = model.organization.get_organization(target_info['name'])
|
||||
if not target:
|
||||
# Just to be safe.
|
||||
return (True, 'Unknown organization %s' % target_info['name'], None)
|
||||
|
@ -90,32 +90,33 @@ class QuayNotificationMethod(NotificationMethod):
|
|||
return (True, None, [target])
|
||||
elif target_info['kind'] == 'team':
|
||||
# Lookup the team.
|
||||
team = None
|
||||
org_team = None
|
||||
try:
|
||||
team = model.get_organization_team(repository.namespace_user.username, target_info['name'])
|
||||
org_team = model.team.get_organization_team(repository.namespace_user.username,
|
||||
target_info['name'])
|
||||
except model.InvalidTeamException:
|
||||
# Probably deleted.
|
||||
return (True, 'Unknown team %s' % target_info['name'], None)
|
||||
|
||||
# Lookup the team's members
|
||||
return (True, None, model.get_organization_team_members(team.id))
|
||||
return (True, None, model.organization.get_organization_team_members(org_team.id))
|
||||
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
repository = notification.repository
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
repository = notification_obj.repository
|
||||
if not repository:
|
||||
# Probably deleted.
|
||||
return
|
||||
|
||||
# Lookup the target user or team to which we'll send the notification.
|
||||
config_data = json.loads(notification.config_json)
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
status, err_message, target_users = self.find_targets(repository, config_data)
|
||||
if not status:
|
||||
raise NotificationMethodPerformException(err_message)
|
||||
|
||||
# For each of the target users, create a notification.
|
||||
for target_user in set(target_users or []):
|
||||
model.create_notification(event_handler.event_name(), target_user,
|
||||
model.notification.create_notification(event_handler.event_name(), target_user,
|
||||
metadata=notification_data['event_data'])
|
||||
|
||||
|
||||
|
@ -129,7 +130,7 @@ class EmailMethod(NotificationMethod):
|
|||
if not email:
|
||||
raise CannotValidateNotificationMethodException('Missing e-mail address')
|
||||
|
||||
record = model.get_email_authorized_for_repo(repository.namespace_user.username,
|
||||
record = model.repository.get_email_authorized_for_repo(repository.namespace_user.username,
|
||||
repository.name, email)
|
||||
if not record or not record.confirmed:
|
||||
raise CannotValidateNotificationMethodException('The specified e-mail address '
|
||||
|
@ -137,8 +138,8 @@ class EmailMethod(NotificationMethod):
|
|||
'notifications for this repository')
|
||||
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
config_data = json.loads(notification.config_json)
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
email = config_data.get('email', '')
|
||||
if not email:
|
||||
return
|
||||
|
@ -166,8 +167,8 @@ class WebhookMethod(NotificationMethod):
|
|||
if not url:
|
||||
raise CannotValidateNotificationMethodException('Missing webhook URL')
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
config_data = json.loads(notification.config_json)
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
url = config_data.get('url', '')
|
||||
if not url:
|
||||
return
|
||||
|
@ -201,13 +202,13 @@ class FlowdockMethod(NotificationMethod):
|
|||
if not token:
|
||||
raise CannotValidateNotificationMethodException('Missing Flowdock API Token')
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
config_data = json.loads(notification.config_json)
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
token = config_data.get('flow_api_token', '')
|
||||
if not token:
|
||||
return
|
||||
|
||||
owner = model.get_user_or_org(notification.repository.namespace_user.username)
|
||||
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
|
||||
if not owner:
|
||||
# Something went wrong.
|
||||
return
|
||||
|
@ -220,8 +221,8 @@ class FlowdockMethod(NotificationMethod):
|
|||
'subject': event_handler.get_summary(notification_data['event_data'], notification_data),
|
||||
'content': event_handler.get_message(notification_data['event_data'], notification_data),
|
||||
'from_name': owner.username,
|
||||
'project': (notification.repository.namespace_user.username + ' ' +
|
||||
notification.repository.name),
|
||||
'project': (notification_obj.repository.namespace_user.username + ' ' +
|
||||
notification_obj.repository.name),
|
||||
'tags': ['#' + event_handler.event_name()],
|
||||
'link': notification_data['event_data']['homepage']
|
||||
}
|
||||
|
@ -254,8 +255,8 @@ class HipchatMethod(NotificationMethod):
|
|||
if not config_data.get('room_id', ''):
|
||||
raise CannotValidateNotificationMethodException('Missing Hipchat Room ID')
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
config_data = json.loads(notification.config_json)
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
|
||||
token = config_data.get('notification_token', '')
|
||||
room_id = config_data.get('room_id', '')
|
||||
|
@ -263,7 +264,7 @@ class HipchatMethod(NotificationMethod):
|
|||
if not token or not room_id:
|
||||
return
|
||||
|
||||
owner = model.get_user_or_org(notification.repository.namespace_user.username)
|
||||
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
|
||||
if not owner:
|
||||
# Something went wrong.
|
||||
return
|
||||
|
@ -367,14 +368,14 @@ class SlackMethod(NotificationMethod):
|
|||
message = message.replace('<br>', '\n')
|
||||
return adjust_tags(message)
|
||||
|
||||
def perform(self, notification, event_handler, notification_data):
|
||||
config_data = json.loads(notification.config_json)
|
||||
def perform(self, notification_obj, event_handler, notification_data):
|
||||
config_data = json.loads(notification_obj.config_json)
|
||||
|
||||
url = config_data.get('url', '')
|
||||
if not url:
|
||||
return
|
||||
|
||||
owner = model.get_user_or_org(notification.repository.namespace_user.username)
|
||||
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
|
||||
if not owner:
|
||||
# Something went wrong.
|
||||
return
|
||||
|
|
|
@ -41,7 +41,7 @@ def get_user(service, token):
|
|||
|
||||
def conduct_oauth_login(service, user_id, username, email, metadata={}):
|
||||
service_name = service.service_name()
|
||||
to_login = model.verify_federated_login(service_name.lower(), user_id)
|
||||
to_login = model.user.verify_federated_login(service_name.lower(), user_id)
|
||||
if not to_login:
|
||||
# See if we can create a new user.
|
||||
if not features.USER_CREATION:
|
||||
|
@ -52,13 +52,13 @@ def conduct_oauth_login(service, user_id, username, email, metadata={}):
|
|||
try:
|
||||
new_username = None
|
||||
for valid in generate_valid_usernames(username):
|
||||
if model.get_user_or_org(valid):
|
||||
if model.user.get_user_or_org(valid):
|
||||
continue
|
||||
|
||||
new_username = valid
|
||||
break
|
||||
|
||||
to_login = model.create_federated_user(new_username, email, service_name.lower(),
|
||||
to_login = model.user.create_federated_user(new_username, email, service_name.lower(),
|
||||
user_id, set_password_notification=True,
|
||||
metadata=metadata)
|
||||
|
||||
|
@ -67,7 +67,7 @@ def conduct_oauth_login(service, user_id, username, email, metadata={}):
|
|||
|
||||
state = request.args.get('state', None)
|
||||
if state:
|
||||
logger.debug('Aliasing with state: %s' % state)
|
||||
logger.debug('Aliasing with state: %s', state)
|
||||
analytics.alias(to_login.username, state)
|
||||
|
||||
except model.InvalidEmailAddressException as ieex:
|
||||
|
@ -200,7 +200,7 @@ def google_oauth_attach():
|
|||
}
|
||||
|
||||
try:
|
||||
model.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
|
||||
model.user.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
|
||||
except IntegrityError:
|
||||
err = 'Google account %s is already attached to a %s account' % (
|
||||
username, app.config['REGISTRY_TITLE_SHORT'])
|
||||
|
@ -228,7 +228,7 @@ def github_oauth_attach():
|
|||
}
|
||||
|
||||
try:
|
||||
model.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
|
||||
model.user.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
|
||||
except IntegrityError:
|
||||
err = 'Github account %s is already attached to a %s account' % (
|
||||
username, app.config['REGISTRY_TITLE_SHORT'])
|
||||
|
|
|
@ -67,9 +67,7 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
|
||||
# Log the action to the database.
|
||||
logger.debug('Logging the %s to logs system', event_name)
|
||||
model.log_action(event_name, namespace,
|
||||
performer=authenticated_user,
|
||||
ip=request.remote_addr, metadata=metadata,
|
||||
repository=repo)
|
||||
model.log.log_action(event_name, namespace, performer=authenticated_user, ip=request.remote_addr,
|
||||
metadata=metadata, repository=repo)
|
||||
|
||||
logger.debug('Track and log of %s complete', event_name)
|
||||
|
|
|
@ -227,11 +227,11 @@ class BuildTriggerHandler(object):
|
|||
def put_config_key(self, key, value):
|
||||
""" Updates a config key in the trigger, saving it to the DB. """
|
||||
self.config[key] = value
|
||||
model.update_build_trigger(self.trigger, self.config)
|
||||
model.build.update_build_trigger(self.trigger, self.config)
|
||||
|
||||
def set_auth_token(self, auth_token):
|
||||
""" Sets the auth token for the trigger, saving it to the DB. """
|
||||
model.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
|
||||
model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
|
||||
|
||||
def get_dockerfile_path(self):
|
||||
""" Returns the normalized path to the Dockerfile found in the subdirectory
|
||||
|
|
|
@ -26,4 +26,4 @@ def ping():
|
|||
|
||||
from endpoints.v1 import index
|
||||
from endpoints.v1 import registry
|
||||
from endpoints.v1 import tags
|
||||
from endpoints.v1 import tag
|
|
@ -2,20 +2,17 @@ import json
|
|||
import logging
|
||||
import urlparse
|
||||
|
||||
from flask import request, make_response, jsonify, session, Blueprint
|
||||
from flask import request, make_response, jsonify, session
|
||||
from functools import wraps
|
||||
from collections import OrderedDict
|
||||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import app, authentication, userevents, storage
|
||||
from auth.auth import process_auth, generate_signed_token
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from util.names import parse_repository_name
|
||||
from util.useremails import send_confirmation_email
|
||||
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||
ReadRepositoryPermission, CreateRepositoryPermission,
|
||||
AlwaysFailPermission, repository_read_grant, repository_write_grant)
|
||||
repository_read_grant, repository_write_grant)
|
||||
|
||||
from util.http import abort
|
||||
from endpoints.v1 import v1_bp
|
||||
|
@ -23,8 +20,6 @@ from endpoints.trackhelper import track_and_log
|
|||
from endpoints.notificationhelper import spawn_notification
|
||||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
|
||||
import features
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -90,13 +85,13 @@ def create_user():
|
|||
|
||||
if username == '$token':
|
||||
try:
|
||||
model.load_token_data(password)
|
||||
model.token.load_token_data(password)
|
||||
return success
|
||||
except model.InvalidTokenException:
|
||||
abort(400, 'Invalid access token.', issue='invalid-access-token')
|
||||
|
||||
elif username == '$oauthtoken':
|
||||
validated = oauth.validate_access_token(password)
|
||||
validated = model.oauth.validate_access_token(password)
|
||||
if validated is not None:
|
||||
return success
|
||||
else:
|
||||
|
@ -104,7 +99,7 @@ def create_user():
|
|||
|
||||
elif '+' in username:
|
||||
try:
|
||||
model.verify_robot(username, password)
|
||||
model.user.verify_robot(username, password)
|
||||
return success
|
||||
except model.InvalidRobotException:
|
||||
abort(400, 'Invalid robot account or password.',
|
||||
|
@ -157,12 +152,11 @@ def update_user(username):
|
|||
|
||||
if 'password' in update_request:
|
||||
logger.debug('Updating user password')
|
||||
model.change_password(get_authenticated_user(),
|
||||
update_request['password'])
|
||||
model.user.change_password(get_authenticated_user(), update_request['password'])
|
||||
|
||||
if 'email' in update_request:
|
||||
logger.debug('Updating user email')
|
||||
model.update_email(get_authenticated_user(), update_request['email'])
|
||||
model.user.update_email(get_authenticated_user(), update_request['email'])
|
||||
|
||||
return jsonify({
|
||||
'username': get_authenticated_user().username,
|
||||
|
@ -178,11 +172,8 @@ def update_user(username):
|
|||
@generate_headers(scope=GrantType.WRITE_REPOSITORY, add_grant_for_status=201)
|
||||
@anon_allowed
|
||||
def create_repository(namespace, repository):
|
||||
logger.debug('Parsing image descriptions for repository %s/%s', namespace, repository)
|
||||
image_descriptions = json.loads(request.data.decode('utf8'))
|
||||
|
||||
logger.debug('Looking up repository %s/%s', namespace, repository)
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
|
||||
logger.debug('Found repository %s/%s', namespace, repository)
|
||||
if not repo and get_authenticated_user() is None:
|
||||
|
@ -201,18 +192,16 @@ def create_repository(namespace, repository):
|
|||
else:
|
||||
permission = CreateRepositoryPermission(namespace)
|
||||
if not permission.can():
|
||||
logger.info('Attempt to create a new repo %s/%s with insufficient perms', namespace, repository)
|
||||
abort(403,
|
||||
message='You do not have permission to create repositories in namespace "%(namespace)s"',
|
||||
issue='no-create-permission',
|
||||
namespace=namespace)
|
||||
logger.info('Attempt to create a new repo %s/%s with insufficient perms', namespace,
|
||||
repository)
|
||||
msg = 'You do not have permission to create repositories in namespace "%(namespace)s"'
|
||||
abort(403, message=msg, issue='no-create-permission', namespace=namespace)
|
||||
|
||||
# Attempt to create the new repository.
|
||||
logger.debug('Creating repository %s/%s with owner: %s', namespace, repository,
|
||||
get_authenticated_user().username)
|
||||
|
||||
repo = model.create_repository(namespace, repository,
|
||||
get_authenticated_user())
|
||||
repo = model.repository.create_repository(namespace, repository, get_authenticated_user())
|
||||
|
||||
if get_authenticated_user():
|
||||
user_event_data = {
|
||||
|
@ -237,13 +226,13 @@ def update_images(namespace, repository):
|
|||
|
||||
if permission.can():
|
||||
logger.debug('Looking up repository')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
# Make sure the repo actually exists.
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
logger.debug('GCing repository')
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
model.repository.garbage_collect_repository(namespace, repository)
|
||||
|
||||
# Generate a job for each notification that has been added to this repo
|
||||
logger.debug('Adding notifications for repository')
|
||||
|
@ -269,10 +258,10 @@ def get_repository_images(namespace, repository):
|
|||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
# TODO invalidate token?
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
if permission.can() or model.repository.repository_is_public(namespace, repository):
|
||||
# We can't rely on permissions to tell us if a repo exists anymore
|
||||
logger.debug('Looking up repository')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
|
@ -320,7 +309,7 @@ def get_search():
|
|||
username = user.username
|
||||
|
||||
if query:
|
||||
matching = model.get_matching_repositories(query, username)
|
||||
matching = model.repository.get_matching_repositories(query, username)
|
||||
else:
|
||||
matching = []
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from flask import (make_response, request, session, Response, redirect,
|
||||
Blueprint, abort as flask_abort)
|
||||
from flask import make_response, request, session, Response, redirect, abort as flask_abort
|
||||
from functools import wraps
|
||||
from datetime import datetime
|
||||
from time import time
|
||||
|
@ -61,7 +60,7 @@ def require_completion(f):
|
|||
@wraps(f)
|
||||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
image_id = kwargs['image_id']
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if image_is_uploading(repo_image):
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=kwargs['image_id'])
|
||||
|
@ -104,9 +103,9 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
logger.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
if permission.can() or model.repository.repository_is_public(namespace, repository):
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
logger.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
|
@ -138,9 +137,9 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
logger.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
if permission.can() or model.repository.repository_is_public(namespace, repository):
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
logger.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
|
@ -183,7 +182,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
abort(403)
|
||||
|
||||
logger.debug('Retrieving image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
try:
|
||||
logger.debug('Retrieving image data')
|
||||
uuid = repo_image.storage.uuid
|
||||
|
@ -236,7 +235,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
|
||||
try:
|
||||
# Save the size of the image.
|
||||
model.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
||||
model.image.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
||||
size_info.uncompressed_size)
|
||||
|
||||
if requires_tarsum:
|
||||
|
@ -244,9 +243,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
csums.append(checksums.compute_tarsum(tmp, json_data))
|
||||
tmp.close()
|
||||
|
||||
except (IOError, checksums.TarError) as e:
|
||||
logger.debug('put_image_layer: Error when computing tarsum '
|
||||
'{0}'.format(e))
|
||||
except (IOError, checksums.TarError) as exc:
|
||||
logger.debug('put_image_layer: Error when computing tarsum %s', exc)
|
||||
|
||||
if repo_image.storage.checksum is None:
|
||||
# We don't have a checksum stored yet, that's fine skipping the check.
|
||||
|
@ -268,7 +266,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
|
@ -310,7 +308,7 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
issue='missing-checksum-cookie', image_id=image_id)
|
||||
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image or not repo_image.storage:
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
|
@ -331,8 +329,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
abort(400, err)
|
||||
|
||||
if checksum not in session.get('checksum', []):
|
||||
logger.debug('session checksums: %s' % session.get('checksum', []))
|
||||
logger.debug('client supplied checksum: %s' % checksum)
|
||||
logger.debug('session checksums: %s', session.get('checksum', []))
|
||||
logger.debug('client supplied checksum: %s', checksum)
|
||||
logger.debug('put_image_checksum: Wrong checksum')
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
|
@ -343,7 +341,7 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
|
@ -362,12 +360,11 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
def get_image_json(namespace, repository, image_id, headers):
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if not permission.can() and not model.repository_is_public(namespace,
|
||||
repository):
|
||||
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
|
||||
abort(403)
|
||||
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
logger.debug('Looking up repo layer data')
|
||||
try:
|
||||
|
@ -394,12 +391,11 @@ def get_image_json(namespace, repository, image_id, headers):
|
|||
def get_image_ancestry(namespace, repository, image_id, headers):
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if not permission.can() and not model.repository_is_public(namespace,
|
||||
repository):
|
||||
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
|
||||
abort(403)
|
||||
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
logger.debug('Looking up image data')
|
||||
try:
|
||||
|
@ -465,22 +461,23 @@ def put_image_json(namespace, repository, image_id):
|
|||
|
||||
logger.debug('Looking up repo image')
|
||||
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo is None:
|
||||
abort(404, 'Repository does not exist: %(namespace)s/%(repository)s', issue='no-repo',
|
||||
namespace=namespace, repository=repository)
|
||||
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
username = (get_authenticated_user() and get_authenticated_user().username or
|
||||
get_grant_user_context())
|
||||
logger.debug('Image not found, creating image with initiating user context: %s', username)
|
||||
repo_image = model.find_create_or_link_image(image_id, repo, username, {},
|
||||
repo_image = model.image.find_create_or_link_image(image_id, repo, username, {},
|
||||
store.preferred_locations[0])
|
||||
|
||||
# Create a temporary tag to prevent this image from getting garbage collected while the push
|
||||
# is in progress.
|
||||
model.create_temporary_hidden_tag(repo, repo_image, app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
|
||||
model.tag.create_temporary_hidden_tag(repo, repo_image,
|
||||
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
|
||||
|
||||
uuid = repo_image.storage.uuid
|
||||
|
||||
|
@ -493,7 +490,7 @@ def put_image_json(namespace, repository, image_id):
|
|||
parent_image = None
|
||||
if parent_id:
|
||||
logger.debug('Looking up parent image')
|
||||
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
|
||||
parent_image = model.image.get_repo_image_extended(namespace, repository, parent_id)
|
||||
|
||||
parent_uuid = parent_image and parent_image.storage.uuid
|
||||
parent_locations = parent_image and parent_image.storage.locations
|
||||
|
@ -523,9 +520,8 @@ def put_image_json(namespace, repository, image_id):
|
|||
command = json.dumps(command_list) if command_list else None
|
||||
|
||||
logger.debug('Setting image metadata')
|
||||
model.set_image_metadata(image_id, namespace, repository,
|
||||
data.get('created'), data.get('comment'), command,
|
||||
parent_image)
|
||||
model.image.set_image_metadata(image_id, namespace, repository, data.get('created'),
|
||||
data.get('comment'), command, parent_image)
|
||||
|
||||
logger.debug('Putting json path')
|
||||
store.put_content(repo_image.storage.locations, json_path, request.data)
|
||||
|
@ -536,7 +532,7 @@ def put_image_json(namespace, repository, image_id):
|
|||
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
|
||||
parent_locations)
|
||||
except IOError as ioe:
|
||||
logger.debug('Error when generating ancestry: %s' % ioe.message)
|
||||
logger.debug('Error when generating ancestry: %s', ioe.message)
|
||||
abort(404)
|
||||
|
||||
logger.debug('Done')
|
||||
|
@ -544,9 +540,9 @@ def put_image_json(namespace, repository, image_id):
|
|||
|
||||
|
||||
def process_image_changes(namespace, repository, image_id):
|
||||
logger.debug('Generating diffs for image: %s' % image_id)
|
||||
logger.debug('Generating diffs for image: %s', image_id)
|
||||
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
logger.warning('No image for id: %s', image_id)
|
||||
return None, None
|
||||
|
@ -557,11 +553,11 @@ def process_image_changes(namespace, repository, image_id):
|
|||
image_trie_path = store.image_file_trie_path(uuid)
|
||||
|
||||
if store.exists(repo_image.storage.locations, image_diffs_path):
|
||||
logger.debug('Diffs already exist for image: %s' % image_id)
|
||||
logger.debug('Diffs already exist for image: %s', image_id)
|
||||
return image_trie_path, repo_image.storage.locations
|
||||
|
||||
image = model.get_image_by_id(namespace, repository, image_id)
|
||||
parents = model.get_parent_images(namespace, repository, image)
|
||||
image = model.image.get_image_by_id(namespace, repository, image_id)
|
||||
parents = model.image.get_parent_images(namespace, repository, image)
|
||||
|
||||
# Compute the diffs and fs for the parent first if necessary
|
||||
parent_trie_path = None
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from flask import abort, request, jsonify, make_response, Blueprint, session
|
||||
from flask import abort, request, jsonify, make_response, session
|
||||
|
||||
from app import app
|
||||
from util.names import parse_repository_name
|
||||
|
@ -17,32 +17,30 @@ from endpoints.v1 import v1_bp
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@v1_bp.route('/repositories/<path:repository>/tags',
|
||||
methods=['GET'])
|
||||
@v1_bp.route('/repositories/<path:repository>/tags', methods=['GET'])
|
||||
@process_auth
|
||||
@anon_protect
|
||||
@parse_repository_name
|
||||
def get_tags(namespace, repository):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
tags = model.list_repository_tags(namespace, repository)
|
||||
if permission.can() or model.repository.repository_is_public(namespace, repository):
|
||||
tags = model.tag.list_repository_tags(namespace, repository)
|
||||
tag_map = {tag.name: tag.image.docker_image_id for tag in tags}
|
||||
return jsonify(tag_map)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>',
|
||||
methods=['GET'])
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['GET'])
|
||||
@process_auth
|
||||
@anon_protect
|
||||
@parse_repository_name
|
||||
def get_tag(namespace, repository, tag):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
if permission.can() or model.repository.repository_is_public(namespace, repository):
|
||||
tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
resp = make_response('"%s"' % tag_image.docker_image_id)
|
||||
resp.headers['Content-Type'] = 'application/json'
|
||||
return resp
|
||||
|
@ -50,8 +48,7 @@ def get_tag(namespace, repository, tag):
|
|||
abort(403)
|
||||
|
||||
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>',
|
||||
methods=['PUT'])
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['PUT'])
|
||||
@process_auth
|
||||
@anon_protect
|
||||
@parse_repository_name
|
||||
|
@ -60,7 +57,7 @@ def put_tag(namespace, repository, tag):
|
|||
|
||||
if permission.can():
|
||||
docker_image_id = json.loads(request.data)
|
||||
model.create_or_update_tag(namespace, repository, tag, docker_image_id)
|
||||
model.tag.create_or_update_tag(namespace, repository, tag, docker_image_id)
|
||||
|
||||
# Store the updated tag.
|
||||
if not 'pushed_tags' in session:
|
||||
|
@ -73,8 +70,7 @@ def put_tag(namespace, repository, tag):
|
|||
abort(403)
|
||||
|
||||
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>',
|
||||
methods=['DELETE'])
|
||||
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['DELETE'])
|
||||
@process_auth
|
||||
@anon_protect
|
||||
@parse_repository_name
|
||||
|
@ -82,8 +78,8 @@ def delete_tag(namespace, repository, tag):
|
|||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
|
||||
if permission.can():
|
||||
model.delete_tag(namespace, repository, tag)
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
model.tag.delete_tag(namespace, repository, tag)
|
||||
model.repository.garbage_collect_repository(namespace, repository)
|
||||
|
||||
return make_response('Deleted', 200)
|
||||
|
|
@ -2,9 +2,8 @@ import logging
|
|||
|
||||
from flask import make_response, url_for, request
|
||||
|
||||
import data.model.blob
|
||||
|
||||
from app import storage
|
||||
from app import storage, app
|
||||
from data import model
|
||||
from digest import digest_tools
|
||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||
from auth.jwt_auth import process_jwt_auth
|
||||
|
@ -22,11 +21,11 @@ logger = logging.getLogger(__name__)
|
|||
@anon_protect
|
||||
def check_blob_existence(namespace, repo_name, digest):
|
||||
try:
|
||||
found = data.model.blob.get_blob_by_digest(digest)
|
||||
found = model.blob.get_repo_blob_by_digest(namespace, repo_name, digest)
|
||||
|
||||
# The response body must be empty for a successful HEAD request
|
||||
return make_response('')
|
||||
except data.model.blob.BlobDoesNotExist:
|
||||
except model.BlobDoesNotExist:
|
||||
abort(404)
|
||||
|
||||
|
||||
|
@ -66,7 +65,8 @@ def upload_chunk(namespace, repo_name, upload_uuid):
|
|||
if digest is not None:
|
||||
final_blob_location = digest_tools.content_path(digest)
|
||||
storage.complete_chunked_upload(upload_location, upload_uuid, final_blob_location, digest)
|
||||
data.model.blob.store_blob_record(digest, upload_location)
|
||||
model.blob.store_blob_record_and_temp_link(namespace, repo_name, digest, upload_location,
|
||||
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
|
||||
|
||||
response = make_response('', 201)
|
||||
response.headers['Docker-Content-Digest'] = digest
|
||||
|
|
|
@ -52,7 +52,7 @@ def generate_registry_jwt():
|
|||
|
||||
namespace, reponame = parse_namespace_repository(namespace_and_repo)
|
||||
if 'pull' in actions and 'push' in actions:
|
||||
repo = model.get_repository(namespace, reponame)
|
||||
repo = model.repository.get_repository(namespace, reponame)
|
||||
if repo:
|
||||
if not ModifyRepositoryPermission(namespace, reponame):
|
||||
abort(403)
|
||||
|
@ -60,7 +60,7 @@ def generate_registry_jwt():
|
|||
if not CreateRepositoryPermission(namespace):
|
||||
abort(403)
|
||||
logger.debug('Creating repository: %s/%s', namespace, reponame)
|
||||
model.create_repository(namespace, reponame, user)
|
||||
model.repository.create_repository(namespace, reponame, user)
|
||||
elif 'pull' in actions:
|
||||
if not ReadRepositoryPermission(namespace, reponame):
|
||||
abort(403)
|
||||
|
|
|
@ -7,8 +7,7 @@ from flask import redirect, Blueprint, abort, send_file, make_response
|
|||
from app import app, signer
|
||||
from auth.auth import process_auth
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data import model
|
||||
from data import database
|
||||
from data import model, database
|
||||
from endpoints.trackhelper import track_and_log
|
||||
from endpoints.decorators import anon_protect
|
||||
from storage import Storage
|
||||
|
@ -22,6 +21,7 @@ from formats.aci import ACIImage
|
|||
verbs = Blueprint('verbs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, image_json,
|
||||
image_id_list):
|
||||
store = Storage(app)
|
||||
|
@ -29,7 +29,8 @@ def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, imag
|
|||
# For performance reasons, we load the full image list here, cache it, then disconnect from
|
||||
# the database.
|
||||
with database.UseThenDisconnect(app.config):
|
||||
image_list = list(model.get_matching_repository_images(namespace, repository, image_id_list))
|
||||
image_list = list(model.image.get_matching_repository_images(namespace, repository,
|
||||
image_id_list))
|
||||
|
||||
image_list.sort(key=lambda image: image_id_list.index(image.docker_image_id))
|
||||
|
||||
|
@ -66,11 +67,11 @@ def _sign_sythentic_image(verb, linked_storage_uuid, queue_file):
|
|||
if not queue_file.raised_exception:
|
||||
with database.UseThenDisconnect(app.config):
|
||||
try:
|
||||
derived = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
except model.InvalidImageException:
|
||||
derived = model.storage.get_storage_by_uuid(linked_storage_uuid)
|
||||
except model.storage.InvalidImageException:
|
||||
return
|
||||
|
||||
signature_entry = model.find_or_create_storage_signature(derived, signer.name)
|
||||
signature_entry = model.storage.find_or_create_storage_signature(derived, signer.name)
|
||||
signature_entry.signature = signature
|
||||
signature_entry.uploading = False
|
||||
signature_entry.save()
|
||||
|
@ -83,7 +84,7 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
|
|||
logger.debug('Exception when building %s image %s: %s', verb, linked_storage_uuid, ex)
|
||||
|
||||
with database.UseThenDisconnect(app.config):
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
model.storage.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
||||
queue_file.add_exception_handler(handle_exception)
|
||||
|
||||
|
@ -95,7 +96,7 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
|
|||
# Setup the database (since this is a new process) and then disconnect immediately
|
||||
# once the operation completes.
|
||||
with database.UseThenDisconnect(app.config):
|
||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading = model.storage.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading.uploading = False
|
||||
done_uploading.save()
|
||||
|
||||
|
@ -103,17 +104,17 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
|
|||
def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
if not permission.can() and not model.repository_is_public(namespace, repository):
|
||||
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
|
||||
abort(403)
|
||||
|
||||
# Lookup the requested tag.
|
||||
try:
|
||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
except model.DataModelException:
|
||||
abort(404)
|
||||
|
||||
# Lookup the tag's image and storage.
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
||||
repo_image = model.image.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
||||
if not repo_image:
|
||||
abort(404)
|
||||
|
||||
|
@ -139,7 +140,7 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
|
|||
(repo_image, tag_image, image_json) = result
|
||||
|
||||
# Lookup the derived image storage for the verb.
|
||||
derived = model.find_derived_storage(repo_image.storage, verb)
|
||||
derived = model.storage.find_derived_storage(repo_image.storage, verb)
|
||||
if derived is None or derived.uploading:
|
||||
return make_response('', 202)
|
||||
|
||||
|
@ -148,7 +149,7 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
|
|||
abort(404)
|
||||
|
||||
# Lookup the signature for the verb.
|
||||
signature_entry = model.lookup_storage_signature(derived, signer.name)
|
||||
signature_entry = model.storage.lookup_storage_signature(derived, signer.name)
|
||||
if signature_entry is None:
|
||||
abort(404)
|
||||
|
||||
|
@ -166,7 +167,7 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
|||
track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
|
||||
|
||||
# Lookup/create the derived image storage for the verb.
|
||||
derived = model.find_or_create_derived_storage(repo_image.storage, verb,
|
||||
derived = model.storage.find_or_create_derived_storage(repo_image.storage, verb,
|
||||
store.preferred_locations[0])
|
||||
|
||||
if not derived.uploading:
|
||||
|
|
|
@ -9,7 +9,6 @@ from health.healthcheck import get_healthchecker
|
|||
|
||||
from data import model
|
||||
from data.database import db
|
||||
from data.model.oauth import DatabaseAuthorizationProvider
|
||||
from app import app, billing as stripe, build_logs, avatar, signer, log_archive
|
||||
from auth.auth import require_session_login, process_oauth
|
||||
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
|
||||
|
@ -281,24 +280,24 @@ def robots():
|
|||
@route_show_if(features.BUILD_SUPPORT)
|
||||
@require_session_login
|
||||
def buildlogs(build_uuid):
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if not build:
|
||||
found_build = model.build.get_repository_build(build_uuid)
|
||||
if not found_build:
|
||||
abort(403)
|
||||
|
||||
repo = build.repository
|
||||
repo = found_build.repository
|
||||
if not ModifyRepositoryPermission(repo.namespace_user.username, repo.name).can():
|
||||
abort(403)
|
||||
|
||||
# If the logs have been archived, just return a URL of the completed archive
|
||||
if build.logs_archived:
|
||||
return redirect(log_archive.get_file_url(build.uuid))
|
||||
if found_build.logs_archived:
|
||||
return redirect(log_archive.get_file_url(found_build.uuid))
|
||||
|
||||
_, logs = build_logs.get_log_entries(build.uuid, 0)
|
||||
_, logs = build_logs.get_log_entries(found_build.uuid, 0)
|
||||
response = jsonify({
|
||||
'logs': [log for log in logs]
|
||||
})
|
||||
|
||||
response.headers["Content-Disposition"] = "attachment;filename=" + build.uuid + ".json"
|
||||
response.headers["Content-Disposition"] = "attachment;filename=" + found_build.uuid + ".json"
|
||||
return response
|
||||
|
||||
|
||||
|
@ -314,7 +313,7 @@ def receipt():
|
|||
if invoice_id:
|
||||
invoice = stripe.Invoice.retrieve(invoice_id)
|
||||
if invoice:
|
||||
user_or_org = model.get_user_or_org_by_customer_id(invoice.customer)
|
||||
user_or_org = model.user.get_user_or_org_by_customer_id(invoice.customer)
|
||||
|
||||
if user_or_org:
|
||||
if user_or_org.organization:
|
||||
|
@ -341,8 +340,8 @@ def confirm_repo_email():
|
|||
record = None
|
||||
|
||||
try:
|
||||
record = model.confirm_email_authorization_for_repo(code)
|
||||
except model.DataModelException as ex:
|
||||
record = model.repository.confirm_email_authorization_for_repo(code)
|
||||
except DataModelException as ex:
|
||||
return render_page_template('confirmerror.html', error_message=ex.message)
|
||||
|
||||
message = """
|
||||
|
@ -363,8 +362,8 @@ def confirm_email():
|
|||
new_email = None
|
||||
|
||||
try:
|
||||
user, new_email, old_email = model.confirm_user_email(code)
|
||||
except model.DataModelException as ex:
|
||||
user, new_email, old_email = model.user.confirm_user_email(code)
|
||||
except DataModelException as ex:
|
||||
return render_page_template('confirmerror.html', error_message=ex.message)
|
||||
|
||||
if new_email:
|
||||
|
@ -379,7 +378,7 @@ def confirm_email():
|
|||
@web.route('/recovery', methods=['GET'])
|
||||
def confirm_recovery():
|
||||
code = request.values['code']
|
||||
user = model.validate_reset_code(code)
|
||||
user = model.user.validate_reset_code(code)
|
||||
|
||||
if user:
|
||||
common_login(user)
|
||||
|
@ -394,22 +393,22 @@ def confirm_recovery():
|
|||
@anon_protect
|
||||
def build_status_badge(namespace, repository):
|
||||
token = request.args.get('token', None)
|
||||
is_public = model.repository_is_public(namespace, repository)
|
||||
is_public = model.repository.repository_is_public(namespace, repository)
|
||||
if not is_public:
|
||||
repo = model.get_repository(namespace, repository)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo or token != repo.badge_token:
|
||||
abort(404)
|
||||
|
||||
# Lookup the tags for the repository.
|
||||
tags = model.list_repository_tags(namespace, repository)
|
||||
tags = model.tag.list_repository_tags(namespace, repository)
|
||||
is_empty = len(list(tags)) == 0
|
||||
build = model.get_recent_repository_build(namespace, repository)
|
||||
recent_build = model.build.get_recent_repository_build(namespace, repository)
|
||||
|
||||
if not is_empty and (not build or build.phase == 'complete'):
|
||||
if not is_empty and (not recent_build or recent_build.phase == 'complete'):
|
||||
status_name = 'ready'
|
||||
elif build and build.phase == 'error':
|
||||
elif recent_build and recent_build.phase == 'error':
|
||||
status_name = 'failed'
|
||||
elif build and build.phase != 'complete':
|
||||
elif recent_build and recent_build.phase != 'complete':
|
||||
status_name = 'building'
|
||||
else:
|
||||
status_name = 'none'
|
||||
|
@ -419,7 +418,7 @@ def build_status_badge(namespace, repository):
|
|||
return response
|
||||
|
||||
|
||||
class FlaskAuthorizationProvider(DatabaseAuthorizationProvider):
|
||||
class FlaskAuthorizationProvider(model.oauth.DatabaseAuthorizationProvider):
|
||||
def get_authorized_user(self):
|
||||
return current_user.db_user()
|
||||
|
||||
|
@ -579,13 +578,12 @@ def download_logs_archive():
|
|||
def attach_bitbucket_trigger(namespace, repository_name):
|
||||
permission = AdministerRepositoryPermission(namespace, repository_name)
|
||||
if permission.can():
|
||||
repo = model.get_repository(namespace, repository_name)
|
||||
repo = model.repository.get_repository(namespace, repository_name)
|
||||
if not repo:
|
||||
msg = 'Invalid repository: %s/%s' % (namespace, repository_name)
|
||||
abort(404, message=msg)
|
||||
|
||||
trigger = model.create_build_trigger(repo, BitbucketBuildTrigger.service_name(),
|
||||
None,
|
||||
trigger = model.build.create_build_trigger(repo, BitbucketBuildTrigger.service_name(), None,
|
||||
current_user.db_user())
|
||||
|
||||
try:
|
||||
|
@ -596,7 +594,7 @@ def attach_bitbucket_trigger(namespace, repository_name):
|
|||
}
|
||||
|
||||
access_token_secret = oauth_info['access_token_secret']
|
||||
model.update_build_trigger(trigger, config, auth_token=access_token_secret)
|
||||
model.build.update_build_trigger(trigger, config, auth_token=access_token_secret)
|
||||
|
||||
return redirect(oauth_info['url'])
|
||||
except TriggerProviderException:
|
||||
|
@ -612,12 +610,12 @@ def attach_bitbucket_trigger(namespace, repository_name):
|
|||
def attach_custom_build_trigger(namespace, repository_name):
|
||||
permission = AdministerRepositoryPermission(namespace, repository_name)
|
||||
if permission.can():
|
||||
repo = model.get_repository(namespace, repository_name)
|
||||
repo = model.repository.get_repository(namespace, repository_name)
|
||||
if not repo:
|
||||
msg = 'Invalid repository: %s/%s' % (namespace, repository_name)
|
||||
abort(404, message=msg)
|
||||
|
||||
trigger = model.create_build_trigger(repo, CustomBuildTrigger.service_name(),
|
||||
trigger = model.build.create_build_trigger(repo, CustomBuildTrigger.service_name(),
|
||||
None, current_user.db_user())
|
||||
|
||||
repo_path = '%s/%s' % (namespace, repository_name)
|
||||
|
@ -634,16 +632,16 @@ def attach_custom_build_trigger(namespace, repository_name):
|
|||
@process_oauth
|
||||
@parse_repository_name_and_tag
|
||||
@anon_protect
|
||||
def redirect_to_repository(namespace, reponame, tag):
|
||||
def redirect_to_repository(namespace, reponame, tag_name):
|
||||
permission = ReadRepositoryPermission(namespace, reponame)
|
||||
is_public = model.repository_is_public(namespace, reponame)
|
||||
is_public = model.repository.repository_is_public(namespace, reponame)
|
||||
|
||||
if request.args.get('ac-discovery', 0) == 1:
|
||||
return index('')
|
||||
|
||||
if permission.can() or is_public:
|
||||
repository_name = '/'.join([namespace, reponame])
|
||||
return redirect(url_for('web.repository', path=repository_name, tag=tag))
|
||||
return redirect(url_for('web.repository', path=repository_name, tag=tag_name))
|
||||
|
||||
abort(404)
|
||||
|
||||
|
@ -653,7 +651,7 @@ def redirect_to_repository(namespace, reponame, tag):
|
|||
@process_oauth
|
||||
@anon_protect
|
||||
def redirect_to_namespace(namespace):
|
||||
user_or_org = model.get_user_or_org(namespace)
|
||||
user_or_org = model.user.get_user_or_org(namespace)
|
||||
if not user_or_org:
|
||||
abort(404)
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ def stripe_webhook():
|
|||
logger.debug('Stripe webhook call: %s', request_data)
|
||||
|
||||
customer_id = request_data.get('data', {}).get('object', {}).get('customer', None)
|
||||
user = model.get_user_or_org_by_customer_id(customer_id) if customer_id else None
|
||||
user = model.user.get_user_or_org_by_customer_id(customer_id) if customer_id else None
|
||||
|
||||
event_type = request_data['type'] if 'type' in request_data else None
|
||||
if event_type == 'charge.succeeded':
|
||||
|
@ -73,7 +73,7 @@ def build_trigger_webhook(trigger_uuid, **kwargs):
|
|||
logger.debug('Webhook received with uuid %s', trigger_uuid)
|
||||
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
# It is ok to return 404 here, since letting an attacker know that a trigger UUID is valid
|
||||
# doesn't leak anything
|
||||
|
@ -101,8 +101,8 @@ def build_trigger_webhook(trigger_uuid, **kwargs):
|
|||
# The payload was malformed
|
||||
abort(400, message=ipe.message)
|
||||
|
||||
pull_robot_name = model.get_pull_robot_name(trigger)
|
||||
repo = model.get_repository(namespace, repository)
|
||||
pull_robot_name = model.build.get_pull_robot_name(trigger)
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
start_build(repo, prepared, pull_robot_name=pull_robot_name)
|
||||
|
||||
return make_response('Okay')
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import logging
|
||||
from data import model
|
||||
from data.model import health
|
||||
from app import build_logs
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _check_registry_gunicorn(app):
|
||||
""" Returns the status of the registry gunicorn workers. """
|
||||
# Compute the URL for checking the registry endpoint. We append a port if and only if the
|
||||
|
@ -24,7 +26,7 @@ def _check_registry_gunicorn(app):
|
|||
|
||||
def _check_database(app):
|
||||
""" Returns the status of the database, as accessed from this instance. """
|
||||
return model.check_health(app.config)
|
||||
return health.check_health(app.config)
|
||||
|
||||
def _check_redis(app):
|
||||
""" Returns the status of Redis, as accessed from this instance. """
|
||||
|
|
243
initdb.py
243
initdb.py
|
@ -6,14 +6,18 @@ import calendar
|
|||
import os
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from email.utils import formatdate
|
||||
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
|
||||
savepoint_sqlite, savepoint)
|
||||
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
|
||||
savepoint)
|
||||
from itertools import count
|
||||
from uuid import UUID
|
||||
from threading import Event
|
||||
|
||||
from data.database import *
|
||||
from email.utils import formatdate
|
||||
from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService,
|
||||
BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation,
|
||||
ImageStorageTransformation, ImageStorageSignatureKind,
|
||||
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind)
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import app, storage as store
|
||||
|
||||
from workers import repositoryactioncounter
|
||||
|
@ -21,6 +25,7 @@ from workers import repositoryactioncounter
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
|
||||
for i in range(1, 10)]
|
||||
|
||||
|
@ -39,53 +44,56 @@ TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
|
|||
|
||||
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
|
||||
|
||||
|
||||
def __gen_checksum(image_id):
|
||||
h = hashlib.md5(image_id)
|
||||
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
|
||||
csum = hashlib.md5(image_id)
|
||||
return 'tarsum+sha256:' + csum.hexdigest() + csum.hexdigest()
|
||||
|
||||
|
||||
def __gen_image_id(repo, image_num):
|
||||
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
||||
|
||||
h = hashlib.md5(str_to_hash)
|
||||
return h.hexdigest() + h.hexdigest()
|
||||
img_id = hashlib.md5(str_to_hash)
|
||||
return img_id.hexdigest() + img_id.hexdigest()
|
||||
|
||||
|
||||
def __gen_image_uuid(repo, image_num):
|
||||
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
||||
|
||||
h = hashlib.md5(str_to_hash)
|
||||
return UUID(bytes=h.digest())
|
||||
img_uuid = hashlib.md5(str_to_hash)
|
||||
return UUID(bytes=img_uuid.digest())
|
||||
|
||||
|
||||
global_image_num = count()
|
||||
|
||||
|
||||
global_image_num = [0]
|
||||
def __create_subtree(repo, structure, creator_username, parent, tag_map):
|
||||
num_nodes, subtrees, last_node_tags = structure
|
||||
|
||||
# create the nodes
|
||||
for i in range(num_nodes):
|
||||
image_num = global_image_num[0]
|
||||
global_image_num[0] += 1
|
||||
for model_num in range(num_nodes):
|
||||
image_num = next(global_image_num)
|
||||
docker_image_id = __gen_image_id(repo, image_num)
|
||||
logger.debug('new docker id: %s' % docker_image_id)
|
||||
logger.debug('new docker id: %s', docker_image_id)
|
||||
checksum = __gen_checksum(docker_image_id)
|
||||
|
||||
new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
|
||||
new_image = model.image.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
|
||||
new_image_locations = new_image.storage.locations
|
||||
new_image.storage.uuid = __gen_image_uuid(repo, image_num)
|
||||
new_image.storage.uploading = False
|
||||
new_image.storage.checksum = checksum
|
||||
new_image.storage.save()
|
||||
|
||||
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=i)
|
||||
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num)
|
||||
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
|
||||
command = json.dumps(command_list) if command_list else None
|
||||
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
|
||||
str(creation_time), 'no comment', command, parent)
|
||||
new_image = model.image.set_image_metadata(docker_image_id, repo.namespace_user.username,
|
||||
repo.name, str(creation_time), 'no comment', command,
|
||||
parent)
|
||||
|
||||
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
|
||||
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
|
||||
int(compressed_size * 1.4))
|
||||
model.image.set_image_size(docker_image_id, repo.namespace_user.username, repo.name,
|
||||
compressed_size, int(compressed_size * 1.4))
|
||||
|
||||
# Populate the diff file
|
||||
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
|
||||
|
@ -101,55 +109,52 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
|
|||
last_node_tags = [last_node_tags]
|
||||
|
||||
for tag_name in last_node_tags:
|
||||
tag = model.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
|
||||
new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
|
||||
new_image.docker_image_id)
|
||||
|
||||
tag_map[tag_name] = tag
|
||||
tag_map[tag_name] = new_tag
|
||||
|
||||
for tag_name in last_node_tags:
|
||||
if tag_name[0] == '#':
|
||||
tag = tag_map[tag_name]
|
||||
tag.name = tag_name[1:]
|
||||
tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
|
||||
tag.lifetime_start_ts = tag.lifetime_end_ts - 10
|
||||
tag.save()
|
||||
found_tag = tag_map[tag_name]
|
||||
found_tag.name = tag_name[1:]
|
||||
found_tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
|
||||
found_tag.lifetime_start_ts = found_tag.lifetime_end_ts - 10
|
||||
found_tag.save()
|
||||
|
||||
for subtree in subtrees:
|
||||
__create_subtree(repo, subtree, creator_username, new_image, tag_map)
|
||||
|
||||
|
||||
def __generate_repository(user, name, description, is_public, permissions,
|
||||
structure):
|
||||
repo = model.create_repository(user.username, name, user)
|
||||
def __generate_repository(user_obj, name, description, is_public, permissions, structure):
|
||||
repo = model.repository.create_repository(user_obj.username, name, user_obj)
|
||||
|
||||
if is_public:
|
||||
model.set_repository_visibility(repo, 'public')
|
||||
model.repository.set_repository_visibility(repo, 'public')
|
||||
|
||||
if description:
|
||||
repo.description = description
|
||||
repo.save()
|
||||
|
||||
for delegate, role in permissions:
|
||||
model.set_user_repo_permission(delegate.username, user.username, name,
|
||||
role)
|
||||
model.permission.set_user_repo_permission(delegate.username, user_obj.username, name, role)
|
||||
|
||||
if isinstance(structure, list):
|
||||
for s in structure:
|
||||
__create_subtree(repo, s, user.username, None, {})
|
||||
for leaf in structure:
|
||||
__create_subtree(repo, leaf, user_obj.username, None, {})
|
||||
else:
|
||||
__create_subtree(repo, structure, user.username, None, {})
|
||||
__create_subtree(repo, structure, user_obj.username, None, {})
|
||||
|
||||
return repo
|
||||
|
||||
|
||||
db_initialized_for_testing = False
|
||||
db_initialized_for_testing = Event()
|
||||
testcases = {}
|
||||
|
||||
def finished_database_for_testing(testcase):
|
||||
""" Called when a testcase has finished using the database, indicating that
|
||||
any changes should be discarded.
|
||||
"""
|
||||
global testcases
|
||||
testcases[testcase]['savepoint'].__exit__(True, None, None)
|
||||
|
||||
def setup_database_for_testing(testcase):
|
||||
|
@ -158,12 +163,10 @@ def setup_database_for_testing(testcase):
|
|||
"""
|
||||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
global db_initialized_for_testing
|
||||
if not db_initialized_for_testing:
|
||||
if not db_initialized_for_testing.is_set():
|
||||
logger.debug('Setting up DB for testing.')
|
||||
|
||||
# Setup the database.
|
||||
|
@ -173,18 +176,18 @@ def setup_database_for_testing(testcase):
|
|||
|
||||
# Enable foreign key constraints.
|
||||
if not IS_TESTING_REAL_DATABASE:
|
||||
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
||||
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
||||
|
||||
db_initialized_for_testing = True
|
||||
db_initialized_for_testing.set()
|
||||
|
||||
# Create a savepoint for the testcase.
|
||||
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
|
||||
|
||||
global testcases
|
||||
testcases[testcase] = {}
|
||||
testcases[testcase]['savepoint'] = test_savepoint
|
||||
testcases[testcase]['savepoint'].__enter__()
|
||||
|
||||
|
||||
def initialize_database():
|
||||
create_model_tables(all_models)
|
||||
|
||||
|
@ -314,8 +317,7 @@ def wipe_database():
|
|||
logger.debug('Wiping all data from the DB.')
|
||||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
drop_model_tables(all_models, fail_silently=True)
|
||||
|
@ -324,52 +326,51 @@ def wipe_database():
|
|||
def populate_database():
|
||||
logger.debug('Populating the DB with test data.')
|
||||
|
||||
new_user_1 = model.create_user('devtable', 'password',
|
||||
'jschorr@devtable.com')
|
||||
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
|
||||
new_user_1.verified = True
|
||||
new_user_1.stripe_id = TEST_STRIPE_ID
|
||||
new_user_1.save()
|
||||
|
||||
disabled_user = model.create_user('disabled', 'password',
|
||||
'jschorr+disabled@devtable.com')
|
||||
disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com')
|
||||
disabled_user.verified = True
|
||||
disabled_user.enabled = False
|
||||
disabled_user.save()
|
||||
|
||||
dtrobot = model.create_robot('dtrobot', new_user_1)
|
||||
dtrobot = model.user.create_robot('dtrobot', new_user_1)
|
||||
|
||||
new_user_2 = model.create_user('public', 'password',
|
||||
'jacob.moshenko@gmail.com')
|
||||
new_user_2 = model.user.create_user('public', 'password', 'jacob.moshenko@gmail.com')
|
||||
new_user_2.verified = True
|
||||
new_user_2.save()
|
||||
|
||||
new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
|
||||
new_user_3 = model.user.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
|
||||
new_user_3.verified = True
|
||||
new_user_3.save()
|
||||
|
||||
model.create_robot('anotherrobot', new_user_3)
|
||||
model.user.create_robot('anotherrobot', new_user_3)
|
||||
|
||||
new_user_4 = model.create_user('randomuser', 'password', 'no4@thanks.com')
|
||||
new_user_4 = model.user.create_user('randomuser', 'password', 'no4@thanks.com')
|
||||
new_user_4.verified = True
|
||||
new_user_4.save()
|
||||
|
||||
new_user_5 = model.create_user('unverified', 'password', 'no5@thanks.com')
|
||||
new_user_5 = model.user.create_user('unverified', 'password', 'no5@thanks.com')
|
||||
new_user_5.save()
|
||||
|
||||
reader = model.create_user('reader', 'password', 'no1@thanks.com')
|
||||
reader = model.user.create_user('reader', 'password', 'no1@thanks.com')
|
||||
reader.verified = True
|
||||
reader.save()
|
||||
|
||||
creatoruser = model.create_user('creator', 'password', 'noc@thanks.com')
|
||||
creatoruser = model.user.create_user('creator', 'password', 'noc@thanks.com')
|
||||
creatoruser.verified = True
|
||||
creatoruser.save()
|
||||
|
||||
outside_org = model.create_user('outsideorg', 'password', 'no2@thanks.com')
|
||||
outside_org = model.user.create_user('outsideorg', 'password', 'no2@thanks.com')
|
||||
outside_org.verified = True
|
||||
outside_org.save()
|
||||
|
||||
model.create_notification('test_notification', new_user_1,
|
||||
metadata={'some':'value', 'arr':[1, 2, 3], 'obj':{'a':1, 'b':2}})
|
||||
model.notification.create_notification('test_notification', new_user_1,
|
||||
metadata={'some':'value',
|
||||
'arr':[1, 2, 3],
|
||||
'obj':{'a':1, 'b':2}})
|
||||
|
||||
from_date = datetime.utcnow()
|
||||
to_date = from_date + timedelta(hours=1)
|
||||
|
@ -378,7 +379,7 @@ def populate_database():
|
|||
'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
|
||||
'reason': 'database migration'
|
||||
}
|
||||
model.create_notification('maintenance', new_user_1, metadata=notification_metadata)
|
||||
model.notification.create_notification('maintenance', new_user_1, metadata=notification_metadata)
|
||||
|
||||
|
||||
__generate_repository(new_user_4, 'randomrepo', 'Random repo repository.', False,
|
||||
|
@ -434,10 +435,10 @@ def populate_database():
|
|||
'Empty repository which is building.',
|
||||
False, [], (0, [], None))
|
||||
|
||||
token = model.create_access_token(building, 'write', 'build-worker')
|
||||
new_token = model.token.create_access_token(building, 'write', 'build-worker')
|
||||
|
||||
trigger = model.create_build_trigger(building, 'github', '123authtoken',
|
||||
new_user_1, pull_robot=dtrobot[0])
|
||||
trigger = model.build.create_build_trigger(building, 'github', '123authtoken', new_user_1,
|
||||
pull_robot=dtrobot[0])
|
||||
trigger.config = json.dumps({
|
||||
'build_source': 'jakedt/testconnect',
|
||||
'subdir': '',
|
||||
|
@ -456,80 +457,73 @@ def populate_database():
|
|||
}
|
||||
}
|
||||
|
||||
record = model.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
||||
record = model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
||||
'jschorr@devtable.com')
|
||||
record.confirmed = True
|
||||
record.save()
|
||||
|
||||
model.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
||||
model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
||||
'jschorr+other@devtable.com')
|
||||
|
||||
build2 = model.create_repository_build(building, token, job_config,
|
||||
build2 = model.build.create_repository_build(building, new_token, job_config,
|
||||
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
|
||||
'build-name', trigger)
|
||||
build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
|
||||
build2.save()
|
||||
|
||||
build3 = model.create_repository_build(building, token, job_config,
|
||||
build3 = model.build.create_repository_build(building, new_token, job_config,
|
||||
'f49d07f9-93da-474d-ad5f-c852107c3892',
|
||||
'build-name', trigger)
|
||||
build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
|
||||
build3.save()
|
||||
|
||||
build = model.create_repository_build(building, token, job_config,
|
||||
'701dcc3724fb4f2ea6c31400528343cd',
|
||||
'build-name', trigger)
|
||||
build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
|
||||
build.save()
|
||||
build1 = model.build.create_repository_build(building, new_token, job_config,
|
||||
'701dcc3724fb4f2ea6c31400528343cd', 'build-name',
|
||||
trigger)
|
||||
build1.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
|
||||
build1.save()
|
||||
|
||||
org = model.create_organization('buynlarge', 'quay@devtable.com',
|
||||
new_user_1)
|
||||
org = model.organization.create_organization('buynlarge', 'quay@devtable.com', new_user_1)
|
||||
org.stripe_id = TEST_STRIPE_ID
|
||||
org.save()
|
||||
|
||||
model.create_robot('coolrobot', org)
|
||||
model.user.create_robot('coolrobot', org)
|
||||
|
||||
oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
|
||||
model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
|
||||
'http://localhost:8000/o2c.html', client_id='deadbeef')
|
||||
|
||||
oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
|
||||
model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
|
||||
'http://localhost:8000/o2c.html', client_id='deadpork',
|
||||
description='This is another test application')
|
||||
|
||||
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
|
||||
|
||||
model.create_robot('neworgrobot', org)
|
||||
model.user.create_robot('neworgrobot', org)
|
||||
|
||||
ownerbot = model.create_robot('ownerbot', org)[0]
|
||||
creatorbot = model.create_robot('creatorbot', org)[0]
|
||||
ownerbot = model.user.create_robot('ownerbot', org)[0]
|
||||
creatorbot = model.user.create_robot('creatorbot', org)[0]
|
||||
|
||||
owners = model.get_organization_team('buynlarge', 'owners')
|
||||
owners = model.team.get_organization_team('buynlarge', 'owners')
|
||||
owners.description = 'Owners have unfetterd access across the entire org.'
|
||||
owners.save()
|
||||
|
||||
org_repo = __generate_repository(org, 'orgrepo',
|
||||
'Repository owned by an org.', False,
|
||||
[(outside_org, 'read')],
|
||||
(4, [], ['latest', 'prod']))
|
||||
org_repo = __generate_repository(org, 'orgrepo', 'Repository owned by an org.', False,
|
||||
[(outside_org, 'read')], (4, [], ['latest', 'prod']))
|
||||
|
||||
org_repo2 = __generate_repository(org, 'anotherorgrepo',
|
||||
'Another repository owned by an org.', False,
|
||||
[],
|
||||
(4, [], ['latest', 'prod']))
|
||||
__generate_repository(org, 'anotherorgrepo', 'Another repository owned by an org.', False,
|
||||
[], (4, [], ['latest', 'prod']))
|
||||
|
||||
creators = model.create_team('creators', org, 'creator',
|
||||
'Creators of orgrepo.')
|
||||
creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.')
|
||||
|
||||
reader_team = model.create_team('readers', org, 'member',
|
||||
'Readers of orgrepo.')
|
||||
model.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, org_repo.name,
|
||||
'read')
|
||||
reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.')
|
||||
model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username,
|
||||
org_repo.name, 'read')
|
||||
|
||||
model.add_user_to_team(new_user_2, reader_team)
|
||||
model.add_user_to_team(reader, reader_team)
|
||||
model.add_user_to_team(ownerbot, owners)
|
||||
model.add_user_to_team(creatorbot, creators)
|
||||
model.add_user_to_team(creatoruser, creators)
|
||||
model.team.add_user_to_team(new_user_2, reader_team)
|
||||
model.team.add_user_to_team(reader, reader_team)
|
||||
model.team.add_user_to_team(ownerbot, owners)
|
||||
model.team.add_user_to_team(creatorbot, creators)
|
||||
model.team.add_user_to_team(creatoruser, creators)
|
||||
|
||||
__generate_repository(new_user_1, 'superwide', None, False, [],
|
||||
[(10, [], 'latest2'),
|
||||
|
@ -547,69 +541,72 @@ def populate_database():
|
|||
(2, [], 'latest15'),
|
||||
(2, [], 'latest16'),
|
||||
(2, [], 'latest17'),
|
||||
(2, [], 'latest18'),])
|
||||
(2, [], 'latest18')])
|
||||
|
||||
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_user=new_user_2)
|
||||
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_team=reader_team)
|
||||
model.add_prototype_permission(org, 'write', activating_user=new_user_2, delegate_user=new_user_1)
|
||||
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
|
||||
delegate_user=new_user_2)
|
||||
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
|
||||
delegate_team=reader_team)
|
||||
model.permission.add_prototype_permission(org, 'write', activating_user=new_user_2,
|
||||
delegate_user=new_user_1)
|
||||
|
||||
today = datetime.today()
|
||||
week_ago = today - timedelta(6)
|
||||
six_ago = today - timedelta(5)
|
||||
four_ago = today - timedelta(4)
|
||||
|
||||
model.log_action('org_create_team', org.username, performer=new_user_1,
|
||||
model.log.log_action('org_create_team', org.username, performer=new_user_1,
|
||||
timestamp=week_ago, metadata={'team': 'readers'})
|
||||
|
||||
model.log_action('org_set_team_role', org.username, performer=new_user_1,
|
||||
model.log.log_action('org_set_team_role', org.username, performer=new_user_1,
|
||||
timestamp=week_ago,
|
||||
metadata={'team': 'readers', 'role': 'read'})
|
||||
|
||||
model.log_action('create_repo', org.username, performer=new_user_1,
|
||||
model.log.log_action('create_repo', org.username, performer=new_user_1,
|
||||
repository=org_repo, timestamp=week_ago,
|
||||
metadata={'namespace': org.username, 'repo': 'orgrepo'})
|
||||
|
||||
model.log_action('change_repo_permission', org.username,
|
||||
model.log.log_action('change_repo_permission', org.username,
|
||||
performer=new_user_2, repository=org_repo,
|
||||
timestamp=six_ago,
|
||||
metadata={'username': new_user_1.username,
|
||||
'repo': 'orgrepo', 'role': 'admin'})
|
||||
|
||||
model.log_action('change_repo_permission', org.username,
|
||||
model.log.log_action('change_repo_permission', org.username,
|
||||
performer=new_user_1, repository=org_repo,
|
||||
timestamp=six_ago,
|
||||
metadata={'username': new_user_2.username,
|
||||
'repo': 'orgrepo', 'role': 'read'})
|
||||
|
||||
model.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
|
||||
model.log.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
|
||||
repository=org_repo, timestamp=four_ago,
|
||||
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
|
||||
|
||||
model.log_action('push_repo', org.username, performer=new_user_2,
|
||||
model.log.log_action('push_repo', org.username, performer=new_user_2,
|
||||
repository=org_repo, timestamp=today,
|
||||
metadata={'username': new_user_2.username,
|
||||
'repo': 'orgrepo'})
|
||||
|
||||
model.log_action('pull_repo', org.username, performer=new_user_2,
|
||||
model.log.log_action('pull_repo', org.username, performer=new_user_2,
|
||||
repository=org_repo, timestamp=today,
|
||||
metadata={'username': new_user_2.username,
|
||||
'repo': 'orgrepo'})
|
||||
|
||||
model.log_action('pull_repo', org.username, repository=org_repo,
|
||||
model.log.log_action('pull_repo', org.username, repository=org_repo,
|
||||
timestamp=today,
|
||||
metadata={'token': 'sometoken', 'token_code': 'somecode',
|
||||
'repo': 'orgrepo'})
|
||||
|
||||
model.log_action('delete_tag', org.username, performer=new_user_2,
|
||||
model.log.log_action('delete_tag', org.username, performer=new_user_2,
|
||||
repository=org_repo, timestamp=today,
|
||||
metadata={'username': new_user_2.username,
|
||||
'repo': 'orgrepo', 'tag': 'sometag'})
|
||||
|
||||
model.log_action('pull_repo', org.username, repository=org_repo,
|
||||
model.log.log_action('pull_repo', org.username, repository=org_repo,
|
||||
timestamp=today,
|
||||
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
|
||||
|
||||
model.log_action('build_dockerfile', new_user_1.username, repository=building,
|
||||
model.log.log_action('build_dockerfile', new_user_1.username, repository=building,
|
||||
timestamp=today,
|
||||
metadata={'repo': 'building', 'namespace': new_user_1.username,
|
||||
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
|
||||
|
@ -622,7 +619,7 @@ if __name__ == '__main__':
|
|||
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
|
||||
logging.basicConfig(level=log_level)
|
||||
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to initialize production database!')
|
||||
|
||||
initialize_database()
|
||||
|
|
|
@ -2,6 +2,7 @@ import os
|
|||
import shutil
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
|
@ -9,6 +10,9 @@ from storage.basestorage import BaseStorageV2
|
|||
from digest import digest_tools
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LocalStorage(BaseStorageV2):
|
||||
|
||||
def __init__(self, storage_path):
|
||||
|
@ -134,8 +138,12 @@ class LocalStorage(BaseStorageV2):
|
|||
msg = 'Given: {0} Computed: {1}'.format(digest_to_verify, content_digest)
|
||||
raise digest_tools.InvalidDigestException(msg)
|
||||
|
||||
final_path = self._init_path(final_path, create=True)
|
||||
shutil.move(self._init_path(content_path), final_path)
|
||||
final_path_abs = self._init_path(final_path, create=True)
|
||||
if not self.exists(final_path_abs):
|
||||
logger.debug('Moving content into place at path: %s', final_path_abs)
|
||||
shutil.move(self._init_path(content_path), final_path_abs)
|
||||
else:
|
||||
logger.debug('Content already exists at path: %s', final_path_abs)
|
||||
|
||||
|
||||
|
||||
|
|
Binary file not shown.
|
@ -18,7 +18,7 @@ import features
|
|||
import tarfile
|
||||
|
||||
from cStringIO import StringIO
|
||||
from util.checksums import compute_simple
|
||||
from digest.checksums import compute_simple
|
||||
|
||||
try:
|
||||
app.register_blueprint(v1_bp, url_prefix='/v1')
|
||||
|
|
|
@ -82,7 +82,7 @@ class ApiTestCase(unittest.TestCase):
|
|||
|
||||
with client.session_transaction() as sess:
|
||||
if auth_username:
|
||||
loaded = model.get_user(auth_username)
|
||||
loaded = model.user.get_user(auth_username)
|
||||
sess['user_id'] = loaded.uuid
|
||||
sess['login_time'] = datetime.datetime.now()
|
||||
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
||||
|
|
|
@ -14,7 +14,7 @@ from endpoints.webhooks import webhooks
|
|||
from endpoints.trigger import BuildTriggerHandler
|
||||
from app import app
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data import model, database
|
||||
from data import database, model
|
||||
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam
|
||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags
|
||||
|
@ -183,8 +183,8 @@ class ApiTestCase(unittest.TestCase):
|
|||
return parsed
|
||||
|
||||
def assertInTeam(self, data, membername):
|
||||
for memberData in data['members']:
|
||||
if memberData['name'] == membername:
|
||||
for member_data in data['members']:
|
||||
if member_data['name'] == membername:
|
||||
return
|
||||
|
||||
self.fail(membername + ' not found in team: ' + py_json.dumps(data))
|
||||
|
@ -367,7 +367,7 @@ class TestConvertToOrganization(ApiTestCase):
|
|||
self.assertEqual(True, json['success'])
|
||||
|
||||
# Verify the organization exists.
|
||||
organization = model.get_organization(READ_ACCESS_USER)
|
||||
organization = model.organization.get_organization(READ_ACCESS_USER)
|
||||
assert organization is not None
|
||||
|
||||
# Verify the admin user is the org's admin.
|
||||
|
@ -389,7 +389,7 @@ class TestConvertToOrganization(ApiTestCase):
|
|||
self.assertEqual(True, json['success'])
|
||||
|
||||
# Verify the organization exists.
|
||||
organization = model.get_organization(READ_ACCESS_USER)
|
||||
organization = model.organization.get_organization(READ_ACCESS_USER)
|
||||
assert organization is not None
|
||||
|
||||
# Verify the admin user is the org's admin.
|
||||
|
@ -449,7 +449,8 @@ class TestCreateNewUser(ApiTestCase):
|
|||
email='test@example.com'),
|
||||
expected_code=400)
|
||||
|
||||
self.assertEquals('Invalid username a: Username must be between 4 and 30 characters in length', json['error_description'])
|
||||
self.assertEquals('Invalid username a: Username must be between 4 and 30 characters in length',
|
||||
json['error_description'])
|
||||
|
||||
def test_trycreateregexmismatch(self):
|
||||
json = self.postJsonResponse(User,
|
||||
|
@ -458,23 +459,22 @@ class TestCreateNewUser(ApiTestCase):
|
|||
email='test@example.com'),
|
||||
expected_code=400)
|
||||
|
||||
self.assertEquals('Invalid username auserName: Username must match expression [a-z0-9_]+', json['error_description'])
|
||||
self.assertEquals('Invalid username auserName: Username must match expression [a-z0-9_]+',
|
||||
json['error_description'])
|
||||
|
||||
def test_createuser(self):
|
||||
data = self.postJsonResponse(User,
|
||||
data=NEW_USER_DETAILS,
|
||||
expected_code=200)
|
||||
data = self.postJsonResponse(User, data=NEW_USER_DETAILS, expected_code=200)
|
||||
self.assertEquals(True, data['awaiting_verification'])
|
||||
|
||||
def test_createuser_withteaminvite(self):
|
||||
inviter = model.get_user(ADMIN_ACCESS_USER)
|
||||
team = model.get_organization_team(ORGANIZATION, 'owners')
|
||||
invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com')
|
||||
inviter = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
team = model.team.get_organization_team(ORGANIZATION, 'owners')
|
||||
invite = model.team.add_or_invite_to_team(inviter, team, None, 'foo@example.com')
|
||||
|
||||
details = {
|
||||
'invite_code': invite.invite_token
|
||||
}
|
||||
details.update(NEW_USER_DETAILS);
|
||||
details.update(NEW_USER_DETAILS)
|
||||
|
||||
data = self.postJsonResponse(User, data=details, expected_code=200)
|
||||
self.assertEquals(True, data['awaiting_verification'])
|
||||
|
@ -644,7 +644,7 @@ class TestCreateOrganization(ApiTestCase):
|
|||
self.assertEquals('"Created"', data)
|
||||
|
||||
# Ensure the org was created.
|
||||
organization = model.get_organization('neworg')
|
||||
organization = model.organization.get_organization('neworg')
|
||||
assert organization is not None
|
||||
|
||||
# Verify the admin user is the org's admin.
|
||||
|
@ -657,13 +657,11 @@ class TestCreateOrganization(ApiTestCase):
|
|||
class TestGetOrganization(ApiTestCase):
|
||||
def test_unknownorg(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
self.getResponse(Organization, params=dict(orgname='notvalid'),
|
||||
expected_code=404)
|
||||
self.getResponse(Organization, params=dict(orgname='notvalid'), expected_code=404)
|
||||
|
||||
def test_cannotaccess(self):
|
||||
self.login(NO_ACCESS_USER)
|
||||
self.getResponse(Organization, params=dict(orgname=ORGANIZATION),
|
||||
expected_code=200)
|
||||
self.getResponse(Organization, params=dict(orgname=ORGANIZATION), expected_code=200)
|
||||
|
||||
def test_getorganization(self):
|
||||
self.login(READ_ACCESS_USER)
|
||||
|
@ -796,8 +794,8 @@ class TestUpdateOrganizationPrototypes(ApiTestCase):
|
|||
|
||||
# Update a prototype.
|
||||
json = self.putJsonResponse(PermissionPrototype,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
prototypeid=pid), data=dict(role='admin'))
|
||||
params=dict(orgname=ORGANIZATION, prototypeid=pid),
|
||||
data=dict(role='admin'))
|
||||
|
||||
self.assertEquals('admin', json['role'])
|
||||
|
||||
|
@ -848,17 +846,18 @@ class TestRemoveOrganizationMember(ApiTestCase):
|
|||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Add read user as a direct permission on the admin user's repo.
|
||||
model.set_user_repo_permission(READ_ACCESS_USER, ADMIN_ACCESS_USER, 'simple', 'read')
|
||||
model.permission.set_user_repo_permission(READ_ACCESS_USER, ADMIN_ACCESS_USER, 'simple', 'read')
|
||||
|
||||
# Verify the user has a permission on the admin user's repo.
|
||||
admin_perms = [p.user.username for p in model.get_all_repo_users(ADMIN_ACCESS_USER, 'simple')]
|
||||
admin_perms = [p.user.username
|
||||
for p in model.user.get_all_repo_users(ADMIN_ACCESS_USER, 'simple')]
|
||||
assert READ_ACCESS_USER in admin_perms
|
||||
|
||||
# Add read user as a direct permission on the org repo.
|
||||
model.set_user_repo_permission(READ_ACCESS_USER, ORGANIZATION, ORG_REPO, 'read')
|
||||
model.permission.set_user_repo_permission(READ_ACCESS_USER, ORGANIZATION, ORG_REPO, 'read')
|
||||
|
||||
# Verify the user has a permission on the org repo.
|
||||
org_perms = [p.user.username for p in model.get_all_repo_users(ORGANIZATION, ORG_REPO)]
|
||||
org_perms = [p.user.username for p in model.user.get_all_repo_users(ORGANIZATION, ORG_REPO)]
|
||||
assert READ_ACCESS_USER in org_perms
|
||||
|
||||
# Remove the user from the org.
|
||||
|
@ -867,10 +866,11 @@ class TestRemoveOrganizationMember(ApiTestCase):
|
|||
|
||||
# Verify that the user's permission on the org repo is gone, but it is still
|
||||
# present on the other repo.
|
||||
org_perms = [p.user.username for p in model.get_all_repo_users(ORGANIZATION, ORG_REPO)]
|
||||
org_perms = [p.user.username for p in model.user.get_all_repo_users(ORGANIZATION, ORG_REPO)]
|
||||
assert not READ_ACCESS_USER in org_perms
|
||||
|
||||
admin_perms = [p.user.username for p in model.get_all_repo_users(ADMIN_ACCESS_USER, 'simple')]
|
||||
admin_perms = [p.user.username
|
||||
for p in model.user.get_all_repo_users(ADMIN_ACCESS_USER, 'simple')]
|
||||
assert READ_ACCESS_USER in admin_perms
|
||||
|
||||
|
||||
|
@ -953,8 +953,7 @@ class TestDeleteOrganizationTeam(ApiTestCase):
|
|||
def test_attemptdeleteowners(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
self.deleteResponse(OrganizationTeam,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners'),
|
||||
self.deleteResponse(OrganizationTeam, params=dict(orgname=ORGANIZATION, teamname='owners'),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
|
@ -1052,11 +1051,11 @@ class TestUpdateOrganizationTeamMember(ApiTestCase):
|
|||
|
||||
class TestAcceptTeamMemberInvite(ApiTestCase):
|
||||
def assertInTeam(self, data, membername):
|
||||
for memberData in data['members']:
|
||||
if memberData['name'] == membername:
|
||||
for member_data in data['members']:
|
||||
if member_data['name'] == membername:
|
||||
return
|
||||
|
||||
self.fail(membername + ' not found in team: ' + json.dumps(data))
|
||||
self.fail(membername + ' not found in team: ' + py_json.dumps(data))
|
||||
|
||||
def test_accept(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
@ -1073,12 +1072,11 @@ class TestAcceptTeamMemberInvite(ApiTestCase):
|
|||
self.login(membername)
|
||||
|
||||
# Accept the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
user = model.user.get_user(membername)
|
||||
invites = list(model.team.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.putJsonResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token))
|
||||
self.putJsonResponse(TeamMemberInvite, params=dict(code=invites[0].invite_token))
|
||||
|
||||
# Verify the user is now on the team.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
|
@ -1107,8 +1105,8 @@ class TestDeclineTeamMemberInvite(ApiTestCase):
|
|||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Try to decline the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
user = model.user.get_user(membername)
|
||||
invites = list(model.team.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.deleteResponse(TeamMemberInvite,
|
||||
|
@ -1131,8 +1129,8 @@ class TestDeclineTeamMemberInvite(ApiTestCase):
|
|||
self.login(membername)
|
||||
|
||||
# Decline the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
user = model.user.get_user(membername)
|
||||
invites = list(model.team.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.deleteResponse(TeamMemberInvite,
|
||||
|
@ -1372,8 +1370,7 @@ class TestDeleteRepository(ApiTestCase):
|
|||
def test_deleterepo(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
self.deleteResponse(Repository,
|
||||
params=dict(repository=self.SIMPLE_REPO))
|
||||
self.deleteResponse(Repository, params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
self.getResponse(Repository,
|
||||
|
@ -1383,8 +1380,7 @@ class TestDeleteRepository(ApiTestCase):
|
|||
def test_deleterepo2(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
self.deleteResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
self.deleteResponse(Repository, params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
self.getResponse(Repository,
|
||||
|
@ -1395,43 +1391,43 @@ class TestDeleteRepository(ApiTestCase):
|
|||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Make sure the repository has come images and tags.
|
||||
self.assertTrue(len(list(model.get_repository_images(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
self.assertTrue(len(list(model.list_repository_tags(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
self.assertTrue(len(list(model.image.get_repository_images(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
self.assertTrue(len(list(model.tag.list_repository_tags(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
|
||||
# Add some data for the repository, in addition to is already existing images and tags.
|
||||
repository = model.get_repository(ADMIN_ACCESS_USER, 'complex')
|
||||
repository = model.repository.get_repository(ADMIN_ACCESS_USER, 'complex')
|
||||
|
||||
# Create some access tokens.
|
||||
access_token = model.create_access_token(repository, 'read')
|
||||
model.create_access_token(repository, 'write')
|
||||
access_token = model.token.create_access_token(repository, 'read')
|
||||
model.token.create_access_token(repository, 'write')
|
||||
|
||||
delegate_token = model.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken', 'read')
|
||||
model.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken', 'write')
|
||||
delegate_token = model.token.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken',
|
||||
'read')
|
||||
model.token.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken', 'write')
|
||||
|
||||
# Create some repository builds.
|
||||
model.create_repository_build(repository, access_token, {}, 'someid', 'foobar')
|
||||
model.create_repository_build(repository, delegate_token, {}, 'someid2', 'foobar2')
|
||||
model.build.create_repository_build(repository, access_token, {}, 'someid', 'foobar')
|
||||
model.build.create_repository_build(repository, delegate_token, {}, 'someid2', 'foobar2')
|
||||
|
||||
# Create some notifications.
|
||||
model.create_repo_notification(repository, 'repo_push', 'hipchat', {})
|
||||
model.create_repo_notification(repository, 'build_queued', 'slack', {})
|
||||
model.notification.create_repo_notification(repository, 'repo_push', 'hipchat', {})
|
||||
model.notification.create_repo_notification(repository, 'build_queued', 'slack', {})
|
||||
|
||||
# Create some logs.
|
||||
model.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
model.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
model.log.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
model.log.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
|
||||
# Create some build triggers.
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
model.create_build_trigger(repository, 'github', 'sometoken', user)
|
||||
model.create_build_trigger(repository, 'github', 'anothertoken', user)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
model.build.create_build_trigger(repository, 'github', 'sometoken', user)
|
||||
model.build.create_build_trigger(repository, 'github', 'anothertoken', user)
|
||||
|
||||
# Create some email authorizations.
|
||||
model.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'a@b.com')
|
||||
model.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'b@c.com')
|
||||
model.repository.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'a@b.com')
|
||||
model.repository.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'b@c.com')
|
||||
|
||||
# Delete the repository.
|
||||
self.deleteResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
self.deleteResponse(Repository, params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
self.getResponse(Repository,
|
||||
|
@ -1665,7 +1661,8 @@ class TestRepoBuilds(ApiTestCase):
|
|||
|
||||
# Check the status endpoint.
|
||||
status_json = self.getJsonResponse(RepositoryBuildStatus,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building', build_uuid=build['id']))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building',
|
||||
build_uuid=build['id']))
|
||||
|
||||
self.assertEquals(status_json['id'], build['id'])
|
||||
self.assertEquals(status_json['resource_key'], build['resource_key'])
|
||||
|
@ -1742,8 +1739,9 @@ class TestRepositoryEmail(ApiTestCase):
|
|||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the e-mail address is not authorized.
|
||||
json = self.getResponse(RepositoryAuthorizedEmail,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', email='test@example.com'),
|
||||
self.getResponse(RepositoryAuthorizedEmail,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
email='test@example.com'),
|
||||
expected_code=404)
|
||||
|
||||
def test_emailnotauthorized_butsent(self):
|
||||
|
@ -1751,7 +1749,8 @@ class TestRepositoryEmail(ApiTestCase):
|
|||
|
||||
# Verify the e-mail address is not authorized.
|
||||
json = self.getJsonResponse(RepositoryAuthorizedEmail,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', email='jschorr+other@devtable.com'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
email='jschorr+other@devtable.com'))
|
||||
|
||||
self.assertEquals(False, json['confirmed'])
|
||||
self.assertEquals(ADMIN_ACCESS_USER, json['namespace'])
|
||||
|
@ -1763,7 +1762,8 @@ class TestRepositoryEmail(ApiTestCase):
|
|||
|
||||
# Verify the e-mail address is authorized.
|
||||
json = self.getJsonResponse(RepositoryAuthorizedEmail,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', email='jschorr@devtable.com'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
email='jschorr@devtable.com'))
|
||||
|
||||
self.assertEquals(True, json['confirmed'])
|
||||
self.assertEquals(ADMIN_ACCESS_USER, json['namespace'])
|
||||
|
@ -1775,7 +1775,8 @@ class TestRepositoryEmail(ApiTestCase):
|
|||
|
||||
# Send the email.
|
||||
json = self.postJsonResponse(RepositoryAuthorizedEmail,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', email='jschorr+foo@devtable.com'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
email='jschorr+foo@devtable.com'))
|
||||
|
||||
self.assertEquals(False, json['confirmed'])
|
||||
self.assertEquals(ADMIN_ACCESS_USER, json['namespace'])
|
||||
|
@ -1789,7 +1790,8 @@ class TestRepositoryNotifications(ApiTestCase):
|
|||
# Add a notification.
|
||||
json = self.postJsonResponse(RepositoryNotificationList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple'),
|
||||
data=dict(config={'url': 'http://example.com'}, event='repo_push', method='webhook'),
|
||||
data=dict(config={'url': 'http://example.com'}, event='repo_push',
|
||||
method='webhook'),
|
||||
expected_code=201)
|
||||
|
||||
self.assertEquals('repo_push', json['event'])
|
||||
|
@ -1886,19 +1888,21 @@ class TestRevertTag(ApiTestCase):
|
|||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(ListRepositoryTags,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/history', tag='latest'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/history',
|
||||
tag='latest'))
|
||||
|
||||
self.assertEquals(2, len(json['tags']))
|
||||
self.assertFalse('end_ts' in json['tags'][0])
|
||||
|
||||
previous_image_id = json['tags'][1]['docker_image_id']
|
||||
|
||||
self.postJsonResponse(RevertTag,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/history', tag='latest'),
|
||||
self.postJsonResponse(RevertTag, params=dict(repository=ADMIN_ACCESS_USER + '/history',
|
||||
tag='latest'),
|
||||
data=dict(image=previous_image_id))
|
||||
|
||||
json = self.getJsonResponse(ListRepositoryTags,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/history', tag='latest'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/history',
|
||||
tag='latest'))
|
||||
self.assertEquals(3, len(json['tags']))
|
||||
self.assertFalse('end_ts' in json['tags'][0])
|
||||
self.assertEquals(previous_image_id, json['tags'][0]['docker_image_id'])
|
||||
|
@ -1917,7 +1921,8 @@ class TestListAndDeleteTag(ApiTestCase):
|
|||
|
||||
# List the images for staging.
|
||||
json = self.getJsonResponse(RepositoryTagImages,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='staging'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex',
|
||||
tag='staging'))
|
||||
|
||||
staging_images = json['images']
|
||||
assert len(prod_images) == len(staging_images) + 1
|
||||
|
@ -1934,7 +1939,8 @@ class TestListAndDeleteTag(ApiTestCase):
|
|||
|
||||
# Make the sure the staging images are still there.
|
||||
json = self.getJsonResponse(RepositoryTagImages,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='staging'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex',
|
||||
tag='staging'))
|
||||
|
||||
self.assertEquals(staging_images, json['images'])
|
||||
|
||||
|
@ -1946,7 +1952,8 @@ class TestListAndDeleteTag(ApiTestCase):
|
|||
|
||||
# Make sure the tag is present.
|
||||
json = self.getJsonResponse(RepositoryTagImages,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='sometag'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex',
|
||||
tag='sometag'))
|
||||
|
||||
sometag_images = json['images']
|
||||
self.assertEquals(sometag_images, staging_images)
|
||||
|
@ -1959,7 +1966,8 @@ class TestListAndDeleteTag(ApiTestCase):
|
|||
|
||||
# Make sure the tag has moved.
|
||||
json = self.getJsonResponse(RepositoryTagImages,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='sometag'))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex',
|
||||
tag='sometag'))
|
||||
|
||||
sometag_new_images = json['images']
|
||||
self.assertEquals(1, len(sometag_new_images))
|
||||
|
@ -1994,8 +2002,9 @@ class TestRepoPermissions(ApiTestCase):
|
|||
params=dict(repository=namespace + '/' + repo))['permissions']
|
||||
|
||||
def listTeamPermissions(self):
|
||||
return self.getJsonResponse(RepositoryTeamPermissionList,
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO))['permissions']
|
||||
response = self.getJsonResponse(RepositoryTeamPermissionList,
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO))
|
||||
return response['permissions']
|
||||
|
||||
def test_userpermissions_underorg(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
@ -2009,7 +2018,8 @@ class TestRepoPermissions(ApiTestCase):
|
|||
|
||||
# Add another user.
|
||||
self.putJsonResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO, username=ADMIN_ACCESS_USER),
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO,
|
||||
username=ADMIN_ACCESS_USER),
|
||||
data=dict(role='admin'))
|
||||
|
||||
# Verify the user is present.
|
||||
|
@ -2034,7 +2044,8 @@ class TestRepoPermissions(ApiTestCase):
|
|||
|
||||
# Add another user.
|
||||
self.putJsonResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', username=NO_ACCESS_USER),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
username=NO_ACCESS_USER),
|
||||
data=dict(role='read'))
|
||||
|
||||
# Verify the user is present.
|
||||
|
@ -2046,12 +2057,14 @@ class TestRepoPermissions(ApiTestCase):
|
|||
self.assertFalse('is_org_member' in permissions[NO_ACCESS_USER])
|
||||
|
||||
json = self.getJsonResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', username=NO_ACCESS_USER))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
username=NO_ACCESS_USER))
|
||||
self.assertEquals('read', json['role'])
|
||||
|
||||
# Change the user's permissions.
|
||||
self.putJsonResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', username=NO_ACCESS_USER),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
username=NO_ACCESS_USER),
|
||||
data=dict(role='admin'))
|
||||
|
||||
# Verify.
|
||||
|
@ -2063,7 +2076,8 @@ class TestRepoPermissions(ApiTestCase):
|
|||
|
||||
# Delete the user's permission.
|
||||
self.deleteResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', username=NO_ACCESS_USER))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
username=NO_ACCESS_USER))
|
||||
|
||||
# Verify.
|
||||
permissions = self.listUserPermissions()
|
||||
|
@ -2095,7 +2109,8 @@ class TestRepoPermissions(ApiTestCase):
|
|||
self.assertEquals('write', permissions['owners']['role'])
|
||||
|
||||
json = self.getJsonResponse(RepositoryTeamPermission,
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO, teamname='owners'))
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO,
|
||||
teamname='owners'))
|
||||
self.assertEquals('write', json['role'])
|
||||
|
||||
# Change the team's permissions.
|
||||
|
@ -2145,7 +2160,8 @@ class TestApiTokens(ApiTestCase):
|
|||
self.assertEquals('mytoken', tokens[token_code]['friendlyName'])
|
||||
|
||||
json = self.getJsonResponse(RepositoryToken,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', code=token_code))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
code=token_code))
|
||||
self.assertEquals(tokens[token_code], json)
|
||||
|
||||
# Change the token's permission.
|
||||
|
@ -2155,7 +2171,8 @@ class TestApiTokens(ApiTestCase):
|
|||
|
||||
# Verify.
|
||||
json = self.getJsonResponse(RepositoryToken,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', code=token_code))
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
code=token_code))
|
||||
self.assertEquals('write', json['role'])
|
||||
|
||||
# Delete the token.
|
||||
|
@ -2306,8 +2323,7 @@ class TestUserRobots(ApiTestCase):
|
|||
token = json['token']
|
||||
|
||||
# Regenerate the robot.
|
||||
json = self.postJsonResponse(RegenerateUserRobot,
|
||||
params=dict(robot_shortname='bender'),
|
||||
json = self.postJsonResponse(RegenerateUserRobot, params=dict(robot_shortname='bender'),
|
||||
expected_code=200)
|
||||
|
||||
# Verify the token changed.
|
||||
|
@ -2345,16 +2361,14 @@ class TestOrgRobots(ApiTestCase):
|
|||
data=dict(role='read'))
|
||||
|
||||
# Add a permission prototype with the robot as the activating user.
|
||||
self.postJsonResponse(PermissionPrototypeList,
|
||||
params=dict(orgname=ORGANIZATION),
|
||||
self.postJsonResponse(PermissionPrototypeList, params=dict(orgname=ORGANIZATION),
|
||||
data=dict(role='read',
|
||||
activating_user={'name': membername},
|
||||
delegate={'kind': 'user',
|
||||
'name': membername}))
|
||||
|
||||
# Add a permission prototype with the robot as the delegating user.
|
||||
self.postJsonResponse(PermissionPrototypeList,
|
||||
params=dict(orgname=ORGANIZATION),
|
||||
self.postJsonResponse(PermissionPrototypeList, params=dict(orgname=ORGANIZATION),
|
||||
data=dict(role='read',
|
||||
delegate={'kind': 'user',
|
||||
'name': membername}))
|
||||
|
@ -2363,38 +2377,40 @@ class TestOrgRobots(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ORGANIZATION, ORG_REPO)
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
pull_robot = model.get_user(membername)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user, pull_robot=pull_robot)
|
||||
repo = model.repository.get_repository(ORGANIZATION, ORG_REPO)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
pull_robot = model.user.get_user(membername)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user,
|
||||
pull_robot=pull_robot)
|
||||
|
||||
# Add a fake build of the fake build trigger.
|
||||
token = model.create_access_token(repo, 'write', kind='build-worker',
|
||||
token = model.token.create_access_token(repo, 'write', kind='build-worker',
|
||||
friendly_name='Repository Build Token')
|
||||
|
||||
build = model.create_repository_build(repo, token, {}, 'fake-dockerfile', 'fake-name', trigger,
|
||||
pull_robot_name=membername)
|
||||
build = model.build.create_repository_build(repo, token, {}, 'fake-dockerfile', 'fake-name',
|
||||
trigger, pull_robot_name=membername)
|
||||
|
||||
# Add some log entries for the robot.
|
||||
model.log_action('pull_repo', ORGANIZATION, performer=pull_robot, repository=repo)
|
||||
model.log.log_action('pull_repo', ORGANIZATION, performer=pull_robot, repository=repo)
|
||||
|
||||
# Delete the robot and verify it works.
|
||||
self.deleteResponse(OrgRobot,
|
||||
params=dict(orgname=ORGANIZATION, robot_shortname='bender'))
|
||||
|
||||
# Verify the build is still present.
|
||||
self.assertIsNotNone(model.get_repository_build(build.uuid))
|
||||
self.assertIsNotNone(model.build.get_repository_build(build.uuid))
|
||||
|
||||
# All the above records should now be deleted, along with the robot. We verify a few of the
|
||||
# critical ones below.
|
||||
|
||||
# Check the team.
|
||||
team = model.get_organization_team(ORGANIZATION, 'readers')
|
||||
members = [member.username for member in model.get_organization_team_members(team.id)]
|
||||
team = model.team.get_organization_team(ORGANIZATION, 'readers')
|
||||
members = [member.username
|
||||
for member in model.organization.get_organization_team_members(team.id)]
|
||||
self.assertFalse(membername in members)
|
||||
|
||||
# Check the robot itself.
|
||||
self.assertIsNone(model.get_user(membername))
|
||||
self.assertIsNone(model.user.get_user(membername))
|
||||
|
||||
|
||||
def test_robots(self):
|
||||
|
@ -2478,7 +2494,8 @@ class TestLogs(ApiTestCase):
|
|||
|
||||
class TestApplicationInformation(ApiTestCase):
|
||||
def test_get_info(self):
|
||||
json = self.getJsonResponse(ApplicationInformation, params=dict(client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
json = self.getJsonResponse(ApplicationInformation,
|
||||
params=dict(client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
assert 'name' in json
|
||||
assert 'uri' in json
|
||||
assert 'organization' in json
|
||||
|
@ -2522,15 +2539,18 @@ class TestOrganizationApplicationResource(ApiTestCase):
|
|||
|
||||
# Retrieve the application.
|
||||
json = self.getJsonResponse(OrganizationApplicationResource,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
|
||||
self.assertEquals(FAKE_APPLICATION_CLIENT_ID, json['client_id'])
|
||||
|
||||
# Edit the application.
|
||||
edit_json = self.putJsonResponse(OrganizationApplicationResource,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID),
|
||||
data=dict(name="Some App", description="foo", application_uri="bar",
|
||||
redirect_uri="baz", avatar_email="meh"))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID),
|
||||
data=dict(name="Some App", description="foo",
|
||||
application_uri="bar", redirect_uri="baz",
|
||||
avatar_email="meh"))
|
||||
|
||||
self.assertEquals(FAKE_APPLICATION_CLIENT_ID, edit_json['client_id'])
|
||||
self.assertEquals("Some App", edit_json['name'])
|
||||
|
@ -2541,7 +2561,8 @@ class TestOrganizationApplicationResource(ApiTestCase):
|
|||
|
||||
# Retrieve the application again.
|
||||
json = self.getJsonResponse(OrganizationApplicationResource,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
|
||||
self.assertEquals(json, edit_json)
|
||||
|
||||
|
@ -2561,20 +2582,23 @@ class TestOrganizationApplicationResetClientSecret(ApiTestCase):
|
|||
|
||||
# Retrieve the application.
|
||||
json = self.getJsonResponse(OrganizationApplicationResource,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
|
||||
self.assertEquals(FAKE_APPLICATION_CLIENT_ID, json['client_id'])
|
||||
|
||||
# Reset the client secret.
|
||||
reset_json = self.postJsonResponse(OrganizationApplicationResetClientSecret,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
|
||||
self.assertEquals(FAKE_APPLICATION_CLIENT_ID, reset_json['client_id'])
|
||||
self.assertNotEquals(reset_json['client_secret'], json['client_secret'])
|
||||
|
||||
# Verify it was changed in the DB.
|
||||
json = self.getJsonResponse(OrganizationApplicationResource,
|
||||
params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
client_id=FAKE_APPLICATION_CLIENT_ID))
|
||||
self.assertEquals(reset_json['client_secret'], json['client_secret'])
|
||||
|
||||
|
||||
|
@ -2642,11 +2666,13 @@ class TestBuildTriggers(ApiTestCase):
|
|||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Check a repo with no known triggers.
|
||||
json = self.getJsonResponse(BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||
json = self.getJsonResponse(BuildTriggerList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||
self.assertEquals(0, len(json['triggers']))
|
||||
|
||||
# Check a repo with one known trigger.
|
||||
json = self.getJsonResponse(BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
json = self.getJsonResponse(BuildTriggerList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
self.assertEquals(1, len(json['triggers']))
|
||||
|
||||
trigger = json['triggers'][0]
|
||||
|
@ -2657,13 +2683,15 @@ class TestBuildTriggers(ApiTestCase):
|
|||
assert 'service' in trigger
|
||||
|
||||
# Verify the get trigger method.
|
||||
trigger_json = self.getJsonResponse(BuildTrigger, params=dict(repository=ADMIN_ACCESS_USER + '/building',
|
||||
trigger_json = self.getJsonResponse(BuildTrigger,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building',
|
||||
trigger_uuid=trigger['id']))
|
||||
|
||||
self.assertEquals(trigger, trigger_json)
|
||||
|
||||
# Check the recent builds for the trigger.
|
||||
builds_json = self.getJsonResponse(TriggerBuildList, params=dict(repository=ADMIN_ACCESS_USER + '/building',
|
||||
builds_json = self.getJsonResponse(TriggerBuildList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building',
|
||||
trigger_uuid=trigger['id']))
|
||||
|
||||
assert 'builds' in builds_json
|
||||
|
@ -2671,7 +2699,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
def test_delete_build_trigger(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
json = self.getJsonResponse(BuildTriggerList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
self.assertEquals(1, len(json['triggers']))
|
||||
trigger = json['triggers'][0]
|
||||
|
||||
|
@ -2680,7 +2709,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
trigger_uuid=trigger['id']))
|
||||
|
||||
# Verify it was deleted.
|
||||
json = self.getJsonResponse(BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
json = self.getJsonResponse(BuildTriggerList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building'))
|
||||
self.assertEquals(0, len(json['triggers']))
|
||||
|
||||
|
||||
|
@ -2690,14 +2720,15 @@ class TestBuildTriggers(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
|
||||
# Analyze the trigger's dockerfile: First, no dockerfile.
|
||||
trigger_config = {}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('error', analyze_json['status'])
|
||||
|
@ -2706,7 +2737,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
# Analyze the trigger's dockerfile: Second, missing FROM in dockerfile.
|
||||
trigger_config = {'dockerfile': 'MAINTAINER me'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('warning', analyze_json['status'])
|
||||
|
@ -2715,7 +2747,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
# Analyze the trigger's dockerfile: Third, dockerfile with public repo.
|
||||
trigger_config = {'dockerfile': 'FROM somerepo'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('publicbase', analyze_json['status'])
|
||||
|
@ -2723,34 +2756,40 @@ class TestBuildTriggers(ApiTestCase):
|
|||
# Analyze the trigger's dockerfile: Fourth, dockerfile with private repo with an invalid path.
|
||||
trigger_config = {'dockerfile': 'FROM localhost:5000/somepath'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('warning', analyze_json['status'])
|
||||
self.assertEquals('"localhost:5000/somepath" is not a valid Quay repository path', analyze_json['message'])
|
||||
self.assertEquals('"localhost:5000/somepath" is not a valid Quay repository path',
|
||||
analyze_json['message'])
|
||||
|
||||
# Analyze the trigger's dockerfile: Fifth, dockerfile with private repo that does not exist.
|
||||
trigger_config = {'dockerfile': 'FROM localhost:5000/nothere/randomrepo'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('error', analyze_json['status'])
|
||||
self.assertEquals('Repository "localhost:5000/nothere/randomrepo" referenced by the Dockerfile was not found', analyze_json['message'])
|
||||
nofound = 'Repository "localhost:5000/%s/randomrepo" referenced by the Dockerfile was not found'
|
||||
self.assertEquals(nofound % 'nothere', analyze_json['message'])
|
||||
|
||||
# Analyze the trigger's dockerfile: Sixth, dockerfile with private repo that the user cannot see.
|
||||
# Analyze the trigger's dockerfile: Sixth, dockerfile with private repo that the user cannot see
|
||||
trigger_config = {'dockerfile': 'FROM localhost:5000/randomuser/randomrepo'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('error', analyze_json['status'])
|
||||
self.assertEquals('Repository "localhost:5000/randomuser/randomrepo" referenced by the Dockerfile was not found', analyze_json['message'])
|
||||
self.assertEquals(nofound % 'randomuser', analyze_json['message'])
|
||||
|
||||
# Analyze the trigger's dockerfile: Seventh, dockerfile with private repo that the user see.
|
||||
trigger_config = {'dockerfile': 'FROM localhost:5000/devtable/complex'}
|
||||
analyze_json = self.postJsonResponse(BuildTriggerAnalyze,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals('analyzed', analyze_json['status'])
|
||||
|
@ -2766,47 +2805,53 @@ class TestBuildTriggers(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
|
||||
# Verify the trigger.
|
||||
json = self.getJsonResponse(BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||
json = self.getJsonResponse(BuildTriggerList,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||
self.assertEquals(1, len(json['triggers']))
|
||||
self.assertEquals(trigger.uuid, json['triggers'][0]['id'])
|
||||
self.assertEquals(trigger.service.name, json['triggers'][0]['service'])
|
||||
self.assertEquals(False, json['triggers'][0]['is_active'])
|
||||
|
||||
# List the trigger's sources.
|
||||
source_json = self.getJsonResponse(BuildTriggerSources, params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
source_json = self.getJsonResponse(BuildTriggerSources,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid))
|
||||
self.assertEquals([{'first': 'source'}, {'second': 'sometoken'}], source_json['sources'])
|
||||
|
||||
# List the trigger's subdirs.
|
||||
subdir_json = self.postJsonResponse(BuildTriggerSubdirs,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'somevalue': 'meh'})
|
||||
|
||||
self.assertEquals({'status': 'success', 'subdir': ['sometoken', 'foo', 'bar', 'meh']}, subdir_json)
|
||||
self.assertEquals({'status': 'success', 'subdir': ['sometoken', 'foo', 'bar', 'meh']},
|
||||
subdir_json)
|
||||
|
||||
# Activate the trigger.
|
||||
trigger_config = {
|
||||
'build_source': 'somesource'
|
||||
}
|
||||
activate_json = self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config})
|
||||
|
||||
self.assertEquals(True, activate_json['is_active'])
|
||||
|
||||
# Make sure the trigger has a write token.
|
||||
trigger = model.get_build_trigger(trigger.uuid)
|
||||
trigger = model.build.get_build_trigger(trigger.uuid)
|
||||
self.assertNotEquals(None, trigger.write_token)
|
||||
self.assertEquals(True, py_json.loads(trigger.config)['active'])
|
||||
|
||||
# Make sure we cannot activate again.
|
||||
self.postResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config},
|
||||
expected_code=400)
|
||||
|
||||
|
@ -2824,7 +2869,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
|
||||
# Start a manual build.
|
||||
start_json = self.postJsonResponse(ActivateBuildTrigger,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data=dict(),
|
||||
expected_code=201)
|
||||
|
||||
|
@ -2844,14 +2890,15 @@ class TestBuildTriggers(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
|
||||
# Try to activate it with an invalid robot account.
|
||||
trigger_config = {}
|
||||
activate_json = self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config, 'pull_robot': 'someinvalidrobot'},
|
||||
expected_code=404)
|
||||
|
||||
|
@ -2861,14 +2908,15 @@ class TestBuildTriggers(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
|
||||
# Try to activate it with a robot account in the wrong namespace.
|
||||
trigger_config = {}
|
||||
activate_json = self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config':trigger_config, 'pull_robot':'freshuser+anotherrobot'},
|
||||
expected_code=403)
|
||||
|
||||
|
@ -2878,15 +2926,17 @@ class TestBuildTriggers(ApiTestCase):
|
|||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, 'simple')
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
trigger = model.build.create_build_trigger(repo, 'fakeservice', 'sometoken', user)
|
||||
|
||||
# Try to activate it with a robot account.
|
||||
trigger_config = {}
|
||||
activate_json = self.postJsonResponse(BuildTriggerActivate,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
data={'config': trigger_config, 'pull_robot': ADMIN_ACCESS_USER + '+dtrobot'})
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data={'config':trigger_config,
|
||||
'pull_robot':ADMIN_ACCESS_USER + '+dtrobot'})
|
||||
|
||||
# Verify that the robot was saved.
|
||||
self.assertEquals(True, activate_json['is_active'])
|
||||
|
@ -2894,7 +2944,8 @@ class TestBuildTriggers(ApiTestCase):
|
|||
|
||||
# Start a manual build.
|
||||
start_json = self.postJsonResponse(ActivateBuildTrigger,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', trigger_uuid=trigger.uuid),
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
|
||||
trigger_uuid=trigger.uuid),
|
||||
data=dict(),
|
||||
expected_code=201)
|
||||
|
||||
|
@ -2918,7 +2969,8 @@ class TestUserAuthorizations(ApiTestCase):
|
|||
assert 'application' in authorization
|
||||
|
||||
# Retrieve the authorization.
|
||||
get_json = self.getJsonResponse(UserAuthorization, params=dict(access_token_uuid = authorization['uuid']))
|
||||
get_json = self.getJsonResponse(UserAuthorization,
|
||||
params=dict(access_token_uuid=authorization['uuid']))
|
||||
self.assertEquals(authorization, get_json)
|
||||
|
||||
# Delete the authorization.
|
||||
|
@ -2980,7 +3032,8 @@ class TestSuperUserManagement(ApiTestCase):
|
|||
self.assertEquals('jschorr+test@devtable.com', json['email'])
|
||||
|
||||
# Update the user.
|
||||
json = self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(password='somepassword'))
|
||||
json = self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'),
|
||||
data=dict(password='somepassword'))
|
||||
self.assertTrue('encrypted_password' in json)
|
||||
|
||||
def test_update_user(self):
|
||||
|
@ -2992,7 +3045,8 @@ class TestSuperUserManagement(ApiTestCase):
|
|||
self.assertEquals('jschorr+test@devtable.com', json['email'])
|
||||
|
||||
# Update the user.
|
||||
json = self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com'))
|
||||
json = self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'),
|
||||
data=dict(email='foo@bar.com'))
|
||||
self.assertFalse('encrypted_password' in json)
|
||||
|
||||
# Verify the user was updated.
|
||||
|
|
|
@ -74,17 +74,17 @@ class TestAuth(ApiTestCase):
|
|||
expected_code=403)
|
||||
|
||||
def test_basic_auth_user(self):
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
self.conduct_basic_auth(ADMIN_ACCESS_USER, 'password')
|
||||
self.verify_identity(user.uuid)
|
||||
|
||||
def test_basic_auth_disabled_user(self):
|
||||
user = model.get_user(DISABLED_USER)
|
||||
user = model.user.get_user(DISABLED_USER)
|
||||
self.conduct_basic_auth(DISABLED_USER, 'password')
|
||||
self.verify_no_identity()
|
||||
|
||||
def test_basic_auth_token(self):
|
||||
token = model.create_delegate_token(ADMIN_ACCESS_USER, 'simple', 'sometoken')
|
||||
token = model.token.create_delegate_token(ADMIN_ACCESS_USER, 'simple', 'sometoken')
|
||||
self.conduct_basic_auth('$token', token.code)
|
||||
self.verify_identity(token.code)
|
||||
|
||||
|
@ -101,26 +101,26 @@ class TestAuth(ApiTestCase):
|
|||
self.verify_no_identity()
|
||||
|
||||
def test_oauth_valid_user(self):
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
self.create_oauth(user)
|
||||
self.conduct_basic_auth('$oauthtoken', 'access1234')
|
||||
self.verify_identity(user.uuid)
|
||||
|
||||
def test_oauth_disabled_user(self):
|
||||
user = model.get_user(DISABLED_USER)
|
||||
user = model.user.get_user(DISABLED_USER)
|
||||
self.create_oauth(user)
|
||||
self.conduct_basic_auth('$oauthtoken', 'access1234')
|
||||
self.verify_no_identity()
|
||||
|
||||
def test_basic_auth_robot(self):
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
robot, passcode = model.get_robot('dtrobot', user)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
robot, passcode = model.user.get_robot('dtrobot', user)
|
||||
self.conduct_basic_auth(robot.username, passcode)
|
||||
self.verify_identity(robot.uuid)
|
||||
|
||||
def test_basic_auth_robot_invalidcode(self):
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
robot, _ = model.get_robot('dtrobot', user)
|
||||
user = model.user.get_user(ADMIN_ACCESS_USER)
|
||||
robot, _ = model.user.get_robot('dtrobot', user)
|
||||
self.conduct_basic_auth(robot.username, 'someinvalidcode')
|
||||
self.verify_no_identity()
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ REPO = 'somerepo'
|
|||
class TestGarbageColection(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _set_tag_expiration_policy(namespace, expiration_s):
|
||||
namespace_user = model.get_user(namespace)
|
||||
model.change_user_tag_expiration(namespace_user, expiration_s)
|
||||
namespace_user = model.user.get_user(namespace)
|
||||
model.user.change_user_tag_expiration(namespace_user, expiration_s)
|
||||
|
||||
def setUp(self):
|
||||
setup_database_for_testing(self)
|
||||
|
@ -32,14 +32,14 @@ class TestGarbageColection(unittest.TestCase):
|
|||
|
||||
def createImage(self, docker_image_id, repository_obj, username):
|
||||
preferred = storage.preferred_locations[0]
|
||||
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
|
||||
image = model.image.find_create_or_link_image(docker_image_id, repository_obj, username, {},
|
||||
preferred)
|
||||
image.storage.uploading = False
|
||||
image.storage.save()
|
||||
|
||||
# Create derived images as well.
|
||||
for i in range(0, 2):
|
||||
model.find_or_create_derived_storage(image.storage, 'squash', preferred)
|
||||
model.storage.find_or_create_derived_storage(image.storage, 'squash', preferred)
|
||||
|
||||
# Add some additional placements to the image.
|
||||
for location_name in ['local_eu']:
|
||||
|
@ -55,8 +55,8 @@ class TestGarbageColection(unittest.TestCase):
|
|||
return image.storage
|
||||
|
||||
def createRepository(self, namespace=ADMIN_ACCESS_USER, name=REPO, **kwargs):
|
||||
user = model.get_user(namespace)
|
||||
repo = model.create_repository(namespace, name, user)
|
||||
user = model.user.get_user(namespace)
|
||||
repo = model.repository.create_repository(namespace, name, user)
|
||||
|
||||
# Populate the repository with the tags.
|
||||
image_map = {}
|
||||
|
@ -69,35 +69,37 @@ class TestGarbageColection(unittest.TestCase):
|
|||
image_map[image_id] = self.createImage(image_id, repo, namespace)
|
||||
|
||||
# Set the ancestors for the image.
|
||||
parent = model.set_image_metadata(image_id, namespace, name, '', '', '', parent=parent)
|
||||
parent = model.image.set_image_metadata(image_id, namespace, name, '', '', '',
|
||||
parent=parent)
|
||||
|
||||
# Set the tag for the image.
|
||||
model.create_or_update_tag(namespace, name, tag_name, image_ids[-1])
|
||||
model.tag.create_or_update_tag(namespace, name, tag_name, image_ids[-1])
|
||||
|
||||
return repo
|
||||
|
||||
def gcNow(self, repository):
|
||||
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
|
||||
def deleteTag(self, repository, tag):
|
||||
model.delete_tag(repository.namespace_user.username, repository.name, tag)
|
||||
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
model.tag.delete_tag(repository.namespace_user.username, repository.name, tag)
|
||||
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
|
||||
def moveTag(self, repository, tag, docker_image_id):
|
||||
model.create_or_update_tag(repository.namespace_user.username, repository.name, tag,
|
||||
model.tag.create_or_update_tag(repository.namespace_user.username, repository.name, tag,
|
||||
docker_image_id)
|
||||
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
|
||||
|
||||
def assertNotDeleted(self, repository, *args):
|
||||
for docker_image_id in args:
|
||||
self.assertTrue(bool(model.get_image_by_id(repository.namespace_user.username,
|
||||
self.assertTrue(bool(model.image.get_image_by_id(repository.namespace_user.username,
|
||||
repository.name, docker_image_id)))
|
||||
|
||||
def assertDeleted(self, repository, *args):
|
||||
for docker_image_id in args:
|
||||
try:
|
||||
# Verify the image is missing when accessed by the repository.
|
||||
model.get_image_by_id(repository.namespace_user.username, repository.name, docker_image_id)
|
||||
model.image.get_image_by_id(repository.namespace_user.username, repository.name,
|
||||
docker_image_id)
|
||||
except model.DataModelException:
|
||||
return
|
||||
|
||||
|
|
|
@ -42,9 +42,10 @@ class TestImageSharing(unittest.TestCase):
|
|||
self.ctx.__exit__(True, None, None)
|
||||
|
||||
def createStorage(self, docker_image_id, repository=REPO, username=ADMIN_ACCESS_USER):
|
||||
repository_obj = model.get_repository(repository.split('/')[0], repository.split('/')[1])
|
||||
repository_obj = model.repository.get_repository(repository.split('/')[0],
|
||||
repository.split('/')[1])
|
||||
preferred = storage.preferred_locations[0]
|
||||
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
|
||||
image = model.image.find_create_or_link_image(docker_image_id, repository_obj, username, {},
|
||||
preferred)
|
||||
image.storage.uploading = False
|
||||
image.storage.save()
|
||||
|
|
|
@ -28,8 +28,8 @@ class TestImageTree(unittest.TestCase):
|
|||
return None
|
||||
|
||||
def test_longest_path_simple_repo(self):
|
||||
all_images = list(model.get_repository_images(NAMESPACE, SIMPLE_REPO))
|
||||
all_tags = list(model.list_repository_tags(NAMESPACE, SIMPLE_REPO))
|
||||
all_images = list(model.image.get_repository_images(NAMESPACE, SIMPLE_REPO))
|
||||
all_tags = list(model.tag.list_repository_tags(NAMESPACE, SIMPLE_REPO))
|
||||
tree = ImageTree(all_images, all_tags)
|
||||
|
||||
base_image = self._get_base_image(all_images)
|
||||
|
@ -47,8 +47,8 @@ class TestImageTree(unittest.TestCase):
|
|||
self.assertEquals('latest', tree.tag_containing_image(result[-1]))
|
||||
|
||||
def test_longest_path_complex_repo(self):
|
||||
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
tree = ImageTree(all_images, all_tags)
|
||||
|
||||
base_image = self._get_base_image(all_images)
|
||||
|
@ -61,8 +61,8 @@ class TestImageTree(unittest.TestCase):
|
|||
self.assertEquals('prod', tree.tag_containing_image(result[-1]))
|
||||
|
||||
def test_filtering(self):
|
||||
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
tree = ImageTree(all_images, all_tags, base_filter=1245)
|
||||
|
||||
base_image = self._get_base_image(all_images)
|
||||
|
@ -74,8 +74,8 @@ class TestImageTree(unittest.TestCase):
|
|||
self.assertEquals(0, len(result))
|
||||
|
||||
def test_find_tag_parent_image(self):
|
||||
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
|
||||
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
|
||||
tree = ImageTree(all_images, all_tags)
|
||||
|
||||
base_image = self._get_base_image(all_images)
|
||||
|
@ -92,9 +92,9 @@ class TestImageTree(unittest.TestCase):
|
|||
|
||||
|
||||
def test_longest_path_simple_repo_direct_lookup(self):
|
||||
repository = model.get_repository(NAMESPACE, SIMPLE_REPO)
|
||||
all_images = list(model.get_repository_images(NAMESPACE, SIMPLE_REPO))
|
||||
all_tags = list(model.list_repository_tags(NAMESPACE, SIMPLE_REPO))
|
||||
repository = model.repository.get_repository(NAMESPACE, SIMPLE_REPO)
|
||||
all_images = list(model.image.get_repository_images(NAMESPACE, SIMPLE_REPO))
|
||||
all_tags = list(model.tag.list_repository_tags(NAMESPACE, SIMPLE_REPO))
|
||||
|
||||
base_image = self._get_base_image(all_images)
|
||||
tag_image = all_tags[0].image
|
||||
|
@ -102,7 +102,7 @@ class TestImageTree(unittest.TestCase):
|
|||
def checker(index, image):
|
||||
return True
|
||||
|
||||
filtered_images = model.get_repository_images_without_placements(repository,
|
||||
filtered_images = model.image.get_repository_images_without_placements(repository,
|
||||
with_ancestor=base_image)
|
||||
self.assertEquals(set([f.id for f in filtered_images]), set([a.id for a in all_images]))
|
||||
|
||||
|
|
|
@ -15,8 +15,8 @@ UNSUPER_USERNAME = 'freshuser'
|
|||
class TestSuperUserOps(unittest.TestCase):
|
||||
def setUp(self):
|
||||
setup_database_for_testing(self)
|
||||
self._su = model.get_user(SUPER_USERNAME)
|
||||
self._normie = model.get_user(UNSUPER_USERNAME)
|
||||
self._su = model.user.get_user(SUPER_USERNAME)
|
||||
self._normie = model.user.get_user(UNSUPER_USERNAME)
|
||||
|
||||
def tearDown(self):
|
||||
finished_database_for_testing(self)
|
||||
|
|
|
@ -69,7 +69,7 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
|
||||
if not is_get_status:
|
||||
from data import model
|
||||
build_obj = model.get_repository_build(self.test_build_id)
|
||||
build_obj = model.build.get_repository_build(self.test_build_id)
|
||||
build_obj.phase = phase
|
||||
build_obj.save()
|
||||
|
||||
|
|
|
@ -27,7 +27,8 @@ bad_count = 0
|
|||
good_count = 0
|
||||
|
||||
def resolve_or_create(repo, docker_image_id, new_ancestry):
|
||||
existing = model.get_repo_image_extended(repo.namespace_user.username, repo.name, docker_image_id)
|
||||
existing = model.image.get_repo_image_extended(repo.namespace_user.username, repo.name,
|
||||
docker_image_id)
|
||||
if existing:
|
||||
logger.debug('Found existing image: %s, %s', existing.id, docker_image_id)
|
||||
return existing
|
||||
|
@ -63,7 +64,7 @@ def all_ancestors_exist(ancestors):
|
|||
cant_fix = []
|
||||
for img in query:
|
||||
try:
|
||||
with_locations = model.get_repo_image_extended(img.repository.namespace_user.username,
|
||||
with_locations = model.image.get_repo_image_extended(img.repository.namespace_user.username,
|
||||
img.repository.name, img.docker_image_id)
|
||||
ancestry_storage = store.image_ancestry_path(img.storage.uuid)
|
||||
if store.exists(with_locations.storage.locations, ancestry_storage):
|
||||
|
|
|
@ -18,7 +18,7 @@ def sendInvoice(invoice_id):
|
|||
return
|
||||
|
||||
customer_id = invoice['customer']
|
||||
user = model.get_user_or_org_by_customer_id(customer_id)
|
||||
user = model.user.get_user_or_org_by_customer_id(customer_id)
|
||||
if not user:
|
||||
print 'No user found for customer %s' % (customer_id)
|
||||
return
|
||||
|
|
|
@ -17,7 +17,7 @@ def get_private_allowed(customer):
|
|||
# Find customers who have more private repositories than their plans allow
|
||||
users = User.select()
|
||||
|
||||
usage = [(user.username, model.get_private_repo_count(user.username),
|
||||
usage = [(user.username, model.user.get_private_repo_count(user.username),
|
||||
get_private_allowed(user)) for user in users]
|
||||
|
||||
for username, used, allowed in usage:
|
||||
|
|
|
@ -59,9 +59,9 @@ if __name__ == "__main__":
|
|||
|
||||
images = []
|
||||
if args.imageid is not None:
|
||||
images = [model.get_image_by_id(args.namespace, args.repository, args.imageid)]
|
||||
images = [model.image.get_image_by_id(args.namespace, args.repository, args.imageid)]
|
||||
else:
|
||||
images = model.get_repository_images(args.namespace, args.repository)
|
||||
images = model.image.get_repository_images(args.namespace, args.repository)
|
||||
|
||||
for img in images:
|
||||
migrate_image(img, location)
|
||||
|
|
|
@ -7,16 +7,16 @@ def renameUser(username, new_name):
|
|||
if username == new_name:
|
||||
raise Exception('Must give a new username')
|
||||
|
||||
check = model.get_user_or_org(new_name)
|
||||
check = model.user.get_user_or_org(new_name)
|
||||
if check is not None:
|
||||
raise Exception('New username %s already exists' % new_name)
|
||||
|
||||
existing = model.get_user_or_org(username)
|
||||
existing = model.user.get_user_or_org(username)
|
||||
if existing is None:
|
||||
raise Exception('Username %s does not exist' % username)
|
||||
|
||||
print 'Renaming user...'
|
||||
model.change_username(existing.id, new_name)
|
||||
model.user.change_username(existing.id, new_name)
|
||||
print 'Rename complete'
|
||||
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ def sendInvoice(invoice_id):
|
|||
return
|
||||
|
||||
customer_id = invoice['customer']
|
||||
user = model.get_user_or_org_by_customer_id(customer_id)
|
||||
user = model.user.get_user_or_org_by_customer_id(customer_id)
|
||||
if not user:
|
||||
print 'No user found for customer %s' % (customer_id)
|
||||
return
|
||||
|
|
|
@ -10,14 +10,14 @@ from flask import Flask, current_app
|
|||
from flask_mail import Mail
|
||||
|
||||
def sendConfirmation(username):
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
code = model.create_confirm_email_code(user)
|
||||
code = model.user.create_confirm_email_code(user)
|
||||
send_confirmation_email(user.username, user.email, code.code)
|
||||
print 'Email sent to %s' % (user.email)
|
||||
|
||||
|
|
|
@ -10,14 +10,14 @@ from flask import Flask, current_app
|
|||
from flask_mail import Mail
|
||||
|
||||
def sendReset(username):
|
||||
user = model.get_nonrobot_user(username)
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
code = model.create_reset_password_email_code(user.email)
|
||||
code = model.user.create_reset_password_email_code(user.email)
|
||||
send_recovery_email(user.email, code.code)
|
||||
print 'Email sent to %s' % (user.email)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from app import get_app_url, avatar
|
||||
from data import model
|
||||
from util.names import parse_robot_username
|
||||
from jinja2 import Template, Environment, FileSystemLoader, contextfilter
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
def icon_path(icon_name):
|
||||
return '%s/static/img/icons/%s.png' % (get_app_url(), icon_name)
|
||||
|
@ -14,13 +14,13 @@ def team_reference(teamname):
|
|||
return "<span>%s <b>%s</b></span>" % (avatar_html, teamname)
|
||||
|
||||
def user_reference(username):
|
||||
user = model.get_namespace_user(username)
|
||||
user = model.user.get_namespace_user(username)
|
||||
if not user:
|
||||
return username
|
||||
|
||||
if user.robot:
|
||||
parts = parse_robot_username(username)
|
||||
user = model.get_namespace_user(parts[0])
|
||||
user = model.user.get_namespace_user(parts[0])
|
||||
|
||||
return """<span><img src="%s" alt="Robot"> <b>%s</b></span>""" % (icon_path('wrench'), username)
|
||||
|
||||
|
@ -37,7 +37,7 @@ def user_reference(username):
|
|||
def repository_tag_reference(repository_path_and_tag):
|
||||
(repository_path, tag) = repository_path_and_tag
|
||||
(namespace, repository) = repository_path.split('/')
|
||||
owner = model.get_namespace_user(namespace)
|
||||
owner = model.user.get_namespace_user(namespace)
|
||||
if not owner:
|
||||
return tag
|
||||
|
||||
|
@ -52,7 +52,7 @@ def repository_reference(pair):
|
|||
namespace = pair[0]
|
||||
repository = pair[1]
|
||||
|
||||
owner = model.get_namespace_user(namespace)
|
||||
owner = model.user.get_namespace_user(namespace)
|
||||
if not owner:
|
||||
return "%s/%s" % (namespace, repository)
|
||||
|
||||
|
@ -68,7 +68,7 @@ def repository_reference(pair):
|
|||
|
||||
|
||||
def admin_reference(username):
|
||||
user = model.get_user_or_org(username)
|
||||
user = model.user.get_user_or_org(username)
|
||||
if not user:
|
||||
return 'account settings'
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ def backfill_sizes_from_data():
|
|||
counter = counter + 1
|
||||
|
||||
try:
|
||||
with_locs = model.get_storage_by_uuid(uuid)
|
||||
with_locs = model.storage.get_storage_by_uuid(uuid)
|
||||
if with_locs.uncompressed_size is not None:
|
||||
logger.debug('Somebody else already filled this in for us: %s', uuid)
|
||||
continue
|
||||
|
@ -81,7 +81,7 @@ def backfill_sizes_from_data():
|
|||
# make sure the image storage still exists and has not changed.
|
||||
logger.debug('Writing entry: %s. Size: %s', uuid, uncompressed_size)
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
current_record = model.get_storage_by_uuid(uuid)
|
||||
current_record = model.storage.get_storage_by_uuid(uuid)
|
||||
|
||||
if not current_record.uploading and current_record.uncompressed_size == None:
|
||||
current_record.uncompressed_size = uncompressed_size
|
||||
|
|
|
@ -23,7 +23,7 @@ def archive_redis_buildlogs():
|
|||
avoid needing two-phase commit. """
|
||||
try:
|
||||
# Get a random build to archive
|
||||
to_archive = model.archivable_buildlogs_query().order_by(db_random_func()).get()
|
||||
to_archive = model.build.archivable_buildlogs_query().order_by(db_random_func()).get()
|
||||
logger.debug('Archiving: %s', to_archive.uuid)
|
||||
|
||||
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
|
||||
|
|
|
@ -15,9 +15,9 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class NotificationWorker(Worker):
|
||||
def process_queue_item(self, job_details):
|
||||
notification_uuid = job_details['notification_uuid'];
|
||||
notification_uuid = job_details['notification_uuid']
|
||||
|
||||
notification = model.get_repo_notification(notification_uuid)
|
||||
notification = model.notification.get_repo_notification(notification_uuid)
|
||||
if not notification:
|
||||
# Probably deleted.
|
||||
return
|
||||
|
@ -29,10 +29,10 @@ class NotificationWorker(Worker):
|
|||
event_handler = NotificationEvent.get_event(event_name)
|
||||
method_handler = NotificationMethod.get_method(method_name)
|
||||
except InvalidNotificationMethodException as ex:
|
||||
logger.exception('Cannot find notification method: %s' % ex.message)
|
||||
logger.exception('Cannot find notification method: %s', ex.message)
|
||||
raise JobException('Cannot find notification method: %s' % ex.message)
|
||||
except InvalidNotificationEventException as ex:
|
||||
logger.exception('Cannot find notification event: %s' % ex.message)
|
||||
logger.exception('Cannot find notification event: %s', ex.message)
|
||||
raise JobException('Cannot find notification event: %s' % ex.message)
|
||||
|
||||
method_handler.perform(notification, event_handler, job_details)
|
||||
|
|
Reference in a new issue