Merge pull request #246 from jakedt/python-registry-v2

Massive refactoring
This commit is contained in:
Jimmy Zelinskie 2015-07-17 16:34:32 -04:00
commit eb612d606c
112 changed files with 5424 additions and 4414 deletions

17
app.py
View file

@ -2,18 +2,16 @@ import logging
import os
import json
from flask import Flask, Config, request, Request, _request_ctx_stack
from flask import Flask, request, Request, _request_ctx_stack
from flask.ext.principal import Principal
from flask.ext.login import LoginManager, UserMixin
from flask.ext.mail import Mail
from werkzeug.routing import BaseConverter
import features
from avatars.avatars import Avatar
from storage import Storage
from avatars.avatars import Avatar
from data import model
from data import database
from data.userfiles import Userfiles
@ -45,6 +43,15 @@ CONFIG_PROVIDER = FileConfigProvider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', '
app = Flask(__name__)
logger = logging.getLogger(__name__)
class RegexConverter(BaseConverter):
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
logger.debug('Installing regex converter with regex: %s', items[0])
self.regex = items[0]
app.url_map.converters['regex'] = RegexConverter
# Instantiate the default configuration (for test or for normal operation).
if 'TEST' in os.environ:
CONFIG_PROVIDER = TestConfigProvider()
@ -154,7 +161,7 @@ class LoginWrappedDBUser(UserMixin):
def db_user(self):
if not self._db_user:
self._db_user = model.get_user_by_uuid(self._uuid)
self._db_user = model.user.get_user_by_uuid(self._uuid)
return self._db_user
def is_authenticated(self):

View file

@ -12,11 +12,10 @@ from base64 import b64decode
import scopes
from data import model
from data.model import oauth
from app import app, authentication
from permissions import QuayDeferredPermissionUser
from auth_context import (set_authenticated_user, set_validated_token, set_grant_user_context,
set_authenticated_user_deferred, set_validated_oauth_token)
set_validated_oauth_token)
from util.http import abort
@ -48,7 +47,7 @@ def _load_user_from_cookie():
def _validate_and_apply_oauth_token(token):
validated = oauth.validate_access_token(token)
validated = model.oauth.validate_access_token(token)
if not validated:
logger.warning('OAuth access token could not be validated: %s', token)
authenticate_header = {
@ -96,40 +95,40 @@ def _process_basic_auth(auth):
elif credentials[0] == '$token':
# Use as token auth
try:
token = model.load_token_data(credentials[1])
logger.debug('Successfully validated token: %s' % credentials[1])
token = model.token.load_token_data(credentials[1])
logger.debug('Successfully validated token: %s', credentials[1])
set_validated_token(token)
identity_changed.send(app, identity=Identity(token.code, 'token'))
return
except model.DataModelException:
logger.debug('Invalid token: %s' % credentials[1])
logger.debug('Invalid token: %s', credentials[1])
elif credentials[0] == '$oauthtoken':
oauth_token = credentials[1]
_validate_and_apply_oauth_token(oauth_token)
elif '+' in credentials[0]:
logger.debug('Trying robot auth with credentials %s' % str(credentials))
logger.debug('Trying robot auth with credentials %s', str(credentials))
# Use as robot auth
try:
robot = model.verify_robot(credentials[0], credentials[1])
logger.debug('Successfully validated robot: %s' % credentials[0])
robot = model.user.verify_robot(credentials[0], credentials[1])
logger.debug('Successfully validated robot: %s', credentials[0])
set_authenticated_user(robot)
deferred_robot = QuayDeferredPermissionUser.for_user(robot)
identity_changed.send(app, identity=deferred_robot)
return
except model.InvalidRobotException:
logger.debug('Invalid robot or password for robot: %s' % credentials[0])
logger.debug('Invalid robot or password for robot: %s', credentials[0])
else:
(authenticated, error_message) = authentication.verify_user(credentials[0], credentials[1],
basic_auth=True)
if authenticated:
logger.debug('Successfully validated user: %s' % authenticated.username)
logger.debug('Successfully validated user: %s', authenticated.username)
set_authenticated_user(authenticated)
new_identity = QuayDeferredPermissionUser.for_user(authenticated)
@ -203,7 +202,7 @@ def process_auth(func):
auth = request.headers.get('authorization', '')
if auth:
logger.debug('Validating auth header: %s' % auth)
logger.debug('Validating auth header: %s', auth)
_process_signed_grant(auth)
_process_basic_auth(auth)
else:
@ -227,7 +226,7 @@ def extract_namespace_repo_from_session(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'namespace' not in session or 'repository' not in session:
logger.error('Unable to load namespace or repository from session: %s' % session)
logger.error('Unable to load namespace or repository from session: %s', session)
abort(400, message='Missing namespace in request')
return func(session['namespace'], session['repository'], *args, **kwargs)

View file

@ -16,7 +16,7 @@ def get_authenticated_user():
return None
logger.debug('Loading deferred authenticated user.')
loaded = model.get_user_by_uuid(user_uuid)
loaded = model.user.get_user_by_uuid(user_uuid)
if not loaded.enabled:
return None

110
auth/jwt_auth.py Normal file
View file

@ -0,0 +1,110 @@
import logging
import jwt
import re
from datetime import datetime, timedelta
from functools import wraps
from flask import request
from flask.ext.principal import identity_changed, Identity
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.backends import default_backend
from cachetools import lru_cache
from app import app
from auth_context import set_grant_user_context
from permissions import repository_read_grant, repository_write_grant
from util.names import parse_namespace_repository
from util.http import abort
logger = logging.getLogger(__name__)
TOKEN_REGEX = re.compile(r'Bearer (([a-zA-Z0-9+/]+\.)+[a-zA-Z0-9+-_/]+)')
class InvalidJWTException(Exception):
pass
def identity_from_bearer_token(bearer_token, max_signed_s, public_key):
""" Process a bearer token and return the loaded identity, or raise InvalidJWTException if an
identity could not be loaded. Expects tokens and grants in the format of the Docker registry
v2 auth spec: https://docs.docker.com/registry/spec/auth/token/
"""
logger.debug('Validating auth header: %s', bearer_token)
# Extract the jwt token from the header
match = TOKEN_REGEX.match(bearer_token)
if match is None or match.end() != len(bearer_token):
raise InvalidJWTException('Invalid bearer token format')
encoded = match.group(1)
logger.debug('encoded JWT: %s', encoded)
# Load the JWT returned.
try:
payload = jwt.decode(encoded, public_key, algorithms=['RS256'], audience='quay',
issuer='token-issuer')
except jwt.InvalidTokenError:
raise InvalidJWTException('Invalid token')
if not 'sub' in payload:
raise InvalidJWTException('Missing sub field in JWT')
if not 'exp' in payload:
raise InvalidJWTException('Missing exp field in JWT')
# Verify that the expiration is no more than 300 seconds in the future.
if datetime.fromtimestamp(payload['exp']) > datetime.utcnow() + timedelta(seconds=max_signed_s):
raise InvalidJWTException('Token was signed for more than %s seconds' % max_signed_s)
username = payload['sub']
loaded_identity = Identity(username, 'signed_jwt')
# Process the grants from the payload
if 'access' in payload:
for grant in payload['access']:
if grant['type'] != 'repository':
continue
namespace, repo_name = parse_namespace_repository(grant['name'])
if 'push' in grant['actions']:
loaded_identity.provides.add(repository_write_grant(namespace, repo_name))
elif 'pull' in grant['actions']:
loaded_identity.provides.add(repository_read_grant(namespace, repo_name))
return loaded_identity
@lru_cache(maxsize=1)
def load_public_key(certificate_file_path):
with open(certificate_file_path) as cert_file:
cert_obj = load_pem_x509_certificate(cert_file.read(), default_backend())
return cert_obj.public_key()
def process_jwt_auth(func):
@wraps(func)
def wrapper(*args, **kwargs):
logger.debug('Called with params: %s, %s', args, kwargs)
auth = request.headers.get('authorization', '').strip()
if auth:
max_signature_seconds = app.config.get('JWT_AUTH_MAX_FRESH_S', 300)
certificate_file_path = app.config['JWT_AUTH_CERTIFICATE_PATH']
public_key = load_public_key(certificate_file_path)
try:
extracted_identity = identity_from_bearer_token(auth, max_signature_seconds, public_key)
identity_changed.send(app, identity=extracted_identity)
set_grant_user_context(extracted_identity.id)
logger.debug('Identity changed to %s', extracted_identity.id)
except InvalidJWTException as ije:
abort(401, message=ije.message)
else:
logger.debug('No auth header.')
return func(*args, **kwargs)
return wrapper

View file

@ -105,7 +105,7 @@ class QuayDeferredPermissionUser(Identity):
def can(self, permission):
if not self._permissions_loaded:
logger.debug('Loading user permissions after deferring for: %s', self.id)
user_object = self._user_object or model.get_user_by_uuid(self.id)
user_object = self._user_object or model.user.get_user_by_uuid(self.id)
if user_object is None:
return super(QuayDeferredPermissionUser, self).can(permission)
@ -130,14 +130,14 @@ class QuayDeferredPermissionUser(Identity):
self.provides.add(user_repos)
# Add repository permissions
for perm in model.get_all_user_permissions(user_object):
for perm in model.permission.get_all_user_permissions(user_object):
repo_grant = _RepositoryNeed(perm.repository.namespace_user.username, perm.repository.name,
self._repo_role_for_scopes(perm.role.name))
logger.debug('User added permission: {0}'.format(repo_grant))
self.provides.add(repo_grant)
# Add namespace permissions derived
for team in model.get_org_wide_permissions(user_object):
for team in model.permission.get_org_wide_permissions(user_object):
team_org_grant = _OrganizationNeed(team.organization.username,
self._team_role_for_scopes(team.role.name))
logger.debug('Organization team added permission: {0}'.format(team_org_grant))
@ -261,7 +261,7 @@ def on_identity_loaded(sender, identity):
elif identity.auth_type == 'token':
logger.debug('Loading permissions for token: %s', identity.id)
token_data = model.load_token_data(identity.id)
token_data = model.token.load_token_data(identity.id)
repo_grant = _RepositoryNeed(token_data.repository.namespace_user.username,
token_data.repository.name,
@ -269,8 +269,8 @@ def on_identity_loaded(sender, identity):
logger.debug('Delegate token added permission: %s', repo_grant)
identity.provides.add(repo_grant)
elif identity.auth_type == 'signed_grant':
logger.debug('Loaded signed grants identity')
elif identity.auth_type == 'signed_grant' or identity.auth_type == 'signed_jwt':
logger.debug('Loaded %s identity for: %s', identity.auth_type, identity.id)
else:
logger.error('Unknown identity auth type: %s', identity.auth_type)

View file

@ -53,7 +53,7 @@ class BuildJob(object):
@lru_cache(maxsize=1)
def _load_repo_build(self):
try:
return model.get_repository_build(self.job_details['build_uuid'])
return model.build.get_repository_build(self.job_details['build_uuid'])
except model.InvalidRepositoryBuildException:
raise BuildJobLoadException(
'Could not load repository build with ID %s' % self.job_details['build_uuid'])
@ -73,7 +73,7 @@ class BuildJob(object):
return json.loads(self.repo_build.job_config)
except ValueError:
raise BuildJobLoadException(
'Could not parse repository build job config with ID %s' % self.job_details['build_uuid']
'Could not parse repository build job config with ID %s' % self.job_details['build_uuid']
)
def determine_cached_tag(self, base_image_id=None, cache_comments=None):
@ -99,15 +99,15 @@ class BuildJob(object):
repo_namespace = repo_build.repository.namespace_user.username
repo_name = repo_build.repository.name
base_image = model.get_image(repo_build.repository, base_image_id)
base_image = model.image.get_image(repo_build.repository, base_image_id)
if base_image is None:
return None
# Build an in-memory tree of the full heirarchy of images in the repository.
all_images = model.get_repository_images_without_placements(repo_build.repository,
with_ancestor=base_image)
all_images = model.image.get_repository_images_without_placements(repo_build.repository,
with_ancestor=base_image)
all_tags = model.list_repository_tags(repo_namespace, repo_name)
all_tags = model.tag.list_repository_tags(repo_namespace, repo_name)
tree = ImageTree(all_images, all_tags, base_filter=base_image.id)
# Find a path in the tree, starting at the base image, that matches the cache comments
@ -136,7 +136,8 @@ class BuildJob(object):
"""
tags = self.build_config.get('docker_tags', ['latest'])
repository = self.repo_build.repository
existing_tags = model.list_repository_tags(repository.namespace_user.username, repository.name)
existing_tags = model.tag.list_repository_tags(repository.namespace_user.username,
repository.name)
cached_tags = set(tags) & set([tag.name for tag in existing_tags])
if cached_tags:
return list(cached_tags)[0]

View file

@ -54,7 +54,7 @@ class StatusHandler(object):
self._append_log_message(phase, self._build_logs.PHASE, extra_data)
# Update the repository build with the new phase
repo_build = model.get_repository_build(self._uuid)
repo_build = model.build.get_repository_build(self._uuid)
repo_build.phase = phase
repo_build.save()

31
conf/selfsigned/jwt.crt Normal file
View file

@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFXDCCA0agAwIBAgIBAjALBgkqhkiG9w0BAQswLTEMMAoGA1UEBhMDVVNBMRAw
DgYDVQQKEwdldGNkLWNhMQswCQYDVQQLEwJDQTAeFw0xNTA3MTYxOTQzMTdaFw0y
NTA3MTYxOTQzMTlaMEYxDDAKBgNVBAYTA1VTQTEQMA4GA1UEChMHZXRjZC1jYTEQ
MA4GA1UECxMHand0YXV0aDESMBAGA1UEAxMJMTI3LjAuMC4xMIICIjANBgkqhkiG
9w0BAQEFAAOCAg8AMIICCgKCAgEAs5RxPVfO7iPZnFIP0DPiiMMMykDEG0OV6O1x
QycVReI2ELIPiWqfDFVcn6XXI/0kpvNeLGr2dDXaQFZYz+rNVDYBjM3djvibFhwa
30URmfHI9iZM703zdMZwc07+TIteIj1Q4MYhbPB4f6oERtLO29RffN9KH2FQvtzx
CF/GFb6vcHOeCeKZEGjxbQ2vfhMJh+UiO6woBooAJULBaM9hxErszqWqu0QKcV2h
NaW6fSf6aVUbFTu9hhYfkujDBR5EmwVFcKxUF+AHDrAshR/VdTHb0SJ3OtKz0vGv
NCc844J8nhUg7SeeO6ONeAq6cDRN65eJ7nJC1Nhhq2DpOgNxu+j0Dz7F+EEtNWpE
ezGjbRjmM4Ekhvsa/SUdzubInrnyHFYcbMZZIZzbgAJfruZHVKWWXjbxyG74xix+
+KzBs9jkCHSNNWnXTx3dev4dp4QltZ048crA1lioim8/W5GzYjvkfNwx6OohC4yD
5UoblQsY5vDdJ+S8g4feTmJMoNHdS/4ar/sVojUDX3KOF3bCZ6w4Ufx09EBXeUlQ
9gzs63xAvFhGk8anFSQbRoQgoKoivHpzlANquhWvRZCDtW5P4RLaHcOLjhq6nwe6
WW+vtDgEEKzdSj1We6grDPoT1kTagJ0gvpX+jcesu5d0e8MHt+qu0WTJwvCxcI+r
8zhXX/MCAwEAAaNyMHAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMB0G
A1UdDgQWBBQqTEeoqfZjPwzZYdkktdV+3Pl6SDAfBgNVHSMEGDAWgBRXeieifbQ2
jgPj8wwrrixidWU9tzAPBgNVHREECDAGhwR/AAABMAsGCSqGSIb3DQEBCwOCAgEA
KIFrN5mDOvbBEWYZSnyrGkF4bsXCbmLtg3FJkgdn5vJ916/2WtgceJMQKu1KE90V
giaRaSm4HEb63VBrCNdvoU0ZSRO/xQI2SqbERdFa36w31W71D16qDchRsFz+dEt4
7jm1gIdl/UYCMqFye7t+6H/w4J8p1M90RlSXw8wHyFEPOjEfBOM5uSpO9xsXW04+
DpfJvsLmvhaaE+OUrPft+VTtf0Wc7vV8jfS21D9nB/CJVaoS71m3FEHD8BlTZIqB
qcU67UJc7qhUJ3HyKbpJgFQcvEQ8GL+PJnsCO7Y/zCCbYLwjV1GffvHMGQ2JAJbB
2qnUxPqVmP87X3YDMXPVubW+CtoRPz7BIYsX2/HejlYOtlT25+SrHwpXRT5lcgbt
a9dcHhUmNNpfTgZpbPrPfdzqw+ze+HcbJAECWgm8v10quGbP5NZCnySM7LIJ8p7C
dLOGGuZnUaruqA3FRYS3147bdhGF1gLwGuM+BwzzvoppMf5kZuBWq6j6Feg1I68z
n1qhlEJSMoS1qUEq/8oXYgSs2ttvMAhZ4CqKPZztp3oZLPzZgL/eKb4JEjhpgitJ
TrgLFwAytHGZIWke/lR+Ca9qo/uMebduLu6akqZ5yrxl/DuHcBV8KGq+rXJIvxxj
O9hZBNQ+WDPvQlSN2z/An17zZePLgxspjZXIkkgSg1Y=
-----END CERTIFICATE-----

View file

@ -0,0 +1,51 @@
-----BEGIN RSA PRIVATE KEY-----
MIIJKQIBAAKCAgEAs5RxPVfO7iPZnFIP0DPiiMMMykDEG0OV6O1xQycVReI2ELIP
iWqfDFVcn6XXI/0kpvNeLGr2dDXaQFZYz+rNVDYBjM3djvibFhwa30URmfHI9iZM
703zdMZwc07+TIteIj1Q4MYhbPB4f6oERtLO29RffN9KH2FQvtzxCF/GFb6vcHOe
CeKZEGjxbQ2vfhMJh+UiO6woBooAJULBaM9hxErszqWqu0QKcV2hNaW6fSf6aVUb
FTu9hhYfkujDBR5EmwVFcKxUF+AHDrAshR/VdTHb0SJ3OtKz0vGvNCc844J8nhUg
7SeeO6ONeAq6cDRN65eJ7nJC1Nhhq2DpOgNxu+j0Dz7F+EEtNWpEezGjbRjmM4Ek
hvsa/SUdzubInrnyHFYcbMZZIZzbgAJfruZHVKWWXjbxyG74xix++KzBs9jkCHSN
NWnXTx3dev4dp4QltZ048crA1lioim8/W5GzYjvkfNwx6OohC4yD5UoblQsY5vDd
J+S8g4feTmJMoNHdS/4ar/sVojUDX3KOF3bCZ6w4Ufx09EBXeUlQ9gzs63xAvFhG
k8anFSQbRoQgoKoivHpzlANquhWvRZCDtW5P4RLaHcOLjhq6nwe6WW+vtDgEEKzd
Sj1We6grDPoT1kTagJ0gvpX+jcesu5d0e8MHt+qu0WTJwvCxcI+r8zhXX/MCAwEA
AQKCAgEAhhD5ZYGLhDARgumk0pwZsF5lyw0FGxGe9lFl8GtaL10NXfOBM+b8rHmB
99IYxs5zMYyZLvH/4oxdzxBnp3m1JvxWtebvVJB3P89lpG/tDw/6JwI7B6Ebc3++
bed4ZG7brRY3rkdcpvb0DuM/5Bv3wRhQ3WnZ7Yl6fbN24viVaqB8W6iFQP4BpcWj
D/ZaoPXXdLP0lbYV/6PBLhAjUnsYkzIYjsIRr1LBtRbghqueiVdyVHbsDDMYb+VO
VyAckFKjh1QtHkwZT+W5fxa5df1pH+BEKmLfvnOVOpOiaH4ur+8319EQTtz3/bBB
qm/f9mqmDY+JsxFsoXiVmht0oxH1MsHV7jSpwxVj0nN6uV61zlgTgj/kXIASbuRO
swFM1o6+KNuFuqI4w5+Nkw5o+PbtP5UMTVTpUSQBQumUbM+xPClRP/k7LZeK0ikv
36BQ2xaLIzECKXyYgK6b1rypTnJv6hAqJcNozUHnKPcworCNK1xB+n+pycrVzPwZ
32WNXdLSquTeXNmc4vHZxVrFFjGzeWmWESYt6huFWn6xb9IdfhrzpuH5LS7rTIhj
kvZCAiN4n+cuRwjBPaxxkSg/Lh8IyFOchwI6CcWWucGFMxJZpqtCS14B27LNrrJt
bCdO/AQr9h3hvDR5vrvLnxOnNusumIZ3tpvfWeObIdOhkiFoPykCggEBAOtEnCIz
RH2M7bo9IE2C4zE6T40fJvTRrSRju78dFW7mpfe9F2nr/2/FVjbeAw6FJaEFDQLW
OSc3Amr0ACLOpSIuoHHb6uT3XzNnXl6xdsJ1swi1gZJgyge6FUYjMNFjko98qI4O
aqYBZzoDBw+K7bpUXEMwYPZcU0A6P/9+98wkJLHp3DfqqfBH7PiMtAJY6+ZQ2mfs
UFGI6ygVONwPhHQ9kWwtGvBfb+4AgUD0lu9UR3Yij07cze1aVJcVXQJopBvFnEnG
qEsm2oDwnWquG4A7ASCUpHJk+A1K4p7q6opM3Y1Lv8OYzR7dHsAEH/NN0mSn1tyE
dFBrzSAdDr9mI8UCggEBAMNnkXy2EuLlDHzrOiEn0l4ULbtHZjT0FtjCczg/K9Nn
ousAr5I09eNuQDXLheNqOe5LXrpWqc5psf5FsCj5WkkzMbJm/7GPwQN8ufThBneY
4oAO/xrOvwDZP9ihzIo/+chQQMXXA8Dysn6lIOHCGrdvEYF8nIvf95gCbaXfPR8G
Jecsxg3Nc0Pi1bGN5X5q/AwlJDUrd7JjIuTWYxEuhczPcoiEskgjGHGO96EWIjLX
cGB4xav6K8X4BJyxN6Ewek/HT4TjMqd1bIH6020JNZ0Z1rVFtr9DUXf5xkI3gbjI
7X3uNu0yjw31rEfVA6vokfFUZ9TogNsxUw2s/WTX2FcCggEBAIXphJF26vonmDzU
hCl6YcToctVZsPvSySGYXwoWDNgWEsvjZotU2A0PntYWIllGcjenv1MpClkG16p2
/gjR5G6DabHFQntXTmnc4Xs2uelPwzsmzPy7eONTCL7mUugsLATeKLbK/+tDizUa
+g7fvha749QemmJABObfAQR1iag5vmVCPqXZPSdWWUzUEbXwVT3AMcDLYqA2NduX
0Mh5UKQ1UyvmtJmzSOuIgAmv7qWFLDPS0g1KYzBBpTpl3436b8abAS2BFNPJ5r9T
tdY+CctASpD36m5uiD5QrJNWFW/o9oZxYlJ8C+0QYWtcLa94UVQXsJXOEsKfyZ8I
yxcolR0CggEACrKs4GsHdayclmo6cH5BoizwpAQwsE18wrlCnZ23xIc0ADG1awfD
PoRWt5tA5UZ3jXhK42DDQy2+NPGTx2p/auqGmgHRleMM6if19lYriHiyTdiEVu9i
vaUnPbD+BcOi5TifkzVGW1XuN8jKmBGMbOaDytcLqwzD/WqEnkQukHhBsvpcjXzm
Bp1wnZvrKJSq3+9YoCCVGQscafLi0Zn+cUwaNScuq4xgVjdBj2wqyyXIXT+/cr7r
jpcZiYqaRRTmXV/IFrppl4lyO1uEH8AVU1iKzLnYW3hQCYV/OTjYvUki13YnQ600
78q3d+dNoCfHdbLtTFa+V0HIDkOeS9sVWQKCAQBoZIeAkKePec19TL5xvqe0MSDC
dZwW/rVPfIraMuETFXlH1dsyUcZu578T7Uvuc/ZAOf7cSedKtxuEtbd0eJ8YtQJ3
LWuL+JX5TsU0qsPvhQIKpLkznhTinH8/TVi8yxJzsOd56Ta2068U+ad9oRiI14Ne
pSzqQavGp5s1anSD769xKNNHKZkYPHYJ/5Te7hhdpBwQ3kn8AiUuemJ5MNfJO+8e
LCQL/LjuwgKAis0PQbWAHs2d9HJxQLlR62j754ooTDe6FfSoH2zKgdzSTteqHXue
ga/+6pwc/LoLS1TAAv9ChJFIERClNi6Bq/OpcECiVN6eFav6r5UR+w3+mCQk
-----END RSA PRIVATE KEY-----

View file

@ -216,6 +216,11 @@ class DefaultConfig(object):
# Signed registry grant token expiration in seconds
SIGNED_GRANT_EXPIRATION_SEC = 60 * 60 * 24 # One day to complete a push/pull
# Registry v2 JWT Auth config
JWT_AUTH_MAX_FRESH_S = 60 * 5 # At most the JWT can be signed for 300s in the future
JWT_AUTH_CERTIFICATE_PATH = 'conf/selfsigned/jwt.crt'
JWT_AUTH_PRIVATE_KEY_PATH = 'conf/selfsigned/jwt.key.insecure'
# The URL endpoint to which we redirect OAuth when generating a token locally.
LOCAL_OAUTH_HANDLER = '/oauth/localapp'

View file

@ -2,12 +2,14 @@ import string
import logging
import uuid
import time
import toposort
from random import SystemRandom
from datetime import datetime
from peewee import *
from data.read_slave import ReadSlaveModel
from sqlalchemy.engine.url import make_url
from collections import defaultdict
from data.read_slave import ReadSlaveModel
from util.names import urn_generator
@ -78,6 +80,41 @@ class UseThenDisconnect(object):
close_db_filter(None)
class TupleSelector(object):
""" Helper class for selecting tuples from a peewee query and easily accessing
them as if they were objects.
"""
class _TupleWrapper(object):
def __init__(self, data, fields):
self._data = data
self._fields = fields
def get(self, field):
return self._data[self._fields.index(TupleSelector.tuple_reference_key(field))]
@classmethod
def tuple_reference_key(cls, field):
""" Returns a string key for referencing a field in a TupleSelector. """
if field._node_type == 'func':
return field.name + ','.join([cls.tuple_reference_key(arg) for arg in field.arguments])
if field._node_type == 'field':
return field.name + ':' + field.model_class.__name__
raise Exception('Unknown field type %s in TupleSelector' % field._node_type)
def __init__(self, query, fields):
self._query = query.select(*fields).tuples()
self._fields = [TupleSelector.tuple_reference_key(field) for field in fields]
def __iter__(self):
return self._build_iterator()
def _build_iterator(self):
for tuple_data in self._query:
yield TupleSelector._TupleWrapper(tuple_data, self._fields)
db = Proxy()
read_slave = Proxy()
db_random_func = CallableProxy()
@ -214,6 +251,10 @@ class User(BaseModel):
else:
super(User, self).delete_instance(recursive=recursive, delete_nullable=delete_nullable)
Namespace = User.alias()
class TeamRole(BaseModel):
name = CharField(index=True)
@ -297,23 +338,49 @@ class Repository(BaseModel):
)
def delete_instance(self, recursive=False, delete_nullable=False):
# Note: peewee generates extra nested deletion statements here that are slow and unnecessary.
# Therefore, we define our own deletion order here and use the dependency system to verify it.
ordered_dependencies = [RepositoryAuthorizedEmail, RepositoryTag, Image, LogEntry,
RepositoryBuild, RepositoryBuildTrigger, RepositoryNotification,
RepositoryPermission, AccessToken, Star, RepositoryActionCount]
if not recursive:
raise RuntimeError('Non-recursive delete on repository.')
for query, fk in self.dependencies(search_nullable=True):
# These models don't need to use transitive deletes, because the referenced objects
# are cleaned up directly
skip_transitive_deletes = {RepositoryTag, RepositoryBuild, RepositoryBuildTrigger}
# We need to sort the ops so that models get cleaned in order of their dependencies
ops = reversed(list(self.dependencies(delete_nullable)))
filtered_ops = []
dependencies = defaultdict(set)
for query, fk in ops:
# We only want to skip transitive deletes, which are done using subqueries in the form of
# DELETE FROM <table> in <subquery>. If an op is not using a subquery, we allow it to be
# applied directly.
if fk.model_class not in skip_transitive_deletes or query.op != 'in':
filtered_ops.append((query, fk))
if query.op == 'in':
dependencies[fk.model_class.__name__].add(query.rhs.model_class.__name__)
elif query.op == '=':
dependencies[fk.model_class.__name__].add(Repository.__name__)
else:
raise RuntimeError('Unknown operator in recursive repository delete query')
sorted_models = list(reversed(toposort.toposort_flatten(dependencies)))
def sorted_model_key(query_fk_tuple):
cmp_query, cmp_fk = query_fk_tuple
if cmp_query.op == 'in':
return -1
return sorted_models.index(cmp_fk.model_class.__name__)
filtered_ops.sort(key=sorted_model_key)
for query, fk in filtered_ops:
model = fk.model_class
if not model in ordered_dependencies:
raise Exception('Missing repository deletion dependency: %s', model)
for model in ordered_dependencies:
model.delete().where(model.repository == self).execute()
# Delete the repository itself.
super(Repository, self).delete_instance(recursive=False, delete_nullable=False)
if fk.null and not delete_nullable:
model.update(**{fk.name: None}).where(query).execute()
else:
model.delete().where(query).execute()
return self.delete().where(self._pk_expr()).execute()
class Star(BaseModel):
user = ForeignKeyField(User, index=True)
@ -514,6 +581,11 @@ class RepositoryTag(BaseModel):
)
class TagManifest(BaseModel):
tag = ForeignKeyField(RepositoryTag)
digest = CharField(index=True)
class BUILD_PHASE(object):
""" Build phases enum """
ERROR = 'error'
@ -674,4 +746,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind,
AccessTokenKind, Star, RepositoryActionCount]
AccessTokenKind, Star, RepositoryActionCount, TagManifest]

View file

@ -1 +1,91 @@
from data.model.legacy import *
from data.database import db
class DataModelException(Exception):
pass
class BlobDoesNotExist(DataModelException):
pass
class InvalidEmailAddressException(DataModelException):
pass
class InvalidOrganizationException(DataModelException):
pass
class InvalidPasswordException(DataModelException):
pass
class InvalidRobotException(DataModelException):
pass
class InvalidUsernameException(DataModelException):
pass
class TooManyUsersException(DataModelException):
pass
class InvalidRepositoryBuildException(DataModelException):
pass
class InvalidBuildTriggerException(DataModelException):
pass
class InvalidTokenException(DataModelException):
pass
class InvalidNotificationException(DataModelException):
pass
class InvalidImageException(DataModelException):
pass
class UserAlreadyInTeam(DataModelException):
pass
class InvalidTeamException(DataModelException):
pass
class InvalidTeamMemberException(DataModelException):
pass
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
self.retry_after = retry_after
class Config(object):
def __init__(self):
self.app_config = None
self.store = None
config = Config()
def db_transaction():
return config.app_config['DB_TRANSACTION_FACTORY'](db)
# There MUST NOT be any circular dependencies between these subsections. If there are fix it by
# moving the minimal number of things to _basequery
# TODO document the methods and modules for each one of the submodules below.
from data.model import (blob, build, image, log, notification, oauth, organization, permission,
repository, storage, tag, team, token, user)

77
data/model/_basequery.py Normal file
View file

@ -0,0 +1,77 @@
from peewee import JOIN_LEFT_OUTER
from cachetools import lru_cache
from data.database import (Repository, User, Team, TeamMember, RepositoryPermission, TeamRole,
Namespace, Visibility, db_for_update)
def get_existing_repository(namespace_name, repository_name, for_update=False):
query = (Repository
.select(Repository, Namespace)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
if for_update:
query = db_for_update(query)
return query.get()
@lru_cache(maxsize=1)
def get_public_repo_visibility():
return Visibility.get(name='public')
def filter_to_repos_for_user(query, username=None, namespace=None, include_public=True):
if not include_public and not username:
return Repository.select().where(Repository.id == '-1')
where_clause = None
if username:
UserThroughTeam = User.alias()
Org = User.alias()
AdminTeam = Team.alias()
AdminTeamMember = TeamMember.alias()
AdminUser = User.alias()
query = (query
.switch(RepositoryPermission)
.join(User, JOIN_LEFT_OUTER)
.switch(RepositoryPermission)
.join(Team, JOIN_LEFT_OUTER)
.join(TeamMember, JOIN_LEFT_OUTER)
.join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
.switch(Repository)
.join(Org, JOIN_LEFT_OUTER, on=(Repository.namespace_user == Org.id))
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
.switch(AdminTeam)
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id)))
where_clause = ((User.username == username) | (UserThroughTeam.username == username) |
((AdminUser.username == username) & (TeamRole.name == 'admin')))
if namespace:
where_clause = where_clause & (Namespace.username == namespace)
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
# remove this hack.
if include_public:
new_clause = (Repository.visibility == get_public_repo_visibility())
if where_clause:
where_clause = where_clause | new_clause
else:
where_clause = new_clause
return query.where(where_clause)
def get_user_organizations(username):
UserAlias = User.alias()
return (User
.select()
.distinct()
.join(Team)
.join(TeamMember)
.join(UserAlias, on=(UserAlias.id == TeamMember.user))
.where(User.organization == True, UserAlias.username == username))

49
data/model/blob.py Normal file
View file

@ -0,0 +1,49 @@
from uuid import uuid4
from data.model import tag, _basequery, BlobDoesNotExist, db_transaction
from data.database import (Repository, Namespace, ImageStorage, Image, ImageStorageLocation,
ImageStoragePlacement)
def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
""" Find the content-addressable blob linked to the specified repository.
"""
placements = list(ImageStoragePlacement
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage)
.join(Image)
.join(Repository)
.join(Namespace)
.where(Repository.name == repo_name, Namespace.username == namespace,
ImageStorage.checksum == blob_digest))
if not placements:
raise BlobDoesNotExist('Blob does not exist with digest: {0}'.format(blob_digest))
found = placements[0].storage
found.locations = {placement.location.name for placement in placements}
return found
def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_name,
link_expiration_s):
""" Store a record of the blob and temporarily link it to the specified repository.
"""
random_image_name = str(uuid4())
with db_transaction:
repo = _basequery.get_existing_repository(namespace, repo_name)
try:
storage = ImageStorage.get(checksum=blob_digest)
location = ImageStorageLocation.get(name=location_name)
ImageStoragePlacement.get(storage=storage, location=location)
except ImageStorage.DoesNotExist:
storage = ImageStorage.create(checksum=blob_digest)
except ImageStoragePlacement.DoesNotExist:
ImageStoragePlacement.create(storage=storage, location=location)
# Create a temporary link into the repository, to be replaced by the v1 metadata later
# and create a temporary tag to reference it
image = Image.create(storage=storage, docker_image_id=random_image_name, repository=repo)
tag.create_temporary_hidden_tag(repo, image, link_expiration_s)

173
data/model/build.py Normal file
View file

@ -0,0 +1,173 @@
import json
from peewee import JOIN_LEFT_OUTER
from datetime import timedelta, datetime
from data.database import (BuildTriggerService, RepositoryBuildTrigger, Repository, Namespace, User,
RepositoryBuild, BUILD_PHASE, db_for_update)
from data.model import (InvalidBuildTriggerException, InvalidRepositoryBuildException,
db_transaction, user as user_model)
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
def update_build_trigger(trigger, config, auth_token=None):
trigger.config = json.dumps(config or {})
if auth_token is not None:
trigger.auth_token = auth_token
trigger.save()
def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None, config=None):
config = config or {}
service = BuildTriggerService.get(name=service_name)
trigger = RepositoryBuildTrigger.create(repository=repo, service=service,
auth_token=auth_token,
connected_user=user,
pull_robot=pull_robot,
config=json.dumps(config))
return trigger
def get_build_trigger(trigger_uuid):
try:
return (RepositoryBuildTrigger
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
.join(BuildTriggerService)
.switch(RepositoryBuildTrigger)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(RepositoryBuildTrigger)
.join(User)
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
.get())
except RepositoryBuildTrigger.DoesNotExist:
msg = 'No build trigger with uuid: %s' % trigger_uuid
raise InvalidBuildTriggerException(msg)
def list_build_triggers(namespace_name, repository_name):
return (RepositoryBuildTrigger
.select(RepositoryBuildTrigger, BuildTriggerService, Repository)
.join(BuildTriggerService)
.switch(RepositoryBuildTrigger)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
limit):
return (list_repository_builds(namespace_name, repository_name, limit)
.where(RepositoryBuildTrigger.uuid == trigger_uuid))
def get_repository_for_resource(resource_key):
try:
return (Repository
.select(Repository, Namespace)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(Repository)
.join(RepositoryBuild)
.where(RepositoryBuild.resource_key == resource_key)
.get())
except Repository.DoesNotExist:
return None
def _get_build_base_query():
return (RepositoryBuild
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
Namespace, User)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(RepositoryBuild)
.join(User, JOIN_LEFT_OUTER)
.switch(RepositoryBuild)
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
.join(BuildTriggerService, JOIN_LEFT_OUTER)
.order_by(RepositoryBuild.started.desc()))
def get_repository_build(build_uuid):
try:
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
except RepositoryBuild.DoesNotExist:
msg = 'Unable to locate a build by id: %s' % build_uuid
raise InvalidRepositoryBuildException(msg)
def list_repository_builds(namespace_name, repository_name, limit,
include_inactive=True, since=None):
query = (_get_build_base_query()
.where(Repository.name == repository_name, Namespace.username == namespace_name)
.limit(limit))
if since is not None:
query = query.where(RepositoryBuild.started >= since)
if not include_inactive:
query = query.where(RepositoryBuild.phase != BUILD_PHASE.ERROR,
RepositoryBuild.phase != BUILD_PHASE.COMPLETE)
return query
def get_recent_repository_build(namespace_name, repository_name):
query = list_repository_builds(namespace_name, repository_name, 1)
try:
return query.get()
except RepositoryBuild.DoesNotExist:
return None
def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
display_name, trigger=None, pull_robot_name=None):
pull_robot = None
if pull_robot_name:
pull_robot = user_model.lookup_robot(pull_robot_name)
return RepositoryBuild.create(repository=repo, access_token=access_token,
job_config=json.dumps(job_config_obj),
display_name=display_name, trigger=trigger,
resource_key=dockerfile_id,
pull_robot=pull_robot)
def get_pull_robot_name(trigger):
if not trigger.pull_robot:
return None
return trigger.pull_robot.username
def cancel_repository_build(build, work_queue):
with db_transaction():
# Reload the build for update.
try:
build = db_for_update(RepositoryBuild.select().where(RepositoryBuild.id == build.id)).get()
except RepositoryBuild.DoesNotExist:
return False
if build.phase != BUILD_PHASE.WAITING or not build.queue_id:
return False
# Try to cancel the queue item.
if not work_queue.cancel(build.queue_id):
return False
# Delete the build row.
build.delete_instance()
return True
def archivable_buildlogs_query():
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
return (RepositoryBuild
.select()
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
(RepositoryBuild.started < presumed_dead_date),
RepositoryBuild.logs_archived == False))

25
data/model/health.py Normal file
View file

@ -0,0 +1,25 @@
import logging
from data.database import TeamRole
from util.config.validator import validate_database_url
logger = logging.getLogger(__name__)
def check_health(app_config):
# Attempt to connect to the database first. If the DB is not responding,
# using the validate_database_url will timeout quickly, as opposed to
# making a normal connect which will just hang (thus breaking the health
# check).
try:
validate_database_url(app_config['DB_URI'], {}, connect_timeout=3)
except Exception:
logger.exception('Could not connect to the database')
return False
# We will connect to the db, check that it contains some team role kinds
try:
return bool(list(TeamRole.select().limit(1)))
except:
return False

341
data/model/image.py Normal file
View file

@ -0,0 +1,341 @@
import logging
import dateutil.parser
from peewee import JOIN_LEFT_OUTER, fn
from datetime import datetime
from data.model import DataModelException, db_transaction, _basequery, storage
from data.database import (Image, Repository, ImageStoragePlacement, Namespace, ImageStorage,
ImageStorageLocation, RepositoryPermission, db_for_update)
logger = logging.getLogger(__name__)
def get_parent_images(namespace_name, repository_name, image_obj):
""" Returns a list of parent Image objects in chronilogical order. """
parents = image_obj.ancestors
# Ancestors are in the format /<root>/<intermediate>/.../<parent>/, with each path section
# containing the database Id of the image row.
parent_db_ids = parents.strip('/').split('/')
if parent_db_ids == ['']:
return []
def filter_to_parents(query):
return query.where(Image.id << parent_db_ids)
parents = get_repository_images_base(namespace_name, repository_name, filter_to_parents)
id_to_image = {unicode(image.id): image for image in parents}
return [id_to_image[parent_id] for parent_id in parent_db_ids]
def get_repo_image(namespace_name, repository_name, docker_image_id):
def limit_to_image_id(query):
return query.where(Image.docker_image_id == docker_image_id).limit(1)
query = _get_repository_images(namespace_name, repository_name, limit_to_image_id)
try:
return query.get()
except Image.DoesNotExist:
return None
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
def limit_to_image_id(query):
return query.where(Image.docker_image_id == docker_image_id).limit(1)
images = get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
if not images:
return None
return images[0]
def _get_repository_images(namespace_name, repository_name, query_modifier):
query = (Image
.select()
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name))
query = query_modifier(query)
return query
def get_repository_images_base(namespace_name, repository_name, query_modifier):
query = (ImageStoragePlacement
.select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage, JOIN_LEFT_OUTER)
.join(Image)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name))
query = query_modifier(query)
location_list = list(query)
images = {}
for location in location_list:
# Make sure we're always retrieving the same image object.
image = location.storage.image
# Set the storage to the one we got from the location, to prevent another query
image.storage = location.storage
if not image.id in images:
images[image.id] = image
image.storage.locations = set()
else:
image = images[image.id]
# Add the location to the image's locations set.
image.storage.locations.add(location.location.name)
return images.values()
def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
return (Image
.select()
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name,
Image.docker_image_id << docker_image_ids))
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
def modify_query(query):
return query.where(Image.docker_image_id << docker_image_ids)
return get_repository_images_base(namespace_name, repository_name, modify_query)
def get_repository_images_without_placements(repo_obj, with_ancestor=None):
query = (Image
.select(Image, ImageStorage)
.join(ImageStorage)
.where(Image.repository == repo_obj))
if with_ancestor:
ancestors_string = '%s%s/' % (with_ancestor.ancestors, with_ancestor.id)
query = query.where((Image.ancestors ** (ancestors_string + '%')) |
(Image.id == with_ancestor.id))
return query
def get_repository_images(namespace_name, repository_name):
return get_repository_images_base(namespace_name, repository_name, lambda q: q)
def get_image_by_id(namespace_name, repository_name, docker_image_id):
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
if not image:
raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' %
(docker_image_id, namespace_name, repository_name))
return image
def __translate_ancestry(old_ancestry, translations, repo_obj, username, preferred_location):
if old_ancestry == '/':
return '/'
def translate_id(old_id, docker_image_id):
logger.debug('Translating id: %s', old_id)
if old_id not in translations:
image_in_repo = find_create_or_link_image(docker_image_id, repo_obj, username, translations,
preferred_location)
translations[old_id] = image_in_repo.id
return translations[old_id]
# Select all the ancestor Docker IDs in a single query.
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
old_images = {i.id: i.docker_image_id for i in query}
# Translate the old images into new ones.
new_ids = [str(translate_id(old_id, old_images[old_id])) for old_id in old_ids]
return '/%s/' % '/'.join(new_ids)
def _find_or_link_image(existing_image, repo_obj, username, translations, preferred_location):
# TODO(jake): This call is currently recursively done under a single transaction. Can we make
# it instead be done under a set of transactions?
with db_transaction():
# Check for an existing image, under the transaction, to make sure it doesn't already exist.
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
existing_image.docker_image_id)
if repo_image:
return repo_image
# Make sure the existing base image still exists.
try:
to_copy = Image.select().join(ImageStorage).where(Image.id == existing_image.id).get()
msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
logger.debug(msg, existing_image.docker_image_id, to_copy.storage.uuid)
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repo_obj,
username, preferred_location)
copied_storage = to_copy.storage
copied_storage.locations = {placement.location.name
for placement in copied_storage.imagestorageplacement_set}
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
repository=repo_obj, storage=copied_storage,
ancestors=new_image_ancestry)
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
translations[existing_image.id] = new_image.id
return new_image
except Image.DoesNotExist:
return None
def find_create_or_link_image(docker_image_id, repo_obj, username, translations,
preferred_location):
# First check for the image existing in the repository. If found, we simply return it.
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
docker_image_id)
if repo_image:
return repo_image
# We next check to see if there is an existing storage the new image can link to.
existing_image_query = (Image
.select(Image, ImageStorage)
.distinct()
.join(ImageStorage)
.switch(Image)
.join(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.switch(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(ImageStorage.uploading == False,
Image.docker_image_id == docker_image_id))
existing_image_query = _basequery.filter_to_repos_for_user(existing_image_query, username)
# If there is an existing image, we try to translate its ancestry and copy its storage.
new_image = None
try:
logger.debug('Looking up existing image for ID: %s', docker_image_id)
existing_image = existing_image_query.get()
logger.debug('Existing image %s found for ID: %s', existing_image.id, docker_image_id)
new_image = _find_or_link_image(existing_image, repo_obj, username, translations,
preferred_location)
if new_image:
return new_image
except Image.DoesNotExist:
logger.debug('No existing image found for ID: %s', docker_image_id)
# Otherwise, create a new storage directly.
with db_transaction():
# Final check for an existing image, under the transaction.
repo_image = get_repo_image(repo_obj.namespace_user.username, repo_obj.name,
docker_image_id)
if repo_image:
return repo_image
logger.debug('Creating new storage for docker id: %s', docker_image_id)
new_storage = storage.create_storage(preferred_location)
return Image.create(docker_image_id=docker_image_id,
repository=repo_obj, storage=new_storage,
ancestors='/')
def set_image_metadata(docker_image_id, namespace_name, repository_name, created_date_str, comment,
command, parent=None):
with db_transaction():
query = (Image
.select(Image, ImageStorage)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(Image)
.join(ImageStorage)
.where(Repository.name == repository_name, Namespace.username == namespace_name,
Image.docker_image_id == docker_image_id))
try:
fetched = db_for_update(query).get()
except Image.DoesNotExist:
raise DataModelException('No image with specified id and repository')
# We cleanup any old checksum in case it's a retry after a fail
fetched.storage.checksum = None
fetched.storage.created = datetime.now()
if created_date_str is not None:
try:
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
except:
# parse raises different exceptions, so we cannot use a specific kind of handler here.
pass
fetched.storage.comment = comment
fetched.storage.command = command
if parent:
fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id)
fetched.save()
fetched.storage.save()
return fetched
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
try:
image = (Image
.select(Image, ImageStorage)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(Image)
.join(ImageStorage, JOIN_LEFT_OUTER)
.where(Repository.name == repository_name, Namespace.username == namespace_name,
Image.docker_image_id == docker_image_id)
.get())
except Image.DoesNotExist:
raise DataModelException('No image with specified id and repository')
image.storage.image_size = image_size
image.storage.uncompressed_size = uncompressed_size
ancestors = image.ancestors.split('/')[1:-1]
if ancestors:
try:
# TODO(jschorr): Switch to this faster route once we have full ancestor aggregate_size
# parent_image = Image.get(Image.id == ancestors[-1])
# total_size = image_size + parent_image.storage.aggregate_size
total_size = (ImageStorage
.select(fn.Sum(ImageStorage.image_size))
.join(Image)
.where(Image.id << ancestors)
.scalar()) + image_size
image.storage.aggregate_size = total_size
except Image.DoesNotExist:
pass
else:
image.storage.aggregate_size = image_size
image.storage.save()
return image
def get_image(repo, dockerfile_id):
try:
return Image.get(Image.docker_image_id == dockerfile_id, Image.repository == repo)
except Image.DoesNotExist:
return None

File diff suppressed because it is too large Load diff

99
data/model/log.py Normal file
View file

@ -0,0 +1,99 @@
import json
from peewee import JOIN_LEFT_OUTER, SQL, fn
from datetime import datetime, timedelta, date
from data.database import LogEntry, LogEntryKind, User
def list_logs(start_time, end_time, performer=None, repository=None, namespace=None):
Performer = User.alias()
joined = (LogEntry
.select(LogEntry, LogEntryKind, User, Performer)
.join(User)
.switch(LogEntry)
.join(Performer, JOIN_LEFT_OUTER,
on=(LogEntry.performer == Performer.id).alias('performer'))
.switch(LogEntry)
.join(LogEntryKind)
.switch(LogEntry))
if repository:
joined = joined.where(LogEntry.repository == repository)
if performer:
joined = joined.where(LogEntry.performer == performer)
if namespace:
joined = joined.where(User.username == namespace)
return list(joined.where(LogEntry.datetime >= start_time,
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()))
def log_action(kind_name, user_or_organization_name, performer=None, repository=None,
ip=None, metadata={}, timestamp=None):
if not timestamp:
timestamp = datetime.today()
kind = LogEntryKind.get(LogEntryKind.name == kind_name)
account = User.get(User.username == user_or_organization_name)
LogEntry.create(kind=kind, account=account, performer=performer,
repository=repository, ip=ip, metadata_json=json.dumps(metadata),
datetime=timestamp)
def _get_repository_events(repository, time_delta, time_delta_earlier, clause):
""" Returns a pair representing the count of the number of events for the given
repository in each of the specified time deltas. The date ranges are calculated by
taking the current time today and subtracting the time delta given. Since
we want to grab *two* ranges, we restrict the second range to be greater
than the first (i.e. referring to an earlier time), so we can conduct the
lookup in a single query. The clause is used to further filter the kind of
events being found.
"""
since = date.today() - time_delta
since_earlier = date.today() - time_delta_earlier
if since_earlier >= since:
raise ValueError('time_delta_earlier must be greater than time_delta')
# This uses a CASE WHEN inner clause to further filter the count.
formatted = since.strftime('%Y-%m-%d')
case_query = 'CASE WHEN datetime >= \'%s\' THEN 1 ELSE 0 END' % formatted
result = (LogEntry
.select(fn.Sum(SQL(case_query)), fn.Count(SQL('*')))
.where(LogEntry.repository == repository)
.where(clause)
.where(LogEntry.datetime >= since_earlier)
.tuples()
.get())
return (int(result[0]) if result[0] else 0, int(result[1]) if result[1] else 0)
def get_repository_pushes(repository, time_delta, time_delta_earlier):
push_repo = LogEntryKind.get(name='push_repo')
clauses = (LogEntry.kind == push_repo)
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
def get_repository_pulls(repository, time_delta, time_delta_earlier):
repo_pull = LogEntryKind.get(name='pull_repo')
repo_verb = LogEntryKind.get(name='repo_verb')
clauses = ((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
return _get_repository_events(repository, time_delta, time_delta_earlier, clauses)
def get_repository_usage():
one_month_ago = date.today() - timedelta(weeks=4)
repo_pull = LogEntryKind.get(name='pull_repo')
repo_verb = LogEntryKind.get(name='repo_verb')
return (LogEntry
.select(LogEntry.ip, LogEntry.repository)
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
.where(~(LogEntry.repository >> None))
.where(LogEntry.datetime >= one_month_ago)
.group_by(LogEntry.ip, LogEntry.repository)
.count())

158
data/model/notification.py Normal file
View file

@ -0,0 +1,158 @@
import json
from peewee import JOIN_LEFT_OUTER
from data.model import InvalidNotificationException, db_transaction
from data.database import (Notification, NotificationKind, User, Team, TeamMember, TeamRole,
RepositoryNotification, ExternalNotificationEvent, Repository,
ExternalNotificationMethod, Namespace)
def create_notification(kind_name, target, metadata={}):
kind_ref = NotificationKind.get(name=kind_name)
notification = Notification.create(kind=kind_ref, target=target,
metadata_json=json.dumps(metadata))
return notification
def create_unique_notification(kind_name, target, metadata={}):
with db_transaction():
if list_notifications(target, kind_name, limit=1).count() == 0:
create_notification(kind_name, target, metadata)
def lookup_notification(user, uuid):
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
if not results:
return None
return results[0]
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
page=None, limit=None):
Org = User.alias()
AdminTeam = Team.alias()
AdminTeamMember = TeamMember.alias()
AdminUser = User.alias()
query = (Notification.select()
.join(User)
.switch(Notification)
.join(Org, JOIN_LEFT_OUTER, on=(Org.id == Notification.target))
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
.switch(AdminTeam)
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id))
.where((Notification.target == user) |
((AdminUser.id == user) & (TeamRole.name == 'admin')))
.order_by(Notification.created)
.desc())
if not include_dismissed:
query = query.switch(Notification).where(Notification.dismissed == False)
if kind_name:
query = (query
.switch(Notification)
.join(NotificationKind)
.where(NotificationKind.name == kind_name))
if id_filter:
query = (query
.switch(Notification)
.where(Notification.uuid == id_filter))
if page:
query = query.paginate(page, limit)
elif limit:
query = query.limit(limit)
return query
def delete_all_notifications_by_kind(kind_name):
kind_ref = NotificationKind.get(name=kind_name)
(Notification
.delete()
.where(Notification.kind == kind_ref)
.execute())
def delete_notifications_by_kind(target, kind_name):
kind_ref = NotificationKind.get(name=kind_name)
Notification.delete().where(Notification.target == target,
Notification.kind == kind_ref).execute()
def delete_matching_notifications(target, kind_name, **kwargs):
kind_ref = NotificationKind.get(name=kind_name)
# Load all notifications for the user with the given kind.
notifications = Notification.select().where(
Notification.target == target,
Notification.kind == kind_ref)
# For each, match the metadata to the specified values.
for notification in notifications:
matches = True
try:
metadata = json.loads(notification.metadata_json)
except:
continue
for (key, value) in kwargs.iteritems():
if not key in metadata or metadata[key] != value:
matches = False
break
if not matches:
continue
notification.delete_instance()
def create_repo_notification(repo, event_name, method_name, config):
event = ExternalNotificationEvent.get(ExternalNotificationEvent.name == event_name)
method = ExternalNotificationMethod.get(ExternalNotificationMethod.name == method_name)
return RepositoryNotification.create(repository=repo, event=event, method=method,
config_json=json.dumps(config))
def get_repo_notification(uuid):
try:
return (RepositoryNotification
.select(RepositoryNotification, Repository, Namespace)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(RepositoryNotification.uuid == uuid)
.get())
except RepositoryNotification.DoesNotExist:
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
def delete_repo_notification(namespace_name, repository_name, uuid):
found = get_repo_notification(uuid)
if (found.repository.namespace_user.username != namespace_name or
found.repository.name != repository_name):
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
found.delete_instance()
return found
def list_repo_notifications(namespace_name, repository_name, event_name=None):
query = (RepositoryNotification
.select(RepositoryNotification, Repository, Namespace)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
if event_name:
query = (query
.switch(RepositoryNotification)
.join(ExternalNotificationEvent)
.where(ExternalNotificationEvent.name == event_name))
return query

View file

@ -7,13 +7,14 @@ from oauth2lib.provider import AuthorizationProvider
from oauth2lib import utils
from data.database import (OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, User,
random_string_generator)
from data.model.legacy import get_user
AccessToken, random_string_generator)
from data.model import user
from auth import scopes
logger = logging.getLogger(__name__)
class DatabaseAuthorizationProvider(AuthorizationProvider):
def get_authorized_user(self):
raise NotImplementedError('Subclasses must fill in the ability to get the authorized_user.')
@ -49,7 +50,8 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
try:
oauth_app = OAuthApplication.get(client_id=client_id)
if oauth_app.redirect_uri and redirect_uri and redirect_uri.startswith(oauth_app.redirect_uri):
if (oauth_app.redirect_uri and redirect_uri and
redirect_uri.startswith(oauth_app.redirect_uri)):
return True
return False
except OAuthApplication.DoesNotExist:
@ -63,12 +65,12 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def load_authorized_scope_string(self, client_id, username):
found = (OAuthAccessToken
.select()
.join(OAuthApplication)
.switch(OAuthAccessToken)
.join(User)
.where(OAuthApplication.client_id == client_id, User.username == username,
OAuthAccessToken.expires_at > datetime.utcnow()))
.select()
.join(OAuthApplication)
.switch(OAuthAccessToken)
.join(User)
.where(OAuthApplication.client_id == client_id, User.username == username,
OAuthAccessToken.expires_at > datetime.utcnow()))
found = list(found)
logger.debug('Found %s matching tokens.', len(found))
long_scope_string = ','.join([token.scope for token in found])
@ -84,11 +86,11 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def from_authorization_code(self, client_id, code, scope):
try:
found = (OAuthAuthorizationCode
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == code,
OAuthAuthorizationCode.scope == scope)
.get())
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == code,
OAuthAuthorizationCode.scope == scope)
.get())
logger.debug('Returning data: %s', found.data)
return found.data
except OAuthAuthorizationCode.DoesNotExist:
@ -97,12 +99,12 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def from_refresh_token(self, client_id, refresh_token, scope):
try:
found = (OAuthAccessToken
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id,
OAuthAccessToken.refresh_token == refresh_token,
OAuthAccessToken.scope == scope)
.get())
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id,
OAuthAccessToken.refresh_token == refresh_token,
OAuthAccessToken.scope == scope)
.get())
return found.data
except OAuthAccessToken.DoesNotExist:
return None
@ -114,31 +116,31 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def persist_token_information(self, client_id, scope, access_token, token_type, expires_in,
refresh_token, data):
user = get_user(json.loads(data)['username'])
if not user:
found = user.get_user(json.loads(data)['username'])
if not found:
raise RuntimeError('Username must be in the data field')
oauth_app = OAuthApplication.get(client_id=client_id)
expires_at = datetime.utcnow() + timedelta(seconds=expires_in)
OAuthAccessToken.create(application=oauth_app, authorized_user=user, scope=scope,
OAuthAccessToken.create(application=oauth_app, authorized_user=found, scope=scope,
access_token=access_token, token_type=token_type,
expires_at=expires_at, refresh_token=refresh_token, data=data)
def discard_authorization_code(self, client_id, code):
found = (OAuthAuthorizationCode
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == code)
.get())
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == code)
.get())
found.delete_instance()
def discard_refresh_token(self, client_id, refresh_token):
found = (AccessToken
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id,
OAuthAccessToken.refresh_token == refresh_token)
.get())
.select()
.join(OAuthApplication)
.where(OAuthApplication.client_id == client_id,
OAuthAccessToken.refresh_token == refresh_token)
.get())
found.delete_instance()
@ -157,7 +159,6 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def get_token_response(self, response_type, client_id, redirect_uri, **params):
# Ensure proper response_type
if response_type != 'token':
err = 'unsupported_response_type'
@ -211,10 +212,10 @@ def create_application(org, name, application_uri, redirect_uri, **kwargs):
def validate_access_token(access_token):
try:
found = (OAuthAccessToken
.select(OAuthAccessToken, User)
.join(User)
.where(OAuthAccessToken.access_token == access_token)
.get())
.select(OAuthAccessToken, User)
.join(User)
.where(OAuthAccessToken.access_token == access_token)
.get())
return found
except OAuthAccessToken.DoesNotExist:
return None
@ -235,7 +236,7 @@ def reset_client_secret(application):
def lookup_application(org, client_id):
try:
return OAuthApplication.get(organization = org, client_id=client_id)
return OAuthApplication.get(organization=org, client_id=client_id)
except OAuthApplication.DoesNotExist:
return None
@ -249,21 +250,21 @@ def delete_application(org, client_id):
return application
def lookup_access_token_for_user(user, token_uuid):
def lookup_access_token_for_user(user_obj, token_uuid):
try:
return OAuthAccessToken.get(OAuthAccessToken.authorized_user == user,
return OAuthAccessToken.get(OAuthAccessToken.authorized_user == user_obj,
OAuthAccessToken.uuid == token_uuid)
except OAuthAccessToken.DoesNotExist:
return None
def list_access_tokens_for_user(user):
def list_access_tokens_for_user(user_obj):
query = (OAuthAccessToken
.select()
.join(OAuthApplication)
.switch(OAuthAccessToken)
.join(User)
.where(OAuthAccessToken.authorized_user == user))
.where(OAuthAccessToken.authorized_user == user_obj))
return query
@ -277,9 +278,9 @@ def list_applications_for_org(org):
return query
def create_access_token_for_testing(user, client_id, scope):
def create_access_token_for_testing(user_obj, client_id, scope):
expires_at = datetime.utcnow() + timedelta(seconds=10000)
application = get_application_for_client_id(client_id)
OAuthAccessToken.create(application=application, authorized_user=user, scope=scope,
OAuthAccessToken.create(application=application, authorized_user=user_obj, scope=scope,
token_type='token', access_token='test',
expires_at=expires_at, refresh_token='', data='')

126
data/model/organization.py Normal file
View file

@ -0,0 +1,126 @@
from data.database import (User, FederatedLogin, TeamMember, Team, TeamRole, RepositoryPermission,
Repository, Namespace)
from data.model import (user, team, DataModelException, InvalidOrganizationException,
InvalidUsernameException, db_transaction, _basequery)
def create_organization(name, email, creating_user):
try:
# Create the org
new_org = user.create_user_noverify(name, email)
new_org.organization = True
new_org.save()
# Create a team for the owners
owners_team = team.create_team('owners', new_org, 'admin')
# Add the user who created the org to the owners team
team.add_user_to_team(creating_user, owners_team)
return new_org
except InvalidUsernameException:
msg = ('Invalid organization name: %s Organization names must consist ' +
'solely of lower case letters, numbers, and underscores. ' +
'[a-z0-9_]') % name
raise InvalidOrganizationException(msg)
def get_organization(name):
try:
return User.get(username=name, organization=True)
except User.DoesNotExist:
raise InvalidOrganizationException('Organization does not exist: %s' %
name)
def convert_user_to_organization(user_obj, admin_user):
# Change the user to an organization.
user_obj.organization = True
# disable this account for login.
user_obj.password_hash = None
user_obj.save()
# Clear any federated auth pointing to this user
FederatedLogin.delete().where(FederatedLogin.user == user_obj).execute()
# Create a team for the owners
owners_team = team.create_team('owners', user_obj, 'admin')
# Add the user who will admin the org to the owners team
team.add_user_to_team(admin_user, owners_team)
return user_obj
def get_user_organizations(username):
return _basequery.get_user_organizations(username)
def get_organization_team_members(teamid):
joined = User.select().join(TeamMember).join(Team)
query = joined.where(Team.id == teamid)
return query
def __get_org_admin_users(org):
return (User
.select()
.join(TeamMember)
.join(Team)
.join(TeamRole)
.where(Team.organization == org, TeamRole.name == 'admin', User.robot == False)
.distinct())
def remove_organization_member(org, user_obj):
org_admins = [u.username for u in __get_org_admin_users(org)]
if len(org_admins) == 1 and user_obj.username in org_admins:
raise DataModelException('Cannot remove user as they are the only organization admin')
with db_transaction():
# Find and remove the user from any repositorys under the org.
permissions = (RepositoryPermission
.select(RepositoryPermission.id)
.join(Repository)
.where(Repository.namespace_user == org,
RepositoryPermission.user == user_obj))
RepositoryPermission.delete().where(RepositoryPermission.id << permissions).execute()
# Find and remove the user from any teams under the org.
members = (TeamMember
.select(TeamMember.id)
.join(Team)
.where(Team.organization == org, TeamMember.user == user_obj))
TeamMember.delete().where(TeamMember.id << members).execute()
def get_organization_member_set(orgname):
Org = User.alias()
org_users = (User
.select(User.username)
.join(TeamMember)
.join(Team)
.join(Org, on=(Org.id == Team.organization))
.where(Org.username == orgname)
.distinct())
return {user.username for user in org_users}
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
return (User
.select()
.distinct()
.join(TeamMember)
.join(Team)
.join(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_organizations():
return User.select().where(User.organization == True, User.robot == False)

283
data/model/permission.py Normal file
View file

@ -0,0 +1,283 @@
from peewee import JOIN_LEFT_OUTER
from data.database import (RepositoryPermission, User, Repository, Visibility, Role, TeamMember,
PermissionPrototype, Team, TeamRole, Namespace)
from data.model import DataModelException, _basequery
def list_robot_permissions(robot_name):
return (RepositoryPermission
.select(RepositoryPermission, User, Repository)
.join(Repository)
.join(Visibility)
.switch(RepositoryPermission)
.join(Role)
.switch(RepositoryPermission)
.join(User)
.where(User.username == robot_name, User.robot == True))
def list_organization_member_permissions(organization):
query = (RepositoryPermission
.select(RepositoryPermission, Repository, User)
.join(Repository)
.switch(RepositoryPermission)
.join(User)
.where(Repository.namespace_user == organization)
.where(User.robot == False))
return query
def get_all_user_permissions(user):
return _get_user_repo_permissions(user)
def get_user_repo_permissions(user, repo):
return _get_user_repo_permissions(user, limit_to_repository_obj=repo)
def _get_user_repo_permissions(user, limit_to_repository_obj=None):
UserThroughTeam = User.alias()
base_query = (RepositoryPermission
.select(RepositoryPermission, Role, Repository, Namespace)
.join(Role)
.switch(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(RepositoryPermission))
if limit_to_repository_obj is not None:
base_query = base_query.where(RepositoryPermission.repository == limit_to_repository_obj)
direct = (base_query
.clone()
.join(User)
.where(User.id == user))
team = (base_query
.clone()
.join(Team)
.join(TeamMember)
.join(UserThroughTeam, on=(UserThroughTeam.id == TeamMember.user))
.where(UserThroughTeam.id == user))
return direct | team
def delete_prototype_permission(org, uid):
found = get_prototype_permission(org, uid)
if not found:
return None
found.delete_instance()
return found
def get_prototype_permission(org, uid):
try:
return PermissionPrototype.get(PermissionPrototype.org == org,
PermissionPrototype.uuid == uid)
except PermissionPrototype.DoesNotExist:
return None
def get_prototype_permissions(org):
ActivatingUser = User.alias()
DelegateUser = User.alias()
query = (PermissionPrototype
.select()
.where(PermissionPrototype.org == org)
.join(ActivatingUser, JOIN_LEFT_OUTER,
on=(ActivatingUser.id == PermissionPrototype.activating_user))
.join(DelegateUser, JOIN_LEFT_OUTER,
on=(DelegateUser.id == PermissionPrototype.delegate_user))
.join(Team, JOIN_LEFT_OUTER,
on=(Team.id == PermissionPrototype.delegate_team))
.join(Role, JOIN_LEFT_OUTER, on=(Role.id == PermissionPrototype.role)))
return query
def update_prototype_permission(org, uid, role_name):
found = get_prototype_permission(org, uid)
if not found:
return None
new_role = Role.get(Role.name == role_name)
found.role = new_role
found.save()
return found
def add_prototype_permission(org, role_name, activating_user,
delegate_user=None, delegate_team=None):
new_role = Role.get(Role.name == role_name)
return PermissionPrototype.create(org=org, role=new_role, activating_user=activating_user,
delegate_user=delegate_user, delegate_team=delegate_team)
def get_org_wide_permissions(user):
Org = User.alias()
team_with_role = Team.select(Team, Org, TeamRole).join(TeamRole)
with_org = team_with_role.switch(Team).join(Org, on=(Team.organization ==
Org.id))
with_user = with_org.switch(Team).join(TeamMember).join(User)
return with_user.where(User.id == user, Org.organization == True)
def get_all_repo_teams(namespace_name, repository_name):
return (RepositoryPermission
.select(Team.name, Role.name, RepositoryPermission)
.join(Team)
.switch(RepositoryPermission)
.join(Role)
.switch(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
def apply_default_permissions(repo_obj, creating_user_obj):
org = repo_obj.namespace_user
user_clause = ((PermissionPrototype.activating_user == creating_user_obj) |
(PermissionPrototype.activating_user >> None))
team_protos = (PermissionPrototype
.select()
.where(PermissionPrototype.org == org, user_clause,
PermissionPrototype.delegate_user >> None))
def create_team_permission(team, repo, role):
RepositoryPermission.create(team=team, repository=repo, role=role)
__apply_permission_list(repo_obj, team_protos, 'name', create_team_permission)
user_protos = (PermissionPrototype
.select()
.where(PermissionPrototype.org == org, user_clause,
PermissionPrototype.delegate_team >> None))
def create_user_permission(user, repo, role):
# The creating user always gets admin anyway
if user.username == creating_user_obj.username:
return
RepositoryPermission.create(user=user, repository=repo, role=role)
__apply_permission_list(repo_obj, user_protos, 'username', create_user_permission)
def __apply_permission_list(repo, proto_query, name_property, create_permission_func):
final_protos = {}
for proto in proto_query:
applies_to = proto.delegate_team or proto.delegate_user
name = getattr(applies_to, name_property)
# We will skip the proto if it is pre-empted by a more important proto
if name in final_protos and proto.activating_user is None:
continue
# By this point, it is either a user specific proto, or there is no
# proto yet, so we can safely assume it applies
final_protos[name] = (applies_to, proto.role)
for delegate, role in final_protos.values():
create_permission_func(delegate, repo, role)
def __entity_permission_repo_query(entity_id, entity_table, entity_id_property, namespace_name,
repository_name):
""" This method works for both users and teams. """
return (RepositoryPermission
.select(entity_table, Repository, Namespace, Role, RepositoryPermission)
.join(entity_table)
.switch(RepositoryPermission)
.join(Role)
.switch(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name,
entity_id_property == entity_id))
def get_user_reponame_permission(username, namespace_name, repository_name):
fetched = list(__entity_permission_repo_query(username, User, User.username, namespace_name,
repository_name))
if not fetched:
raise DataModelException('User does not have permission for repo.')
return fetched[0]
def get_team_reponame_permission(team_name, namespace_name, repository_name):
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name, namespace_name,
repository_name))
if not fetched:
raise DataModelException('Team does not have permission for repo.')
return fetched[0]
def delete_user_permission(username, namespace_name, repository_name):
if username == namespace_name:
raise DataModelException('Namespace owner must always be admin.')
fetched = list(__entity_permission_repo_query(username, User, User.username, namespace_name,
repository_name))
if not fetched:
raise DataModelException('User does not have permission for repo.')
fetched[0].delete_instance()
def delete_team_permission(team_name, namespace_name, repository_name):
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name, namespace_name,
repository_name))
if not fetched:
raise DataModelException('Team does not have permission for repo.')
fetched[0].delete_instance()
def __set_entity_repo_permission(entity, permission_entity_property,
namespace_name, repository_name, role_name):
repo = _basequery.get_existing_repository(namespace_name, repository_name)
new_role = Role.get(Role.name == role_name)
# Fetch any existing permission for this entity on the repo
try:
entity_attr = getattr(RepositoryPermission, permission_entity_property)
perm = RepositoryPermission.get(entity_attr == entity, RepositoryPermission.repository == repo)
perm.role = new_role
perm.save()
return perm
except RepositoryPermission.DoesNotExist:
set_entity_kwargs = {permission_entity_property: entity}
new_perm = RepositoryPermission.create(repository=repo, role=new_role, **set_entity_kwargs)
return new_perm
def set_user_repo_permission(username, namespace_name, repository_name, role_name):
if username == namespace_name:
raise DataModelException('Namespace owner must always be admin.')
try:
user = User.get(User.username == username)
except User.DoesNotExist:
raise DataModelException('Invalid username: %s' % username)
return __set_entity_repo_permission(user, 'user', namespace_name, repository_name, role_name)
def set_team_repo_permission(team_name, namespace_name, repository_name, role_name):
try:
team = (Team
.select()
.join(User)
.where(Team.name == team_name, User.username == namespace_name)
.get())
except Team.DoesNotExist:
raise DataModelException('No team %s in organization %s' % (team_name, namespace_name))
return __set_entity_repo_permission(team, 'team', namespace_name, repository_name, role_name)

377
data/model/repository.py Normal file
View file

@ -0,0 +1,377 @@
import logging
from peewee import JOIN_LEFT_OUTER, fn
from datetime import timedelta, datetime
from data.model import (DataModelException, tag, db_transaction, storage, image, permission,
_basequery)
from data.database import (Repository, Namespace, RepositoryTag, Star, Image, ImageStorage, User,
Visibility, RepositoryPermission, TupleSelector, RepositoryActionCount,
Role, RepositoryAuthorizedEmail, db_for_update)
logger = logging.getLogger(__name__)
def create_repository(namespace, name, creating_user, visibility='private'):
private = Visibility.get(name=visibility)
namespace_user = User.get(username=namespace)
repo = Repository.create(name=name, visibility=private, namespace_user=namespace_user)
admin = Role.get(name='admin')
if creating_user and not creating_user.organization:
RepositoryPermission.create(user=creating_user, repository=repo, role=admin)
if creating_user.username != namespace:
# Permission prototypes only work for orgs
permission.apply_default_permissions(repo, creating_user)
return repo
def get_repository(namespace_name, repository_name):
try:
return _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
return None
def _purge_all_repository_tags(namespace_name, repository_name):
""" Immediately purge all repository tags without respecting the lifeline procedure """
try:
repo = _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
raise DataModelException('Invalid repository \'%s/%s\'' %
(namespace_name, repository_name))
RepositoryTag.delete().where(RepositoryTag.repository == repo.id).execute()
def purge_repository(namespace_name, repository_name):
# Delete all tags to allow gc to reclaim storage
_purge_all_repository_tags(namespace_name, repository_name)
# Gc to remove the images and storage
garbage_collect_repository(namespace_name, repository_name)
# Delete the rest of the repository metadata
fetched = _basequery.get_existing_repository(namespace_name, repository_name)
fetched.delete_instance(recursive=True, delete_nullable=False)
def garbage_collect_repository(namespace_name, repository_name):
storage_id_whitelist = {}
tag.garbage_collect_tags(namespace_name, repository_name)
with db_transaction():
# TODO (jake): We could probably select this and all the images in a single query using
# a different kind of join.
# Get a list of all images used by tags in the repository
tag_query = (RepositoryTag
.select(RepositoryTag, Image, ImageStorage)
.join(Image)
.join(ImageStorage, JOIN_LEFT_OUTER)
.switch(RepositoryTag)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name))
referenced_ancestors = set()
for one_tag in tag_query:
# The ancestor list is in the format '/1/2/3/', extract just the ids
ancestor_id_strings = one_tag.image.ancestors.split('/')[1:-1]
ancestor_list = [int(img_id_str) for img_id_str in ancestor_id_strings]
referenced_ancestors = referenced_ancestors.union(set(ancestor_list))
referenced_ancestors.add(one_tag.image.id)
all_repo_images = image.get_repository_images(namespace_name, repository_name)
all_images = {int(img.id): img for img in all_repo_images}
to_remove = set(all_images.keys()).difference(referenced_ancestors)
if len(to_remove) > 0:
logger.info('Cleaning up unreferenced images: %s', to_remove)
storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove}
Image.delete().where(Image.id << list(to_remove)).execute()
if len(to_remove) > 0:
logger.info('Garbage collecting storage for images: %s', to_remove)
storage.garbage_collect_storage(storage_id_whitelist)
def star_repository(user, repository):
""" Stars a repository. """
star = Star.create(user=user.id, repository=repository.id)
star.save()
def unstar_repository(user, repository):
""" Unstars a repository. """
try:
(Star
.delete()
.where(Star.repository == repository.id, Star.user == user.id)
.execute())
except Star.DoesNotExist:
raise DataModelException('Star not found.')
def get_user_starred_repositories(user, limit=None, page=None):
""" Retrieves all of the repositories a user has starred. """
query = (Repository
.select(Repository, User, Visibility)
.join(Star)
.switch(Repository)
.join(User)
.switch(Repository)
.join(Visibility)
.where(Star.user == user))
if page and limit:
query = query.paginate(page, limit)
elif limit:
query = query.limit(limit)
return query
def repository_is_starred(user, repository):
""" Determines whether a user has starred a repository or not. """
try:
(Star
.select()
.where(Star.repository == repository.id, Star.user == user.id)
.get())
return True
except Star.DoesNotExist:
return False
def get_when_last_modified(repository_ids):
tuples = (RepositoryTag
.select(RepositoryTag.repository, fn.Max(RepositoryTag.lifetime_start_ts))
.where(RepositoryTag.repository << repository_ids)
.group_by(RepositoryTag.repository)
.tuples())
last_modified_map = {}
for record in tuples:
last_modified_map[record[0]] = record[1]
return last_modified_map
def get_action_counts(repository_ids):
# Filter the join to recent entries only.
last_week = datetime.now() - timedelta(weeks=1)
tuples = (RepositoryActionCount
.select(RepositoryActionCount.repository, fn.Sum(RepositoryActionCount.count))
.where(RepositoryActionCount.repository << repository_ids)
.where(RepositoryActionCount.date >= last_week)
.group_by(RepositoryActionCount.repository)
.tuples())
action_count_map = {}
for record in tuples:
action_count_map[record[0]] = record[1]
return action_count_map
def get_visible_repositories(username=None, include_public=True, page=None,
limit=None, namespace=None, namespace_only=False):
fields = [Repository.name, Repository.id, Repository.description, Visibility.name,
Namespace.username]
query = _visible_repository_query(username=username, include_public=include_public, page=page,
limit=limit, namespace=namespace,
select_models=fields)
if limit:
query = query.limit(limit)
if namespace and namespace_only:
query = query.where(Namespace.username == namespace)
return TupleSelector(query, fields)
def _visible_repository_query(username=None, include_public=True, limit=None,
page=None, namespace=None, select_models=[]):
query = (Repository
.select(*select_models) # MySQL/RDS complains is there are selected models for counts.
.distinct()
.join(Visibility)
.switch(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER))
query = _basequery.filter_to_repos_for_user(query, username, namespace, include_public)
if page:
query = query.paginate(page, limit)
elif limit:
query = query.limit(limit)
return query
def get_sorted_matching_repositories(prefix, only_public, checker, limit=10):
""" Returns repositories matching the given prefix string and passing the given checker
function.
"""
last_week = datetime.now() - timedelta(weeks=1)
results = []
existing_ids = []
def get_search_results(search_clause, with_count=False):
if len(results) >= limit:
return
select_items = [Repository, Namespace]
if with_count:
select_items.append(fn.Sum(RepositoryActionCount.count).alias('count'))
query = (Repository
.select(*select_items)
.join(Namespace, JOIN_LEFT_OUTER, on=(Namespace.id == Repository.namespace_user))
.switch(Repository)
.where(search_clause)
.group_by(Repository, Namespace))
if only_public:
query = query.where(Repository.visibility == _basequery.get_public_repo_visibility())
if existing_ids:
query = query.where(~(Repository.id << existing_ids))
if with_count:
query = (query
.switch(Repository)
.join(RepositoryActionCount)
.where(RepositoryActionCount.date >= last_week)
.order_by(fn.Sum(RepositoryActionCount.count).desc()))
for result in query:
if len(results) >= limit:
return results
# Note: We compare IDs here, instead of objects, because calling .visibility on the
# Repository will kick off a new SQL query to retrieve that visibility enum value. We don't
# join the visibility table in SQL, as well, because it is ungodly slow in MySQL :-/
result.is_public = result.visibility_id == _basequery.get_public_repo_visibility().id
result.count = result.count if with_count else 0
if not checker(result):
continue
results.append(result)
existing_ids.append(result.id)
# For performance reasons, we conduct the repo name and repo namespace searches on their
# own. This also affords us the ability to give higher precedence to repository names matching
# over namespaces, which is semantically correct.
get_search_results(Repository.name ** (prefix + '%'), with_count=True)
get_search_results(Repository.name ** (prefix + '%'), with_count=False)
get_search_results(Namespace.username ** (prefix + '%'), with_count=True)
get_search_results(Namespace.username ** (prefix + '%'), with_count=False)
return results
def get_matching_repositories(repo_term, username=None, limit=10, include_public=True):
namespace_term = repo_term
name_term = repo_term
visible = _visible_repository_query(username, include_public=include_public)
search_clauses = (Repository.name ** ('%' + name_term + '%') |
Namespace.username ** ('%' + namespace_term + '%'))
# Handle the case where the user has already entered a namespace path.
if repo_term.find('/') > 0:
parts = repo_term.split('/', 1)
namespace_term = '/'.join(parts[:-1])
name_term = parts[-1]
search_clauses = (Repository.name ** ('%' + name_term + '%') &
Namespace.username ** ('%' + namespace_term + '%'))
return visible.where(search_clauses).limit(limit)
def lookup_repository(repo_id):
try:
return Repository.get(Repository.id == repo_id)
except Repository.DoesNotExist:
return None
def is_repository_public(repository):
return repository.visibility == _basequery.get_public_repo_visibility()
def repository_is_public(namespace_name, repository_name):
try:
(Repository
.select()
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(Repository)
.join(Visibility)
.where(Namespace.username == namespace_name, Repository.name == repository_name,
Visibility.name == 'public')
.get())
return True
except Repository.DoesNotExist:
return False
def set_repository_visibility(repo, visibility):
visibility_obj = Visibility.get(name=visibility)
if not visibility_obj:
return
repo.visibility = visibility_obj
repo.save()
def get_email_authorized_for_repo(namespace, repository, email):
try:
return (RepositoryAuthorizedEmail
.select(RepositoryAuthorizedEmail, Repository, Namespace)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace, Repository.name == repository,
RepositoryAuthorizedEmail.email == email)
.get())
except RepositoryAuthorizedEmail.DoesNotExist:
return None
def create_email_authorization_for_repo(namespace_name, repository_name, email):
try:
repo = _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
raise DataModelException('Invalid repository %s/%s' %
(namespace_name, repository_name))
return RepositoryAuthorizedEmail.create(repository=repo, email=email, confirmed=False)
def confirm_email_authorization_for_repo(code):
try:
found = (RepositoryAuthorizedEmail
.select(RepositoryAuthorizedEmail, Repository, Namespace)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(RepositoryAuthorizedEmail.code == code)
.get())
except RepositoryAuthorizedEmail.DoesNotExist:
raise DataModelException('Invalid confirmation code.')
found.confirmed = True
found.save()
return found

195
data/model/storage.py Normal file
View file

@ -0,0 +1,195 @@
import logging
from peewee import JOIN_LEFT_OUTER, fn
from data.model import config, db_transaction, InvalidImageException
from data.database import (ImageStorage, Image, DerivedImageStorage, ImageStoragePlacement,
ImageStorageLocation, ImageStorageTransformation, ImageStorageSignature,
ImageStorageSignatureKind)
logger = logging.getLogger(__name__)
def find_or_create_derived_storage(source, transformation_name, preferred_location):
existing = find_derived_storage(source, transformation_name)
if existing is not None:
return existing
logger.debug('Creating storage dervied from source: %s', source.uuid)
trans = ImageStorageTransformation.get(name=transformation_name)
new_storage = create_storage(preferred_location)
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
return new_storage
def garbage_collect_storage(storage_id_whitelist):
if len(storage_id_whitelist) == 0:
return
def placements_query_to_paths_set(placements_query):
return {(placement.location.name, config.store.image_path(placement.storage.uuid))
for placement in placements_query}
def orphaned_storage_query(select_base_query, candidates, group_by):
return (select_base_query
.switch(ImageStorage)
.join(Image, JOIN_LEFT_OUTER)
.switch(ImageStorage)
.join(DerivedImageStorage, JOIN_LEFT_OUTER,
on=(ImageStorage.id == DerivedImageStorage.derivative))
.where(ImageStorage.id << list(candidates))
.group_by(*group_by)
.having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0)))
# Note: We remove the derived image storage in its own transaction as a way to reduce the
# time that the transaction holds on the database indicies. This could result in a derived
# image storage being deleted for an image storage which is later reused during this time,
# but since these are caches anyway, it isn't terrible and worth the tradeoff (for now).
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
with db_transaction():
# Find out which derived storages will be removed, and add them to the whitelist
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
storage_id_whitelist,
(ImageStorage.id,)))
if len(orphaned_from_candidates) > 0:
derived_to_remove = (ImageStorage
.select(ImageStorage.id)
.join(DerivedImageStorage,
on=(ImageStorage.id == DerivedImageStorage.derivative))
.where(DerivedImageStorage.source << orphaned_from_candidates))
storage_id_whitelist.update({derived.id for derived in derived_to_remove})
# Remove the dervived image storages with sources of orphaned storages
(DerivedImageStorage
.delete()
.where(DerivedImageStorage.source << orphaned_from_candidates)
.execute())
# Note: Both of these deletes must occur in the same transaction (unfortunately) because a
# storage without any placement is invalid, and a placement cannot exist without a storage.
# TODO(jake): We might want to allow for null storages on placements, which would allow us to
# delete the storages, then delete the placements in a non-transaction.
logger.debug('Garbage collecting storages from candidates: %s', storage_id_whitelist)
with db_transaction():
# Track all of the data that should be removed from blob storage
placements_to_remove = list(orphaned_storage_query(ImageStoragePlacement
.select(ImageStoragePlacement,
ImageStorage,
ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage),
storage_id_whitelist,
(ImageStorage, ImageStoragePlacement,
ImageStorageLocation)))
paths_to_remove = placements_query_to_paths_set(placements_to_remove)
# Remove the placements for orphaned storages
if len(placements_to_remove) > 0:
placement_ids_to_remove = [placement.id for placement in placements_to_remove]
placements_removed = (ImageStoragePlacement
.delete()
.where(ImageStoragePlacement.id << placement_ids_to_remove)
.execute())
logger.debug('Removed %s image storage placements', placements_removed)
# Remove all orphaned storages
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
storage_id_whitelist,
(ImageStorage.id,)).alias('osq'))
if len(orphaned_storages) > 0:
storages_removed = (ImageStorage
.delete()
.where(ImageStorage.id << orphaned_storages)
.execute())
logger.debug('Removed %s image storage records', storages_removed)
# We are going to make the conscious decision to not delete image storage blobs inside
# transactions.
# This may end up producing garbage in s3, trading off for higher availability in the database.
for location_name, image_path in paths_to_remove:
logger.debug('Removing %s from %s', image_path, location_name)
config.store.remove({location_name}, image_path)
def create_storage(location_name):
storage = ImageStorage.create()
location = ImageStorageLocation.get(name=location_name)
ImageStoragePlacement.create(location=location, storage=storage)
storage.locations = {location_name}
return storage
def find_or_create_storage_signature(storage, signature_kind):
found = lookup_storage_signature(storage, signature_kind)
if found is None:
kind = ImageStorageSignatureKind.get(name=signature_kind)
found = ImageStorageSignature.create(storage=storage, kind=kind)
return found
def lookup_storage_signature(storage, signature_kind):
kind = ImageStorageSignatureKind.get(name=signature_kind)
try:
return (ImageStorageSignature
.select()
.where(ImageStorageSignature.storage == storage,
ImageStorageSignature.kind == kind)
.get())
except ImageStorageSignature.DoesNotExist:
return None
def find_derived_storage(source, transformation_name):
try:
found = (ImageStorage
.select(ImageStorage, DerivedImageStorage)
.join(DerivedImageStorage, on=(ImageStorage.id == DerivedImageStorage.derivative))
.join(ImageStorageTransformation)
.where(DerivedImageStorage.source == source,
ImageStorageTransformation.name == transformation_name)
.get())
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
return found
except ImageStorage.DoesNotExist:
return None
def delete_derived_storage_by_uuid(storage_uuid):
try:
image_storage = get_storage_by_uuid(storage_uuid)
except InvalidImageException:
return
try:
DerivedImageStorage.get(derivative=image_storage)
except DerivedImageStorage.DoesNotExist:
return
image_storage.delete_instance(recursive=True)
def get_storage_by_uuid(storage_uuid):
placements = list(ImageStoragePlacement
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage)
.where(ImageStorage.uuid == storage_uuid))
if not placements:
raise InvalidImageException('No storage found with uuid: %s', storage_uuid)
found = placements[0].storage
found.locations = {placement.location.name for placement in placements}
return found

164
data/model/tag.py Normal file
View file

@ -0,0 +1,164 @@
from uuid import uuid4
from data.model import image, db_transaction, DataModelException, _basequery
from data.database import (RepositoryTag, Repository, Image, ImageStorage, Namespace,
get_epoch_timestamp, db_for_update)
def _tag_alive(query, now_ts=None):
if now_ts is None:
now_ts = get_epoch_timestamp()
return query.where((RepositoryTag.lifetime_end_ts >> None) |
(RepositoryTag.lifetime_end_ts > now_ts))
def list_repository_tags(namespace_name, repository_name, include_hidden=False,
include_storage=False):
to_select = (RepositoryTag, Image)
if include_storage:
to_select = (RepositoryTag, Image, ImageStorage)
query = _tag_alive(RepositoryTag
.select(*to_select)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(RepositoryTag)
.join(Image)
.where(Repository.name == repository_name,
Namespace.username == namespace_name))
if not include_hidden:
query = query.where(RepositoryTag.hidden == False)
if include_storage:
query = query.switch(Image).join(ImageStorage)
return query
def create_or_update_tag(namespace_name, repository_name, tag_name,
tag_docker_image_id, reversion=False):
try:
repo = _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
raise DataModelException('Invalid repository %s/%s' % (namespace_name, repository_name))
now_ts = get_epoch_timestamp()
with db_transaction():
try:
tag = db_for_update(_tag_alive(RepositoryTag
.select()
.where(RepositoryTag.repository == repo,
RepositoryTag.name == tag_name), now_ts)).get()
tag.lifetime_end_ts = now_ts
tag.save()
except RepositoryTag.DoesNotExist:
pass
try:
image_obj = Image.get(Image.docker_image_id == tag_docker_image_id, Image.repository == repo)
except Image.DoesNotExist:
raise DataModelException('Invalid image with id: %s' % tag_docker_image_id)
return RepositoryTag.create(repository=repo, image=image_obj, name=tag_name,
lifetime_start_ts=now_ts, reversion=reversion)
def create_temporary_hidden_tag(repo, image_obj, expiration_s):
""" Create a tag with a defined timeline, that will not appear in the UI or CLI. Returns the name
of the temporary tag. """
now_ts = get_epoch_timestamp()
expire_ts = now_ts + expiration_s
tag_name = str(uuid4())
RepositoryTag.create(repository=repo, image=image_obj, name=tag_name, lifetime_start_ts=now_ts,
lifetime_end_ts=expire_ts, hidden=True)
return tag_name
def delete_tag(namespace_name, repository_name, tag_name):
now_ts = get_epoch_timestamp()
with db_transaction():
try:
query = _tag_alive(RepositoryTag
.select(RepositoryTag, Repository)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name,
Namespace.username == namespace_name,
RepositoryTag.name == tag_name), now_ts)
found = db_for_update(query).get()
except RepositoryTag.DoesNotExist:
msg = ('Invalid repository tag \'%s\' on repository \'%s/%s\'' %
(tag_name, namespace_name, repository_name))
raise DataModelException(msg)
found.lifetime_end_ts = now_ts
found.save()
def garbage_collect_tags(namespace_name, repository_name):
# We do this without using a join to prevent holding read locks on the repository table
repo = _basequery.get_existing_repository(namespace_name, repository_name)
expired_time = get_epoch_timestamp() - repo.namespace_user.removed_tag_expiration_s
tags_to_delete = list(RepositoryTag
.select(RepositoryTag.id)
.where(RepositoryTag.repository == repo,
~(RepositoryTag.lifetime_end_ts >> None),
(RepositoryTag.lifetime_end_ts <= expired_time))
.order_by(RepositoryTag.id))
if len(tags_to_delete) > 0:
(RepositoryTag
.delete()
.where(RepositoryTag.id << tags_to_delete)
.execute())
def get_tag_image(namespace_name, repository_name, tag_name):
def limit_to_tag(query):
return _tag_alive(query
.switch(Image)
.join(RepositoryTag)
.where(RepositoryTag.name == tag_name))
images = image.get_repository_images_base(namespace_name, repository_name, limit_to_tag)
if not images:
raise DataModelException('Unable to find image for tag.')
else:
return images[0]
def list_repository_tag_history(repo_obj, page=1, size=100, specific_tag=None):
query = (RepositoryTag
.select(RepositoryTag, Image)
.join(Image)
.where(RepositoryTag.repository == repo_obj)
.where(RepositoryTag.hidden == False)
.order_by(RepositoryTag.lifetime_start_ts.desc())
.paginate(page, size))
if specific_tag:
query = query.where(RepositoryTag.name == specific_tag)
return query
def revert_tag(repo_obj, tag_name, docker_image_id):
""" Reverts a tag to a specific image ID. """
# Verify that the image ID already existed under this repository under the
# tag.
try:
(RepositoryTag
.select()
.join(Image)
.where(RepositoryTag.repository == repo_obj)
.where(RepositoryTag.name == tag_name)
.where(Image.docker_image_id == docker_image_id)
.get())
except RepositoryTag.DoesNotExist:
raise DataModelException('Cannot revert to unknown or invalid image')
return create_or_update_tag(repo_obj.namespace_user.username, repo_obj.name, tag_name,
docker_image_id, reversion=True)

276
data/model/team.py Normal file
View file

@ -0,0 +1,276 @@
from data.database import Team, TeamMember, TeamRole, User, TeamMemberInvite, Repository
from data.model import (DataModelException, InvalidTeamException, UserAlreadyInTeam,
InvalidTeamMemberException, user, _basequery)
from util.validation import validate_username
def create_team(name, org_obj, team_role_name, description=''):
(username_valid, username_issue) = validate_username(name)
if not username_valid:
raise InvalidTeamException('Invalid team name %s: %s' % (name, username_issue))
if not org_obj.organization:
raise InvalidTeamException('Specified organization %s was not an organization' %
org_obj.username)
team_role = TeamRole.get(TeamRole.name == team_role_name)
return Team.create(name=name, organization=org_obj, role=team_role,
description=description)
def add_user_to_team(user_obj, team):
try:
return TeamMember.create(user=user_obj, team=team)
except Exception:
raise UserAlreadyInTeam('User %s is already a member of team %s' %
(user_obj.username, team.name))
def remove_user_from_team(org_name, team_name, username, removed_by_username):
Org = User.alias()
joined = TeamMember.select().join(User).switch(TeamMember).join(Team)
with_role = joined.join(TeamRole)
with_org = with_role.switch(Team).join(Org,
on=(Org.id == Team.organization))
found = list(with_org.where(User.username == username,
Org.username == org_name,
Team.name == team_name))
if not found:
raise DataModelException('User %s does not belong to team %s' %
(username, team_name))
if username == removed_by_username:
admin_team_query = __get_user_admin_teams(org_name, username)
admin_team_names = {team.name for team in admin_team_query}
if team_name in admin_team_names and len(admin_team_names) <= 1:
msg = 'User cannot remove themselves from their only admin team.'
raise DataModelException(msg)
user_in_team = found[0]
user_in_team.delete_instance()
def get_team_org_role(team):
return TeamRole.get(TeamRole.id == team.role.id)
def set_team_org_permission(team, team_role_name, set_by_username):
if team.role.name == 'admin' and team_role_name != 'admin':
# We need to make sure we're not removing the users only admin role
user_admin_teams = __get_user_admin_teams(team.organization.username, set_by_username)
admin_team_set = {admin_team.name for admin_team in user_admin_teams}
if team.name in admin_team_set and len(admin_team_set) <= 1:
msg = (('Cannot remove admin from team \'%s\' because calling user ' +
'would no longer have admin on org \'%s\'') %
(team.name, team.organization.username))
raise DataModelException(msg)
new_role = TeamRole.get(TeamRole.name == team_role_name)
team.role = new_role
team.save()
return team
def __get_user_admin_teams(org_name, username):
Org = User.alias()
user_teams = Team.select().join(TeamMember).join(User)
with_org = user_teams.switch(Team).join(Org,
on=(Org.id == Team.organization))
with_role = with_org.switch(Team).join(TeamRole)
admin_teams = with_role.where(User.username == username,
Org.username == org_name,
TeamRole.name == 'admin')
return admin_teams
def remove_team(org_name, team_name, removed_by_username):
joined = Team.select(Team, TeamRole).join(User).switch(Team).join(TeamRole)
found = list(joined.where(User.organization == True,
User.username == org_name,
Team.name == team_name))
if not found:
raise InvalidTeamException('Team \'%s\' is not a team in org \'%s\'' %
(team_name, org_name))
team = found[0]
if team.role.name == 'admin':
admin_teams = list(__get_user_admin_teams(org_name, removed_by_username))
if len(admin_teams) <= 1:
# The team we are trying to remove is the only admin team for this user
msg = ('Deleting team \'%s\' would remove all admin from user \'%s\'' %
(team_name, removed_by_username))
raise DataModelException(msg)
team.delete_instance(recursive=True, delete_nullable=True)
def add_or_invite_to_team(inviter, team, user_obj=None, email=None, requires_invite=True):
# If the user is a member of the organization, then we simply add the
# user directly to the team. Otherwise, an invite is created for the user/email.
# We return None if the user was directly added and the invite object if the user was invited.
if user_obj and requires_invite:
orgname = team.organization.username
# If the user is part of the organization (or a robot), then no invite is required.
if user_obj.robot:
requires_invite = False
if not user_obj.username.startswith(orgname + '+'):
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
'as it is not a member of the organization')
else:
Org = User.alias()
found = User.select(User.username)
found = found.where(User.username == user_obj.username).join(TeamMember).join(Team)
found = found.join(Org, on=(Org.username == orgname)).limit(1)
requires_invite = not any(found)
# If we have a valid user and no invite is required, simply add the user to the team.
if user_obj and not requires_invite:
add_user_to_team(user_obj, team)
return None
email_address = email if not user_obj else None
return TeamMemberInvite.create(user=user_obj, email=email_address, team=team, inviter=inviter)
def get_matching_user_teams(team_prefix, user_obj, limit=10):
query = (Team
.select()
.join(User)
.switch(Team)
.join(TeamMember)
.where(TeamMember.user == user_obj, Team.name ** (team_prefix + '%'))
.distinct(Team.id)
.limit(limit))
return query
def get_organization_team(orgname, teamname):
joined = Team.select().join(User)
query = joined.where(Team.name == teamname, User.organization == True,
User.username == orgname).limit(1)
result = list(query)
if not result:
raise InvalidTeamException('Team does not exist: %s/%s', orgname,
teamname)
return result[0]
def get_matching_admined_teams(team_prefix, user_obj, limit=10):
admined_orgs = (_basequery.get_user_organizations(user_obj.username)
.switch(Team)
.join(TeamRole)
.where(TeamRole.name == 'admin'))
query = (Team
.select()
.join(User)
.switch(Team)
.join(TeamMember)
.where(Team.name ** (team_prefix + '%'), Team.organization << (admined_orgs))
.distinct(Team.id)
.limit(limit))
return query
def get_matching_teams(team_prefix, organization):
query = Team.select().where(Team.name ** (team_prefix + '%'),
Team.organization == organization)
return query.limit(10)
def get_teams_within_org(organization):
return Team.select().where(Team.organization == organization)
def get_user_teams_within_org(username, organization):
joined = Team.select().join(TeamMember).join(User)
return joined.where(Team.organization == organization,
User.username == username)
def list_organization_members_by_teams(organization):
query = (TeamMember
.select(Team, User)
.annotate(Team)
.annotate(User)
.where(Team.organization == organization))
return query
def get_organization_team_member_invites(teamid):
joined = TeamMemberInvite.select().join(Team).join(User)
query = joined.where(Team.id == teamid)
return query
def delete_team_email_invite(team, email):
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
found.delete_instance()
def delete_team_user_invite(team, user_obj):
try:
found = TeamMemberInvite.get(TeamMemberInvite.user == user_obj, TeamMemberInvite.team == team)
except TeamMemberInvite.DoesNotExist:
return False
found.delete_instance()
return True
def lookup_team_invites(user_obj):
return TeamMemberInvite.select().where(TeamMemberInvite.user == user_obj)
def lookup_team_invite(code, user_obj=None):
# Lookup the invite code.
try:
found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
except TeamMemberInvite.DoesNotExist:
raise DataModelException('Invalid confirmation code.')
if user_obj and found.user != user_obj:
raise DataModelException('Invalid confirmation code.')
return found
def delete_team_invite(code, user_obj=None):
found = lookup_team_invite(code, user_obj)
team = found.team
inviter = found.inviter
found.delete_instance()
return (team, inviter)
def confirm_team_invite(code, user_obj):
found = lookup_team_invite(code)
# If the invite is for a specific user, we have to confirm that here.
if found.user is not None and found.user != user_obj:
message = """This invite is intended for user "%s".
Please login to that account and try again.""" % found.user.username
raise DataModelException(message)
# Add the user to the team.
try:
add_user_to_team(user_obj, found.team)
except UserAlreadyInTeam:
# Ignore.
pass
# Delete the invite and return the team.
team = found.team
inviter = found.inviter
found.delete_instance()
return (team, inviter)

87
data/model/token.py Normal file
View file

@ -0,0 +1,87 @@
import logging
from peewee import JOIN_LEFT_OUTER
from data.database import (AccessToken, AccessTokenKind, Repository, Namespace, Role,
RepositoryBuildTrigger, LogEntryKind)
from data.model import DataModelException, _basequery, InvalidTokenException
logger = logging.getLogger(__name__)
def create_access_token(repo, role, kind=None, friendly_name=None):
role = Role.get(Role.name == role)
kind_ref = None
if kind is not None:
kind_ref = AccessTokenKind.get(AccessTokenKind.name == kind)
new_token = AccessToken.create(repository=repo, temporary=True, role=role, kind=kind_ref,
friendly_name=friendly_name)
return new_token
def create_delegate_token(namespace_name, repository_name, friendly_name,
role='read'):
read_only = Role.get(name=role)
repo = _basequery.get_existing_repository(namespace_name, repository_name)
new_token = AccessToken.create(repository=repo, role=read_only,
friendly_name=friendly_name, temporary=False)
return new_token
def get_repository_delegate_tokens(namespace_name, repository_name):
return (AccessToken
.select(AccessToken, Role)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.switch(AccessToken)
.join(Role)
.switch(AccessToken)
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
.where(Repository.name == repository_name, Namespace.username == namespace_name,
AccessToken.temporary == False, RepositoryBuildTrigger.uuid >> None))
def get_repo_delegate_token(namespace_name, repository_name, code):
repo_query = get_repository_delegate_tokens(namespace_name, repository_name)
try:
return repo_query.where(AccessToken.code == code).get()
except AccessToken.DoesNotExist:
raise InvalidTokenException('Unable to find token with code: %s' % code)
def set_repo_delegate_token_role(namespace_name, repository_name, code, role):
token = get_repo_delegate_token(namespace_name, repository_name, code)
if role != 'read' and role != 'write':
raise DataModelException('Invalid role for delegate token: %s' % role)
new_role = Role.get(Role.name == role)
token.role = new_role
token.save()
return token
def delete_delegate_token(namespace_name, repository_name, code):
token = get_repo_delegate_token(namespace_name, repository_name, code)
token.delete_instance(recursive=True)
return token
def load_token_data(code):
""" Load the permissions for any token by code. """
try:
return (AccessToken
.select(AccessToken, Repository, Namespace, Role)
.join(Role)
.switch(AccessToken)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(AccessToken.code == code)
.get())
except AccessToken.DoesNotExist:
raise InvalidTokenException('Invalid delegate token code: %s' % code)

657
data/model/user.py Normal file
View file

@ -0,0 +1,657 @@
import bcrypt
import logging
import json
from peewee import JOIN_LEFT_OUTER, IntegrityError, fn
from uuid import uuid4
from datetime import datetime, timedelta
from data.database import (User, LoginService, FederatedLogin, RepositoryPermission, TeamMember,
Team, Repository, TupleSelector, TeamRole, Namespace, Visibility,
EmailConfirmation, Role, db_for_update, random_string_generator)
from data.model import (DataModelException, InvalidPasswordException, InvalidRobotException,
InvalidUsernameException, InvalidEmailAddressException,
TooManyUsersException, TooManyLoginAttemptsException, db_transaction,
notification, config, repository, _basequery)
from util.names import format_robot_username, parse_robot_username
from util.validation import (validate_username, validate_email, validate_password,
INVALID_PASSWORD_MESSAGE)
from util.backoff import exponential_backoff
logger = logging.getLogger(__name__)
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
def hash_password(password, salt=None):
salt = salt or bcrypt.gensalt()
return bcrypt.hashpw(password.encode('utf-8'), salt)
def is_create_user_allowed():
return True
def create_user(username, password, email, auto_verify=False):
""" Creates a regular user, if allowed. """
if not validate_password(password):
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
if not is_create_user_allowed():
raise TooManyUsersException()
created = create_user_noverify(username, email)
created.password_hash = hash_password(password)
created.verified = auto_verify
created.save()
return created
def create_user_noverify(username, email):
if not validate_email(email):
raise InvalidEmailAddressException('Invalid email address: %s' % email)
(username_valid, username_issue) = validate_username(username)
if not username_valid:
raise InvalidUsernameException('Invalid username %s: %s' % (username, username_issue))
try:
existing = User.get((User.username == username) | (User.email == email))
logger.info('Existing user with same username or email.')
# A user already exists with either the same username or email
if existing.username == username:
raise InvalidUsernameException('Username has already been taken: %s' %
username)
raise InvalidEmailAddressException('Email has already been used: %s' %
email)
except User.DoesNotExist:
# This is actually the happy path
logger.debug('Email and username are unique!')
try:
return User.create(username=username, email=email)
except Exception as ex:
raise DataModelException(ex.message)
def is_username_unique(test_username):
try:
User.get((User.username == test_username))
return False
except User.DoesNotExist:
return True
def change_password(user, new_password):
if not validate_password(new_password):
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
pw_hash = hash_password(new_password)
user.invalid_login_attempts = 0
user.password_hash = pw_hash
user.uuid = str(uuid4())
user.save()
# Remove any password required notifications for the user.
notification.delete_notifications_by_kind(user, 'password_required')
def change_username(user_id, new_username):
(username_valid, username_issue) = validate_username(new_username)
if not username_valid:
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
with db_transaction():
# Reload the user for update
user = db_for_update(User.select().where(User.id == user_id)).get()
# Rename the robots
for robot in db_for_update(_list_entity_robots(user.username)):
_, robot_shortname = parse_robot_username(robot.username)
new_robot_name = format_robot_username(new_username, robot_shortname)
robot.username = new_robot_name
robot.save()
# Rename the user
user.username = new_username
user.save()
return user
def change_invoice_email(user, invoice_email):
user.invoice_email = invoice_email
user.save()
def change_user_tag_expiration(user, tag_expiration_s):
user.removed_tag_expiration_s = tag_expiration_s
user.save()
def update_email(user, new_email, auto_verify=False):
try:
user.email = new_email
user.verified = auto_verify
user.save()
except IntegrityError:
raise DataModelException('E-mail address already used')
def create_robot(robot_shortname, parent):
(username_valid, username_issue) = validate_username(robot_shortname)
if not username_valid:
raise InvalidRobotException('The name for the robot \'%s\' is invalid: %s' %
(robot_shortname, username_issue))
username = format_robot_username(parent.username, robot_shortname)
try:
User.get(User.username == username)
msg = 'Existing robot with name: %s' % username
logger.info(msg)
raise InvalidRobotException(msg)
except User.DoesNotExist:
pass
try:
created = User.create(username=username, robot=True)
service = LoginService.get(name='quayrobot')
password = created.email
FederatedLogin.create(user=created, service=service,
service_ident=password)
return created, password
except Exception as ex:
raise DataModelException(ex.message)
def get_robot(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
return robot, robot.email
def lookup_robot(robot_username):
try:
return (User
.select()
.join(FederatedLogin)
.join(LoginService)
.where(LoginService.name == 'quayrobot', User.username == robot_username,
User.robot == True)
.get())
except User.DoesNotExist:
raise InvalidRobotException('Could not find robot with username: %s' % robot_username)
def get_matching_robots(name_prefix, username, limit=10):
admined_orgs = (_basequery.get_user_organizations(username)
.switch(Team)
.join(TeamRole)
.where(TeamRole.name == 'admin'))
prefix_checks = False
for org in admined_orgs:
prefix_checks = prefix_checks | (User.username ** (org.username + '+' + name_prefix + '%'))
prefix_checks = prefix_checks | (User.username ** (username + '+' + name_prefix + '%'))
return User.select().where(prefix_checks).limit(limit)
def verify_robot(robot_username, password):
result = parse_robot_username(robot_username)
if result is None:
raise InvalidRobotException('%s is an invalid robot name' % robot_username)
# Find the matching robot.
query = (User
.select()
.join(FederatedLogin)
.join(LoginService)
.where(FederatedLogin.service_ident == password, LoginService.name == 'quayrobot',
User.username == robot_username))
try:
robot = query.get()
except User.DoesNotExist:
msg = ('Could not find robot with username: %s and supplied password.' %
robot_username)
raise InvalidRobotException(msg)
# Find the owner user and ensure it is not disabled.
try:
owner = User.get(User.username == result[0])
except User.DoesNotExist:
raise InvalidRobotException('Robot %s owner does not exist' % robot_username)
if not owner.enabled:
raise InvalidRobotException('This user has been disabled. Please contact your administrator.')
return robot
def regenerate_robot_token(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
password = random_string_generator(length=64)()
robot.email = password
service = LoginService.get(name='quayrobot')
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
login.service_ident = password
login.save()
robot.save()
return robot, password
def delete_robot(robot_username):
try:
robot = User.get(username=robot_username, robot=True)
robot.delete_instance(recursive=True, delete_nullable=True)
except User.DoesNotExist:
raise InvalidRobotException('Could not find robot with username: %s' %
robot_username)
def _list_entity_robots(entity_name):
""" Return the list of robots for the specified entity. This MUST return a query, not a
materialized list so that callers can use db_for_update.
"""
return (User
.select()
.join(FederatedLogin)
.where(User.robot == True, User.username ** (entity_name + '+%')))
def list_entity_robot_permission_teams(entity_name, include_permissions=False):
query = (_list_entity_robots(entity_name))
fields = [User.username, FederatedLogin.service_ident]
if include_permissions:
query = (query
.join(RepositoryPermission, JOIN_LEFT_OUTER,
on=(RepositoryPermission.user == FederatedLogin.user))
.join(Repository, JOIN_LEFT_OUTER)
.switch(User)
.join(TeamMember, JOIN_LEFT_OUTER)
.join(Team, JOIN_LEFT_OUTER))
fields.append(Repository.name)
fields.append(Team.name)
return TupleSelector(query, fields)
def create_federated_user(username, email, service_name, service_id,
set_password_notification, metadata={}):
if not is_create_user_allowed():
raise TooManyUsersException()
new_user = create_user_noverify(username, email)
new_user.verified = True
new_user.save()
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=new_user, service=service,
service_ident=service_id,
metadata_json=json.dumps(metadata))
if set_password_notification:
notification.create_notification('password_required', new_user)
return new_user
def attach_federated_login(user, service_name, service_id, metadata={}):
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=user, service=service, service_ident=service_id,
metadata_json=json.dumps(metadata))
return user
def verify_federated_login(service_name, service_id):
try:
found = (FederatedLogin
.select(FederatedLogin, User)
.join(LoginService)
.switch(FederatedLogin).join(User)
.where(FederatedLogin.service_ident == service_id, LoginService.name == service_name)
.get())
return found.user
except FederatedLogin.DoesNotExist:
return None
def list_federated_logins(user):
selected = FederatedLogin.select(FederatedLogin.service_ident,
LoginService.name, FederatedLogin.metadata_json)
joined = selected.join(LoginService)
return joined.where(LoginService.name != 'quayrobot',
FederatedLogin.user == user)
def lookup_federated_login(user, service_name):
try:
return list_federated_logins(user).where(LoginService.name == service_name).get()
except FederatedLogin.DoesNotExist:
return None
def create_confirm_email_code(user, new_email=None):
if new_email:
if not validate_email(new_email):
raise InvalidEmailAddressException('Invalid email address: %s' %
new_email)
code = EmailConfirmation.create(user=user, email_confirm=True,
new_email=new_email)
return code
def confirm_user_email(code):
try:
code = EmailConfirmation.get(EmailConfirmation.code == code,
EmailConfirmation.email_confirm == True)
except EmailConfirmation.DoesNotExist:
raise DataModelException('Invalid email confirmation code.')
user = code.user
user.verified = True
old_email = None
new_email = code.new_email
if new_email and new_email != old_email:
if find_user_by_email(new_email):
raise DataModelException('E-mail address already used.')
old_email = user.email
user.email = new_email
user.save()
code.delete_instance()
return user, new_email, old_email
def create_reset_password_email_code(email):
try:
user = User.get(User.email == email)
except User.DoesNotExist:
raise InvalidEmailAddressException('Email address was not found.');
if user.organization:
raise InvalidEmailAddressException('Organizations can not have passwords.')
code = EmailConfirmation.create(user=user, pw_reset=True)
return code
def validate_reset_code(code):
try:
code = EmailConfirmation.get(EmailConfirmation.code == code,
EmailConfirmation.pw_reset == True)
except EmailConfirmation.DoesNotExist:
return None
user = code.user
code.delete_instance()
return user
def find_user_by_email(email):
try:
return User.get(User.email == email)
except User.DoesNotExist:
return None
def get_nonrobot_user(username):
try:
return User.get(User.username == username, User.organization == False, User.robot == False)
except User.DoesNotExist:
return None
def get_user(username):
try:
return User.get(User.username == username, User.organization == False)
except User.DoesNotExist:
return None
def get_namespace_user(username):
try:
return User.get(User.username == username)
except User.DoesNotExist:
return None
def get_user_or_org(username):
try:
return User.get(User.username == username, User.robot == False)
except User.DoesNotExist:
return None
def get_user_by_id(user_db_id):
try:
return User.get(User.id == user_db_id, User.organization == False)
except User.DoesNotExist:
return None
def get_namespace_by_user_id(namespace_user_db_id):
try:
return User.get(User.id == namespace_user_db_id, User.robot == False).username
except User.DoesNotExist:
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
def get_user_by_uuid(user_uuid):
try:
return User.get(User.uuid == user_uuid, User.organization == False)
except User.DoesNotExist:
return None
def get_user_or_org_by_customer_id(customer_id):
try:
return User.get(User.stripe_id == customer_id)
except User.DoesNotExist:
return None
def get_matching_user_namespaces(namespace_prefix, username, limit=10):
base_query = (Namespace
.select()
.distinct()
.limit(limit)
.join(Repository, on=(Repository.namespace_user == Namespace.id))
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.where(Namespace.username ** (namespace_prefix + '%')))
return _basequery.filter_to_repos_for_user(base_query, username)
def get_matching_users(username_prefix, robot_namespace=None,
organization=None):
direct_user_query = (User.username ** (username_prefix + '%') &
(User.organization == False) & (User.robot == False))
if robot_namespace:
robot_prefix = format_robot_username(robot_namespace, username_prefix)
direct_user_query = (direct_user_query |
(User.username ** (robot_prefix + '%') &
(User.robot == True)))
query = (User
.select(User.username, User.email, User.robot)
.group_by(User.username, User.email, User.robot)
.where(direct_user_query))
if organization:
query = (query
.select(User.username, User.email, User.robot, fn.Sum(Team.id))
.join(TeamMember, JOIN_LEFT_OUTER)
.join(Team, JOIN_LEFT_OUTER, on=((Team.id == TeamMember.team) &
(Team.organization == organization))))
class MatchingUserResult(object):
def __init__(self, *args):
self.username = args[0]
self.email = args[1]
self.robot = args[2]
if organization:
self.is_org_member = (args[3] != None)
else:
self.is_org_member = None
return (MatchingUserResult(*args) for args in query.tuples().limit(10))
def verify_user(username_or_email, password):
# Make sure we didn't get any unicode for the username.
try:
str(username_or_email)
except ValueError:
return None
try:
fetched = User.get((User.username == username_or_email) |
(User.email == username_or_email))
except User.DoesNotExist:
return None
now = datetime.utcnow()
if fetched.invalid_login_attempts > 0:
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
fetched.last_invalid_login)
if can_retry_at > now:
retry_after = can_retry_at - now
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
if (fetched.password_hash and
hash_password(password, fetched.password_hash) == fetched.password_hash):
if fetched.invalid_login_attempts > 0:
fetched.invalid_login_attempts = 0
fetched.save()
return fetched
fetched.invalid_login_attempts += 1
fetched.last_invalid_login = now
fetched.save()
# We weren't able to authorize the user
return None
def get_all_repo_users(namespace_name, repository_name):
return (RepositoryPermission
.select(User.username, User.email, User.robot, Role.name, RepositoryPermission)
.join(User)
.switch(RepositoryPermission)
.join(Role)
.switch(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
return (User
.select()
.distinct()
.join(TeamMember)
.join(Team)
.join(RepositoryPermission)
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_all_repo_users_transitive(namespace_name, repository_name):
# Load the users found via teams and directly via permissions.
via_teams = get_all_repo_users_transitive_via_teams(namespace_name, repository_name)
directly = [perm.user for perm in get_all_repo_users(namespace_name, repository_name)]
# Filter duplicates.
user_set = set()
def check_add(u):
if u.username in user_set:
return False
user_set.add(u.username)
return True
return [user for user in list(directly) + list(via_teams) if check_add(user)]
def get_private_repo_count(username):
return (Repository
.select()
.join(Visibility)
.switch(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Namespace.username == username, Visibility.name == 'private')
.count())
def get_active_users():
return User.select().where(User.organization == False, User.robot == False)
def get_active_user_count():
return get_active_users().count()
def detach_external_login(user, service_name):
try:
service = LoginService.get(name=service_name)
except LoginService.DoesNotExist:
return
FederatedLogin.delete().where(FederatedLogin.user == user,
FederatedLogin.service == service).execute()
def delete_user(user):
user.delete_instance(recursive=True, delete_nullable=True)
# TODO: also delete any repository data associated
def get_pull_credentials(robotname):
try:
robot = lookup_robot(robotname)
except InvalidRobotException:
return None
try:
login_info = FederatedLogin.get(user=robot)
except FederatedLogin.DoesNotExist:
return None
return {
'username': robot.username,
'password': login_info.service_ident,
'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'],
config.app_config['SERVER_HOSTNAME']),
}

View file

@ -3,18 +3,21 @@ import logging
import json
import itertools
import uuid
import struct
import os
import urllib
import jwt
from util.aes import AESCipher
from util.validation import generate_valid_usernames
from data import model
from collections import namedtuple
from datetime import datetime, timedelta
import features
from data import model
from util.aes import AESCipher
from util.validation import generate_valid_usernames
logger = logging.getLogger(__name__)
if os.environ.get('LDAP_DEBUG') == '1':
logger.setLevel(logging.DEBUG)
@ -25,7 +28,7 @@ if os.environ.get('LDAP_DEBUG') == '1':
def _get_federated_user(username, email, federated_service, create_new_user):
db_user = model.verify_federated_login(federated_service, username)
db_user = model.user.verify_federated_login(federated_service, username)
if not db_user:
if not create_new_user:
return (None, 'Invalid user')
@ -33,15 +36,15 @@ def _get_federated_user(username, email, federated_service, create_new_user):
# We must create the user in our db
valid_username = None
for valid_username in generate_valid_usernames(username):
if model.is_username_unique(valid_username):
if model.user.is_username_unique(valid_username):
break
if not valid_username:
logger.error('Unable to pick a username for user: %s', username)
return (None, 'Unable to pick a username. Please report this to your administrator.')
db_user = model.create_federated_user(valid_username, email, federated_service, username,
set_password_notification=False)
db_user = model.user.create_federated_user(valid_username, email, federated_service, username,
set_password_notification=False)
else:
# Update the db attributes from ldap
db_user.email = email
@ -50,20 +53,22 @@ def _get_federated_user(username, email, federated_service, create_new_user):
return (db_user, None)
class JWTAuthUsers(object):
class ExternalJWTAuthN(object):
""" Delegates authentication to a REST endpoint that returns JWTs. """
PUBLIC_KEY_FILENAME = 'jwt-authn.cert'
def __init__(self, verify_url, issuer, override_config_dir, http_client, public_key_path=None):
def __init__(self, verify_url, issuer, override_config_dir, http_client, max_fresh_s,
public_key_path=None):
self.verify_url = verify_url
self.issuer = issuer
self.client = http_client
self.max_fresh_s = max_fresh_s
default_key_path = os.path.join(override_config_dir, JWTAuthUsers.PUBLIC_KEY_FILENAME)
default_key_path = os.path.join(override_config_dir, ExternalJWTAuthN.PUBLIC_KEY_FILENAME)
public_key_path = public_key_path or default_key_path
if not os.path.exists(public_key_path):
error_message = ('JWT Authentication public key file "%s" not found in directory %s' %
(JWTAuthUsers.PUBLIC_KEY_FILENAME, override_config_dir))
(ExternalJWTAuthN.PUBLIC_KEY_FILENAME, override_config_dir))
raise Exception(error_message)
@ -99,21 +104,22 @@ class JWTAuthUsers(object):
if not 'exp' in payload:
raise Exception('Missing exp field in JWT')
# Verify that the expiration is no more than 300 seconds in the future.
# Verify that the expiration is no more than self.max_fresh_s seconds in the future.
expiration = datetime.utcfromtimestamp(payload['exp'])
if expiration > datetime.utcnow() + timedelta(seconds=300):
logger.debug('Payload expiration is outside of the 300 second window: %s', payload['exp'])
if expiration > datetime.utcnow() + timedelta(seconds=self.max_fresh_s):
logger.debug('Payload expiration is outside of the %s second window: %s', self.max_fresh_s,
payload['exp'])
return (None, 'Invalid username or password')
# Parse out the username and email.
return _get_federated_user(payload['sub'], payload['email'], 'jwtauthn', create_new_user)
def confirm_existing_user(self, username, password):
db_user = model.get_user(username)
db_user = model.user.get_user(username)
if not db_user:
return (None, 'Invalid user')
federated_login = model.lookup_federated_login(db_user, 'jwtauthn')
federated_login = model.user.lookup_federated_login(db_user, 'jwtauthn')
if not federated_login:
return (None, 'Invalid user')
@ -123,7 +129,7 @@ class JWTAuthUsers(object):
class DatabaseUsers(object):
def verify_user(self, username_or_email, password):
""" Simply delegate to the model implementation. """
result = model.verify_user(username_or_email, password)
result = model.user.verify_user(username_or_email, password)
if not result:
return (None, 'Invalid Username or Password')
@ -239,11 +245,11 @@ class LDAPUsers(object):
""" Verify the username and password by looking up the *LDAP* username and confirming the
password.
"""
db_user = model.get_user(username)
db_user = model.user.get_user(username)
if not db_user:
return (None, 'Invalid user')
federated_login = model.lookup_federated_login(db_user, 'ldap')
federated_login = model.user.lookup_federated_login(db_user, 'ldap')
if not federated_login:
return (None, 'Invalid user')
@ -327,7 +333,9 @@ class UserAuthentication(object):
elif authentication_type == 'JWT':
verify_url = app.config.get('JWT_VERIFY_ENDPOINT')
issuer = app.config.get('JWT_AUTH_ISSUER')
users = JWTAuthUsers(verify_url, issuer, override_config_dir, app.config['HTTPCLIENT'])
max_fresh_s = app.config.get('JWT_AUTH_MAX_FRESH_S', 300)
users = ExternalJWTAuthN(verify_url, issuer, override_config_dir, max_fresh_s,
app.config['HTTPCLIENT'])
else:
raise RuntimeError('Unknown authentication type: %s' % authentication_type)
@ -399,8 +407,6 @@ class UserAuthentication(object):
def verify_user(self, username_or_email, password, basic_auth=False):
# First try to decode the password as a signed token.
if basic_auth:
import features
decrypted = self._decrypt_user_password(password)
if decrypted is None:
# This is a normal password.

0
digest/__init__.py Normal file
View file

64
digest/digest_tools.py Normal file
View file

@ -0,0 +1,64 @@
import re
import os.path
import hashlib
from collections import namedtuple
Digest = namedtuple('Digest', ['is_tarsum', 'tarsum_version', 'hash_alg', 'hash_bytes'])
DIGEST_PATTERN = r'(tarsum\.(v[\w]+)\+)?([\w]+):([0-9a-f]+)'
DIGEST_REGEX = re.compile(DIGEST_PATTERN)
class InvalidDigestException(RuntimeError):
pass
def parse_digest(digest):
""" Returns the digest parsed out to its components. """
match = DIGEST_REGEX.match(digest)
if match is None or match.end() != len(digest):
raise InvalidDigestException('Not a valid digest: %s', digest)
is_tarsum = match.group(1) is not None
return Digest(is_tarsum, match.group(2), match.group(3), match.group(4))
def content_path(digest):
""" Returns a relative path to the parsed digest. """
parsed = parse_digest(digest)
components = []
if parsed.is_tarsum:
components.extend(['tarsum', parsed.tarsum_version])
# Generate a prefix which is always two characters, and which will be filled with leading zeros
# if the input does not contain at least two characters. e.g. ABC -> AB, A -> 0A
prefix = parsed.hash_bytes[0:2].zfill(2)
components.extend([parsed.hash_alg, prefix, parsed.hash_bytes])
return os.path.join(*components)
def sha256_digest(content):
""" Returns a sha256 hash of the content bytes in digest form. """
def single_chunk_generator():
yield content
return sha256_digest_from_generator(single_chunk_generator())
def sha256_digest_from_generator(content_generator):
""" Reads all of the data from the iterator and creates a sha256 digest from the content
"""
digest = hashlib.sha256()
for chunk in content_generator:
digest.update(chunk)
return 'sha256:{0}'.format(digest.hexdigest())
def digests_equal(lhs_digest_string, rhs_digest_string):
""" Parse and compare the two digests, returns True if the digests are equal, False otherwise.
"""
return parse_digest(lhs_digest_string) == parse_digest(rhs_digest_string)

View file

@ -249,7 +249,7 @@ def require_repo_permission(permission_class, scope, allow_public=False):
permission = permission_class(namespace, repository)
if (permission.can() or
(allow_public and
model.repository_is_public(namespace, repository))):
model.repository.repository_is_public(namespace, repository))):
return func(self, namespace, repository, *args, **kwargs)
raise Unauthorized()
return wrapped
@ -376,8 +376,8 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None):
metadata['oauth_token_application'] = oauth_token.application.name
performer = get_authenticated_user()
model.log_action(kind, user_or_orgname, performer=performer, ip=request.remote_addr,
metadata=metadata, repository=repo)
model.log.log_action(kind, user_or_orgname, performer=performer, ip=request.remote_addr,
metadata=metadata, repository=repo)
def define_json_response(schema_name):

View file

@ -6,7 +6,7 @@ from flask import request
from app import billing
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, log_action,
related_user_resource, internal_only, Unauthorized, NotFound,
require_user_admin, show_if, hide_if, path_param, require_scope, abort)
require_user_admin, show_if, path_param, require_scope, abort)
from endpoints.api.subscribe import subscribe, subscription_view
from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user
@ -225,7 +225,7 @@ class OrganizationCard(ApiResource):
""" Get the organization's credit card. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
return get_card(organization)
raise Unauthorized()
@ -236,7 +236,7 @@ class OrganizationCard(ApiResource):
""" Update the orgnaization's credit card. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
token = request.get_json()['token']
response = set_card(organization, token)
log_action('account_change_cc', orgname)
@ -288,7 +288,7 @@ class UserPlan(ApiResource):
""" Fetch any existing subscription for the user. """
cus = None
user = get_authenticated_user()
private_repos = model.get_private_repo_count(user.username)
private_repos = model.user.get_private_repo_count(user.username)
if user.stripe_id:
try:
@ -345,7 +345,7 @@ class OrganizationPlan(ApiResource):
request_data = request.get_json()
plan = request_data['plan']
token = request_data['token'] if 'token' in request_data else None
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
return subscribe(organization, plan, token, True) # Business plan required
raise Unauthorized()
@ -357,8 +357,8 @@ class OrganizationPlan(ApiResource):
cus = None
permission = AdministerOrganizationPermission(orgname)
if permission.can():
private_repos = model.get_private_repo_count(orgname)
organization = model.get_organization(orgname)
private_repos = model.user.get_private_repo_count(orgname)
organization = model.organization.get_organization(orgname)
if organization.stripe_id:
try:
cus = billing.Customer.retrieve(organization.stripe_id)
@ -406,7 +406,7 @@ class OrganizationInvoiceList(ApiResource):
""" List the invoices for the specified orgnaization. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
if not organization.stripe_id:
raise NotFound()
@ -519,7 +519,7 @@ class OrganizationInvoiceFieldList(ApiResource):
""" List the invoice fields for the organization. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
if not organization.stripe_id:
raise NotFound()
@ -534,7 +534,7 @@ class OrganizationInvoiceFieldList(ApiResource):
""" Creates a new invoice field. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
if not organization.stripe_id:
raise NotFound()
@ -558,7 +558,7 @@ class OrganizationInvoiceField(ApiResource):
""" Deletes the invoice field for the current user. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
organization = model.organization.get_organization(orgname)
if not organization.stripe_id:
raise NotFound()

View file

@ -2,10 +2,9 @@
import logging
import json
import time
import datetime
from flask import request, redirect
from flask import request
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
@ -14,7 +13,8 @@ from endpoints.api import (RepositoryParamResource, parse_args, query_param, nic
path_param, InvalidRequest, require_repo_admin)
from endpoints.building import start_build, PreparedBuild
from endpoints.trigger import BuildTriggerHandler
from data import model, database
from data import database
from data import model
from auth.auth_context import get_authenticated_user
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
AdministerRepositoryPermission, AdministerOrganizationPermission)
@ -122,7 +122,7 @@ def build_status_view(build_obj):
'status': status or {},
'subdirectory': job_config.get('build_subdir', ''),
'tags': job_config.get('docker_tags', []),
'manual_user': job_config.get('manual_user', None),
'manual_user': job_config.get('manual_user', None),
'is_writer': can_write,
'trigger': trigger_view(build_obj.trigger, can_read, can_admin, for_build=True),
'trigger_metadata': job_config.get('trigger_metadata', None) if can_read else None,
@ -192,7 +192,7 @@ class RepositoryBuildList(RepositoryParamResource):
if since is not None:
since = datetime.datetime.utcfromtimestamp(since)
builds = model.list_repository_builds(namespace, repository, limit, since=since)
builds = model.build.list_repository_builds(namespace, repository, limit, since=since)
return {
'builds': [build_status_view(build) for build in builds]
}
@ -214,12 +214,13 @@ class RepositoryBuildList(RepositoryParamResource):
if pull_robot_name:
result = parse_robot_username(pull_robot_name)
if result:
pull_robot = model.lookup_robot(pull_robot_name)
if not pull_robot:
try:
model.user.lookup_robot(pull_robot_name)
except model.InvalidRobotException:
raise NotFound()
# Make sure the user has administer permissions for the robot's namespace.
(robot_namespace, shortname) = result
(robot_namespace, _) = result
if not AdministerOrganizationPermission(robot_namespace).can():
raise Unauthorized()
else:
@ -228,14 +229,14 @@ class RepositoryBuildList(RepositoryParamResource):
# Check if the dockerfile resource has already been used. If so, then it
# can only be reused if the user has access to the repository in which the
# dockerfile was previously built.
associated_repository = model.get_repository_for_resource(dockerfile_id)
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
if associated_repository:
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
associated_repository.name):
raise Unauthorized()
# Start the build.
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
prepared = PreparedBuild()
prepared.build_name = user_files.get_file_checksum(dockerfile_id)
@ -267,8 +268,8 @@ class RepositoryBuildResource(RepositoryParamResource):
def get(self, namespace, repository, build_uuid):
""" Returns information about a build. """
try:
build = model.get_repository_build(build_uuid)
except model.InvalidRepositoryBuildException:
build = model.build.get_repository_build(build_uuid)
except model.build.InvalidRepositoryBuildException:
raise NotFound()
return build_status_view(build)
@ -278,14 +279,14 @@ class RepositoryBuildResource(RepositoryParamResource):
def delete(self, namespace, repository, build_uuid):
""" Cancels a repository build if it has not yet been picked up by a build worker. """
try:
build = model.get_repository_build(build_uuid)
except model.InvalidRepositoryBuildException:
build = model.build.get_repository_build(build_uuid)
except model.build.InvalidRepositoryBuildException:
raise NotFound()
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
raise NotFound()
if model.cancel_repository_build(build, dockerfile_build_queue):
if model.build.cancel_repository_build(build, dockerfile_build_queue):
return 'Okay', 201
else:
raise InvalidRequest('Build is currently running or has finished')
@ -300,7 +301,7 @@ class RepositoryBuildStatus(RepositoryParamResource):
@nickname('getRepoBuildStatus')
def get(self, namespace, repository, build_uuid):
""" Return the status for the builds specified by the build uuids. """
build = model.get_repository_build(build_uuid)
build = model.build.get_repository_build(build_uuid)
if (not build or build.repository.name != repository or
build.repository.namespace_user.username != namespace):
raise NotFound()
@ -319,7 +320,7 @@ class RepositoryBuildLogs(RepositoryParamResource):
""" Return the build logs for the build specified by the build uuid. """
response_obj = {}
build = model.get_repository_build(build_uuid)
build = model.build.get_repository_build(build_uuid)
if (not build or build.repository.name != repository or
build.repository.namespace_user.username != namespace):
raise NotFound()

View file

@ -24,7 +24,7 @@ def image_view(image, image_map, include_locations=True, include_ancestors=True)
return image_map[aid].docker_image_id
image_data = {
image_data = {
'id': image.docker_image_id,
'created': format_date(extended_props.created),
'comment': extended_props.comment,
@ -60,8 +60,8 @@ class RepositoryImageList(RepositoryParamResource):
@nickname('listRepositoryImages')
def get(self, namespace, repository):
""" List the images for the specified repository. """
all_images = model.get_repository_images(namespace, repository)
all_tags = model.list_repository_tags(namespace, repository)
all_images = model.image.get_repository_images(namespace, repository)
all_tags = model.tag.list_repository_tags(namespace, repository)
tags_by_image_id = defaultdict(list)
found_image_ids = set()
@ -96,13 +96,13 @@ class RepositoryImage(RepositoryParamResource):
@nickname('getImage')
def get(self, namespace, repository, image_id):
""" Get the information available for the specified image. """
image = model.get_repo_image_extended(namespace, repository, image_id)
image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not image:
raise NotFound()
# Lookup all the ancestor images for the image.
image_map = {}
for current_image in model.get_parent_images(namespace, repository, image):
for current_image in model.image.get_parent_images(namespace, repository, image):
image_map[str(current_image.id)] = current_image
return historical_image_view(image, image_map)
@ -119,7 +119,7 @@ class RepositoryImageChanges(RepositoryParamResource):
@nickname('getImageChanges')
def get(self, namespace, repository, image_id):
""" Get the list of changes for the specified image. """
image = model.get_repo_image_extended(namespace, repository, image_id)
image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not image:
raise NotFound()

View file

@ -37,7 +37,7 @@ def log_view(log):
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None):
performer = None
if performer_name:
performer = model.get_user(performer_name)
performer = model.user.get_user(performer_name)
if start_time:
try:
@ -58,8 +58,8 @@ def get_logs(start_time, end_time, performer_name=None, repository=None, namespa
if not end_time:
end_time = datetime.today()
logs = model.list_logs(start_time, end_time, performer=performer, repository=repository,
namespace=namespace)
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
namespace=namespace)
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
@ -78,7 +78,7 @@ class RepositoryLogs(RepositoryParamResource):
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
def get(self, args, namespace, repository):
""" List the logs for the specified repository. """
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
raise NotFound()

View file

@ -4,6 +4,8 @@ import logging
from flask import request
import features
from app import billing as stripe, avatar
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
related_user_resource, internal_only, Unauthorized, NotFound,
@ -11,15 +13,13 @@ from endpoints.api import (resource, nickname, ApiResource, validate_json_reques
require_scope)
from endpoints.api.team import team_view
from endpoints.api.user import User, PrivateRepositories
from auth.permissions import (AdministerOrganizationPermission, OrganizationMemberPermission,
from auth.permissions import (AdministerOrganizationPermission, OrganizationMemberPermission,
CreateRepositoryPermission)
from auth.auth_context import get_authenticated_user
from auth import scopes
from data import model
from data.billing import get_plan
import features
logger = logging.getLogger(__name__)
@ -38,7 +38,7 @@ def org_view(o, teams):
}
if teams is not None:
teams = sorted(teams, key=lambda team:team.id)
teams = sorted(teams, key=lambda team: team.id)
view['teams'] = {t.name : team_view(o.username, t) for t in teams}
view['ordered_teams'] = [team.name for team in teams]
@ -84,22 +84,19 @@ class OrganizationList(ApiResource):
existing = None
try:
existing = model.get_organization(org_data['name'])
existing = model.organization.get_organization(org_data['name'])
except model.InvalidOrganizationException:
pass
if not existing:
try:
existing = model.get_user(org_data['name'])
except model.InvalidUserException:
pass
existing = model.user.get_user(org_data['name'])
if existing:
msg = 'A user or organization with this name already exists'
raise request_error(message=msg)
try:
model.create_organization(org_data['name'], org_data['email'], user)
model.organization.create_organization(org_data['name'], org_data['email'], user)
return 'Created', 201
except model.DataModelException as ex:
raise request_error(exception=ex)
@ -138,13 +135,13 @@ class Organization(ApiResource):
def get(self, orgname):
""" Get the details for the specified organization """
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
teams = None
if OrganizationMemberPermission(orgname).can():
teams = model.get_teams_within_org(org)
teams = model.team.get_teams_within_org(org)
return org_view(org, teams)
@ -157,28 +154,28 @@ class Organization(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
org_data = request.get_json()
if 'invoice_email' in org_data:
logger.debug('Changing invoice_email for organization: %s', org.username)
model.change_invoice_email(org, org_data['invoice_email'])
model.user.change_invoice_email(org, org_data['invoice_email'])
if 'email' in org_data and org_data['email'] != org.email:
new_email = org_data['email']
if model.find_user_by_email(new_email):
if model.user.find_user_by_email(new_email):
raise request_error(message='E-mail address already used')
logger.debug('Changing email address for organization: %s', org.username)
model.update_email(org, new_email)
model.user.update_email(org, new_email)
if 'tag_expiration' in org_data:
logger.debug('Changing organization tag expiration to: %ss', org_data['tag_expiration'])
model.change_user_tag_expiration(org, org_data['tag_expiration'])
model.user.change_user_tag_expiration(org, org_data['tag_expiration'])
teams = model.get_teams_within_org(org)
teams = model.team.get_teams_within_org(org)
return org_view(org, teams)
raise Unauthorized()
@ -197,8 +194,8 @@ class OrgPrivateRepositories(ApiResource):
""" Return whether or not this org is allowed to create new private repositories. """
permission = CreateRepositoryPermission(orgname)
if permission.can():
organization = model.get_organization(orgname)
private_repos = model.get_private_repo_count(organization.username)
organization = model.organization.get_organization(orgname)
private_repos = model.user.get_private_repo_count(organization.username)
data = {
'privateAllowed': False
}
@ -234,7 +231,7 @@ class OrganizationMemberList(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -242,7 +239,7 @@ class OrganizationMemberList(ApiResource):
# will return an entry for *every team* a member is on, so we will have
# duplicate keys (which is why we pre-build the dictionary).
members_dict = {}
members = model.list_organization_members_by_teams(org)
members = model.team.list_organization_members_by_teams(org)
for member in members:
if member.user.robot:
continue
@ -264,7 +261,7 @@ class OrganizationMemberList(ApiResource):
})
# Loop to add direct repository permissions.
for permission in model.list_organization_member_permissions(org):
for permission in model.permission.list_organization_member_permissions(org):
username = permission.user.username
if not username in members_dict:
continue
@ -292,17 +289,17 @@ class OrganizationMember(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
# Lookup the user.
user = model.get_nonrobot_user(membername)
user = model.user.get_nonrobot_user(membername)
if not user:
raise NotFound()
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
# Remove the user from the organization.
model.remove_organization_member(org, user)
model.organization.remove_organization_member(org, user)
return 'Deleted', 204
raise Unauthorized()
@ -391,7 +388,7 @@ class OrganizationApplications(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -408,18 +405,16 @@ class OrganizationApplications(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
app_data = request.get_json()
application = model.oauth.create_application(
org, app_data['name'],
app_data.get('application_uri', ''),
app_data.get('redirect_uri', ''),
description = app_data.get('description', ''),
avatar_email = app_data.get('avatar_email', None),)
application = model.oauth.create_application(org, app_data['name'],
app_data.get('application_uri', ''),
app_data.get('redirect_uri', ''),
description=app_data.get('description', ''),
avatar_email=app_data.get('avatar_email', None))
app_data.update({
'application_name': application.name,
@ -479,7 +474,7 @@ class OrganizationApplicationResource(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -499,7 +494,7 @@ class OrganizationApplicationResource(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -532,7 +527,7 @@ class OrganizationApplicationResource(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -559,7 +554,7 @@ class OrganizationApplicationResetClientSecret(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()

View file

@ -23,7 +23,7 @@ def wrap_role_view_user(role_json, user):
role_json['name'] = user.username
role_json['is_robot'] = user.robot
if not user.robot:
role_json['avatar'] = avatar.get_data_for_user(user)
role_json['avatar'] = avatar.get_data_for_user(user)
return role_json
@ -46,7 +46,7 @@ class RepositoryTeamPermissionList(RepositoryParamResource):
@nickname('listRepoTeamPermissions')
def get(self, namespace, repository):
""" List all team permission. """
repo_perms = model.get_all_repo_teams(namespace, repository)
repo_perms = model.permission.get_all_repo_teams(namespace, repository)
def wrapped_role_view(repo_perm):
return wrap_role_view_team(role_view(repo_perm), repo_perm.team)
@ -68,7 +68,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
# Lookup the organization (if any).
org = None
try:
org = model.get_organization(namespace) # Will raise an error if not org
org = model.organization.get_organization(namespace) # Will raise an error if not org
except model.InvalidOrganizationException:
# This repository isn't under an org
pass
@ -80,7 +80,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
role_view_func = wrapped_role_view
if org:
org_members = model.get_organization_member_set(namespace)
org_members = model.organization.get_organization_member_set(namespace)
current_func = role_view_func
def wrapped_role_org_view(repo_perm):
@ -90,7 +90,7 @@ class RepositoryUserPermissionList(RepositoryParamResource):
role_view_func = wrapped_role_org_view
# Load and return the permissions.
repo_perms = model.get_all_repo_users(namespace, repository)
repo_perms = model.user.get_all_repo_users(namespace, repository)
return {
'permissions': {perm.user.username: role_view_func(perm)
for perm in repo_perms}
@ -107,15 +107,15 @@ class RepositoryUserTransitivePermission(RepositoryParamResource):
@nickname('getUserTransitivePermission')
def get(self, namespace, repository, username):
""" Get the fetch the permission for the specified user. """
user = model.get_user(username)
user = model.user.get_user(username)
if not user:
raise NotFound
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
raise NotFound
permissions = list(model.get_user_repo_permissions(user, repo))
permissions = list(model.permission.get_user_repo_permissions(user, repo))
return {
'permissions': [role_view(permission) for permission in permissions]
}
@ -152,14 +152,13 @@ class RepositoryUserPermission(RepositoryParamResource):
@nickname('getUserPermissions')
def get(self, namespace, repository, username):
""" Get the Fetch the permission for the specified user. """
logger.debug('Get repo: %s/%s permissions for user %s' %
(namespace, repository, username))
perm = model.get_user_reponame_permission(username, namespace, repository)
logger.debug('Get repo: %s/%s permissions for user %s', namespace, repository, username)
perm = model.permission.get_user_reponame_permission(username, namespace, repository)
perm_view = wrap_role_view_user(role_view(perm), perm.user)
try:
model.get_organization(namespace)
org_members = model.get_organization_member_set(namespace)
model.organization.get_organization(namespace)
org_members = model.organization.get_organization_member_set(namespace)
perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
except model.InvalidOrganizationException:
# This repository is not part of an organization
@ -174,20 +173,19 @@ class RepositoryUserPermission(RepositoryParamResource):
""" Update the perimssions for an existing repository. """
new_permission = request.get_json()
logger.debug('Setting permission to: %s for user %s' %
(new_permission['role'], username))
logger.debug('Setting permission to: %s for user %s', new_permission['role'], username)
try:
perm = model.set_user_repo_permission(username, namespace, repository,
new_permission['role'])
except model.InvalidUsernameException as ex:
perm = model.permission.set_user_repo_permission(username, namespace, repository,
new_permission['role'])
except model.DataModelException as ex:
raise request_error(exception=ex)
perm_view = wrap_role_view_user(role_view(perm), perm.user)
try:
model.get_organization(namespace)
org_members = model.get_organization_member_set(namespace)
model.organization.get_organization(namespace)
org_members = model.organization.get_organization_member_set(namespace)
perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
except model.InvalidOrganizationException:
# This repository is not part of an organization
@ -198,7 +196,7 @@ class RepositoryUserPermission(RepositoryParamResource):
log_action('change_repo_permission', namespace,
{'username': username, 'repo': repository,
'role': new_permission['role']},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return perm_view, 200
@ -207,13 +205,13 @@ class RepositoryUserPermission(RepositoryParamResource):
def delete(self, namespace, repository, username):
""" Delete the permission for the user. """
try:
model.delete_user_permission(username, namespace, repository)
model.permission.delete_user_permission(username, namespace, repository)
except model.DataModelException as ex:
raise request_error(exception=ex)
log_action('delete_repo_permission', namespace,
{'username': username, 'repo': repository},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return 'Deleted', 204
@ -249,9 +247,8 @@ class RepositoryTeamPermission(RepositoryParamResource):
@nickname('getTeamPermissions')
def get(self, namespace, repository, teamname):
""" Fetch the permission for the specified team. """
logger.debug('Get repo: %s/%s permissions for team %s' %
(namespace, repository, teamname))
perm = model.get_team_reponame_permission(teamname, namespace, repository)
logger.debug('Get repo: %s/%s permissions for team %s', namespace, repository, teamname)
perm = model.permission.get_team_reponame_permission(teamname, namespace, repository)
return role_view(perm)
@require_repo_admin
@ -261,16 +258,15 @@ class RepositoryTeamPermission(RepositoryParamResource):
""" Update the existing team permission. """
new_permission = request.get_json()
logger.debug('Setting permission to: %s for team %s' %
(new_permission['role'], teamname))
logger.debug('Setting permission to: %s for team %s', new_permission['role'], teamname)
perm = model.set_team_repo_permission(teamname, namespace, repository,
new_permission['role'])
perm = model.permission.set_team_repo_permission(teamname, namespace, repository,
new_permission['role'])
log_action('change_repo_permission', namespace,
{'team': teamname, 'repo': repository,
'role': new_permission['role']},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return wrap_role_view_team(role_view(perm), perm.team), 200
@ -278,10 +274,10 @@ class RepositoryTeamPermission(RepositoryParamResource):
@nickname('deleteTeamPermissions')
def delete(self, namespace, repository, teamname):
""" Delete the permission for the specified team. """
model.delete_team_permission(teamname, namespace, repository)
model.permission.delete_team_permission(teamname, namespace, repository)
log_action('delete_repo_permission', namespace,
{'team': teamname, 'repo': repository},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return 'Deleted', 204

View file

@ -3,8 +3,7 @@
from flask import request
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
log_action, Unauthorized, NotFound, internal_only, path_param,
require_scope)
log_action, Unauthorized, NotFound, path_param, require_scope)
from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
@ -129,12 +128,12 @@ class PermissionPrototypeList(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
permissions = model.get_prototype_permissions(org)
org_members = model.get_organization_member_set(orgname)
permissions = model.permission.get_prototype_permissions(org)
org_members = model.organization.get_organization_member_set(orgname)
return {'prototypes': [prototype_view(p, org_members) for p in permissions]}
raise Unauthorized()
@ -147,7 +146,7 @@ class PermissionPrototypeList(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
@ -165,9 +164,9 @@ class PermissionPrototypeList(ApiResource):
delegate_username = delegate_name if delegate_kind == 'user' else None
delegate_teamname = delegate_name if delegate_kind == 'team' else None
activating_user = (model.get_user(activating_username) if activating_username else None)
delegate_user = (model.get_user(delegate_username) if delegate_username else None)
delegate_team = (model.get_organization_team(orgname, delegate_teamname)
activating_user = (model.user.get_user(activating_username) if activating_username else None)
delegate_user = (model.user.get_user(delegate_username) if delegate_username else None)
delegate_team = (model.team.get_organization_team(orgname, delegate_teamname)
if delegate_teamname else None)
if activating_username and not activating_user:
@ -178,10 +177,10 @@ class PermissionPrototypeList(ApiResource):
role_name = details['role']
prototype = model.add_prototype_permission(org, role_name, activating_user,
delegate_user, delegate_team)
prototype = model.permission.add_prototype_permission(org, role_name, activating_user,
delegate_user, delegate_team)
log_prototype_action('create_prototype_permission', orgname, prototype)
org_members = model.get_organization_member_set(orgname)
org_members = model.organization.get_organization_member_set(orgname)
return prototype_view(prototype, org_members)
raise Unauthorized()
@ -221,11 +220,11 @@ class PermissionPrototype(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
prototype = model.delete_prototype_permission(org, prototypeid)
prototype = model.permission.delete_prototype_permission(org, prototypeid)
if not prototype:
raise NotFound()
@ -243,23 +242,23 @@ class PermissionPrototype(ApiResource):
permission = AdministerOrganizationPermission(orgname)
if permission.can():
try:
org = model.get_organization(orgname)
org = model.organization.get_organization(orgname)
except model.InvalidOrganizationException:
raise NotFound()
existing = model.get_prototype_permission(org, prototypeid)
existing = model.permission.get_prototype_permission(org, prototypeid)
if not existing:
raise NotFound()
details = request.get_json()
role_name = details['role']
prototype = model.update_prototype_permission(org, prototypeid, role_name)
prototype = model.permission.update_prototype_permission(org, prototypeid, role_name)
if not prototype:
raise NotFound()
log_prototype_action('modify_prototype_permission', orgname, prototype,
original_role=existing.role.name)
org_members = model.get_organization_member_set(orgname)
org_members = model.organization.get_organization_member_set(orgname)
return prototype_view(prototype, org_members)
raise Unauthorized()

View file

@ -18,6 +18,7 @@ import features
logger = logging.getLogger(__name__)
def record_view(record):
return {
'email': record.email,
@ -38,7 +39,7 @@ class RepositoryAuthorizedEmail(RepositoryParamResource):
@nickname('checkRepoEmailAuthorized')
def get(self, namespace, repository, email):
""" Checks to see if the given e-mail address is authorized on this repository. """
record = model.get_email_authorized_for_repo(namespace, repository, email)
record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
if not record:
abort(404)
@ -51,12 +52,12 @@ class RepositoryAuthorizedEmail(RepositoryParamResource):
""" Starts the authorization process for an e-mail address on a repository. """
with tf(db):
record = model.get_email_authorized_for_repo(namespace, repository, email)
record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
if record and record.confirmed:
return record_view(record)
if not record:
record = model.create_email_authorization_for_repo(namespace, repository, email)
record = model.repository.create_email_authorization_for_repo(namespace, repository, email)
send_repo_authorization_email(namespace, repository, email, record.code)
return record_view(record)

View file

@ -1,7 +1,6 @@
""" List, create and manage repositories. """
import logging
import json
import datetime
from datetime import timedelta
@ -9,9 +8,8 @@ from datetime import timedelta
from flask import request
from data import model
from data.model import Namespace
from data.database import (Repository as RepositoryTable, Visibility, RepositoryTag,
RepositoryActionCount, fn)
RepositoryActionCount, Namespace, fn)
from endpoints.api import (truthy_bool, format_date, nickname, log_action, validate_json_request,
require_repo_read, require_repo_write, require_repo_admin,
@ -20,7 +18,7 @@ from endpoints.api import (truthy_bool, format_date, nickname, log_action, valid
path_param)
from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPermission,
CreateRepositoryPermission, ReadRepositoryPermission)
CreateRepositoryPermission)
from auth.auth_context import get_authenticated_user
from auth import scopes
@ -85,13 +83,13 @@ class RepositoryList(ApiResource):
repository_name = req['repository']
visibility = req['visibility']
existing = model.get_repository(namespace_name, repository_name)
existing = model.repository.get_repository(namespace_name, repository_name)
if existing:
raise request_error(message='Repository already exists')
visibility = req['visibility']
repo = model.create_repository(namespace_name, repository_name, owner, visibility)
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
repo.description = req['description']
repo.save()
@ -124,7 +122,7 @@ class RepositoryList(ApiResource):
"""Fetch the list of repositories under a variety of situations."""
username = None
if get_authenticated_user():
starred_repos = model.get_user_starred_repositories(get_authenticated_user())
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user())
star_lookup = set([repo.id for repo in starred_repos])
if args['private']:
@ -133,22 +131,22 @@ class RepositoryList(ApiResource):
response = {}
# Find the matching repositories.
repo_query = model.get_visible_repositories(username,
limit=args['limit'],
page=args['page'],
include_public=args['public'],
namespace=args['namespace'],
namespace_only=args['namespace_only'])
repo_query = model.repository.get_visible_repositories(username,
limit=args['limit'],
page=args['page'],
include_public=args['public'],
namespace=args['namespace'],
namespace_only=args['namespace_only'])
# Collect the IDs of the repositories found for subequent lookup of popularity
# and/or last modified.
repository_ids = [repo.get(RepositoryTable.id) for repo in repo_query]
if args['last_modified']:
last_modified_map = model.get_when_last_modified(repository_ids)
last_modified_map = model.repository.get_when_last_modified(repository_ids)
if args['popularity']:
action_count_map = model.get_action_counts(repository_ids)
action_count_map = model.repository.get_action_counts(repository_ids)
def repo_view(repo_obj):
repo = {
@ -210,26 +208,27 @@ class Repository(RepositoryParamResource):
}
if tag.lifetime_start_ts > 0:
tag_info['last_modified'] = format_date(datetime.datetime.fromtimestamp(tag.lifetime_start_ts))
last_modified = format_date(datetime.datetime.fromtimestamp(tag.lifetime_start_ts))
tag_info['last_modified'] = last_modified
return tag_info
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo:
tags = model.list_repository_tags(namespace, repository, include_storage=True)
tags = model.tag.list_repository_tags(namespace, repository, include_storage=True)
tag_dict = {tag.name: tag_view(tag) for tag in tags}
can_write = ModifyRepositoryPermission(namespace, repository).can()
can_admin = AdministerRepositoryPermission(namespace, repository).can()
is_starred = (model.repository_is_starred(get_authenticated_user(), repo)
is_starred = (model.repository.repository_is_starred(get_authenticated_user(), repo)
if get_authenticated_user() else False)
is_public = model.is_repository_public(repo)
is_public = model.repository.is_repository_public(repo)
(pull_today, pull_thirty_day) = model.get_repository_pulls(repo, timedelta(days=1),
timedelta(days=30))
(pull_today, pull_thirty_day) = model.log.get_repository_pulls(repo, timedelta(days=1),
timedelta(days=30))
(push_today, push_thirty_day) = model.get_repository_pushes(repo, timedelta(days=1),
timedelta(days=30))
(push_today, push_thirty_day) = model.log.get_repository_pushes(repo, timedelta(days=1),
timedelta(days=30))
return {
'namespace': namespace,
@ -261,7 +260,7 @@ class Repository(RepositoryParamResource):
@validate_json_request('RepoUpdate')
def put(self, namespace, repository):
""" Update the description in the specified repository. """
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo:
values = request.get_json()
repo.description = values['description']
@ -279,7 +278,7 @@ class Repository(RepositoryParamResource):
@nickname('deleteRepository')
def delete(self, namespace, repository):
""" Delete a repository. """
model.purge_repository(namespace, repository)
model.repository.purge_repository(namespace, repository)
log_action('delete_repo', namespace,
{'repo': repository, 'namespace': namespace})
return 'Deleted', 204
@ -315,10 +314,10 @@ class RepositoryVisibility(RepositoryParamResource):
@validate_json_request('ChangeVisibility')
def post(self, namespace, repository):
""" Change the visibility of a repository. """
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo:
values = request.get_json()
model.set_repository_visibility(repo, values['visibility'])
model.repository.set_repository_visibility(repo, values['visibility'])
log_action('change_repo_visibility', namespace,
{'repo': repository, 'visibility': values['visibility']},
repo=repo)

View file

@ -2,11 +2,11 @@
import json
from flask import request, abort
from flask import request
from app import notification_queue
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
log_action, validate_json_request, api, NotFound, request_error,
log_action, validate_json_request, NotFound, request_error,
path_param)
from endpoints.notificationevent import NotificationEvent
from endpoints.notificationmethod import (NotificationMethod,
@ -15,17 +15,17 @@ from endpoints.notificationhelper import build_notification_data
from data import model
def notification_view(notification):
def notification_view(note):
config = {}
try:
config = json.loads(notification.config_json)
config = json.loads(note.config_json)
except:
config = {}
return {
'uuid': notification.uuid,
'event': notification.event.name,
'method': notification.method.name,
'uuid': note.uuid,
'event': note.event.name,
'method': note.method.name,
'config': config
}
@ -66,25 +66,25 @@ class RepositoryNotificationList(RepositoryParamResource):
@validate_json_request('NotificationCreateRequest')
def post(self, namespace, repository):
""" Create a new notification for the specified repository. """
repo = model.get_repository(namespace, repository)
json = request.get_json()
repo = model.repository.get_repository(namespace, repository)
parsed = request.get_json()
method_handler = NotificationMethod.get_method(json['method'])
method_handler = NotificationMethod.get_method(parsed['method'])
if not method_handler:
raise request_error(message='Unknown method')
try:
method_handler.validate(repo, json['config'])
method_handler.validate(repo, parsed['config'])
except CannotValidateNotificationMethodException as ex:
raise request_error(message=ex.message)
notification = model.create_repo_notification(repo, json['event'], json['method'],
json['config'])
new_notification = model.notification.create_repo_notification(repo, parsed['event'],
parsed['method'], parsed['config'])
resp = notification_view(notification)
resp = notification_view(new_notification)
log_action('add_repo_notification', namespace,
{'repo': repository, 'notification_id': notification.uuid,
'event': json['event'], 'method': json['method']},
{'repo': repository, 'notification_id': new_notification.uuid,
'event': parsed['event'], 'method': parsed['method']},
repo=repo)
return resp, 201
@ -92,7 +92,7 @@ class RepositoryNotificationList(RepositoryParamResource):
@nickname('listRepoNotifications')
def get(self, namespace, repository):
""" List the notifications for the specified repository. """
notifications = model.list_repo_notifications(namespace, repository)
notifications = model.notification.list_repo_notifications(namespace, repository)
return {
'notifications': [notification_view(n) for n in notifications]
}
@ -108,25 +108,25 @@ class RepositoryNotification(RepositoryParamResource):
def get(self, namespace, repository, uuid):
""" Get information for the specified notification. """
try:
notification = model.get_repo_notification(uuid)
found = model.notification.get_repo_notification(uuid)
except model.InvalidNotificationException:
raise NotFound()
if (notification.repository.namespace_user.username != namespace or
notification.repository.name != repository):
if (found.repository.namespace_user.username != namespace or
found.repository.name != repository):
raise NotFound()
return notification_view(notification)
return notification_view(found)
@require_repo_admin
@nickname('deleteRepoNotification')
def delete(self, namespace, repository, uuid):
""" Deletes the specified notification. """
notification = model.delete_repo_notification(namespace, repository, uuid)
deleted = model.notification.delete_repo_notification(namespace, repository, uuid)
log_action('delete_repo_notification', namespace,
{'repo': repository, 'notification_id': uuid,
'event': notification.event.name, 'method': notification.method.name},
repo=model.get_repository(namespace, repository))
'event': deleted.event.name, 'method': deleted.method.name},
repo=model.repository.get_repository(namespace, repository))
return 'No Content', 204
@ -141,18 +141,18 @@ class TestRepositoryNotification(RepositoryParamResource):
def post(self, namespace, repository, uuid):
""" Queues a test notification for this repository. """
try:
notification = model.get_repo_notification(uuid)
test_note = model.notification.get_repo_notification(uuid)
except model.InvalidNotificationException:
raise NotFound()
if (notification.repository.namespace_user.username != namespace or
notification.repository.name != repository):
if (test_note.repository.namespace_user.username != namespace or
test_note.repository.name != repository):
raise NotFound()
event_info = NotificationEvent.get_event(notification.event.name)
sample_data = event_info.get_sample_data(repository=notification.repository)
notification_data = build_notification_data(notification, sample_data)
notification_queue.put([notification.repository.namespace_user.username, repository,
notification.event.name], json.dumps(notification_data))
event_info = NotificationEvent.get_event(test_note.event.name)
sample_data = event_info.get_sample_data(repository=test_note.repository)
notification_data = build_notification_data(test_note, sample_data)
notification_queue.put([test_note.repository.namespace_user.username, repository,
test_note.event.name], json.dumps(notification_data))
return {}

View file

@ -45,7 +45,7 @@ class RepositoryTokenList(RepositoryParamResource):
@nickname('listRepoTokens')
def get(self, namespace, repository):
""" List the tokens for the specified repository. """
tokens = model.get_repository_delegate_tokens(namespace, repository)
tokens = model.token.get_repository_delegate_tokens(namespace, repository)
return {
'tokens': {token.code: token_view(token) for token in tokens}
@ -58,12 +58,11 @@ class RepositoryTokenList(RepositoryParamResource):
""" Create a new repository token. """
token_params = request.get_json()
token = model.create_delegate_token(namespace, repository,
token_params['friendlyName'])
token = model.token.create_delegate_token(namespace, repository, token_params['friendlyName'])
log_action('add_repo_accesstoken', namespace,
{'repo': repository, 'token': token_params['friendlyName']},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return token_view(token), 201
@ -99,7 +98,7 @@ class RepositoryToken(RepositoryParamResource):
def get(self, namespace, repository, code):
""" Fetch the specified repository token information. """
try:
perm = model.get_repo_delegate_token(namespace, repository, code)
perm = model.token.get_repo_delegate_token(namespace, repository, code)
except model.InvalidTokenException:
raise NotFound()
@ -115,13 +114,13 @@ class RepositoryToken(RepositoryParamResource):
logger.debug('Setting permission to: %s for code %s' %
(new_permission['role'], code))
token = model.set_repo_delegate_token_role(namespace, repository, code,
new_permission['role'])
token = model.token.set_repo_delegate_token_role(namespace, repository, code,
new_permission['role'])
log_action('change_repo_permission', namespace,
{'repo': repository, 'token': token.friendly_name, 'code': code,
'role': new_permission['role']},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return token_view(token)
@ -129,11 +128,11 @@ class RepositoryToken(RepositoryParamResource):
@nickname('deleteToken')
def delete(self, namespace, repository, code):
""" Delete the repository token. """
token = model.delete_delegate_token(namespace, repository, code)
token = model.token.delete_delegate_token(namespace, repository, code)
log_action('delete_repo_accesstoken', namespace,
{'repo': repository, 'token': token.friendly_name,
'code': code},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return 'Deleted', 204

View file

@ -1,8 +1,8 @@
""" Manage user and organization robot accounts. """
from endpoints.api import (resource, nickname, ApiResource, log_action, related_user_resource,
Unauthorized, require_user_admin, internal_only, require_scope,
path_param, parse_args, truthy_bool, query_param)
Unauthorized, require_user_admin, require_scope, path_param, parse_args,
truthy_bool, query_param)
from auth.permissions import AdministerOrganizationPermission, OrganizationMemberPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
@ -30,7 +30,8 @@ def permission_view(permission):
def robots_list(prefix, include_permissions=False):
tuples = model.list_entity_robot_permission_teams(prefix, include_permissions=include_permissions)
tuples = model.user.list_entity_robot_permission_teams(prefix,
include_permissions=include_permissions)
robots = {}
robot_teams = set()
@ -85,7 +86,8 @@ class UserRobotList(ApiResource):
@resource('/v1/user/robots/<robot_shortname>')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
class UserRobot(ApiResource):
""" Resource for managing a user's robots. """
@require_user_admin
@ -93,7 +95,7 @@ class UserRobot(ApiResource):
def get(self, robot_shortname):
""" Returns the user's robot with the specified name. """
parent = get_authenticated_user()
robot, password = model.get_robot(robot_shortname, parent)
robot, password = model.user.get_robot(robot_shortname, parent)
return robot_view(robot.username, password)
@require_user_admin
@ -101,7 +103,7 @@ class UserRobot(ApiResource):
def put(self, robot_shortname):
""" Create a new user robot with the specified name. """
parent = get_authenticated_user()
robot, password = model.create_robot(robot_shortname, parent)
robot, password = model.user.create_robot(robot_shortname, parent)
log_action('create_robot', parent.username, {'robot': robot_shortname})
return robot_view(robot.username, password), 201
@ -110,7 +112,7 @@ class UserRobot(ApiResource):
def delete(self, robot_shortname):
""" Delete an existing robot. """
parent = get_authenticated_user()
model.delete_robot(format_robot_username(parent.username, robot_shortname))
model.user.delete_robot(format_robot_username(parent.username, robot_shortname))
log_action('delete_robot', parent.username, {'robot': robot_shortname})
return 'Deleted', 204
@ -137,7 +139,8 @@ class OrgRobotList(ApiResource):
@resource('/v1/organization/<orgname>/robots/<robot_shortname>')
@path_param('orgname', 'The name of the organization')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
@related_user_resource(UserRobot)
class OrgRobot(ApiResource):
""" Resource for managing an organization's robots. """
@ -147,8 +150,8 @@ class OrgRobot(ApiResource):
""" Returns the organization's robot with the specified name. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.get_robot(robot_shortname, parent)
parent = model.organization.get_organization(orgname)
robot, password = model.user.get_robot(robot_shortname, parent)
return robot_view(robot.username, password)
raise Unauthorized()
@ -159,9 +162,9 @@ class OrgRobot(ApiResource):
""" Create a new robot in the organization. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.create_robot(robot_shortname, parent)
log_action('create_robot', orgname, {'robot': robot_shortname})
parent = model.organization.get_organization(orgname)
robot, password = model.user.create_robot(robot_shortname, parent)
log_action('create_robot', orgname, {'robot': robot_shortname})
return robot_view(robot.username, password), 201
raise Unauthorized()
@ -172,7 +175,7 @@ class OrgRobot(ApiResource):
""" Delete an existing organization robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
model.delete_robot(format_robot_username(orgname, robot_shortname))
model.user.delete_robot(format_robot_username(orgname, robot_shortname))
log_action('delete_robot', orgname, {'robot': robot_shortname})
return 'Deleted', 204
@ -180,7 +183,8 @@ class OrgRobot(ApiResource):
@resource('/v1/user/robots/<robot_shortname>/permissions')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
class UserRobotPermissions(ApiResource):
""" Resource for listing the permissions a user's robot has in the system. """
@require_user_admin
@ -188,8 +192,8 @@ class UserRobotPermissions(ApiResource):
def get(self, robot_shortname):
""" Returns the list of repository permissions for the user's robot. """
parent = get_authenticated_user()
robot, password = model.get_robot(robot_shortname, parent)
permissions = model.list_robot_permissions(robot.username)
robot, _ = model.user.get_robot(robot_shortname, parent)
permissions = model.permission.list_robot_permissions(robot.username)
return {
'permissions': [permission_view(permission) for permission in permissions]
@ -198,7 +202,8 @@ class UserRobotPermissions(ApiResource):
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/permissions')
@path_param('orgname', 'The name of the organization')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
@related_user_resource(UserRobotPermissions)
class OrgRobotPermissions(ApiResource):
""" Resource for listing the permissions an org's robot has in the system. """
@ -208,9 +213,9 @@ class OrgRobotPermissions(ApiResource):
""" Returns the list of repository permissions for the org's robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.get_robot(robot_shortname, parent)
permissions = model.list_robot_permissions(robot.username)
parent = model.organization.get_organization(orgname)
robot, _ = model.user.get_robot(robot_shortname, parent)
permissions = model.permission.list_robot_permissions(robot.username)
return {
'permissions': [permission_view(permission) for permission in permissions]
@ -220,7 +225,8 @@ class OrgRobotPermissions(ApiResource):
@resource('/v1/user/robots/<robot_shortname>/regenerate')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
class RegenerateUserRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
@require_user_admin
@ -228,14 +234,15 @@ class RegenerateUserRobot(ApiResource):
def post(self, robot_shortname):
""" Regenerates the token for a user's robot. """
parent = get_authenticated_user()
robot, password = model.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
return robot_view(robot.username, password)
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/regenerate')
@path_param('orgname', 'The name of the organization')
@path_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
@path_param('robot_shortname',
'The short name for the robot, without any user or organization prefix')
@related_user_resource(RegenerateUserRobot)
class RegenerateOrgRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
@ -245,9 +252,9 @@ class RegenerateOrgRobot(ApiResource):
""" Regenerates the token for an organization robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
parent = model.get_organization(orgname)
robot, password = model.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
parent = model.organization.get_organization(orgname)
robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
return robot_view(robot.username, password)
raise Unauthorized()

View file

@ -3,12 +3,12 @@
from endpoints.api import (ApiResource, parse_args, query_param, truthy_bool, nickname, resource,
require_scope, path_param)
from data import model
from auth.permissions import (OrganizationMemberPermission, ViewTeamPermission,
ReadRepositoryPermission, UserAdminPermission,
AdministerOrganizationPermission, ReadRepositoryPermission)
from auth.permissions import (OrganizationMemberPermission, ReadRepositoryPermission,
UserAdminPermission, AdministerOrganizationPermission,
ReadRepositoryPermission)
from auth.auth_context import get_authenticated_user
from auth import scopes
from app import avatar, get_app_url
from app import avatar
from operator import itemgetter
from stringscore import liquidmetal
from util.names import parse_robot_username
@ -35,7 +35,7 @@ class EntitySearch(ApiResource):
organization = None
try:
organization = model.get_organization(namespace_name)
organization = model.organization.get_organization(namespace_name)
# namespace name was an org
permission = OrganizationMemberPermission(namespace_name)
@ -43,7 +43,7 @@ class EntitySearch(ApiResource):
robot_namespace = namespace_name
if args['includeTeams']:
teams = model.get_matching_teams(prefix, organization)
teams = model.team.get_matching_teams(prefix, organization)
if args['includeOrgs'] and AdministerOrganizationPermission(namespace_name) \
and namespace_name.startswith(prefix):
@ -54,7 +54,7 @@ class EntitySearch(ApiResource):
'avatar': avatar.get_data_for_org(organization),
}]
except model.InvalidOrganizationException:
except model.organization.InvalidOrganizationException:
# namespace name was a user
user = get_authenticated_user()
if user and user.username == namespace_name:
@ -63,7 +63,7 @@ class EntitySearch(ApiResource):
if admin_permission.can():
robot_namespace = namespace_name
users = model.get_matching_users(prefix, robot_namespace, organization)
users = model.user.get_matching_users(prefix, robot_namespace, organization)
def entity_team_view(team):
result = {
@ -95,18 +95,6 @@ class EntitySearch(ApiResource):
}
def team_view(orgname, team):
view_permission = ViewTeamPermission(orgname, team.name)
role = model.get_team_org_role(team).name
return {
'id': team.id,
'name': team.name,
'description': team.description,
'can_view': view_permission.can(),
'role': role
}
@resource('/v1/find/repository')
class FindRepositories(ApiResource):
""" Resource for finding repositories. """
@ -130,7 +118,7 @@ class FindRepositories(ApiResource):
if user is not None:
username = user.username
matching = model.get_matching_repositories(prefix, username)
matching = model.repository.get_matching_repositories(prefix, username)
return {
'repositories': [repo_view(repo) for repo in matching
if (repo.visibility.name == 'public' or
@ -174,7 +162,7 @@ def search_entity_view(username, entity, get_short_name=None):
def conduct_team_search(username, query, encountered_teams, results):
""" Finds the matching teams where the user is a member. """
matching_teams = model.get_matching_user_teams(query, get_authenticated_user(), limit=5)
matching_teams = model.team.get_matching_user_teams(query, get_authenticated_user(), limit=5)
for team in matching_teams:
if team.id in encountered_teams:
continue
@ -193,7 +181,7 @@ def conduct_team_search(username, query, encountered_teams, results):
def conduct_admined_team_search(username, query, encountered_teams, results):
""" Finds matching teams in orgs admined by the user. """
matching_teams = model.get_matching_admined_teams(query, get_authenticated_user(), limit=5)
matching_teams = model.team.get_matching_admined_teams(query, get_authenticated_user(), limit=5)
for team in matching_teams:
if team.id in encountered_teams:
continue
@ -212,14 +200,15 @@ def conduct_admined_team_search(username, query, encountered_teams, results):
def conduct_repo_search(username, query, results):
""" Finds matching repositories. """
def can_read(repository):
if repository.is_public:
def can_read(repo):
if repo.is_public:
return True
return ReadRepositoryPermission(repository.namespace_user.username, repository.name).can()
return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
only_public = username is None
matching_repos = model.get_sorted_matching_repositories(query, only_public, can_read, limit=5)
matching_repos = model.repository.get_sorted_matching_repositories(query, only_public, can_read,
limit=5)
for repo in matching_repos:
repo_score = math.log(repo.count or 1, 10) or 1
@ -242,7 +231,7 @@ def conduct_repo_search(username, query, results):
def conduct_namespace_search(username, query, results):
""" Finds matching users and organizations. """
matching_entities = model.get_matching_user_namespaces(query, username, limit=5)
matching_entities = model.user.get_matching_user_namespaces(query, username, limit=5)
for entity in matching_entities:
results.append(search_entity_view(username, entity))
@ -252,7 +241,7 @@ def conduct_robot_search(username, query, results):
def get_short_name(name):
return parse_robot_username(name)[1]
matching_robots = model.get_matching_robots(query, username, limit=5)
matching_robots = model.user.get_matching_robots(query, username, limit=5)
for robot in matching_robots:
results.append(search_entity_view(username, robot, get_short_name))

View file

@ -11,6 +11,7 @@ from data.billing import PLANS
import features
logger = logging.getLogger(__name__)
@ -50,12 +51,12 @@ def subscribe(user, plan, token, require_business_plan):
raise NotFound()
if (require_business_plan and not plan_found['bus_features'] and not
plan_found['price'] == 0):
plan_found['price'] == 0):
logger.warning('Business attempting to subscribe to personal plan: %s',
user.username)
raise request_error(message='No matching plan found')
private_repos = model.get_private_repo_count(user.username)
private_repos = model.user.get_private_repo_count(user.username)
# This is the default response
response_json = {

View file

@ -2,10 +2,9 @@
import logging
import os
import json
import signal
from flask import abort, Response
from flask import abort
from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if,
require_fresh_login, request, validate_json_request, verify_not_prod)
@ -14,17 +13,17 @@ from app import app, CONFIG_PROVIDER, superusers
from data import model
from data.database import configure
from auth.permissions import SuperUserPermission
from auth.auth_context import get_authenticated_user
from data.database import User
from util.config.configutil import add_enterprise_config_defaults
from util.config.provider import CannotWriteConfigException
from util.config.validator import validate_service_for_config, CONFIG_FILENAMES
from data.runmigration import run_alembic_migration
import features
logger = logging.getLogger(__name__)
def database_is_valid():
""" Returns whether the database, as configured, is valid. """
if app.config['TESTING']:
@ -310,7 +309,7 @@ class SuperUserCreateInitialSuperUser(ApiResource):
email = data['email']
# Create the user in the database.
superuser = model.create_user(username, password, email, auto_verify=True)
superuser = model.user.create_user(username, password, email, auto_verify=True)
# Add the user to the config.
config_object = CONFIG_PROVIDER.get_yaml()

View file

@ -2,33 +2,31 @@
import string
import logging
import json
import os
from random import SystemRandom
from app import app, avatar, superusers, authentication
from flask import request
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, NotFound, require_user_admin, format_date,
InvalidToken, require_scope, format_date, hide_if, show_if, parse_args,
query_param, abort, require_fresh_login, path_param, verify_not_prod)
from endpoints.api.logs import get_logs
from data import model
from auth.permissions import SuperUserPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
from util.useremails import send_confirmation_email, send_recovery_email
import features
from app import app, avatar, superusers, authentication
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
internal_only, require_scope, show_if, parse_args,
query_param, abort, require_fresh_login, path_param, verify_not_prod)
from endpoints.api.logs import get_logs
from data import model
from auth.permissions import SuperUserPermission
from auth import scopes
from util.useremails import send_confirmation_email, send_recovery_email
logger = logging.getLogger(__name__)
def get_immediate_subdirectories(directory):
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
def get_services():
services = set(get_immediate_subdirectories(app.config['SYSTEM_SERVICES_PATH']))
services = services - set(app.config['SYSTEM_SERVICE_BLACKLIST'])
@ -55,7 +53,7 @@ class SuperUserGetLogsForService(ApiResource):
with open(app.config['SYSTEM_LOGS_FILE'], 'r') as f:
logs = [line for line in f if line.find(service + '[') >= 0]
except Exception as ex:
except Exception:
logger.exception('Cannot read logs')
abort(400)
@ -102,7 +100,6 @@ class SuperUserLogs(ApiResource):
def get(self, args):
""" List the usage logs for the current system. """
if SuperUserPermission().can():
performer_name = args['performer']
start_time = args['starttime']
end_time = args['endtime']
@ -144,7 +141,7 @@ class ChangeLog(ApiResource):
def get(self):
""" Returns the change log for this installation. """
if SuperUserPermission().can():
with open ('CHANGELOG.md', 'r') as f:
with open('CHANGELOG.md', 'r') as f:
return {
'log': f.read()
}
@ -165,7 +162,7 @@ class SuperUserOrganizationList(ApiResource):
def get(self):
""" Returns a list of all organizations in the system. """
if SuperUserPermission().can():
orgs = model.get_organizations()
orgs = model.organization.get_organizations()
return {
'organizations': [org_view(org) for org in orgs]
}
@ -204,7 +201,7 @@ class SuperUserList(ApiResource):
def get(self):
""" Returns a list of all users in the system. """
if SuperUserPermission().can():
users = model.get_active_users()
users = model.user.get_active_users()
return {
'users': [user_view(user) for user in users]
}
@ -226,14 +223,14 @@ class SuperUserList(ApiResource):
# Generate a temporary password for the user.
random = SystemRandom()
password = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(32)])
password = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(32)])
# Create the user.
user = model.create_user(username, password, email, auto_verify=not features.MAILING)
user = model.user.create_user(username, password, email, auto_verify=not features.MAILING)
# If mailing is turned on, send the user a verification email.
if features.MAILING:
confirmation = model.create_confirm_email_code(user)
confirmation = model.user.create_confirm_email_code(user)
send_confirmation_email(user.username, user.email, confirmation.code)
return {
@ -258,14 +255,14 @@ class SuperUserSendRecoveryEmail(ApiResource):
@require_scope(scopes.SUPERUSER)
def post(self, username):
if SuperUserPermission().can():
user = model.get_nonrobot_user(username)
user = model.user.get_nonrobot_user(username)
if not user:
abort(404)
if superusers.is_superuser(username):
abort(403)
abort(403)
code = model.create_reset_password_email_code(user.email)
code = model.user.create_reset_password_email_code(user.email)
send_recovery_email(user.email, code.code)
return {
'email': user.email
@ -309,7 +306,7 @@ class SuperUserManagement(ApiResource):
def get(self, username):
""" Returns information about the specified user. """
if SuperUserPermission().can():
user = model.get_nonrobot_user(username)
user = model.user.get_nonrobot_user(username)
if not user:
abort(404)
@ -324,14 +321,14 @@ class SuperUserManagement(ApiResource):
def delete(self, username):
""" Deletes the specified user. """
if SuperUserPermission().can():
user = model.get_nonrobot_user(username)
user = model.user.get_nonrobot_user(username)
if not user:
abort(404)
if superusers.is_superuser(username):
abort(403)
abort(403)
model.delete_user(user)
model.user.delete_user(user)
return 'Deleted', 204
abort(403)
@ -344,26 +341,26 @@ class SuperUserManagement(ApiResource):
def put(self, username):
""" Updates information about the specified user. """
if SuperUserPermission().can():
user = model.get_nonrobot_user(username)
if not user:
abort(404)
user = model.user.get_nonrobot_user(username)
if not user:
abort(404)
if superusers.is_superuser(username):
abort(403)
if superusers.is_superuser(username):
abort(403)
user_data = request.get_json()
if 'password' in user_data:
model.change_password(user, user_data['password'])
user_data = request.get_json()
if 'password' in user_data:
model.user.change_password(user, user_data['password'])
if 'email' in user_data:
model.update_email(user, user_data['email'], auto_verify=True)
if 'email' in user_data:
model.user.update_email(user, user_data['email'], auto_verify=True)
if 'enabled' in user_data:
# Disable/enable the user.
user.enabled = bool(user_data['enabled'])
user.save()
if 'enabled' in user_data:
# Disable/enable the user.
user.enabled = bool(user_data['enabled'])
user.save()
return user_view(user, password=user_data.get('password'))
return user_view(user, password=user_data.get('password'))
abort(403)
@ -395,9 +392,9 @@ class SuperUserOrganizationManagement(ApiResource):
def delete(self, name):
""" Deletes the specified organization. """
if SuperUserPermission().can():
org = model.get_organization(name)
org = model.organization.get_organization(name)
model.delete_user(org)
model.user.delete_user(org)
return 'Deleted', 204
abort(403)
@ -410,12 +407,12 @@ class SuperUserOrganizationManagement(ApiResource):
def put(self, name):
""" Updates information about the specified user. """
if SuperUserPermission().can():
org = model.get_organization(name)
org_data = request.get_json()
org = model.organization.get_organization(name)
org_data = request.get_json()
if 'name' in org_data:
org = model.change_username(org.id, org_data['name'])
if 'name' in org_data:
org = model.user.change_username(org.id, org_data['name'])
return org_view(org)
return org_view(org)
abort(403)

View file

@ -4,14 +4,11 @@ from flask import request, abort
from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
RepositoryParamResource, log_action, NotFound, validate_json_request,
path_param, format_date, parse_args, query_param)
path_param, parse_args, query_param)
from endpoints.api.image import image_view
from data import model
from auth.auth_context import get_authenticated_user
from datetime import datetime
@resource('/v1/repository/<repopath:repository>/tag/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@ -25,7 +22,7 @@ class ListRepositoryTags(RepositoryParamResource):
@query_param('page', 'Page index for the results. Default 1.', type=int, default=1)
@nickname('listRepoTags')
def get(self, args, namespace, repository):
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
abort(404)
@ -51,8 +48,8 @@ class ListRepositoryTags(RepositoryParamResource):
# Note: We ask for limit+1 here, so we can check to see if there are
# additional pages of results.
tags = model.list_repository_tag_history(repo, page=page, size=limit+1,
specific_tag=specific_tag)
tags = model.tag.list_repository_tag_history(repo, page=page, size=limit+1,
specific_tag=specific_tag)
tags = list(tags)
return {
@ -90,27 +87,27 @@ class RepositoryTag(RepositoryParamResource):
def put(self, namespace, repository, tag):
""" Change which image a tag points to or create a new tag."""
image_id = request.get_json()['image']
image = model.get_repo_image(namespace, repository, image_id)
image = model.image.get_repo_image(namespace, repository, image_id)
if not image:
raise NotFound()
original_image_id = None
try:
original_tag_image = model.get_tag_image(namespace, repository, tag)
original_tag_image = model.tag.get_tag_image(namespace, repository, tag)
if original_tag_image:
original_image_id = original_tag_image.docker_image_id
except model.DataModelException:
# This is a new tag.
pass
model.create_or_update_tag(namespace, repository, tag, image_id)
model.garbage_collect_repository(namespace, repository)
model.tag.create_or_update_tag(namespace, repository, tag, image_id)
model.repository.garbage_collect_repository(namespace, repository)
username = get_authenticated_user().username
log_action('move_tag' if original_image_id else 'create_tag', namespace,
{'username': username, 'repo': repository, 'tag': tag,
'image': image_id, 'original_image': original_image_id},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return 'Updated', 201
@ -118,13 +115,13 @@ class RepositoryTag(RepositoryParamResource):
@nickname('deleteFullTag')
def delete(self, namespace, repository, tag):
""" Delete the specified repository tag. """
model.delete_tag(namespace, repository, tag)
model.garbage_collect_repository(namespace, repository)
model.tag.delete_tag(namespace, repository, tag)
model.repository.garbage_collect_repository(namespace, repository)
username = get_authenticated_user().username
log_action('delete_tag', namespace,
{'username': username, 'repo': repository, 'tag': tag},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return 'Deleted', 204
@ -139,11 +136,11 @@ class RepositoryTagImages(RepositoryParamResource):
def get(self, namespace, repository, tag):
""" List the images for the specified repository tag. """
try:
tag_image = model.get_tag_image(namespace, repository, tag)
tag_image = model.tag.get_tag_image(namespace, repository, tag)
except model.DataModelException:
raise NotFound()
parent_images = model.get_parent_images(namespace, repository, tag_image)
parent_images = model.image.get_parent_images(namespace, repository, tag_image)
image_map = {}
for image in parent_images:
image_map[str(image.id)] = image
@ -186,21 +183,21 @@ class RevertTag(RepositoryParamResource):
def post(self, namespace, repository, tag):
""" Reverts a repository tag back to a previous image in the repository. """
try:
tag_image = model.get_tag_image(namespace, repository, tag)
tag_image = model.tag.get_tag_image(namespace, repository, tag)
except model.DataModelException:
raise NotFound()
# Revert the tag back to the previous image.
image_id = request.get_json()['image']
model.revert_tag(tag_image.repository, tag, image_id)
model.garbage_collect_repository(namespace, repository)
model.tag.revert_tag(tag_image.repository, tag, image_id)
model.repository.garbage_collect_repository(namespace, repository)
# Log the reversion.
username = get_authenticated_user().username
log_action('revert_tag', namespace,
{'username': username, 'repo': repository, 'tag': tag,
'image': image_id, 'original_image': tag_image.docker_image_id},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
return {
'image_id': image_id,

View file

@ -2,6 +2,8 @@
from flask import request
import features
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
log_action, Unauthorized, NotFound, internal_only, require_scope,
path_param, query_param, truthy_bool, parse_args, require_user_admin,
@ -13,12 +15,11 @@ from data import model
from util.useremails import send_org_invite_email
from app import avatar
import features
def try_accept_invite(code, user):
(team, inviter) = model.confirm_team_invite(code, user)
(team, inviter) = model.team.confirm_team_invite(code, user)
model.delete_matching_notifications(user, 'org_team_invite', code=code)
model.notification.delete_matching_notifications(user, 'org_team_invite', code=code)
orgname = team.organization.username
log_action('org_team_member_invite_accepted', orgname, {
@ -31,15 +32,15 @@ def try_accept_invite(code, user):
def handle_addinvite_team(inviter, team, user=None, email=None):
invite = model.add_or_invite_to_team(inviter, team, user, email,
requires_invite = features.MAILING)
invite = model.team.add_or_invite_to_team(inviter, team, user, email,
requires_invite=features.MAILING)
if not invite:
# User was added to the team directly.
return
orgname = team.organization.username
if user:
model.create_notification('org_team_invite', user, metadata = {
model.notification.create_notification('org_team_invite', user, metadata={
'code': invite.invite_token,
'inviter': inviter.username,
'org': orgname,
@ -52,7 +53,7 @@ def handle_addinvite_team(inviter, team, user=None, email=None):
def team_view(orgname, team):
view_permission = ViewTeamPermission(orgname, team.name)
role = model.get_team_org_role(team).name
role = model.team.get_team_org_role(team).name
return {
'name': team.name,
'description': team.description,
@ -126,15 +127,15 @@ class OrganizationTeam(ApiResource):
details = request.get_json()
is_existing = False
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
is_existing = True
except model.InvalidTeamException:
# Create the new team.
description = details['description'] if 'description' in details else ''
role = details['role'] if 'role' in details else 'member'
org = model.get_organization(orgname)
team = model.create_team(teamname, org, role, description)
org = model.organization.get_organization(orgname)
team = model.team.create_team(teamname, org, role, description)
log_action('org_create_team', orgname, {'team': teamname})
if is_existing:
@ -146,10 +147,10 @@ class OrganizationTeam(ApiResource):
{'team': teamname, 'description': team.description})
if 'role' in details:
role = model.get_team_org_role(team).name
role = model.team.get_team_org_role(team).name
if role != details['role']:
team = model.set_team_org_permission(team, details['role'],
get_authenticated_user().username)
team = model.team.set_team_org_permission(team, details['role'],
get_authenticated_user().username)
log_action('org_set_team_role', orgname, {'team': teamname, 'role': details['role']})
return team_view(orgname, team), 200
@ -162,7 +163,7 @@ class OrganizationTeam(ApiResource):
""" Delete the specified team. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
model.remove_team(orgname, teamname, get_authenticated_user().username)
model.team.remove_team(orgname, teamname, get_authenticated_user().username)
log_action('org_delete_team', orgname, {'team': teamname})
return 'Deleted', 204
@ -176,7 +177,8 @@ class TeamMemberList(ApiResource):
""" Resource for managing the list of members for a team. """
@require_scope(scopes.ORG_ADMIN)
@parse_args
@query_param('includePending', 'Whether to include pending members', type=truthy_bool, default=False)
@query_param('includePending', 'Whether to include pending members', type=truthy_bool,
default=False)
@nickname('getOrganizationTeamMembers')
def get(self, args, orgname, teamname):
""" Retrieve the list of members for the specified team. """
@ -186,15 +188,15 @@ class TeamMemberList(ApiResource):
if view_permission.can():
team = None
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
except model.InvalidTeamException:
raise NotFound()
members = model.get_organization_team_members(team.id)
members = model.organization.get_organization_team_members(team.id)
invites = []
if args['includePending'] and edit_permission.can():
invites = model.get_organization_team_member_invites(team.id)
invites = model.team.get_organization_team_member_invites(team.id)
data = {
'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
@ -224,12 +226,12 @@ class TeamMember(ApiResource):
# Find the team.
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
except model.InvalidTeamException:
raise NotFound()
# Find the user.
user = model.get_user(membername)
user = model.user.get_user(membername)
if not user:
raise request_error(message='Unknown user')
@ -263,18 +265,18 @@ class TeamMember(ApiResource):
# Find the team.
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
except model.InvalidTeamException:
raise NotFound()
# Find the member.
member = model.get_user(membername)
member = model.user.get_user(membername)
if not member:
raise NotFound()
# First attempt to delete an invite for the user to this team. If none found,
# then we try to remove the user directly.
if model.delete_team_user_invite(team, member):
if model.team.delete_team_user_invite(team, member):
log_action('org_delete_team_member_invite', orgname, {
'user': membername,
'team': teamname,
@ -282,7 +284,7 @@ class TeamMember(ApiResource):
})
return 'Deleted', 204
model.remove_user_from_team(orgname, teamname, membername, invoking_user)
model.team.remove_user_from_team(orgname, teamname, membername, invoking_user)
log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname})
return 'Deleted', 204
@ -303,7 +305,7 @@ class InviteTeamMember(ApiResource):
# Find the team.
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
except model.InvalidTeamException:
raise NotFound()
@ -329,12 +331,12 @@ class InviteTeamMember(ApiResource):
# Find the team.
try:
team = model.get_organization_team(orgname, teamname)
team = model.team.get_organization_team(orgname, teamname)
except model.InvalidTeamException:
raise NotFound()
# Delete the invite.
model.delete_team_email_invite(team, email)
model.team.delete_team_email_invite(team, email)
log_action('org_delete_team_member_invite', orgname, {
'email': email,
'team': teamname,
@ -369,15 +371,16 @@ class TeamMemberInvite(ApiResource):
@require_user_admin
def delete(self, code):
""" Delete an existing member of a team. """
(team, inviter) = model.delete_team_invite(code, get_authenticated_user())
(team, inviter) = model.team.delete_team_invite(code, user_obj=get_authenticated_user())
model.delete_matching_notifications(get_authenticated_user(), 'org_team_invite', code=code)
model.notification.delete_matching_notifications(get_authenticated_user(), 'org_team_invite',
code=code)
orgname = team.organization.username
log_action('org_team_member_invite_declined', orgname, {
'member': get_authenticated_user().username,
'team': team.name,
'inviter': inviter.username
'member': get_authenticated_user().username,
'team': team.name,
'inviter': inviter.username
})
return 'Deleted', 204

View file

@ -12,14 +12,14 @@ from endpoints.api import (RepositoryParamResource, nickname, resource, require_
log_action, request_error, query_param, parse_args, internal_only,
validate_json_request, api, Unauthorized, NotFound, InvalidRequest,
path_param)
from endpoints.api.build import (build_status_view, trigger_view, RepositoryBuildStatus,
get_trigger_config)
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
from endpoints.building import start_build
from endpoints.trigger import (BuildTriggerHandler, TriggerDeactivationException,
TriggerActivationException, EmptyRepositoryException,
RepositoryReadException, TriggerStartException)
from data import model
from auth.permissions import UserAdminPermission, AdministerOrganizationPermission, ReadRepositoryPermission
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
ReadRepositoryPermission)
from util.names import parse_robot_username
from util.dockerfileparse import parse_dockerfile
@ -41,7 +41,7 @@ class BuildTriggerList(RepositoryParamResource):
@nickname('listBuildTriggers')
def get(self, namespace, repository):
""" List the triggers for the specified repository. """
triggers = model.list_build_triggers(namespace, repository)
triggers = model.build.list_build_triggers(namespace, repository)
return {
'triggers': [trigger_view(trigger, can_admin=True) for trigger in triggers]
}
@ -58,7 +58,7 @@ class BuildTrigger(RepositoryParamResource):
def get(self, namespace, repository, trigger_uuid):
""" Get information for the specified build trigger. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -69,7 +69,7 @@ class BuildTrigger(RepositoryParamResource):
def delete(self, namespace, repository, trigger_uuid):
""" Delete the specified build trigger. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -84,7 +84,7 @@ class BuildTrigger(RepositoryParamResource):
log_action('delete_repo_trigger', namespace,
{'repo': repository, 'trigger_id': trigger_uuid,
'service': trigger.service.name},
repo=model.get_repository(namespace, repository))
repo=model.repository.get_repository(namespace, repository))
trigger.delete_instance(recursive=True)
@ -114,7 +114,7 @@ class BuildTriggerSubdirs(RepositoryParamResource):
def post(self, namespace, repository, trigger_uuid):
""" List the subdirectories available for the specified build trigger and source. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -175,7 +175,7 @@ class BuildTriggerActivate(RepositoryParamResource):
def post(self, namespace, repository, trigger_uuid):
""" Activate the specified build trigger. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -188,8 +188,9 @@ class BuildTriggerActivate(RepositoryParamResource):
# Update the pull robot (if any).
pull_robot_name = request.get_json().get('pull_robot', None)
if pull_robot_name:
pull_robot = model.lookup_robot(pull_robot_name)
if not pull_robot:
try:
pull_robot = model.user.lookup_robot(pull_robot_name)
except model.InvalidRobotException:
raise NotFound()
# Make sure the user has administer permissions for the robot's namespace.
@ -208,8 +209,8 @@ class BuildTriggerActivate(RepositoryParamResource):
new_config_dict = request.get_json()['config']
write_token_name = 'Build Trigger: %s' % trigger.service.name
write_token = model.create_delegate_token(namespace, repository, write_token_name,
'write')
write_token = model.token.create_delegate_token(namespace, repository, write_token_name,
'write')
try:
path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid)
@ -233,7 +234,7 @@ class BuildTriggerActivate(RepositoryParamResource):
trigger.save()
# Log the trigger setup.
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
log_action('setup_repo_trigger', namespace,
{'repo': repository, 'namespace': namespace,
'trigger_id': trigger.uuid, 'service': trigger.service.name,
@ -275,7 +276,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
def post(self, namespace, repository, trigger_uuid):
""" Analyze the specified build trigger configuration. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -324,7 +325,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
}
(base_namespace, base_repository) = result
found_repository = model.get_repository(base_namespace, base_repository)
found_repository = model.repository.get_repository(base_namespace, base_repository)
if not found_repository:
return {
'status': 'error',
@ -361,7 +362,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
(robot_namespace, shortname) = parse_robot_username(user.username)
return AdministerOrganizationPermission(robot_namespace).can()
repo_users = list(model.get_all_repo_users_transitive(base_namespace, base_repository))
repo_users = list(model.user.get_all_repo_users_transitive(base_namespace, base_repository))
read_robots = [robot_view(user) for user in repo_users if is_valid_robot(user)]
return {
@ -399,7 +400,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
'properties': {
'branch_name': {
'type': 'string',
'description': '(SCM only) If specified, the name of the branch to build.'
'description': '(SCM only) If specified, the name of the branch to model.build.'
},
'commit_sha': {
'type': 'string',
@ -415,7 +416,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
def post(self, namespace, repository, trigger_uuid):
""" Manually start a build from the specified trigger. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -424,8 +425,8 @@ class ActivateBuildTrigger(RepositoryParamResource):
raise InvalidRequest('Trigger is not active.')
try:
repo = model.get_repository(namespace, repository)
pull_robot_name = model.get_pull_robot_name(trigger)
repo = model.repository.get_repository(namespace, repository)
pull_robot_name = model.build.get_pull_robot_name(trigger)
run_parameters = request.get_json()
prepared = handler.manual_start(run_parameters=run_parameters)
@ -454,10 +455,9 @@ class TriggerBuildList(RepositoryParamResource):
def get(self, args, namespace, repository, trigger_uuid):
""" List the builds started by the specified trigger. """
limit = args['limit']
builds = list(model.list_trigger_builds(namespace, repository,
trigger_uuid, limit))
builds = model.build.list_trigger_builds(namespace, repository, trigger_uuid, limit)
return {
'builds': [build_status_view(build) for build in builds]
'builds': [build_status_view(bld) for bld in builds]
}
@ -471,7 +471,7 @@ class BuildTriggerFieldValues(RepositoryParamResource):
def post(self, namespace, repository, trigger_uuid, field_name):
""" List the field values for a custom run field. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
@ -502,7 +502,7 @@ class BuildTriggerSources(RepositoryParamResource):
def get(self, namespace, repository, trigger_uuid):
""" List the build sources for the trigger configuration thus far. """
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()

View file

@ -3,33 +3,33 @@
import logging
import json
from random import SystemRandom
from flask import request, abort
from flask.ext.login import logout_user
from flask.ext.principal import identity_changed, AnonymousIdentity
from peewee import IntegrityError
import features
from app import app, billing as stripe, authentication, avatar
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, NotFound, require_user_admin, parse_args,
query_param, InvalidToken, require_scope, format_date, hide_if, show_if,
query_param, InvalidToken, require_scope, format_date, show_if,
license_error, require_fresh_login, path_param, define_json_response,
RepositoryParamResource)
from endpoints.api.subscribe import subscribe
from endpoints.common import common_login
from endpoints.decorators import anon_allowed
from endpoints.api.team import try_accept_invite
from data import model
from data.billing import get_plan
from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission,
UserAdminPermission, UserReadPermission, SuperUserPermission)
from auth.auth_context import get_authenticated_user
from auth import scopes
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email, send_password_changed)
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email,
send_password_changed)
from util.names import parse_single_urn
import features
logger = logging.getLogger(__name__)
@ -45,7 +45,7 @@ def user_view(user):
'preferred_namespace': not (o.stripe_id is None)
}
organizations = model.get_user_organizations(user.username)
organizations = model.organization.get_user_organizations(user.username)
def login_view(login):
try:
@ -59,7 +59,7 @@ def user_view(user):
'metadata': metadata
}
logins = model.list_federated_logins(user)
logins = model.user.list_federated_logins(user)
user_response = {
'anonymous': False,
@ -89,14 +89,14 @@ def user_view(user):
return user_response
def notification_view(notification):
def notification_view(note):
return {
'id': notification.uuid,
'organization': notification.target.username if notification.target.organization else None,
'kind': notification.kind.name,
'created': format_date(notification.created),
'metadata': json.loads(notification.metadata_json),
'dismissed': notification.dismissed
'id': note.uuid,
'organization': note.target.username if note.target.organization else None,
'kind': note.kind.name,
'created': format_date(note.created),
'metadata': json.loads(note.metadata_json),
'dismissed': note.dismissed
}
@ -238,7 +238,7 @@ class User(ApiResource):
log_action('account_change_password', user.username)
# Change the user's password.
model.change_password(user, user_data['password'])
model.user.change_password(user, user_data['password'])
# Login again to reset their session cookie.
common_login(user)
@ -248,36 +248,36 @@ class User(ApiResource):
if 'invoice_email' in user_data:
logger.debug('Changing invoice_email for user: %s', user.username)
model.change_invoice_email(user, user_data['invoice_email'])
model.user.change_invoice_email(user, user_data['invoice_email'])
if 'tag_expiration' in user_data:
logger.debug('Changing user tag expiration to: %ss', user_data['tag_expiration'])
model.change_user_tag_expiration(user, user_data['tag_expiration'])
model.user.change_user_tag_expiration(user, user_data['tag_expiration'])
if 'email' in user_data and user_data['email'] != user.email:
new_email = user_data['email']
if model.find_user_by_email(new_email):
if model.user.find_user_by_email(new_email):
# Email already used.
raise request_error(message='E-mail address already used')
if features.MAILING:
logger.debug('Sending email to change email address for user: %s',
user.username)
code = model.create_confirm_email_code(user, new_email=new_email)
code = model.user.create_confirm_email_code(user, new_email=new_email)
send_change_email(user.username, user_data['email'], code.code)
else:
model.update_email(user, new_email, auto_verify=not features.MAILING)
model.user.update_email(user, new_email, auto_verify=not features.MAILING)
if ('username' in user_data and user_data['username'] != user.username and
features.USER_RENAME):
new_username = user_data['username']
if model.get_user_or_org(new_username) is not None:
if model.user.get_user_or_org(new_username) is not None:
# Username already used
raise request_error(message='Username is already in use')
model.change_username(user.id, new_username)
model.user.change_username(user.id, new_username)
except model.InvalidPasswordException, ex:
except model.user.InvalidPasswordException, ex:
raise request_error(exception=ex)
return user_view(user)
@ -291,12 +291,12 @@ class User(ApiResource):
user_data = request.get_json()
invite_code = user_data.get('invite_code', '')
existing_user = model.get_nonrobot_user(user_data['username'])
existing_user = model.user.get_nonrobot_user(user_data['username'])
if existing_user:
raise request_error(message='The username already exists')
try:
new_user = model.create_user(user_data['username'], user_data['password'],
new_user = model.user.create_user(user_data['username'], user_data['password'],
user_data['email'], auto_verify=not features.MAILING)
# Handle any invite codes.
@ -306,12 +306,12 @@ class User(ApiResource):
# Add the user to the team.
try:
try_accept_invite(invite_code, new_user)
except model.DataModelException:
except model.user.DataModelException:
pass
if features.MAILING:
code = model.create_confirm_email_code(new_user)
code = model.user.create_confirm_email_code(new_user)
send_confirmation_email(new_user.username, new_user.email, code.code)
return {
'awaiting_verification': True
@ -320,9 +320,9 @@ class User(ApiResource):
common_login(new_user)
return user_view(new_user)
except model.TooManyUsersException as ex:
except model.user.TooManyUsersException as ex:
raise license_error(exception=ex)
except model.DataModelException as ex:
except model.user.DataModelException as ex:
raise request_error(exception=ex)
@resource('/v1/user/private')
@ -336,7 +336,7 @@ class PrivateRepositories(ApiResource):
""" Get the number of private repos this user has, and whether they are allowed to create more.
"""
user = get_authenticated_user()
private_repos = model.get_private_repo_count(user.username)
private_repos = model.user.get_private_repo_count(user.username)
repos_allowed = 0
if user.stripe_id:
@ -396,7 +396,7 @@ def conduct_signin(username_or_email, password):
verified = None
try:
(verified, error_message) = authentication.verify_user(username_or_email, password)
except model.TooManyUsersException as ex:
except model.user.TooManyUsersException as ex:
raise license_error(exception=ex)
if verified:
@ -457,15 +457,14 @@ class ConvertToOrganization(ApiResource):
# Ensure that the sign in credentials work.
admin_username = convert_data['adminUser']
admin_password = convert_data['adminPassword']
(admin_user, error_message) = authentication.verify_user(admin_username, admin_password)
(admin_user, _) = authentication.verify_user(admin_username, admin_password)
if not admin_user:
raise request_error(reason='invaliduser',
message='The admin user credentials are not valid')
message='The admin user credentials are not valid')
# Ensure that the new admin user is the not user being converted.
if admin_user.id == user.id:
raise request_error(reason='invaliduser',
message='The admin user is not valid')
raise request_error(reason='invaliduser', message='The admin user is not valid')
# Subscribe the organization to the new plan.
if features.BILLING:
@ -473,7 +472,7 @@ class ConvertToOrganization(ApiResource):
subscribe(user, plan, None, True) # Require business plans
# Convert the user to an organization.
model.convert_user_to_organization(user, admin_user)
model.organization.convert_user_to_organization(user, admin_user)
log_action('account_convert', user.username)
# And finally login with the admin credentials.
@ -583,7 +582,7 @@ class DetachExternal(ApiResource):
@nickname('detachExternalLogin')
def post(self, servicename):
""" Request that the current user be detached from the external login service. """
model.detach_external_login(get_authenticated_user(), servicename)
model.user.detach_external_login(get_authenticated_user(), servicename)
return {'success': True}
@ -614,7 +613,7 @@ class Recovery(ApiResource):
def post(self):
""" Request a password recovery email."""
email = request.get_json()['email']
code = model.create_reset_password_email_code(email)
code = model.user.create_reset_password_email_code(email)
send_recovery_email(email, code.code)
return 'Created', 201
@ -631,7 +630,8 @@ class UserNotificationList(ApiResource):
page = args['page']
limit = args['limit']
notifications = list(model.list_notifications(get_authenticated_user(), page=page, limit=limit + 1))
notifications = list(model.notification.list_notifications(get_authenticated_user(), page=page,
limit=limit + 1))
has_more = False
if len(notifications) > limit:
@ -639,7 +639,7 @@ class UserNotificationList(ApiResource):
notifications = notifications[0:limit]
return {
'notifications': [notification_view(notification) for notification in notifications],
'notifications': [notification_view(note) for note in notifications],
'additional': has_more
}
@ -665,24 +665,24 @@ class UserNotification(ApiResource):
@require_user_admin
@nickname('getUserNotification')
def get(self, uuid):
notification = model.lookup_notification(get_authenticated_user(), uuid)
if not notification:
note = model.notification.lookup_notification(get_authenticated_user(), uuid)
if not note:
raise NotFound()
return notification_view(notification)
return notification_view(note)
@require_user_admin
@nickname('updateUserNotification')
@validate_json_request('UpdateNotification')
def put(self, uuid):
notification = model.lookup_notification(get_authenticated_user(), uuid)
if not notification:
note = model.notification.lookup_notification(get_authenticated_user(), uuid)
if not note:
raise NotFound()
notification.dismissed = request.get_json().get('dismissed', False)
notification.save()
note.dismissed = request.get_json().get('dismissed', False)
note.save()
return notification_view(notification)
return notification_view(note)
def authorization_view(access_token):
@ -733,8 +733,7 @@ class UserAuthorization(ApiResource):
@require_user_admin
@nickname('deleteUserAuthorization')
def delete(self, access_token_uuid):
access_token = model.oauth.lookup_access_token_for_user(get_authenticated_user(),
access_token_uuid)
access_token = model.oauth.lookup_access_token_for_user(get_authenticated_user(), access_token_uuid)
if not access_token:
raise NotFound()
@ -774,9 +773,8 @@ class StarredRepositoryList(ApiResource):
""" List all starred repositories. """
page = args['page']
limit = args['limit']
starred_repos = model.get_user_starred_repositories(get_authenticated_user(),
page=page,
limit=limit)
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user(),
page=page, limit=limit)
def repo_view(repo_obj):
return {
'namespace': repo_obj.namespace_user.username,
@ -797,11 +795,11 @@ class StarredRepositoryList(ApiResource):
req = request.get_json()
namespace = req['namespace']
repository = req['repository']
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo:
try:
model.star_repository(user, repo)
model.repository.star_repository(user, repo)
except IntegrityError:
pass
@ -820,10 +818,10 @@ class StarredRepository(RepositoryParamResource):
def delete(self, namespace, repository):
""" Removes a star from a repository. """
user = get_authenticated_user()
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo:
model.unstar_repository(user, repo)
model.repository.unstar_repository(user, repo)
return 'Deleted', 204
@ -833,7 +831,7 @@ class Users(ApiResource):
@nickname('getUserInformation')
def get(self, username):
""" Get user information for the specified user. """
user = model.get_nonrobot_user(username)
user = model.user.get_nonrobot_user(username)
if user is None:
abort(404)

View file

@ -22,7 +22,7 @@ bitbuckettrigger = Blueprint('bitbuckettrigger', __name__)
@route_show_if(features.BITBUCKET_BUILD)
@require_session_login
def attach_bitbucket_build_trigger(trigger_uuid):
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
if not trigger or trigger.service.name != BitbucketBuildTrigger.service_name():
abort(404)

View file

@ -1,21 +1,24 @@
import logging
import json
from flask import request
from app import app, dockerfile_build_queue
from data import model
from data.database import db
from auth.auth_context import get_authenticated_user
from endpoints.notificationhelper import spawn_notification
from flask import request
logger = logging.getLogger(__name__)
def start_build(repository, prepared_build, pull_robot_name=None):
host = app.config['SERVER_HOSTNAME']
repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
token = model.create_access_token(repository, 'write', kind='build-worker',
friendly_name='Repository Build Token')
new_token = model.token.create_access_token(repository, 'write', kind='build-worker',
friendly_name='Repository Build Token')
logger.debug('Creating build %s with repo %s tags %s',
prepared_build.build_name, repo_path, prepared_build.tags)
@ -29,15 +32,17 @@ def start_build(repository, prepared_build, pull_robot_name=None):
}
with app.config['DB_TRANSACTION_FACTORY'](db):
build_request = model.create_repository_build(repository, token, job_config,
prepared_build.dockerfile_id,
prepared_build.build_name,
prepared_build.trigger,
pull_robot_name=pull_robot_name)
build_request = model.build.create_repository_build(repository, new_token, job_config,
prepared_build.dockerfile_id,
prepared_build.build_name,
prepared_build.trigger,
pull_robot_name=pull_robot_name)
pull_creds = model.user.get_pull_credentials(pull_robot_name) if pull_robot_name else None
json_data = json.dumps({
'build_uuid': build_request.uuid,
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
'pull_credentials': pull_creds
})
queue_id = dockerfile_build_queue.put([repository.namespace_user.username, repository.name],
@ -62,8 +67,8 @@ def start_build(repository, prepared_build, pull_robot_name=None):
event_log_metadata['trigger_kind'] = prepared_build.trigger.service.name
event_log_metadata['trigger_metadata'] = prepared_build.metadata or {}
model.log_action('build_dockerfile', repository.namespace_user.username, ip=request.remote_addr,
metadata=event_log_metadata, repository=repository)
model.log.log_action('build_dockerfile', repository.namespace_user.username,
ip=request.remote_addr, metadata=event_log_metadata, repository=repository)
spawn_notification(repository, 'build_queued', event_log_metadata,
subpage='build/%s' % build_request.uuid,

View file

@ -199,11 +199,12 @@ def render_page_template(name, **kwargs):
def check_repository_usage(user_or_org, plan_found):
private_repos = model.get_private_repo_count(user_or_org.username)
private_repos = model.user.get_private_repo_count(user_or_org.username)
repos_allowed = plan_found['privateRepos']
if private_repos > repos_allowed:
model.create_notification('over_private_usage', user_or_org, {'namespace': user_or_org.username})
model.notification.create_notification('over_private_usage', user_or_org,
{'namespace': user_or_org.username})
else:
model.delete_notifications_by_kind(user_or_org, 'over_private_usage')
model.notification.delete_notifications_by_kind(user_or_org, 'over_private_usage')

View file

@ -26,12 +26,12 @@ def attach_github_build_trigger(namespace, repository):
if permission.can():
code = request.args.get('code')
token = github_trigger.exchange_code_for_token(app.config, client, code)
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository)
abort(404, message=msg)
trigger = model.create_build_trigger(repo, 'github', token, current_user.db_user())
trigger = model.build.create_build_trigger(repo, 'github', token, current_user.db_user())
repo_path = '%s/%s' % (namespace, repository)
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
trigger.uuid)

View file

@ -9,7 +9,6 @@ from auth.permissions import AdministerRepositoryPermission
from data import model
from endpoints.common import route_show_if
from util.http import abort
from util.names import parse_repository_name
import features
@ -40,14 +39,15 @@ def attach_gitlab_build_trigger():
msg = 'Could not exchange token. It may have expired.'
abort(404, message=msg)
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository)
abort(404, message=msg)
trigger = model.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
trigger = model.build.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
repo_path = '%s/%s' % (namespace, repository)
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=', trigger.uuid)
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
trigger.uuid)
logger.debug('Redirecting to full url: %s', full_url)
return redirect(full_url)

View file

@ -54,8 +54,9 @@ def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[
performer_data=None):
event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage)
notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name,
event_name=event_name)
notifications = model.notification.list_repo_notifications(repo.namespace_user.username,
repo.name,
event_name=event_name)
for notification in list(notifications):
notification_data = build_notification_data(notification, event_data, performer_data)
path = [repo.namespace_user.username, repo.name, event_name] + pathargs

View file

@ -38,11 +38,11 @@ class NotificationMethod(object):
"""
raise NotImplementedError
def perform(self, notification, event_handler, notification_data):
def perform(self, notification_obj, event_handler, notification_data):
"""
Performs the notification method.
notification: The noticication record itself.
notification_obj: The noticication record itself.
event_handler: The NotificationEvent handler.
notification_data: The dict of notification data placed in the queue.
"""
@ -71,14 +71,14 @@ class QuayNotificationMethod(NotificationMethod):
target_info = config_data['target']
if target_info['kind'] == 'user':
target = model.get_nonrobot_user(target_info['name'])
target = model.user.get_nonrobot_user(target_info['name'])
if not target:
# Just to be safe.
return (True, 'Unknown user %s' % target_info['name'], [])
return (True, None, [target])
elif target_info['kind'] == 'org':
target = model.get_organization(target_info['name'])
target = model.organization.get_organization(target_info['name'])
if not target:
# Just to be safe.
return (True, 'Unknown organization %s' % target_info['name'], None)
@ -90,33 +90,34 @@ class QuayNotificationMethod(NotificationMethod):
return (True, None, [target])
elif target_info['kind'] == 'team':
# Lookup the team.
team = None
org_team = None
try:
team = model.get_organization_team(repository.namespace_user.username, target_info['name'])
org_team = model.team.get_organization_team(repository.namespace_user.username,
target_info['name'])
except model.InvalidTeamException:
# Probably deleted.
return (True, 'Unknown team %s' % target_info['name'], None)
# Lookup the team's members
return (True, None, model.get_organization_team_members(team.id))
return (True, None, model.organization.get_organization_team_members(org_team.id))
def perform(self, notification, event_handler, notification_data):
repository = notification.repository
def perform(self, notification_obj, event_handler, notification_data):
repository = notification_obj.repository
if not repository:
# Probably deleted.
return
# Lookup the target user or team to which we'll send the notification.
config_data = json.loads(notification.config_json)
config_data = json.loads(notification_obj.config_json)
status, err_message, target_users = self.find_targets(repository, config_data)
if not status:
raise NotificationMethodPerformException(err_message)
# For each of the target users, create a notification.
for target_user in set(target_users or []):
model.create_notification(event_handler.event_name(), target_user,
metadata=notification_data['event_data'])
model.notification.create_notification(event_handler.event_name(), target_user,
metadata=notification_data['event_data'])
class EmailMethod(NotificationMethod):
@ -129,16 +130,16 @@ class EmailMethod(NotificationMethod):
if not email:
raise CannotValidateNotificationMethodException('Missing e-mail address')
record = model.get_email_authorized_for_repo(repository.namespace_user.username,
repository.name, email)
record = model.repository.get_email_authorized_for_repo(repository.namespace_user.username,
repository.name, email)
if not record or not record.confirmed:
raise CannotValidateNotificationMethodException('The specified e-mail address '
'is not authorized to receive '
'notifications for this repository')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
def perform(self, notification_obj, event_handler, notification_data):
config_data = json.loads(notification_obj.config_json)
email = config_data.get('email', '')
if not email:
return
@ -166,8 +167,8 @@ class WebhookMethod(NotificationMethod):
if not url:
raise CannotValidateNotificationMethodException('Missing webhook URL')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
def perform(self, notification_obj, event_handler, notification_data):
config_data = json.loads(notification_obj.config_json)
url = config_data.get('url', '')
if not url:
return
@ -201,13 +202,13 @@ class FlowdockMethod(NotificationMethod):
if not token:
raise CannotValidateNotificationMethodException('Missing Flowdock API Token')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
def perform(self, notification_obj, event_handler, notification_data):
config_data = json.loads(notification_obj.config_json)
token = config_data.get('flow_api_token', '')
if not token:
return
owner = model.get_user_or_org(notification.repository.namespace_user.username)
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -220,8 +221,8 @@ class FlowdockMethod(NotificationMethod):
'subject': event_handler.get_summary(notification_data['event_data'], notification_data),
'content': event_handler.get_message(notification_data['event_data'], notification_data),
'from_name': owner.username,
'project': (notification.repository.namespace_user.username + ' ' +
notification.repository.name),
'project': (notification_obj.repository.namespace_user.username + ' ' +
notification_obj.repository.name),
'tags': ['#' + event_handler.event_name()],
'link': notification_data['event_data']['homepage']
}
@ -254,8 +255,8 @@ class HipchatMethod(NotificationMethod):
if not config_data.get('room_id', ''):
raise CannotValidateNotificationMethodException('Missing Hipchat Room ID')
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
def perform(self, notification_obj, event_handler, notification_data):
config_data = json.loads(notification_obj.config_json)
token = config_data.get('notification_token', '')
room_id = config_data.get('room_id', '')
@ -263,7 +264,7 @@ class HipchatMethod(NotificationMethod):
if not token or not room_id:
return
owner = model.get_user_or_org(notification.repository.namespace_user.username)
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -367,14 +368,14 @@ class SlackMethod(NotificationMethod):
message = message.replace('<br>', '\n')
return adjust_tags(message)
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
def perform(self, notification_obj, event_handler, notification_data):
config_data = json.loads(notification_obj.config_json)
url = config_data.get('url', '')
if not url:
return
owner = model.get_user_or_org(notification.repository.namespace_user.username)
owner = model.user.get_user_or_org(notification_obj.repository.namespace_user.username)
if not owner:
# Something went wrong.
return

View file

@ -41,7 +41,7 @@ def get_user(service, token):
def conduct_oauth_login(service, user_id, username, email, metadata={}):
service_name = service.service_name()
to_login = model.verify_federated_login(service_name.lower(), user_id)
to_login = model.user.verify_federated_login(service_name.lower(), user_id)
if not to_login:
# See if we can create a new user.
if not features.USER_CREATION:
@ -52,22 +52,22 @@ def conduct_oauth_login(service, user_id, username, email, metadata={}):
try:
new_username = None
for valid in generate_valid_usernames(username):
if model.get_user_or_org(valid):
if model.user.get_user_or_org(valid):
continue
new_username = valid
break
to_login = model.create_federated_user(new_username, email, service_name.lower(),
user_id, set_password_notification=True,
metadata=metadata)
to_login = model.user.create_federated_user(new_username, email, service_name.lower(),
user_id, set_password_notification=True,
metadata=metadata)
# Success, tell analytics
analytics.track(to_login.username, 'register', {'service': service_name.lower()})
state = request.args.get('state', None)
if state:
logger.debug('Aliasing with state: %s' % state)
logger.debug('Aliasing with state: %s', state)
analytics.alias(to_login.username, state)
except model.InvalidEmailAddressException as ieex:
@ -200,7 +200,7 @@ def google_oauth_attach():
}
try:
model.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
model.user.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
except IntegrityError:
err = 'Google account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])
@ -228,7 +228,7 @@ def github_oauth_attach():
}
try:
model.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
model.user.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
except IntegrityError:
err = 'Github account %s is already attached to a %s account' % (
username, app.config['REGISTRY_TITLE_SHORT'])

View file

@ -67,9 +67,7 @@ def track_and_log(event_name, repo, **kwargs):
# Log the action to the database.
logger.debug('Logging the %s to logs system', event_name)
model.log_action(event_name, namespace,
performer=authenticated_user,
ip=request.remote_addr, metadata=metadata,
repository=repo)
model.log.log_action(event_name, namespace, performer=authenticated_user, ip=request.remote_addr,
metadata=metadata, repository=repo)
logger.debug('Track and log of %s complete', event_name)

View file

@ -227,11 +227,11 @@ class BuildTriggerHandler(object):
def put_config_key(self, key, value):
""" Updates a config key in the trigger, saving it to the DB. """
self.config[key] = value
model.update_build_trigger(self.trigger, self.config)
model.build.update_build_trigger(self.trigger, self.config)
def set_auth_token(self, auth_token):
""" Sets the auth token for the trigger, saving it to the DB. """
model.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
def get_dockerfile_path(self):
""" Returns the normalized path to the Dockerfile found in the subdirectory

29
endpoints/v1/__init__.py Normal file
View file

@ -0,0 +1,29 @@
from flask import Blueprint, make_response
from endpoints.decorators import anon_protect, anon_allowed
v1_bp = Blueprint('v1', __name__)
# Note: This is *not* part of the Docker index spec. This is here for our own health check,
# since we have nginx handle the _ping below.
@v1_bp.route('/_internal_ping')
@anon_allowed
def internal_ping():
return make_response('true', 200)
@v1_bp.route('/_ping')
@anon_allowed
def ping():
# NOTE: any changes made here must also be reflected in the nginx config
response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0'
response.headers['X-Docker-Registry-Standalone'] = '0'
return response
from endpoints.v1 import index
from endpoints.v1 import registry
from endpoints.v1 import tag

View file

@ -2,32 +2,27 @@ import json
import logging
import urlparse
from flask import request, make_response, jsonify, session, Blueprint
from flask import request, make_response, jsonify, session
from functools import wraps
from collections import OrderedDict
from data import model
from data.model import oauth
from app import app, authentication, userevents, storage
from auth.auth import process_auth, generate_signed_token
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
from util.names import parse_repository_name
from util.useremails import send_confirmation_email
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
ReadRepositoryPermission, CreateRepositoryPermission,
AlwaysFailPermission, repository_read_grant, repository_write_grant)
repository_read_grant, repository_write_grant)
from util.http import abort
from endpoints.v1 import v1_bp
from endpoints.trackhelper import track_and_log
from endpoints.notificationhelper import spawn_notification
from endpoints.decorators import anon_protect, anon_allowed
import features
logger = logging.getLogger(__name__)
index = Blueprint('index', __name__)
class GrantType(object):
READ_REPOSITORY = 'read'
@ -73,8 +68,8 @@ def generate_headers(scope=GrantType.READ_REPOSITORY, add_grant_for_status=None)
return decorator_method
@index.route('/users', methods=['POST'])
@index.route('/users/', methods=['POST'])
@v1_bp.route('/users', methods=['POST'])
@v1_bp.route('/users/', methods=['POST'])
@anon_allowed
def create_user():
user_data = request.get_json()
@ -90,13 +85,13 @@ def create_user():
if username == '$token':
try:
model.load_token_data(password)
model.token.load_token_data(password)
return success
except model.InvalidTokenException:
abort(400, 'Invalid access token.', issue='invalid-access-token')
elif username == '$oauthtoken':
validated = oauth.validate_access_token(password)
validated = model.oauth.validate_access_token(password)
if validated is not None:
return success
else:
@ -104,7 +99,7 @@ def create_user():
elif '+' in username:
try:
model.verify_robot(username, password)
model.user.verify_robot(username, password)
return success
except model.InvalidRobotException:
abort(400, 'Invalid robot account or password.',
@ -123,8 +118,8 @@ def create_user():
abort(400, error_message, issue='login-failure')
@index.route('/users', methods=['GET'])
@index.route('/users/', methods=['GET'])
@v1_bp.route('/users', methods=['GET'])
@v1_bp.route('/users/', methods=['GET'])
@process_auth
@anon_allowed
def get_user():
@ -146,7 +141,7 @@ def get_user():
abort(404)
@index.route('/users/<username>/', methods=['PUT'])
@v1_bp.route('/users/<username>/', methods=['PUT'])
@process_auth
@anon_allowed
def update_user(username):
@ -157,12 +152,11 @@ def update_user(username):
if 'password' in update_request:
logger.debug('Updating user password')
model.change_password(get_authenticated_user(),
update_request['password'])
model.user.change_password(get_authenticated_user(), update_request['password'])
if 'email' in update_request:
logger.debug('Updating user email')
model.update_email(get_authenticated_user(), update_request['email'])
model.user.update_email(get_authenticated_user(), update_request['email'])
return jsonify({
'username': get_authenticated_user().username,
@ -172,17 +166,14 @@ def update_user(username):
abort(403)
@index.route('/repositories/<path:repository>', methods=['PUT'])
@v1_bp.route('/repositories/<path:repository>', methods=['PUT'])
@process_auth
@parse_repository_name
@generate_headers(scope=GrantType.WRITE_REPOSITORY, add_grant_for_status=201)
@anon_allowed
def create_repository(namespace, repository):
logger.debug('Parsing image descriptions for repository %s/%s', namespace, repository)
image_descriptions = json.loads(request.data.decode('utf8'))
logger.debug('Looking up repository %s/%s', namespace, repository)
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
logger.debug('Found repository %s/%s', namespace, repository)
if not repo and get_authenticated_user() is None:
@ -201,18 +192,16 @@ def create_repository(namespace, repository):
else:
permission = CreateRepositoryPermission(namespace)
if not permission.can():
logger.info('Attempt to create a new repo %s/%s with insufficient perms', namespace, repository)
abort(403,
message='You do not have permission to create repositories in namespace "%(namespace)s"',
issue='no-create-permission',
namespace=namespace)
logger.info('Attempt to create a new repo %s/%s with insufficient perms', namespace,
repository)
msg = 'You do not have permission to create repositories in namespace "%(namespace)s"'
abort(403, message=msg, issue='no-create-permission', namespace=namespace)
# Attempt to create the new repository.
logger.debug('Creating repository %s/%s with owner: %s', namespace, repository,
get_authenticated_user().username)
repo = model.create_repository(namespace, repository,
get_authenticated_user())
repo = model.repository.create_repository(namespace, repository, get_authenticated_user())
if get_authenticated_user():
user_event_data = {
@ -227,7 +216,7 @@ def create_repository(namespace, repository):
return make_response('Created', 201)
@index.route('/repositories/<path:repository>/images', methods=['PUT'])
@v1_bp.route('/repositories/<path:repository>/images', methods=['PUT'])
@process_auth
@parse_repository_name
@generate_headers(scope=GrantType.WRITE_REPOSITORY)
@ -237,13 +226,13 @@ def update_images(namespace, repository):
if permission.can():
logger.debug('Looking up repository')
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
# Make sure the repo actually exists.
abort(404, message='Unknown repository', issue='unknown-repo')
logger.debug('GCing repository')
model.garbage_collect_repository(namespace, repository)
model.repository.garbage_collect_repository(namespace, repository)
# Generate a job for each notification that has been added to this repo
logger.debug('Adding notifications for repository')
@ -260,7 +249,7 @@ def update_images(namespace, repository):
abort(403)
@index.route('/repositories/<path:repository>/images', methods=['GET'])
@v1_bp.route('/repositories/<path:repository>/images', methods=['GET'])
@process_auth
@parse_repository_name
@generate_headers(scope=GrantType.READ_REPOSITORY)
@ -269,10 +258,10 @@ def get_repository_images(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository)
# TODO invalidate token?
if permission.can() or model.repository_is_public(namespace, repository):
if permission.can() or model.repository.repository_is_public(namespace, repository):
# We can't rely on permissions to tell us if a repo exists anymore
logger.debug('Looking up repository')
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo:
abort(404, message='Unknown repository', issue='unknown-repo')
@ -286,7 +275,7 @@ def get_repository_images(namespace, repository):
abort(403)
@index.route('/repositories/<path:repository>/images', methods=['DELETE'])
@v1_bp.route('/repositories/<path:repository>/images', methods=['DELETE'])
@process_auth
@parse_repository_name
@generate_headers(scope=GrantType.WRITE_REPOSITORY)
@ -295,14 +284,14 @@ def delete_repository_images(namespace, repository):
abort(501, 'Not Implemented', issue='not-implemented')
@index.route('/repositories/<path:repository>/auth', methods=['PUT'])
@v1_bp.route('/repositories/<path:repository>/auth', methods=['PUT'])
@parse_repository_name
@anon_allowed
def put_repository_auth(namespace, repository):
abort(501, 'Not Implemented', issue='not-implemented')
@index.route('/search', methods=['GET'])
@v1_bp.route('/search', methods=['GET'])
@process_auth
@anon_protect
def get_search():
@ -320,7 +309,7 @@ def get_search():
username = user.username
if query:
matching = model.get_matching_repositories(query, username)
matching = model.repository.get_matching_repositories(query, username)
else:
matching = []
@ -337,20 +326,3 @@ def get_search():
resp = make_response(json.dumps(data), 200)
resp.mimetype = 'application/json'
return resp
# Note: This is *not* part of the Docker index spec. This is here for our own health check,
# since we have nginx handle the _ping below.
@index.route('/_internal_ping')
@anon_allowed
def internal_ping():
return make_response('true', 200)
@index.route('/_ping')
@index.route('/_ping')
@anon_allowed
def ping():
# NOTE: any changes made here must also be reflected in the nginx config
response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0'
response.headers['X-Docker-Registry-Standalone'] = '0'
return response

View file

@ -1,8 +1,7 @@
import logging
import json
from flask import (make_response, request, session, Response, redirect,
Blueprint, abort as flask_abort)
from flask import make_response, request, session, Response, redirect, abort as flask_abort
from functools import wraps
from datetime import datetime
from time import time
@ -10,19 +9,20 @@ from time import time
from app import storage as store, image_diff_queue, app
from auth.auth import process_auth, extract_namespace_repo_from_session
from auth.auth_context import get_authenticated_user, get_grant_user_context
from util import checksums, changes
from digest import checksums
from util import changes
from util.http import abort, exact_abort
from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from data import model, database
from util import gzipstream
from endpoints.v1 import v1_bp
from endpoints.decorators import anon_protect
registry = Blueprint('registry', __name__)
logger = logging.getLogger(__name__)
class SocketReader(object):
def __init__(self, fp):
self._fp = fp
@ -60,7 +60,7 @@ def require_completion(f):
@wraps(f)
def wrapper(namespace, repository, *args, **kwargs):
image_id = kwargs['image_id']
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if image_is_uploading(repo_image):
abort(400, 'Image %(image_id)s is being uploaded, retry later',
issue='upload-in-progress', image_id=kwargs['image_id'])
@ -93,7 +93,7 @@ def set_cache_headers(f):
return wrapper
@registry.route('/images/<image_id>/layer', methods=['HEAD'])
@v1_bp.route('/images/<image_id>/layer', methods=['HEAD'])
@process_auth
@extract_namespace_repo_from_session
@require_completion
@ -103,9 +103,9 @@ def head_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository)
logger.debug('Checking repo permissions')
if permission.can() or model.repository_is_public(namespace, repository):
if permission.can() or model.repository.repository_is_public(namespace, repository):
logger.debug('Looking up repo image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not repo_image:
logger.debug('Image not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
@ -127,7 +127,7 @@ def head_image_layer(namespace, repository, image_id, headers):
abort(403)
@registry.route('/images/<image_id>/layer', methods=['GET'])
@v1_bp.route('/images/<image_id>/layer', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
@require_completion
@ -137,9 +137,9 @@ def get_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository)
logger.debug('Checking repo permissions')
if permission.can() or model.repository_is_public(namespace, repository):
if permission.can() or model.repository.repository_is_public(namespace, repository):
logger.debug('Looking up repo image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not repo_image:
logger.debug('Image not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
@ -171,7 +171,7 @@ def get_image_layer(namespace, repository, image_id, headers):
abort(403)
@registry.route('/images/<image_id>/layer', methods=['PUT'])
@v1_bp.route('/images/<image_id>/layer', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
@anon_protect
@ -182,7 +182,7 @@ def put_image_layer(namespace, repository, image_id):
abort(403)
logger.debug('Retrieving image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
try:
logger.debug('Retrieving image data')
uuid = repo_image.storage.uuid
@ -235,17 +235,16 @@ def put_image_layer(namespace, repository, image_id):
try:
# Save the size of the image.
model.set_image_size(image_id, namespace, repository, size_info.compressed_size,
size_info.uncompressed_size)
model.image.set_image_size(image_id, namespace, repository, size_info.compressed_size,
size_info.uncompressed_size)
if requires_tarsum:
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
tmp.close()
except (IOError, checksums.TarError) as e:
logger.debug('put_image_layer: Error when computing tarsum '
'{0}'.format(e))
except (IOError, checksums.TarError) as exc:
logger.debug('put_image_layer: Error when computing tarsum %s', exc)
if repo_image.storage.checksum is None:
# We don't have a checksum stored yet, that's fine skipping the check.
@ -267,7 +266,7 @@ def put_image_layer(namespace, repository, image_id):
# The layer is ready for download, send a job to the work queue to
# process it.
logger.debug('Adding layer to diff queue')
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
'namespace_user_id': repo.namespace_user.id,
'repository': repository,
@ -277,7 +276,7 @@ def put_image_layer(namespace, repository, image_id):
return make_response('true', 200)
@registry.route('/images/<image_id>/checksum', methods=['PUT'])
@v1_bp.route('/images/<image_id>/checksum', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
@anon_protect
@ -309,7 +308,7 @@ def put_image_checksum(namespace, repository, image_id):
issue='missing-checksum-cookie', image_id=image_id)
logger.debug('Looking up repo image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not repo_image or not repo_image.storage:
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
@ -330,8 +329,8 @@ def put_image_checksum(namespace, repository, image_id):
abort(400, err)
if checksum not in session.get('checksum', []):
logger.debug('session checksums: %s' % session.get('checksum', []))
logger.debug('client supplied checksum: %s' % checksum)
logger.debug('session checksums: %s', session.get('checksum', []))
logger.debug('client supplied checksum: %s', checksum)
logger.debug('put_image_checksum: Wrong checksum')
abort(400, 'Checksum mismatch for image: %(image_id)s',
issue='checksum-mismatch', image_id=image_id)
@ -342,7 +341,7 @@ def put_image_checksum(namespace, repository, image_id):
# The layer is ready for download, send a job to the work queue to
# process it.
logger.debug('Adding layer to diff queue')
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
'namespace_user_id': repo.namespace_user.id,
'repository': repository,
@ -352,7 +351,7 @@ def put_image_checksum(namespace, repository, image_id):
return make_response('true', 200)
@registry.route('/images/<image_id>/json', methods=['GET'])
@v1_bp.route('/images/<image_id>/json', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
@require_completion
@ -361,12 +360,11 @@ def put_image_checksum(namespace, repository, image_id):
def get_image_json(namespace, repository, image_id, headers):
logger.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
if not permission.can() and not model.repository_is_public(namespace,
repository):
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
abort(403)
logger.debug('Looking up repo image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
logger.debug('Looking up repo layer data')
try:
@ -384,7 +382,7 @@ def get_image_json(namespace, repository, image_id, headers):
return response
@registry.route('/images/<image_id>/ancestry', methods=['GET'])
@v1_bp.route('/images/<image_id>/ancestry', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
@require_completion
@ -393,12 +391,11 @@ def get_image_json(namespace, repository, image_id, headers):
def get_image_ancestry(namespace, repository, image_id, headers):
logger.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
if not permission.can() and not model.repository_is_public(namespace,
repository):
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
abort(403)
logger.debug('Looking up repo image')
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
logger.debug('Looking up image data')
try:
@ -438,7 +435,7 @@ def store_checksum(image_storage, checksum):
image_storage.save()
@registry.route('/images/<image_id>/json', methods=['PUT'])
@v1_bp.route('/images/<image_id>/json', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
@anon_protect
@ -464,22 +461,23 @@ def put_image_json(namespace, repository, image_id):
logger.debug('Looking up repo image')
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if repo is None:
abort(404, 'Repository does not exist: %(namespace)s/%(repository)s', issue='no-repo',
namespace=namespace, repository=repository)
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not repo_image:
username = (get_authenticated_user() and get_authenticated_user().username or
get_grant_user_context())
logger.debug('Image not found, creating image with initiating user context: %s', username)
repo_image = model.find_create_or_link_image(image_id, repo, username, {},
store.preferred_locations[0])
repo_image = model.image.find_create_or_link_image(image_id, repo, username, {},
store.preferred_locations[0])
# Create a temporary tag to prevent this image from getting garbage collected while the push
# is in progress.
model.create_temporary_hidden_tag(repo, repo_image, app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
model.tag.create_temporary_hidden_tag(repo, repo_image,
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
uuid = repo_image.storage.uuid
@ -492,7 +490,7 @@ def put_image_json(namespace, repository, image_id):
parent_image = None
if parent_id:
logger.debug('Looking up parent image')
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
parent_image = model.image.get_repo_image_extended(namespace, repository, parent_id)
parent_uuid = parent_image and parent_image.storage.uuid
parent_locations = parent_image and parent_image.storage.locations
@ -522,9 +520,8 @@ def put_image_json(namespace, repository, image_id):
command = json.dumps(command_list) if command_list else None
logger.debug('Setting image metadata')
model.set_image_metadata(image_id, namespace, repository,
data.get('created'), data.get('comment'), command,
parent_image)
model.image.set_image_metadata(image_id, namespace, repository, data.get('created'),
data.get('comment'), command, parent_image)
logger.debug('Putting json path')
store.put_content(repo_image.storage.locations, json_path, request.data)
@ -535,7 +532,7 @@ def put_image_json(namespace, repository, image_id):
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
parent_locations)
except IOError as ioe:
logger.debug('Error when generating ancestry: %s' % ioe.message)
logger.debug('Error when generating ancestry: %s', ioe.message)
abort(404)
logger.debug('Done')
@ -543,9 +540,9 @@ def put_image_json(namespace, repository, image_id):
def process_image_changes(namespace, repository, image_id):
logger.debug('Generating diffs for image: %s' % image_id)
logger.debug('Generating diffs for image: %s', image_id)
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
if not repo_image:
logger.warning('No image for id: %s', image_id)
return None, None
@ -556,11 +553,11 @@ def process_image_changes(namespace, repository, image_id):
image_trie_path = store.image_file_trie_path(uuid)
if store.exists(repo_image.storage.locations, image_diffs_path):
logger.debug('Diffs already exist for image: %s' % image_id)
logger.debug('Diffs already exist for image: %s', image_id)
return image_trie_path, repo_image.storage.locations
image = model.get_image_by_id(namespace, repository, image_id)
parents = model.get_parent_images(namespace, repository, image)
image = model.image.get_image_by_id(namespace, repository, image_id)
parents = model.image.get_parent_images(namespace, repository, image)
# Compute the diffs and fs for the parent first if necessary
parent_trie_path = None

View file

@ -2,7 +2,7 @@
import logging
import json
from flask import abort, request, jsonify, make_response, Blueprint, session
from flask import abort, request, jsonify, make_response, session
from app import app
from util.names import parse_repository_name
@ -11,39 +11,36 @@ from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from data import model
from endpoints.decorators import anon_protect
from endpoints.v1 import v1_bp
logger = logging.getLogger(__name__)
tags = Blueprint('tags', __name__)
@tags.route('/repositories/<path:repository>/tags',
methods=['GET'])
@v1_bp.route('/repositories/<path:repository>/tags', methods=['GET'])
@process_auth
@anon_protect
@parse_repository_name
def get_tags(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
tags = model.list_repository_tags(namespace, repository)
if permission.can() or model.repository.repository_is_public(namespace, repository):
tags = model.tag.list_repository_tags(namespace, repository)
tag_map = {tag.name: tag.image.docker_image_id for tag in tags}
return jsonify(tag_map)
abort(403)
@tags.route('/repositories/<path:repository>/tags/<tag>',
methods=['GET'])
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['GET'])
@process_auth
@anon_protect
@parse_repository_name
def get_tag(namespace, repository, tag):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
tag_image = model.get_tag_image(namespace, repository, tag)
if permission.can() or model.repository.repository_is_public(namespace, repository):
tag_image = model.tag.get_tag_image(namespace, repository, tag)
resp = make_response('"%s"' % tag_image.docker_image_id)
resp.headers['Content-Type'] = 'application/json'
return resp
@ -51,8 +48,7 @@ def get_tag(namespace, repository, tag):
abort(403)
@tags.route('/repositories/<path:repository>/tags/<tag>',
methods=['PUT'])
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['PUT'])
@process_auth
@anon_protect
@parse_repository_name
@ -61,7 +57,7 @@ def put_tag(namespace, repository, tag):
if permission.can():
docker_image_id = json.loads(request.data)
model.create_or_update_tag(namespace, repository, tag, docker_image_id)
model.tag.create_or_update_tag(namespace, repository, tag, docker_image_id)
# Store the updated tag.
if not 'pushed_tags' in session:
@ -74,8 +70,7 @@ def put_tag(namespace, repository, tag):
abort(403)
@tags.route('/repositories/<path:repository>/tags/<tag>',
methods=['DELETE'])
@v1_bp.route('/repositories/<path:repository>/tags/<tag>', methods=['DELETE'])
@process_auth
@anon_protect
@parse_repository_name
@ -83,8 +78,8 @@ def delete_tag(namespace, repository, tag):
permission = ModifyRepositoryPermission(namespace, repository)
if permission.can():
model.delete_tag(namespace, repository, tag)
model.garbage_collect_repository(namespace, repository)
model.tag.delete_tag(namespace, repository, tag)
model.repository.garbage_collect_repository(namespace, repository)
return make_response('Deleted', 200)

69
endpoints/v2/__init__.py Normal file
View file

@ -0,0 +1,69 @@
# XXX This code is not yet ready to be run in production, and should remain disabled until such
# XXX time as this notice is removed.
import logging
from flask import Blueprint, make_response, url_for, request
from functools import wraps
from urlparse import urlparse
from endpoints.decorators import anon_protect, anon_allowed
from auth.jwt_auth import process_jwt_auth
from auth.auth_context import get_grant_user_context
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
AdministerRepositoryPermission)
from data import model
from util.http import abort
logger = logging.getLogger(__name__)
v2_bp = Blueprint('v2', __name__)
def _require_repo_permission(permission_class, allow_public=False):
def wrapper(func):
@wraps(func)
def wrapped(namespace, repo_name, *args, **kwargs):
logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace, repo_name)
permission = permission_class(namespace, repo_name)
if (permission.can() or
(allow_public and
model.repository.repository_is_public(namespace, repo_name))):
return func(namespace, repo_name, *args, **kwargs)
raise abort(401)
return wrapped
return wrapper
require_repo_read = _require_repo_permission(ReadRepositoryPermission, True)
require_repo_write = _require_repo_permission(ModifyRepositoryPermission)
require_repo_admin = _require_repo_permission(AdministerRepositoryPermission)
def get_input_stream(flask_request):
if flask_request.headers.get('transfer-encoding') == 'chunked':
return flask_request.environ['wsgi.input']
return flask_request.stream
@v2_bp.route('/')
@process_jwt_auth
@anon_allowed
def v2_support_enabled():
response = make_response('true', 200)
if get_grant_user_context() is None:
response = make_response('true', 401)
realm_hostname = urlparse(request.url).netloc
realm_auth_path = url_for('v2.generate_registry_jwt')
authenticate = 'Bearer realm="{0}{1}",service="quay"'.format(realm_hostname, realm_auth_path)
response.headers['WWW-Authenticate'] = authenticate
response.headers['Docker-Distribution-API-Version'] = 'registry/2.0'
return response
from endpoints.v2 import v2auth
from endpoints.v2 import manifest
from endpoints.v2 import blob

83
endpoints/v2/blob.py Normal file
View file

@ -0,0 +1,83 @@
# XXX This code is not yet ready to be run in production, and should remain disabled until such
# XXX time as this notice is removed.
import logging
from flask import make_response, url_for, request
from app import storage, app
from data import model
from digest import digest_tools
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from auth.jwt_auth import process_jwt_auth
from endpoints.decorators import anon_protect
from util.http import abort
logger = logging.getLogger(__name__)
BASE_BLOB_ROUTE = '/<namespace>/<repo_name>/blobs/<regex("{0}"):digest>'
BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN)
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['HEAD'])
@process_jwt_auth
@require_repo_read
@anon_protect
def check_blob_existence(namespace, repo_name, digest):
try:
found = model.blob.get_repo_blob_by_digest(namespace, repo_name, digest)
# The response body must be empty for a successful HEAD request
return make_response('')
except model.BlobDoesNotExist:
abort(404)
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['GET'])
@process_jwt_auth
@require_repo_read
@anon_protect
def download_blob(namespace, repo_name, digest):
# TODO Implement this
return make_response('')
@v2_bp.route('/<namespace>/<repo_name>/blobs/uploads/', methods=['POST'])
@process_jwt_auth
@require_repo_write
@anon_protect
def start_blob_upload(namespace, repo_name):
new_upload_uuid = storage.initiate_chunked_upload(storage.preferred_locations[0])
accepted = make_response('', 202)
accepted.headers['Location'] = url_for('v2.upload_chunk', namespace=namespace,
repo_name=repo_name, upload_uuid=new_upload_uuid)
accepted.headers['Range'] = 'bytes=0-0'
accepted.headers['Docker-Upload-UUID'] = new_upload_uuid
return accepted
@v2_bp.route('/<namespace>/<repo_name>/blobs/uploads/<upload_uuid>', methods=['PUT'])
@process_jwt_auth
@require_repo_write
@anon_protect
def upload_chunk(namespace, repo_name, upload_uuid):
digest = request.args.get('digest', None)
upload_location = storage.preferred_locations[0]
bytes_written = storage.stream_upload_chunk(upload_location, upload_uuid, 0, -1,
get_input_stream(request))
if digest is not None:
final_blob_location = digest_tools.content_path(digest)
storage.complete_chunked_upload(upload_location, upload_uuid, final_blob_location, digest)
model.blob.store_blob_record_and_temp_link(namespace, repo_name, digest, upload_location,
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
response = make_response('', 201)
response.headers['Docker-Content-Digest'] = digest
response.headers['Location'] = url_for('v2.download_blob', namespace=namespace,
repo_name=repo_name, digest=digest)
return response
return make_response('', 202)

110
endpoints/v2/manifest.py Normal file
View file

@ -0,0 +1,110 @@
# XXX This code is not yet ready to be run in production, and should remain disabled until such
# XXX time as this notice is removed.
import logging
import re
import jwt.utils
import yaml
from flask import make_response, request
from app import storage
from auth.jwt_auth import process_jwt_auth
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from digest import digest_tools
logger = logging.getLogger(__name__)
VALID_TAG_PATTERN = r'[\w][\w.-]{0,127}'
VALID_TAG_REGEX = re.compile(VALID_TAG_PATTERN)
class SignedManifest(object):
SIGNATURES_KEY = 'signatures'
PROTECTED_KEY = 'protected'
FORMAT_LENGTH_KEY = 'formatLength'
FORMAT_TAIL_KEY = 'formatTail'
REPO_NAME_KEY = 'name'
REPO_TAG_KEY = 'tag'
def __init__(self, manifest_bytes):
self._bytes = manifest_bytes
parsed = yaml.safe_load(manifest_bytes)
self._signatures = parsed[self.SIGNATURES_KEY]
self._namespace, self._repo_name = parsed[self.REPO_NAME_KEY].split('/')
self._tag = parsed[self.REPO_TAG_KEY]
self._validate()
def _validate(self):
pass
@property
def signatures(self):
return self._signatures
@property
def namespace(self):
return self._namespace
@property
def repo_name(self):
return self._repo_name
@property
def tag(self):
return self._tag
@property
def payload(self):
protected = self._signatures[0][self.PROTECTED_KEY]
parsed_protected = yaml.safe_load(jwt.utils.base64url_decode(protected))
logger.debug('parsed_protected: %s', parsed_protected)
signed_content_head = self._bytes[:parsed_protected[self.FORMAT_LENGTH_KEY]]
logger.debug('signed content head: %s', signed_content_head)
signed_content_tail = jwt.utils.base64url_decode(parsed_protected[self.FORMAT_TAIL_KEY])
logger.debug('signed content tail: %s', signed_content_tail)
return signed_content_head + signed_content_tail
@v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + VALID_TAG_PATTERN + '"):tag_name>',
methods=['GET'])
@process_jwt_auth
@require_repo_read
@anon_protect
def fetch_manifest_by_tagname(namespace, repo_name, tag_name):
logger.debug('Fetching tag manifest with name: %s', tag_name)
return make_response('Manifest {0}'.format(tag_name))
@v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + VALID_TAG_PATTERN + '"):tag_name>',
methods=['PUT'])
@process_jwt_auth
@require_repo_write
@anon_protect
def write_manifest_by_tagname(namespace, repo_name, tag_name):
manifest = SignedManifest(request.data)
manifest_digest = digest_tools.sha256_digest(manifest.payload)
response = make_response('OK', 202)
response.headers['Docker-Content-Digest'] = manifest_digest
response.headers['Location'] = 'https://fun.com'
return response
# @v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + digest_tools.DIGEST_PATTERN + '"):tag_digest>',
# methods=['PUT'])
# @process_jwt_auth
# @require_repo_write
# @anon_protect
# def write_manifest(namespace, repo_name, tag_digest):
# logger.debug('Writing tag manifest with name: %s', tag_digest)
# manifest_path = digest_tools.content_path(tag_digest)
# storage.stream_write('local_us', manifest_path, get_input_stream(request))
# return make_response('Manifest {0}'.format(tag_digest))

112
endpoints/v2/v2auth.py Normal file
View file

@ -0,0 +1,112 @@
# XXX This code is not yet ready to be run in production, and should remain disabled until such
# XXX time as this notice is removed.
import logging
import re
import time
import jwt
from flask import request, jsonify, abort
from cachetools import lru_cache
from app import app
from data import model
from auth.auth import process_auth
from auth.auth_context import get_authenticated_user
from auth.permissions import (ModifyRepositoryPermission, ReadRepositoryPermission,
CreateRepositoryPermission)
from endpoints.v2 import v2_bp
from util.cache import no_cache
from util.names import parse_namespace_repository
logger = logging.getLogger(__name__)
SCOPE_REGEX = re.compile(
r'repository:([\.a-zA-Z0-9_\-]+/[\.a-zA-Z0-9_\-]+):(((push|pull|\*),)*(push|pull|\*))'
)
@lru_cache(maxsize=1)
def load_certificate_bytes(certificate_file_path):
with open(certificate_file_path) as cert_file:
return ''.join(cert_file.readlines()[1:-1]).rstrip('\n')
@lru_cache(maxsize=1)
def load_private_key(private_key_file_path):
with open(private_key_file_path) as private_key_file:
return private_key_file.read()
@v2_bp.route('/auth')
@process_auth
@no_cache
def generate_registry_jwt():
""" This endpoint will generate a JWT conforming to the Docker registry v2 auth spec:
https://docs.docker.com/registry/spec/auth/token/
"""
audience_param = request.args.get('service')
logger.debug('Request audience: %s', audience_param)
scope_param = request.args.get('scope')
logger.debug('Scope request: %s', scope_param)
user = get_authenticated_user()
access = []
if scope_param is not None:
match = SCOPE_REGEX.match(scope_param)
if match is None or match.end() != len(scope_param):
logger.debug('Match: %s', match)
logger.debug('End: %s', match.end())
logger.debug('len: %s', len(scope_param))
logger.warning('Unable to decode repository and actions: %s', scope_param)
abort(400)
logger.debug('Match: %s', match.groups())
namespace_and_repo = match.group(1)
actions = match.group(2).split(',')
namespace, reponame = parse_namespace_repository(namespace_and_repo)
if 'pull' in actions and 'push' in actions:
repo = model.repository.get_repository(namespace, reponame)
if repo:
if not ModifyRepositoryPermission(namespace, reponame):
abort(403)
else:
if not CreateRepositoryPermission(namespace):
abort(403)
logger.debug('Creating repository: %s/%s', namespace, reponame)
model.repository.create_repository(namespace, reponame, user)
elif 'pull' in actions:
if not ReadRepositoryPermission(namespace, reponame):
abort(403)
access.append({
'type': 'repository',
'name': namespace_and_repo,
'actions': actions,
})
token_data = {
'iss': 'token-issuer',
'aud': audience_param,
'nbf': int(time.time()),
'exp': int(time.time() + 60),
'sub': user.username,
'access': access,
}
certificate = load_certificate_bytes(app.config['JWT_AUTH_CERTIFICATE_PATH'])
token_headers = {
'x5c': [certificate],
}
private_key = load_private_key(app.config['JWT_AUTH_PRIVATE_KEY_PATH'])
return jsonify({'token':jwt.encode(token_data, private_key, 'RS256', headers=token_headers)})

View file

@ -7,8 +7,7 @@ from flask import redirect, Blueprint, abort, send_file, make_response
from app import app, signer
from auth.auth import process_auth
from auth.permissions import ReadRepositoryPermission
from data import model
from data import database
from data import model, database
from endpoints.trackhelper import track_and_log
from endpoints.decorators import anon_protect
from storage import Storage
@ -22,6 +21,7 @@ from formats.aci import ACIImage
verbs = Blueprint('verbs', __name__)
logger = logging.getLogger(__name__)
def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, image_json,
image_id_list):
store = Storage(app)
@ -29,7 +29,8 @@ def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, imag
# For performance reasons, we load the full image list here, cache it, then disconnect from
# the database.
with database.UseThenDisconnect(app.config):
image_list = list(model.get_matching_repository_images(namespace, repository, image_id_list))
image_list = list(model.image.get_matching_repository_images(namespace, repository,
image_id_list))
image_list.sort(key=lambda image: image_id_list.index(image.docker_image_id))
@ -48,7 +49,7 @@ def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, imag
yield current_image_stream
stream = formatter.build_stream(namespace, repository, tag, synthetic_image_id, image_json,
get_next_image, get_next_layer)
get_next_image, get_next_layer)
return stream.read
@ -66,11 +67,11 @@ def _sign_sythentic_image(verb, linked_storage_uuid, queue_file):
if not queue_file.raised_exception:
with database.UseThenDisconnect(app.config):
try:
derived = model.get_storage_by_uuid(linked_storage_uuid)
except model.InvalidImageException:
derived = model.storage.get_storage_by_uuid(linked_storage_uuid)
except model.storage.InvalidImageException:
return
signature_entry = model.find_or_create_storage_signature(derived, signer.name)
signature_entry = model.storage.find_or_create_storage_signature(derived, signer.name)
signature_entry.signature = signature
signature_entry.uploading = False
signature_entry.save()
@ -83,7 +84,7 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
logger.debug('Exception when building %s image %s: %s', verb, linked_storage_uuid, ex)
with database.UseThenDisconnect(app.config):
model.delete_derived_storage_by_uuid(linked_storage_uuid)
model.storage.delete_derived_storage_by_uuid(linked_storage_uuid)
queue_file.add_exception_handler(handle_exception)
@ -95,7 +96,7 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
# Setup the database (since this is a new process) and then disconnect immediately
# once the operation completes.
with database.UseThenDisconnect(app.config):
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
done_uploading = model.storage.get_storage_by_uuid(linked_storage_uuid)
done_uploading.uploading = False
done_uploading.save()
@ -103,17 +104,17 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
permission = ReadRepositoryPermission(namespace, repository)
if not permission.can() and not model.repository_is_public(namespace, repository):
if not permission.can() and not model.repository.repository_is_public(namespace, repository):
abort(403)
# Lookup the requested tag.
try:
tag_image = model.get_tag_image(namespace, repository, tag)
tag_image = model.tag.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
# Lookup the tag's image and storage.
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
repo_image = model.image.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
if not repo_image:
abort(404)
@ -139,7 +140,7 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
(repo_image, tag_image, image_json) = result
# Lookup the derived image storage for the verb.
derived = model.find_derived_storage(repo_image.storage, verb)
derived = model.storage.find_derived_storage(repo_image.storage, verb)
if derived is None or derived.uploading:
return make_response('', 202)
@ -148,7 +149,7 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
abort(404)
# Lookup the signature for the verb.
signature_entry = model.lookup_storage_signature(derived, signer.name)
signature_entry = model.storage.lookup_storage_signature(derived, signer.name)
if signature_entry is None:
abort(404)
@ -166,8 +167,8 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
# Lookup/create the derived image storage for the verb.
derived = model.find_or_create_derived_storage(repo_image.storage, verb,
store.preferred_locations[0])
derived = model.storage.find_or_create_derived_storage(repo_image.storage, verb,
store.preferred_locations[0])
if not derived.uploading:
logger.debug('Derived %s image %s exists in storage', verb, derived.uuid)
@ -206,8 +207,8 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
# and send the results to the client and storage.
args = (formatter, namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')

View file

@ -9,7 +9,6 @@ from health.healthcheck import get_healthchecker
from data import model
from data.database import db
from data.model.oauth import DatabaseAuthorizationProvider
from app import app, billing as stripe, build_logs, avatar, signer, log_archive
from auth.auth import require_session_login, process_oauth
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
@ -281,24 +280,24 @@ def robots():
@route_show_if(features.BUILD_SUPPORT)
@require_session_login
def buildlogs(build_uuid):
build = model.get_repository_build(build_uuid)
if not build:
found_build = model.build.get_repository_build(build_uuid)
if not found_build:
abort(403)
repo = build.repository
repo = found_build.repository
if not ModifyRepositoryPermission(repo.namespace_user.username, repo.name).can():
abort(403)
# If the logs have been archived, just return a URL of the completed archive
if build.logs_archived:
return redirect(log_archive.get_file_url(build.uuid))
if found_build.logs_archived:
return redirect(log_archive.get_file_url(found_build.uuid))
_, logs = build_logs.get_log_entries(build.uuid, 0)
_, logs = build_logs.get_log_entries(found_build.uuid, 0)
response = jsonify({
'logs': [log for log in logs]
})
response.headers["Content-Disposition"] = "attachment;filename=" + build.uuid + ".json"
response.headers["Content-Disposition"] = "attachment;filename=" + found_build.uuid + ".json"
return response
@ -314,7 +313,7 @@ def receipt():
if invoice_id:
invoice = stripe.Invoice.retrieve(invoice_id)
if invoice:
user_or_org = model.get_user_or_org_by_customer_id(invoice.customer)
user_or_org = model.user.get_user_or_org_by_customer_id(invoice.customer)
if user_or_org:
if user_or_org.organization:
@ -341,8 +340,8 @@ def confirm_repo_email():
record = None
try:
record = model.confirm_email_authorization_for_repo(code)
except model.DataModelException as ex:
record = model.repository.confirm_email_authorization_for_repo(code)
except DataModelException as ex:
return render_page_template('confirmerror.html', error_message=ex.message)
message = """
@ -363,8 +362,8 @@ def confirm_email():
new_email = None
try:
user, new_email, old_email = model.confirm_user_email(code)
except model.DataModelException as ex:
user, new_email, old_email = model.user.confirm_user_email(code)
except DataModelException as ex:
return render_page_template('confirmerror.html', error_message=ex.message)
if new_email:
@ -379,7 +378,7 @@ def confirm_email():
@web.route('/recovery', methods=['GET'])
def confirm_recovery():
code = request.values['code']
user = model.validate_reset_code(code)
user = model.user.validate_reset_code(code)
if user:
common_login(user)
@ -394,22 +393,22 @@ def confirm_recovery():
@anon_protect
def build_status_badge(namespace, repository):
token = request.args.get('token', None)
is_public = model.repository_is_public(namespace, repository)
is_public = model.repository.repository_is_public(namespace, repository)
if not is_public:
repo = model.get_repository(namespace, repository)
repo = model.repository.get_repository(namespace, repository)
if not repo or token != repo.badge_token:
abort(404)
# Lookup the tags for the repository.
tags = model.list_repository_tags(namespace, repository)
tags = model.tag.list_repository_tags(namespace, repository)
is_empty = len(list(tags)) == 0
build = model.get_recent_repository_build(namespace, repository)
recent_build = model.build.get_recent_repository_build(namespace, repository)
if not is_empty and (not build or build.phase == 'complete'):
if not is_empty and (not recent_build or recent_build.phase == 'complete'):
status_name = 'ready'
elif build and build.phase == 'error':
elif recent_build and recent_build.phase == 'error':
status_name = 'failed'
elif build and build.phase != 'complete':
elif recent_build and recent_build.phase != 'complete':
status_name = 'building'
else:
status_name = 'none'
@ -419,7 +418,7 @@ def build_status_badge(namespace, repository):
return response
class FlaskAuthorizationProvider(DatabaseAuthorizationProvider):
class FlaskAuthorizationProvider(model.oauth.DatabaseAuthorizationProvider):
def get_authorized_user(self):
return current_user.db_user()
@ -579,14 +578,13 @@ def download_logs_archive():
def attach_bitbucket_trigger(namespace, repository_name):
permission = AdministerRepositoryPermission(namespace, repository_name)
if permission.can():
repo = model.get_repository(namespace, repository_name)
repo = model.repository.get_repository(namespace, repository_name)
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository_name)
abort(404, message=msg)
trigger = model.create_build_trigger(repo, BitbucketBuildTrigger.service_name(),
None,
current_user.db_user())
trigger = model.build.create_build_trigger(repo, BitbucketBuildTrigger.service_name(), None,
current_user.db_user())
try:
oauth_info = BuildTriggerHandler.get_handler(trigger).get_oauth_url()
@ -596,7 +594,7 @@ def attach_bitbucket_trigger(namespace, repository_name):
}
access_token_secret = oauth_info['access_token_secret']
model.update_build_trigger(trigger, config, auth_token=access_token_secret)
model.build.update_build_trigger(trigger, config, auth_token=access_token_secret)
return redirect(oauth_info['url'])
except TriggerProviderException:
@ -612,13 +610,13 @@ def attach_bitbucket_trigger(namespace, repository_name):
def attach_custom_build_trigger(namespace, repository_name):
permission = AdministerRepositoryPermission(namespace, repository_name)
if permission.can():
repo = model.get_repository(namespace, repository_name)
repo = model.repository.get_repository(namespace, repository_name)
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository_name)
abort(404, message=msg)
trigger = model.create_build_trigger(repo, CustomBuildTrigger.service_name(),
None, current_user.db_user())
trigger = model.build.create_build_trigger(repo, CustomBuildTrigger.service_name(),
None, current_user.db_user())
repo_path = '%s/%s' % (namespace, repository_name)
full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
@ -634,16 +632,16 @@ def attach_custom_build_trigger(namespace, repository_name):
@process_oauth
@parse_repository_name_and_tag
@anon_protect
def redirect_to_repository(namespace, reponame, tag):
def redirect_to_repository(namespace, reponame, tag_name):
permission = ReadRepositoryPermission(namespace, reponame)
is_public = model.repository_is_public(namespace, reponame)
is_public = model.repository.repository_is_public(namespace, reponame)
if request.args.get('ac-discovery', 0) == 1:
return index('')
if permission.can() or is_public:
repository_name = '/'.join([namespace, reponame])
return redirect(url_for('web.repository', path=repository_name, tag=tag))
return redirect(url_for('web.repository', path=repository_name, tag=tag_name))
abort(404)
@ -653,7 +651,7 @@ def redirect_to_repository(namespace, reponame, tag):
@process_oauth
@anon_protect
def redirect_to_namespace(namespace):
user_or_org = model.get_user_or_org(namespace)
user_or_org = model.user.get_user_or_org(namespace)
if not user_or_org:
abort(404)

View file

@ -25,7 +25,7 @@ def stripe_webhook():
logger.debug('Stripe webhook call: %s', request_data)
customer_id = request_data.get('data', {}).get('object', {}).get('customer', None)
user = model.get_user_or_org_by_customer_id(customer_id) if customer_id else None
user = model.user.get_user_or_org_by_customer_id(customer_id) if customer_id else None
event_type = request_data['type'] if 'type' in request_data else None
if event_type == 'charge.succeeded':
@ -73,7 +73,7 @@ def build_trigger_webhook(trigger_uuid, **kwargs):
logger.debug('Webhook received with uuid %s', trigger_uuid)
try:
trigger = model.get_build_trigger(trigger_uuid)
trigger = model.build.get_build_trigger(trigger_uuid)
except model.InvalidBuildTriggerException:
# It is ok to return 404 here, since letting an attacker know that a trigger UUID is valid
# doesn't leak anything
@ -101,8 +101,8 @@ def build_trigger_webhook(trigger_uuid, **kwargs):
# The payload was malformed
abort(400, message=ipe.message)
pull_robot_name = model.get_pull_robot_name(trigger)
repo = model.get_repository(namespace, repository)
pull_robot_name = model.build.get_pull_robot_name(trigger)
repo = model.repository.get_repository(namespace, repository)
start_build(repo, prepared, pull_robot_name=pull_robot_name)
return make_response('Okay')

View file

@ -1,9 +1,11 @@
import logging
from data import model
from data.model import health
from app import build_logs
logger = logging.getLogger(__name__)
def _check_registry_gunicorn(app):
""" Returns the status of the registry gunicorn workers. """
# Compute the URL for checking the registry endpoint. We append a port if and only if the
@ -24,7 +26,7 @@ def _check_registry_gunicorn(app):
def _check_database(app):
""" Returns the status of the database, as accessed from this instance. """
return model.check_health(app.config)
return health.check_health(app.config)
def _check_redis(app):
""" Returns the status of Redis, as accessed from this instance. """

357
initdb.py
View file

@ -6,14 +6,18 @@ import calendar
import os
from datetime import datetime, timedelta
from email.utils import formatdate
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
savepoint_sqlite, savepoint)
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
savepoint)
from itertools import count
from uuid import UUID
from threading import Event
from data.database import *
from email.utils import formatdate
from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService,
BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation,
ImageStorageTransformation, ImageStorageSignatureKind,
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind)
from data import model
from data.model import oauth
from app import app, storage as store
from workers import repositoryactioncounter
@ -21,6 +25,7 @@ from workers import repositoryactioncounter
logger = logging.getLogger(__name__)
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
for i in range(1, 10)]
@ -39,53 +44,56 @@ TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
def __gen_checksum(image_id):
h = hashlib.md5(image_id)
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
csum = hashlib.md5(image_id)
return 'tarsum+sha256:' + csum.hexdigest() + csum.hexdigest()
def __gen_image_id(repo, image_num):
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return h.hexdigest() + h.hexdigest()
img_id = hashlib.md5(str_to_hash)
return img_id.hexdigest() + img_id.hexdigest()
def __gen_image_uuid(repo, image_num):
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return UUID(bytes=h.digest())
img_uuid = hashlib.md5(str_to_hash)
return UUID(bytes=img_uuid.digest())
global_image_num = count()
global_image_num = [0]
def __create_subtree(repo, structure, creator_username, parent, tag_map):
num_nodes, subtrees, last_node_tags = structure
# create the nodes
for i in range(num_nodes):
image_num = global_image_num[0]
global_image_num[0] += 1
for model_num in range(num_nodes):
image_num = next(global_image_num)
docker_image_id = __gen_image_id(repo, image_num)
logger.debug('new docker id: %s' % docker_image_id)
logger.debug('new docker id: %s', docker_image_id)
checksum = __gen_checksum(docker_image_id)
new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
new_image = model.image.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
new_image_locations = new_image.storage.locations
new_image.storage.uuid = __gen_image_uuid(repo, image_num)
new_image.storage.uploading = False
new_image.storage.checksum = checksum
new_image.storage.save()
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=i)
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num)
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
command = json.dumps(command_list) if command_list else None
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
str(creation_time), 'no comment', command, parent)
new_image = model.image.set_image_metadata(docker_image_id, repo.namespace_user.username,
repo.name, str(creation_time), 'no comment', command,
parent)
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
int(compressed_size * 1.4))
model.image.set_image_size(docker_image_id, repo.namespace_user.username, repo.name,
compressed_size, int(compressed_size * 1.4))
# Populate the diff file
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
@ -101,55 +109,52 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
last_node_tags = [last_node_tags]
for tag_name in last_node_tags:
tag = model.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id)
new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id)
tag_map[tag_name] = tag
tag_map[tag_name] = new_tag
for tag_name in last_node_tags:
if tag_name[0] == '#':
tag = tag_map[tag_name]
tag.name = tag_name[1:]
tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
tag.lifetime_start_ts = tag.lifetime_end_ts - 10
tag.save()
found_tag = tag_map[tag_name]
found_tag.name = tag_name[1:]
found_tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
found_tag.lifetime_start_ts = found_tag.lifetime_end_ts - 10
found_tag.save()
for subtree in subtrees:
__create_subtree(repo, subtree, creator_username, new_image, tag_map)
def __generate_repository(user, name, description, is_public, permissions,
structure):
repo = model.create_repository(user.username, name, user)
def __generate_repository(user_obj, name, description, is_public, permissions, structure):
repo = model.repository.create_repository(user_obj.username, name, user_obj)
if is_public:
model.set_repository_visibility(repo, 'public')
model.repository.set_repository_visibility(repo, 'public')
if description:
repo.description = description
repo.save()
for delegate, role in permissions:
model.set_user_repo_permission(delegate.username, user.username, name,
role)
model.permission.set_user_repo_permission(delegate.username, user_obj.username, name, role)
if isinstance(structure, list):
for s in structure:
__create_subtree(repo, s, user.username, None, {})
for leaf in structure:
__create_subtree(repo, leaf, user_obj.username, None, {})
else:
__create_subtree(repo, structure, user.username, None, {})
__create_subtree(repo, structure, user_obj.username, None, {})
return repo
db_initialized_for_testing = False
db_initialized_for_testing = Event()
testcases = {}
def finished_database_for_testing(testcase):
""" Called when a testcase has finished using the database, indicating that
any changes should be discarded.
"""
global testcases
testcases[testcase]['savepoint'].__exit__(True, None, None)
def setup_database_for_testing(testcase):
@ -158,12 +163,10 @@ def setup_database_for_testing(testcase):
"""
# Sanity check to make sure we're not killing our prod db
db = model.db
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!')
global db_initialized_for_testing
if not db_initialized_for_testing:
if not db_initialized_for_testing.is_set():
logger.debug('Setting up DB for testing.')
# Setup the database.
@ -173,18 +176,18 @@ def setup_database_for_testing(testcase):
# Enable foreign key constraints.
if not IS_TESTING_REAL_DATABASE:
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db_initialized_for_testing = True
db_initialized_for_testing.set()
# Create a savepoint for the testcase.
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
global testcases
testcases[testcase] = {}
testcases[testcase]['savepoint'] = test_savepoint
testcases[testcase]['savepoint'].__enter__()
def initialize_database():
create_model_tables(all_models)
@ -314,8 +317,7 @@ def wipe_database():
logger.debug('Wiping all data from the DB.')
# Sanity check to make sure we're not killing our prod db
db = model.db
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!')
drop_model_tables(all_models, fail_silently=True)
@ -324,52 +326,51 @@ def wipe_database():
def populate_database():
logger.debug('Populating the DB with test data.')
new_user_1 = model.create_user('devtable', 'password',
'jschorr@devtable.com')
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
new_user_1.verified = True
new_user_1.stripe_id = TEST_STRIPE_ID
new_user_1.save()
disabled_user = model.create_user('disabled', 'password',
'jschorr+disabled@devtable.com')
disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com')
disabled_user.verified = True
disabled_user.enabled = False
disabled_user.save()
dtrobot = model.create_robot('dtrobot', new_user_1)
dtrobot = model.user.create_robot('dtrobot', new_user_1)
new_user_2 = model.create_user('public', 'password',
'jacob.moshenko@gmail.com')
new_user_2 = model.user.create_user('public', 'password', 'jacob.moshenko@gmail.com')
new_user_2.verified = True
new_user_2.save()
new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
new_user_3 = model.user.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
new_user_3.verified = True
new_user_3.save()
model.create_robot('anotherrobot', new_user_3)
model.user.create_robot('anotherrobot', new_user_3)
new_user_4 = model.create_user('randomuser', 'password', 'no4@thanks.com')
new_user_4 = model.user.create_user('randomuser', 'password', 'no4@thanks.com')
new_user_4.verified = True
new_user_4.save()
new_user_5 = model.create_user('unverified', 'password', 'no5@thanks.com')
new_user_5 = model.user.create_user('unverified', 'password', 'no5@thanks.com')
new_user_5.save()
reader = model.create_user('reader', 'password', 'no1@thanks.com')
reader = model.user.create_user('reader', 'password', 'no1@thanks.com')
reader.verified = True
reader.save()
creatoruser = model.create_user('creator', 'password', 'noc@thanks.com')
creatoruser = model.user.create_user('creator', 'password', 'noc@thanks.com')
creatoruser.verified = True
creatoruser.save()
outside_org = model.create_user('outsideorg', 'password', 'no2@thanks.com')
outside_org = model.user.create_user('outsideorg', 'password', 'no2@thanks.com')
outside_org.verified = True
outside_org.save()
model.create_notification('test_notification', new_user_1,
metadata={'some':'value', 'arr':[1, 2, 3], 'obj':{'a':1, 'b':2}})
model.notification.create_notification('test_notification', new_user_1,
metadata={'some':'value',
'arr':[1, 2, 3],
'obj':{'a':1, 'b':2}})
from_date = datetime.utcnow()
to_date = from_date + timedelta(hours=1)
@ -378,7 +379,7 @@ def populate_database():
'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
'reason': 'database migration'
}
model.create_notification('maintenance', new_user_1, metadata=notification_metadata)
model.notification.create_notification('maintenance', new_user_1, metadata=notification_metadata)
__generate_repository(new_user_4, 'randomrepo', 'Random repo repository.', False,
@ -434,10 +435,10 @@ def populate_database():
'Empty repository which is building.',
False, [], (0, [], None))
token = model.create_access_token(building, 'write', 'build-worker')
new_token = model.token.create_access_token(building, 'write', 'build-worker')
trigger = model.create_build_trigger(building, 'github', '123authtoken',
new_user_1, pull_robot=dtrobot[0])
trigger = model.build.create_build_trigger(building, 'github', '123authtoken', new_user_1,
pull_robot=dtrobot[0])
trigger.config = json.dumps({
'build_source': 'jakedt/testconnect',
'subdir': '',
@ -456,164 +457,160 @@ def populate_database():
}
}
record = model.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr@devtable.com')
record = model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr@devtable.com')
record.confirmed = True
record.save()
model.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr+other@devtable.com')
model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr+other@devtable.com')
build2 = model.create_repository_build(building, token, job_config,
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
'build-name', trigger)
build2 = model.build.create_repository_build(building, new_token, job_config,
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
'build-name', trigger)
build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
build2.save()
build3 = model.create_repository_build(building, token, job_config,
'f49d07f9-93da-474d-ad5f-c852107c3892',
'build-name', trigger)
build3 = model.build.create_repository_build(building, new_token, job_config,
'f49d07f9-93da-474d-ad5f-c852107c3892',
'build-name', trigger)
build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
build3.save()
build = model.create_repository_build(building, token, job_config,
'701dcc3724fb4f2ea6c31400528343cd',
'build-name', trigger)
build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
build.save()
build1 = model.build.create_repository_build(building, new_token, job_config,
'701dcc3724fb4f2ea6c31400528343cd', 'build-name',
trigger)
build1.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
build1.save()
org = model.create_organization('buynlarge', 'quay@devtable.com',
new_user_1)
org = model.organization.create_organization('buynlarge', 'quay@devtable.com', new_user_1)
org.stripe_id = TEST_STRIPE_ID
org.save()
model.create_robot('coolrobot', org)
model.user.create_robot('coolrobot', org)
oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
'http://localhost:8000/o2c.html', client_id='deadbeef')
model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
'http://localhost:8000/o2c.html', client_id='deadbeef')
oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
'http://localhost:8000/o2c.html', client_id='deadpork',
description='This is another test application')
model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
'http://localhost:8000/o2c.html', client_id='deadpork',
description='This is another test application')
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
model.create_robot('neworgrobot', org)
model.user.create_robot('neworgrobot', org)
ownerbot = model.create_robot('ownerbot', org)[0]
creatorbot = model.create_robot('creatorbot', org)[0]
ownerbot = model.user.create_robot('ownerbot', org)[0]
creatorbot = model.user.create_robot('creatorbot', org)[0]
owners = model.get_organization_team('buynlarge', 'owners')
owners = model.team.get_organization_team('buynlarge', 'owners')
owners.description = 'Owners have unfetterd access across the entire org.'
owners.save()
org_repo = __generate_repository(org, 'orgrepo',
'Repository owned by an org.', False,
[(outside_org, 'read')],
(4, [], ['latest', 'prod']))
org_repo = __generate_repository(org, 'orgrepo', 'Repository owned by an org.', False,
[(outside_org, 'read')], (4, [], ['latest', 'prod']))
org_repo2 = __generate_repository(org, 'anotherorgrepo',
'Another repository owned by an org.', False,
[],
(4, [], ['latest', 'prod']))
__generate_repository(org, 'anotherorgrepo', 'Another repository owned by an org.', False,
[], (4, [], ['latest', 'prod']))
creators = model.create_team('creators', org, 'creator',
'Creators of orgrepo.')
creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.')
reader_team = model.create_team('readers', org, 'member',
'Readers of orgrepo.')
model.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, org_repo.name,
'read')
reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.')
model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username,
org_repo.name, 'read')
model.add_user_to_team(new_user_2, reader_team)
model.add_user_to_team(reader, reader_team)
model.add_user_to_team(ownerbot, owners)
model.add_user_to_team(creatorbot, creators)
model.add_user_to_team(creatoruser, creators)
model.team.add_user_to_team(new_user_2, reader_team)
model.team.add_user_to_team(reader, reader_team)
model.team.add_user_to_team(ownerbot, owners)
model.team.add_user_to_team(creatorbot, creators)
model.team.add_user_to_team(creatoruser, creators)
__generate_repository(new_user_1, 'superwide', None, False, [],
[(10, [], 'latest2'),
(2, [], 'latest3'),
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
'latest4'),
(2, [], 'latest5'),
(2, [], 'latest6'),
(2, [], 'latest7'),
(2, [], 'latest8'),
(2, [], 'latest9'),
(2, [], 'latest10'),
(2, [], 'latest13'),
(2, [], 'latest14'),
(2, [], 'latest15'),
(2, [], 'latest16'),
(2, [], 'latest17'),
(2, [], 'latest18'),])
(2, [], 'latest3'),
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
'latest4'),
(2, [], 'latest5'),
(2, [], 'latest6'),
(2, [], 'latest7'),
(2, [], 'latest8'),
(2, [], 'latest9'),
(2, [], 'latest10'),
(2, [], 'latest13'),
(2, [], 'latest14'),
(2, [], 'latest15'),
(2, [], 'latest16'),
(2, [], 'latest17'),
(2, [], 'latest18')])
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_user=new_user_2)
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_team=reader_team)
model.add_prototype_permission(org, 'write', activating_user=new_user_2, delegate_user=new_user_1)
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
delegate_user=new_user_2)
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
delegate_team=reader_team)
model.permission.add_prototype_permission(org, 'write', activating_user=new_user_2,
delegate_user=new_user_1)
today = datetime.today()
week_ago = today - timedelta(6)
six_ago = today - timedelta(5)
four_ago = today - timedelta(4)
model.log_action('org_create_team', org.username, performer=new_user_1,
timestamp=week_ago, metadata={'team': 'readers'})
model.log.log_action('org_create_team', org.username, performer=new_user_1,
timestamp=week_ago, metadata={'team': 'readers'})
model.log_action('org_set_team_role', org.username, performer=new_user_1,
timestamp=week_ago,
metadata={'team': 'readers', 'role': 'read'})
model.log.log_action('org_set_team_role', org.username, performer=new_user_1,
timestamp=week_ago,
metadata={'team': 'readers', 'role': 'read'})
model.log_action('create_repo', org.username, performer=new_user_1,
repository=org_repo, timestamp=week_ago,
metadata={'namespace': org.username, 'repo': 'orgrepo'})
model.log.log_action('create_repo', org.username, performer=new_user_1,
repository=org_repo, timestamp=week_ago,
metadata={'namespace': org.username, 'repo': 'orgrepo'})
model.log_action('change_repo_permission', org.username,
performer=new_user_2, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_1.username,
'repo': 'orgrepo', 'role': 'admin'})
model.log.log_action('change_repo_permission', org.username,
performer=new_user_2, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_1.username,
'repo': 'orgrepo', 'role': 'admin'})
model.log_action('change_repo_permission', org.username,
performer=new_user_1, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'role': 'read'})
model.log.log_action('change_repo_permission', org.username,
performer=new_user_1, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'role': 'read'})
model.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
repository=org_repo, timestamp=four_ago,
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
model.log.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
repository=org_repo, timestamp=four_ago,
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
model.log_action('push_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log.log_action('push_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log_action('pull_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token': 'sometoken', 'token_code': 'somecode',
'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token': 'sometoken', 'token_code': 'somecode',
'repo': 'orgrepo'})
model.log_action('delete_tag', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'tag': 'sometag'})
model.log.log_action('delete_tag', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'tag': 'sometag'})
model.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
model.log_action('build_dockerfile', new_user_1.username, repository=building,
timestamp=today,
metadata={'repo': 'building', 'namespace': new_user_1.username,
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
'service': trigger.service.name})
model.log.log_action('build_dockerfile', new_user_1.username, repository=building,
timestamp=today,
metadata={'repo': 'building', 'namespace': new_user_1.username,
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
'service': trigger.service.name})
while repositoryactioncounter.count_repository_actions():
pass
@ -622,7 +619,7 @@ if __name__ == '__main__':
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
logging.basicConfig(level=log_level)
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to initialize production database!')
initialize_database()

View file

@ -6,10 +6,8 @@ from app import app as application
# Note: We need to import this module to make sure the decorators are registered.
import endpoints.decorated
from endpoints.index import index
from endpoints.tags import tags
from endpoints.registry import registry
from endpoints.v1 import v1_bp
# from endpoints.v2 import v2_bp
application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1')
application.register_blueprint(registry, url_prefix='/v1')
application.register_blueprint(v1_bp, url_prefix='/v1')
# application.register_blueprint(v2_bp, url_prefix='/v2')

View file

@ -51,3 +51,4 @@ python-swiftclient
python-keystoneclient
Flask-Testing
pyjwt
toposort

View file

@ -79,6 +79,7 @@ SQLAlchemy==1.0.6
stevedore==1.5.0
stringscore==0.1.0
stripe==1.22.3
toposort==1.4
trollius==1.0.4
tzlocal==1.2
urllib3==1.10.4

View file

@ -93,3 +93,28 @@ class BaseStorage(StoragePaths):
def get_checksum(self, path):
raise NotImplementedError
class DigestInvalidException(RuntimeError):
pass
class BaseStorageV2(BaseStorage):
def initiate_chunked_upload(self):
""" Start a new chunked upload, and return a handle with which the upload can be referenced.
"""
raise NotImplementedError
def stream_upload_chunk(self, uuid, offset, length, in_fp):
""" Upload the specified amount of data from the given file pointer to the chunked destination
specified, starting at the given offset. Returns the number of bytes written.
"""
raise NotImplementedError
def complete_chunked_upload(self, uuid, final_path, digest_to_verify):
""" Complete the chunked upload and store the final results in the path indicated.
"""
raise NotImplementedError

View file

@ -3,7 +3,7 @@ import logging
from functools import wraps
from storage.basestorage import StoragePaths, BaseStorage
from storage.basestorage import StoragePaths, BaseStorage, BaseStorageV2
logger = logging.getLogger(__name__)
@ -42,3 +42,6 @@ class DistributedStorage(StoragePaths):
remove = _location_aware(BaseStorage.remove)
get_checksum = _location_aware(BaseStorage.get_checksum)
get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads)
initiate_chunked_upload = _location_aware(BaseStorageV2.initiate_chunked_upload)
stream_upload_chunk = _location_aware(BaseStorageV2.stream_upload_chunk)
complete_chunked_upload = _location_aware(BaseStorageV2.complete_chunked_upload)

View file

@ -2,11 +2,18 @@ import os
import shutil
import hashlib
import io
import logging
from storage.basestorage import BaseStorage
from uuid import uuid4
from storage.basestorage import BaseStorageV2
from digest import digest_tools
class LocalStorage(BaseStorage):
logger = logging.getLogger(__name__)
class LocalStorage(BaseStorageV2):
def __init__(self, storage_path):
self._root_path = storage_path
@ -46,15 +53,26 @@ class LocalStorage(BaseStorage):
def stream_write(self, path, fp, content_type=None, content_encoding=None):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
while True:
try:
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
with open(path, mode='wb') as out_fp:
self._stream_write_to_fp(fp, out_fp)
def _stream_write_to_fp(self, in_fp, out_fp, num_bytes=-1):
""" Copy the specified number of bytes from the input file stream to the output stream. If
num_bytes < 0 copy until the stream ends.
"""
bytes_copied = 0
bytes_remaining = num_bytes
while bytes_remaining > 0 or num_bytes < 0:
try:
buf = in_fp.read(self.buffer_size)
if not buf:
break
out_fp.write(buf)
bytes_copied += len(buf)
except IOError:
break
return bytes_copied
def list_directory(self, path=None):
path = self._init_path(path)
@ -92,3 +110,40 @@ class LocalStorage(BaseStorage):
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
def _rel_upload_path(self, uuid):
return 'uploads/{0}'.format(uuid)
def initiate_chunked_upload(self):
new_uuid = str(uuid4())
# Just create an empty file at the path
with open(self._init_path(self._rel_upload_path(new_uuid), create=True), 'w'):
pass
return new_uuid
def stream_upload_chunk(self, uuid, offset, length, in_fp):
with open(self._init_path(self._rel_upload_path(uuid)), 'r+b') as upload_storage:
upload_storage.seek(offset)
return self._stream_write_to_fp(in_fp, upload_storage, length)
def complete_chunked_upload(self, uuid, final_path, digest_to_verify):
content_path = self._rel_upload_path(uuid)
content_digest = digest_tools.sha256_digest_from_generator(self.stream_read(content_path))
if not digest_tools.digests_equal(content_digest, digest_to_verify):
msg = 'Given: {0} Computed: {1}'.format(digest_to_verify, content_digest)
raise digest_tools.InvalidDigestException(msg)
final_path_abs = self._init_path(final_path, create=True)
if not self.exists(final_path_abs):
logger.debug('Moving content into place at path: %s', final_path_abs)
shutil.move(self._init_path(content_path), final_path_abs)
else:
logger.debug('Content already exists at path: %s', final_path_abs)

Binary file not shown.

View file

@ -6,9 +6,7 @@ from flask.blueprints import Blueprint
from flask.ext.testing import LiveServerTestCase
from app import app
from endpoints.registry import registry
from endpoints.index import index
from endpoints.tags import tags
from endpoints.v1 import v1_bp
from endpoints.api import api_bp
from initdb import wipe_database, initialize_database, populate_database
from endpoints.csrf import generate_csrf_token
@ -20,12 +18,10 @@ import features
import tarfile
from cStringIO import StringIO
from util.checksums import compute_simple
from digest.checksums import compute_simple
try:
app.register_blueprint(index, url_prefix='/v1')
app.register_blueprint(tags, url_prefix='/v1')
app.register_blueprint(registry, url_prefix='/v1')
app.register_blueprint(v1_bp, url_prefix='/v1')
app.register_blueprint(api_bp, url_prefix='/api')
except ValueError:
# Blueprint was already registered

View file

@ -112,123 +112,123 @@ class IndexTestSpec(object):
def build_index_specs():
return [
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_layer', image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 404, 404, 404, 404),
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_layer', image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_layer', image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_layer', image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_layer', image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 403, 404).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_layer', image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 403, 404).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_checksum',
IndexTestSpec(url_for('v1.put_image_checksum',
image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_checksum',
IndexTestSpec(url_for('v1.put_image_checksum',
image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_checksum',
IndexTestSpec(url_for('v1.put_image_checksum',
image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_json', image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 404, 404, 404, 404),
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_json', image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.get_image_json', image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.get_image_ancestry',
IndexTestSpec(url_for('v1.get_image_ancestry',
image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 404, 404, 404, 404),
IndexTestSpec(url_for('registry.get_image_ancestry',
IndexTestSpec(url_for('v1.get_image_ancestry',
image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.get_image_ancestry',
IndexTestSpec(url_for('v1.get_image_ancestry',
image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 404, 404),
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_json', image_id=FAKE_IMAGE_ID),
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_json', image_id=FAKE_IMAGE_ID),
PRIVATE_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
IndexTestSpec(url_for('v1.put_image_json', image_id=FAKE_IMAGE_ID),
ORG_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('index.create_user'), NO_REPO, 400, 400, 400,
IndexTestSpec(url_for('v1.create_user'), NO_REPO, 400, 400, 400,
400).set_method('POST').set_data_from_obj(NEW_USER_DETAILS),
IndexTestSpec(url_for('index.get_user'), NO_REPO, 404, 200, 200, 200),
IndexTestSpec(url_for('v1.get_user'), NO_REPO, 404, 200, 200, 200),
IndexTestSpec(url_for('index.update_user', username=FAKE_USERNAME),
IndexTestSpec(url_for('v1.update_user', username=FAKE_USERNAME),
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('index.create_repository', repository=PUBLIC_REPO),
IndexTestSpec(url_for('v1.create_repository', repository=PUBLIC_REPO),
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('index.create_repository', repository=PRIVATE_REPO),
IndexTestSpec(url_for('v1.create_repository', repository=PRIVATE_REPO),
NO_REPO, 403, 403, 403, 201).set_method('PUT'),
IndexTestSpec(url_for('index.create_repository', repository=ORG_REPO),
IndexTestSpec(url_for('v1.create_repository', repository=ORG_REPO),
NO_REPO, 403, 403, 403, 201).set_method('PUT'),
IndexTestSpec(url_for('index.update_images', repository=PUBLIC_REPO),
IndexTestSpec(url_for('v1.update_images', repository=PUBLIC_REPO),
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('index.update_images', repository=PRIVATE_REPO),
IndexTestSpec(url_for('v1.update_images', repository=PRIVATE_REPO),
NO_REPO, 403, 403, 403, 204).set_method('PUT'),
IndexTestSpec(url_for('index.update_images', repository=ORG_REPO), NO_REPO,
IndexTestSpec(url_for('v1.update_images', repository=ORG_REPO), NO_REPO,
403, 403, 403, 204).set_method('PUT'),
IndexTestSpec(url_for('index.get_repository_images',
IndexTestSpec(url_for('v1.get_repository_images',
repository=PUBLIC_REPO),
NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('index.get_repository_images',
IndexTestSpec(url_for('v1.get_repository_images',
repository=PRIVATE_REPO)),
IndexTestSpec(url_for('index.get_repository_images', repository=ORG_REPO)),
IndexTestSpec(url_for('v1.get_repository_images', repository=ORG_REPO)),
IndexTestSpec(url_for('index.delete_repository_images',
IndexTestSpec(url_for('v1.delete_repository_images',
repository=PUBLIC_REPO),
NO_REPO, 501, 501, 501, 501).set_method('DELETE'),
IndexTestSpec(url_for('index.put_repository_auth', repository=PUBLIC_REPO),
IndexTestSpec(url_for('v1.put_repository_auth', repository=PUBLIC_REPO),
NO_REPO, 501, 501, 501, 501).set_method('PUT'),
IndexTestSpec(url_for('index.get_search'), NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('v1.get_search'), NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('index.ping'), NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('v1.ping'), NO_REPO, 200, 200, 200, 200),
IndexTestSpec(url_for('tags.get_tags', repository=PUBLIC_REPO), NO_REPO,
IndexTestSpec(url_for('v1.get_tags', repository=PUBLIC_REPO), NO_REPO,
200, 200, 200, 200),
IndexTestSpec(url_for('tags.get_tags', repository=PRIVATE_REPO)),
IndexTestSpec(url_for('tags.get_tags', repository=ORG_REPO)),
IndexTestSpec(url_for('v1.get_tags', repository=PRIVATE_REPO)),
IndexTestSpec(url_for('v1.get_tags', repository=ORG_REPO)),
IndexTestSpec(url_for('tags.get_tag', repository=PUBLIC_REPO,
IndexTestSpec(url_for('v1.get_tag', repository=PUBLIC_REPO,
tag=FAKE_TAG_NAME), NO_REPO, 400, 400, 400, 400),
IndexTestSpec(url_for('tags.get_tag', repository=PRIVATE_REPO,
IndexTestSpec(url_for('v1.get_tag', repository=PRIVATE_REPO,
tag=FAKE_TAG_NAME), NO_REPO, 403, 403, 400, 400),
IndexTestSpec(url_for('tags.get_tag', repository=ORG_REPO,
IndexTestSpec(url_for('v1.get_tag', repository=ORG_REPO,
tag=FAKE_TAG_NAME), NO_REPO, 403, 403, 400, 400),
IndexTestSpec(url_for('tags.put_tag', repository=PUBLIC_REPO,
IndexTestSpec(url_for('v1.put_tag', repository=PUBLIC_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
IndexTestSpec(url_for('tags.put_tag', repository=PRIVATE_REPO,
IndexTestSpec(url_for('v1.put_tag', repository=PRIVATE_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('tags.put_tag', repository=ORG_REPO,
IndexTestSpec(url_for('v1.put_tag', repository=ORG_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 400).set_method('PUT'),
IndexTestSpec(url_for('tags.delete_tag', repository=PUBLIC_REPO,
IndexTestSpec(url_for('v1.delete_tag', repository=PUBLIC_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 403).set_method('DELETE'),
IndexTestSpec(url_for('tags.delete_tag', repository=PRIVATE_REPO,
IndexTestSpec(url_for('v1.delete_tag', repository=PRIVATE_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 400).set_method('DELETE'),
IndexTestSpec(url_for('tags.delete_tag', repository=ORG_REPO,
IndexTestSpec(url_for('v1.delete_tag', repository=ORG_REPO,
tag=FAKE_TAG_NAME),
NO_REPO, 403, 403, 403, 400).set_method('DELETE'),
]

View file

@ -1,8 +1,6 @@
import unittest
from endpoints.tags import tags
from endpoints.registry import registry
from endpoints.index import index
from endpoints.v1 import v1_bp
from endpoints.verbs import verbs
@ -23,9 +21,7 @@ class TestAnonymousAccessChecked(unittest.TestCase):
deferred_function(Checker(self))
def test_anonymous_access_checked(self):
self.verifyBlueprint(tags)
self.verifyBlueprint(registry)
self.verifyBlueprint(index)
self.verifyBlueprint(v1_bp)
self.verifyBlueprint(verbs)
if __name__ == '__main__':

View file

@ -82,7 +82,7 @@ class ApiTestCase(unittest.TestCase):
with client.session_transaction() as sess:
if auth_username:
loaded = model.get_user(auth_username)
loaded = model.user.get_user(auth_username)
sess['user_id'] = loaded.uuid
sess['login_time'] = datetime.datetime.now()
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN

File diff suppressed because it is too large Load diff

View file

@ -74,17 +74,17 @@ class TestAuth(ApiTestCase):
expected_code=403)
def test_basic_auth_user(self):
user = model.get_user(ADMIN_ACCESS_USER)
user = model.user.get_user(ADMIN_ACCESS_USER)
self.conduct_basic_auth(ADMIN_ACCESS_USER, 'password')
self.verify_identity(user.uuid)
def test_basic_auth_disabled_user(self):
user = model.get_user(DISABLED_USER)
user = model.user.get_user(DISABLED_USER)
self.conduct_basic_auth(DISABLED_USER, 'password')
self.verify_no_identity()
def test_basic_auth_token(self):
token = model.create_delegate_token(ADMIN_ACCESS_USER, 'simple', 'sometoken')
token = model.token.create_delegate_token(ADMIN_ACCESS_USER, 'simple', 'sometoken')
self.conduct_basic_auth('$token', token.code)
self.verify_identity(token.code)
@ -101,26 +101,26 @@ class TestAuth(ApiTestCase):
self.verify_no_identity()
def test_oauth_valid_user(self):
user = model.get_user(ADMIN_ACCESS_USER)
user = model.user.get_user(ADMIN_ACCESS_USER)
self.create_oauth(user)
self.conduct_basic_auth('$oauthtoken', 'access1234')
self.verify_identity(user.uuid)
def test_oauth_disabled_user(self):
user = model.get_user(DISABLED_USER)
user = model.user.get_user(DISABLED_USER)
self.create_oauth(user)
self.conduct_basic_auth('$oauthtoken', 'access1234')
self.verify_no_identity()
def test_basic_auth_robot(self):
user = model.get_user(ADMIN_ACCESS_USER)
robot, passcode = model.get_robot('dtrobot', user)
user = model.user.get_user(ADMIN_ACCESS_USER)
robot, passcode = model.user.get_robot('dtrobot', user)
self.conduct_basic_auth(robot.username, passcode)
self.verify_identity(robot.uuid)
def test_basic_auth_robot_invalidcode(self):
user = model.get_user(ADMIN_ACCESS_USER)
robot, _ = model.get_robot('dtrobot', user)
user = model.user.get_user(ADMIN_ACCESS_USER)
robot, _ = model.user.get_robot('dtrobot', user)
self.conduct_basic_auth(robot.username, 'someinvalidcode')
self.verify_no_identity()

48
test/test_digest_tools.py Normal file
View file

@ -0,0 +1,48 @@
import unittest
from digest.digest_tools import parse_digest, content_path, InvalidDigestException
class TestParseDigest(unittest.TestCase):
def test_parse_good(self):
examples = [
('tarsum.v123123+sha1:123deadbeef', (True, 'v123123', 'sha1', '123deadbeef')),
('tarsum.v1+sha256:123123', (True, 'v1', 'sha256', '123123')),
('tarsum.v0+md5:abc', (True, 'v0', 'md5', 'abc')),
('sha1:123deadbeef', (False, None, 'sha1', '123deadbeef')),
('sha256:123123', (False, None, 'sha256', '123123')),
('md5:abc', (False, None, 'md5', 'abc')),
]
for digest, output in examples:
self.assertEquals(parse_digest(digest), output)
def test_parse_fail(self):
examples = [
'tarsum.v++sha1:123deadbeef',
'.v1+sha256:123123',
'tarsum.v+md5:abc',
'sha1:123deadbeefzxczxv',
'sha256123123',
'tarsum.v1+',
'tarsum.v1123+sha1:',
]
for bad_digest in examples:
with self.assertRaises(InvalidDigestException):
parse_digest(bad_digest)
class TestDigestPath(unittest.TestCase):
def test_paths(self):
examples = [
('tarsum.v123123+sha1:123deadbeef', 'tarsum/v123123/sha1/12/123deadbeef'),
('tarsum.v1+sha256:123123', 'tarsum/v1/sha256/12/123123'),
('tarsum.v0+md5:abc', 'tarsum/v0/md5/ab/abc'),
('sha1:123deadbeef', 'sha1/12/123deadbeef'),
('sha256:123123', 'sha256/12/123123'),
('md5:abc', 'md5/ab/abc'),
('md5:1', 'md5/01/1'),
]
for digest, path in examples:
self.assertEquals(content_path(digest), path)

View file

@ -4,14 +4,10 @@ from app import app
from util.names import parse_namespace_repository
from initdb import setup_database_for_testing, finished_database_for_testing
from specs import build_index_specs
from endpoints.registry import registry
from endpoints.index import index
from endpoints.tags import tags
from endpoints.v1 import v1_bp
app.register_blueprint(index, url_prefix='/v1')
app.register_blueprint(tags, url_prefix='/v1')
app.register_blueprint(registry, url_prefix='/v1')
app.register_blueprint(v1_bp, url_prefix='/v1')
NO_ACCESS_USER = 'freshuser'

View file

@ -4,10 +4,10 @@ import jwt
import base64
from app import app
from flask import Flask, abort, jsonify, request, make_response
from flask import Flask, jsonify, request, make_response
from flask.ext.testing import LiveServerTestCase
from initdb import setup_database_for_testing, finished_database_for_testing
from data.users import JWTAuthUsers
from data.users import ExternalJWTAuthN
from tempfile import NamedTemporaryFile
from Crypto.PublicKey import RSA
from datetime import datetime, timedelta
@ -31,8 +31,8 @@ class JWTAuthTestCase(LiveServerTestCase):
def create_app(self):
users = [
{ 'name': 'cooluser', 'email': 'user@domain.com', 'password': 'password' },
{ 'name': 'some.neat.user', 'email': 'neat@domain.com', 'password': 'foobar'}
{'name': 'cooluser', 'email': 'user@domain.com', 'password': 'password'},
{'name': 'some.neat.user', 'email': 'neat@domain.com', 'password': 'foobar'}
]
jwt_app = Flask('testjwt')
@ -82,10 +82,8 @@ class JWTAuthTestCase(LiveServerTestCase):
self.session = requests.Session()
self.jwt_auth = JWTAuthUsers(
self.get_server_url() + '/user/verify',
'authy', '', app.config['HTTPCLIENT'],
JWTAuthTestCase.public_key.name)
self.jwt_auth = ExternalJWTAuthN(self.get_server_url() + '/user/verify', 'authy', '',
app.config['HTTPCLIENT'], 300, JWTAuthTestCase.public_key.name)
def tearDown(self):
finished_database_for_testing(self)

View file

@ -13,8 +13,8 @@ REPO = 'somerepo'
class TestGarbageColection(unittest.TestCase):
@staticmethod
def _set_tag_expiration_policy(namespace, expiration_s):
namespace_user = model.get_user(namespace)
model.change_user_tag_expiration(namespace_user, expiration_s)
namespace_user = model.user.get_user(namespace)
model.user.change_user_tag_expiration(namespace_user, expiration_s)
def setUp(self):
setup_database_for_testing(self)
@ -32,14 +32,14 @@ class TestGarbageColection(unittest.TestCase):
def createImage(self, docker_image_id, repository_obj, username):
preferred = storage.preferred_locations[0]
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred)
image = model.image.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred)
image.storage.uploading = False
image.storage.save()
# Create derived images as well.
for i in range(0, 2):
model.find_or_create_derived_storage(image.storage, 'squash', preferred)
model.storage.find_or_create_derived_storage(image.storage, 'squash', preferred)
# Add some additional placements to the image.
for location_name in ['local_eu']:
@ -55,8 +55,8 @@ class TestGarbageColection(unittest.TestCase):
return image.storage
def createRepository(self, namespace=ADMIN_ACCESS_USER, name=REPO, **kwargs):
user = model.get_user(namespace)
repo = model.create_repository(namespace, name, user)
user = model.user.get_user(namespace)
repo = model.repository.create_repository(namespace, name, user)
# Populate the repository with the tags.
image_map = {}
@ -69,35 +69,37 @@ class TestGarbageColection(unittest.TestCase):
image_map[image_id] = self.createImage(image_id, repo, namespace)
# Set the ancestors for the image.
parent = model.set_image_metadata(image_id, namespace, name, '', '', '', parent=parent)
parent = model.image.set_image_metadata(image_id, namespace, name, '', '', '',
parent=parent)
# Set the tag for the image.
model.create_or_update_tag(namespace, name, tag_name, image_ids[-1])
model.tag.create_or_update_tag(namespace, name, tag_name, image_ids[-1])
return repo
def gcNow(self, repository):
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
def deleteTag(self, repository, tag):
model.delete_tag(repository.namespace_user.username, repository.name, tag)
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
model.tag.delete_tag(repository.namespace_user.username, repository.name, tag)
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
def moveTag(self, repository, tag, docker_image_id):
model.create_or_update_tag(repository.namespace_user.username, repository.name, tag,
model.tag.create_or_update_tag(repository.namespace_user.username, repository.name, tag,
docker_image_id)
model.garbage_collect_repository(repository.namespace_user.username, repository.name)
model.repository.garbage_collect_repository(repository.namespace_user.username, repository.name)
def assertNotDeleted(self, repository, *args):
for docker_image_id in args:
self.assertTrue(bool(model.get_image_by_id(repository.namespace_user.username,
repository.name, docker_image_id)))
self.assertTrue(bool(model.image.get_image_by_id(repository.namespace_user.username,
repository.name, docker_image_id)))
def assertDeleted(self, repository, *args):
for docker_image_id in args:
try:
# Verify the image is missing when accessed by the repository.
model.get_image_by_id(repository.namespace_user.username, repository.name, docker_image_id)
model.image.get_image_by_id(repository.namespace_user.username, repository.name,
docker_image_id)
except model.DataModelException:
return

View file

@ -42,10 +42,11 @@ class TestImageSharing(unittest.TestCase):
self.ctx.__exit__(True, None, None)
def createStorage(self, docker_image_id, repository=REPO, username=ADMIN_ACCESS_USER):
repository_obj = model.get_repository(repository.split('/')[0], repository.split('/')[1])
repository_obj = model.repository.get_repository(repository.split('/')[0],
repository.split('/')[1])
preferred = storage.preferred_locations[0]
image = model.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred)
image = model.image.find_create_or_link_image(docker_image_id, repository_obj, username, {},
preferred)
image.storage.uploading = False
image.storage.save()
return image.storage

View file

@ -28,8 +28,8 @@ class TestImageTree(unittest.TestCase):
return None
def test_longest_path_simple_repo(self):
all_images = list(model.get_repository_images(NAMESPACE, SIMPLE_REPO))
all_tags = list(model.list_repository_tags(NAMESPACE, SIMPLE_REPO))
all_images = list(model.image.get_repository_images(NAMESPACE, SIMPLE_REPO))
all_tags = list(model.tag.list_repository_tags(NAMESPACE, SIMPLE_REPO))
tree = ImageTree(all_images, all_tags)
base_image = self._get_base_image(all_images)
@ -47,8 +47,8 @@ class TestImageTree(unittest.TestCase):
self.assertEquals('latest', tree.tag_containing_image(result[-1]))
def test_longest_path_complex_repo(self):
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
tree = ImageTree(all_images, all_tags)
base_image = self._get_base_image(all_images)
@ -61,8 +61,8 @@ class TestImageTree(unittest.TestCase):
self.assertEquals('prod', tree.tag_containing_image(result[-1]))
def test_filtering(self):
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
tree = ImageTree(all_images, all_tags, base_filter=1245)
base_image = self._get_base_image(all_images)
@ -74,8 +74,8 @@ class TestImageTree(unittest.TestCase):
self.assertEquals(0, len(result))
def test_find_tag_parent_image(self):
all_images = list(model.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.list_repository_tags(NAMESPACE, COMPLEX_REPO))
all_images = list(model.image.get_repository_images(NAMESPACE, COMPLEX_REPO))
all_tags = list(model.tag.list_repository_tags(NAMESPACE, COMPLEX_REPO))
tree = ImageTree(all_images, all_tags)
base_image = self._get_base_image(all_images)
@ -92,9 +92,9 @@ class TestImageTree(unittest.TestCase):
def test_longest_path_simple_repo_direct_lookup(self):
repository = model.get_repository(NAMESPACE, SIMPLE_REPO)
all_images = list(model.get_repository_images(NAMESPACE, SIMPLE_REPO))
all_tags = list(model.list_repository_tags(NAMESPACE, SIMPLE_REPO))
repository = model.repository.get_repository(NAMESPACE, SIMPLE_REPO)
all_images = list(model.image.get_repository_images(NAMESPACE, SIMPLE_REPO))
all_tags = list(model.tag.list_repository_tags(NAMESPACE, SIMPLE_REPO))
base_image = self._get_base_image(all_images)
tag_image = all_tags[0].image
@ -102,7 +102,7 @@ class TestImageTree(unittest.TestCase):
def checker(index, image):
return True
filtered_images = model.get_repository_images_without_placements(repository,
filtered_images = model.image.get_repository_images_without_placements(repository,
with_ancestor=base_image)
self.assertEquals(set([f.id for f in filtered_images]), set([a.id for a in all_images]))

View file

@ -15,8 +15,8 @@ UNSUPER_USERNAME = 'freshuser'
class TestSuperUserOps(unittest.TestCase):
def setUp(self):
setup_database_for_testing(self)
self._su = model.get_user(SUPER_USERNAME)
self._normie = model.get_user(UNSUPER_USERNAME)
self._su = model.user.get_user(SUPER_USERNAME)
self._normie = model.user.get_user(UNSUPER_USERNAME)
def tearDown(self):
finished_database_for_testing(self)

View file

@ -69,7 +69,7 @@ class TestBuildLogs(RedisBuildLogs):
if not is_get_status:
from data import model
build_obj = model.get_repository_build(self.test_build_id)
build_obj = model.build.get_repository_build(self.test_build_id)
build_obj.phase = phase
build_obj.save()

View file

@ -27,7 +27,8 @@ bad_count = 0
good_count = 0
def resolve_or_create(repo, docker_image_id, new_ancestry):
existing = model.get_repo_image_extended(repo.namespace_user.username, repo.name, docker_image_id)
existing = model.image.get_repo_image_extended(repo.namespace_user.username, repo.name,
docker_image_id)
if existing:
logger.debug('Found existing image: %s, %s', existing.id, docker_image_id)
return existing
@ -63,8 +64,8 @@ def all_ancestors_exist(ancestors):
cant_fix = []
for img in query:
try:
with_locations = model.get_repo_image_extended(img.repository.namespace_user.username,
img.repository.name, img.docker_image_id)
with_locations = model.image.get_repo_image_extended(img.repository.namespace_user.username,
img.repository.name, img.docker_image_id)
ancestry_storage = store.image_ancestry_path(img.storage.uuid)
if store.exists(with_locations.storage.locations, ancestry_storage):
full_ancestry = json.loads(store.get_content(with_locations.storage.locations,

View file

@ -18,7 +18,7 @@ def sendInvoice(invoice_id):
return
customer_id = invoice['customer']
user = model.get_user_or_org_by_customer_id(customer_id)
user = model.user.get_user_or_org_by_customer_id(customer_id)
if not user:
print 'No user found for customer %s' % (customer_id)
return

Some files were not shown because too many files have changed in this diff Show more