Merge branch 'master' into nolurk
This commit is contained in:
commit
c0e995c1d4
43 changed files with 1091 additions and 127 deletions
|
@ -53,6 +53,7 @@ RUN mkdir /usr/local/nginx/logs/
|
|||
|
||||
# Run the tests
|
||||
RUN TEST=true venv/bin/python -m unittest discover -f
|
||||
RUN TEST=true venv/bin/python -m test.registry_tests -f
|
||||
|
||||
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp", "/conf/etcd"]
|
||||
|
||||
|
|
38
ROADMAP.md
Normal file
38
ROADMAP.md
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Quay.io Roadmap
|
||||
|
||||
**work in progress**
|
||||
|
||||
### Short Term
|
||||
- Framework for microservice decomposition
|
||||
- Improve documentation
|
||||
- Ability to answer 80% of tickets with a link to the docs
|
||||
- Eliminate old UI screenshots/references
|
||||
- Auth provider as a service
|
||||
- Registry v2 compatible
|
||||
|
||||
### Medium Term
|
||||
- Registry v2 support
|
||||
- Forward and backward compatible with registry v1
|
||||
- Support ACI push spec
|
||||
- Translate between ACI and docker images transparently
|
||||
- Integrate docs with the search bar
|
||||
- Full text search?
|
||||
- Running on top of Tectonic
|
||||
- BitTorrent distribution support
|
||||
- Fully launch our API
|
||||
- Versioned and backward compatible
|
||||
- Adequate documentation
|
||||
|
||||
### Long Term
|
||||
- Become the Tectonic app store
|
||||
- Pods/apps as top level concept
|
||||
- Builds as top level concept
|
||||
- Multiple Quay.io repos from a single git push
|
||||
- Multi-step builds
|
||||
- build artifact
|
||||
- bundle artifact
|
||||
- test bundle
|
||||
- Immediately consistent multi-region data availability
|
||||
- Cockroach?
|
||||
- 2 factor auth
|
||||
- How to integrate with Docker CLI?
|
|
@ -55,6 +55,7 @@ class BuildComponent(BaseComponent):
|
|||
def onConnect(self):
|
||||
self.join(self.builder_realm)
|
||||
|
||||
@trollius.coroutine
|
||||
def onJoin(self, details):
|
||||
logger.debug('Registering methods and listeners for component %s', self.builder_realm)
|
||||
yield trollius.From(self.register(self._on_ready, u'io.quay.buildworker.ready'))
|
||||
|
@ -277,6 +278,9 @@ class BuildComponent(BaseComponent):
|
|||
# Send the notification that the build has completed successfully.
|
||||
self._current_job.send_notification('build_success', image_id=kwargs.get('image_id'))
|
||||
except ApplicationError as aex:
|
||||
build_id = self._current_job.repo_build.uuid
|
||||
logger.exception('Got remote exception for build: %s', build_id)
|
||||
|
||||
worker_error = WorkerError(aex.error, aex.kwargs.get('base_error'))
|
||||
|
||||
# Write the error to the log.
|
||||
|
@ -310,6 +314,7 @@ class BuildComponent(BaseComponent):
|
|||
|
||||
@trollius.coroutine
|
||||
def _on_ready(self, token, version):
|
||||
logger.debug('On ready called (token "%s")', token)
|
||||
self._worker_version = version
|
||||
|
||||
if not version in SUPPORTED_WORKER_VERSIONS:
|
||||
|
@ -343,6 +348,10 @@ class BuildComponent(BaseComponent):
|
|||
|
||||
def _on_heartbeat(self):
|
||||
""" Updates the last known heartbeat. """
|
||||
if not self._current_job or self._component_status == ComponentStatus.TIMED_OUT:
|
||||
return
|
||||
|
||||
logger.debug('Got heartbeat for build %s', self._current_job.repo_build.uuid)
|
||||
self._last_heartbeat = datetime.datetime.utcnow()
|
||||
|
||||
@trollius.coroutine
|
||||
|
@ -374,9 +383,15 @@ class BuildComponent(BaseComponent):
|
|||
logger.debug('Checking heartbeat on realm %s', self.builder_realm)
|
||||
if (self._last_heartbeat and
|
||||
self._last_heartbeat < datetime.datetime.utcnow() - HEARTBEAT_DELTA):
|
||||
logger.debug('Heartbeat on realm %s has expired: %s', self.builder_realm,
|
||||
self._last_heartbeat)
|
||||
|
||||
yield trollius.From(self._timeout())
|
||||
raise trollius.Return()
|
||||
|
||||
logger.debug('Heartbeat on realm %s is valid: %s.', self.builder_realm,
|
||||
self._last_heartbeat)
|
||||
|
||||
yield trollius.From(trollius.sleep(HEARTBEAT_TIMEOUT))
|
||||
|
||||
@trollius.coroutine
|
||||
|
|
|
@ -47,6 +47,7 @@ coreos:
|
|||
{{ dockersystemd('builder-logs',
|
||||
'quay.io/kelseyhightower/journal-2-logentries',
|
||||
extra_args='--env-file /root/overrides.list -v /run/journald.sock:/run/journald.sock',
|
||||
flattened=True,
|
||||
after_units=['quay-builder.service']
|
||||
) | indent(4) }}
|
||||
{%- endif %}
|
||||
|
|
|
@ -7,18 +7,26 @@ http {
|
|||
include hosted-http-base.conf;
|
||||
include rate-limiting.conf;
|
||||
|
||||
ssl_certificate ./stack/ssl.cert;
|
||||
ssl_certificate_key ./stack/ssl.key;
|
||||
ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
server {
|
||||
include server-base.conf;
|
||||
|
||||
listen 443 default;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./stack/ssl.cert;
|
||||
ssl_certificate_key ./stack/ssl.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# This header must be set only for HTTPS
|
||||
add_header Strict-Transport-Security "max-age=63072000; preload";
|
||||
|
||||
}
|
||||
|
||||
server {
|
||||
|
@ -28,11 +36,8 @@ http {
|
|||
listen 8443 default proxy_protocol;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./stack/ssl.cert;
|
||||
ssl_certificate_key ./stack/ssl.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# This header must be set only for HTTPS
|
||||
add_header Strict-Transport-Security "max-age=63072000; preload";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,11 @@ if ($args ~ "_escaped_fragment_") {
|
|||
rewrite ^ /snapshot$uri;
|
||||
}
|
||||
|
||||
# Disable the ability to be embedded into iframes
|
||||
add_header X-Frame-Options DENY;
|
||||
|
||||
|
||||
# Proxy Headers
|
||||
proxy_set_header X-Forwarded-For $proper_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
|
|
|
@ -566,6 +566,12 @@ def list_federated_logins(user):
|
|||
FederatedLogin.user == user)
|
||||
|
||||
|
||||
def lookup_federated_login(user, service_name):
|
||||
try:
|
||||
return list_federated_logins(user).where(LoginService.name == service_name).get()
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
def create_confirm_email_code(user, new_email=None):
|
||||
if new_email:
|
||||
if not validate_email(new_email):
|
||||
|
@ -636,6 +642,13 @@ def find_user_by_email(email):
|
|||
return None
|
||||
|
||||
|
||||
def get_nonrobot_user(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.organization == False, User.robot == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.organization == False)
|
||||
|
|
|
@ -9,6 +9,7 @@ import os
|
|||
from util.aes import AESCipher
|
||||
from util.validation import generate_valid_usernames
|
||||
from data import model
|
||||
from collections import namedtuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if os.environ.get('LDAP_DEBUG') == '1':
|
||||
|
@ -28,6 +29,8 @@ class DatabaseUsers(object):
|
|||
|
||||
return (result, None)
|
||||
|
||||
def confirm_existing_user(self, username, password):
|
||||
return self.verify_user(username, password)
|
||||
|
||||
def user_exists(self, username):
|
||||
return model.get_user(username) is not None
|
||||
|
@ -43,6 +46,7 @@ class LDAPConnection(object):
|
|||
def __enter__(self):
|
||||
trace_level = 2 if os.environ.get('LDAP_DEBUG') == '1' else 0
|
||||
self._conn = ldap.initialize(self._ldap_uri, trace_level=trace_level)
|
||||
self._conn.set_option(ldap.OPT_REFERRALS, 1)
|
||||
self._conn.simple_bind_s(self._user_dn, self._user_pw)
|
||||
|
||||
return self._conn
|
||||
|
@ -52,6 +56,8 @@ class LDAPConnection(object):
|
|||
|
||||
|
||||
class LDAPUsers(object):
|
||||
_LDAPResult = namedtuple('LDAPResult', ['dn', 'attrs'])
|
||||
|
||||
def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr):
|
||||
self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd)
|
||||
self._ldap_uri = ldap_uri
|
||||
|
@ -60,6 +66,25 @@ class LDAPUsers(object):
|
|||
self._uid_attr = uid_attr
|
||||
self._email_attr = email_attr
|
||||
|
||||
def _get_ldap_referral_dn(self, referral_exception):
|
||||
logger.debug('Got referral: %s', referral_exception.args[0])
|
||||
if not referral_exception.args[0] or not referral_exception.args[0].get('info'):
|
||||
logger.debug('LDAP referral missing info block')
|
||||
return None
|
||||
|
||||
referral_info = referral_exception.args[0]['info']
|
||||
if not referral_info.startswith('Referral:\n'):
|
||||
logger.debug('LDAP referral missing Referral header')
|
||||
return None
|
||||
|
||||
referral_uri = referral_info[len('Referral:\n'):]
|
||||
if not referral_uri.startswith('ldap:///'):
|
||||
logger.debug('LDAP referral URI does not start with ldap:///')
|
||||
return None
|
||||
|
||||
referral_dn = referral_uri[len('ldap:///'):]
|
||||
return referral_dn
|
||||
|
||||
def _ldap_user_search(self, username_or_email):
|
||||
with self._ldap_conn as conn:
|
||||
logger.debug('Incoming username or email param: %s', username_or_email.__repr__())
|
||||
|
@ -70,22 +95,56 @@ class LDAPUsers(object):
|
|||
logger.debug('Conducting user search: %s under %s', query, user_search_dn)
|
||||
try:
|
||||
pairs = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query.encode('utf-8'))
|
||||
except ldap.REFERRAL as re:
|
||||
referral_dn = self._get_ldap_referral_dn(re)
|
||||
if not referral_dn:
|
||||
return None
|
||||
|
||||
try:
|
||||
subquery = u'(%s=%s)' % (self._uid_attr, username_or_email)
|
||||
pairs = conn.search_s(referral_dn, ldap.SCOPE_BASE, subquery)
|
||||
except ldap.LDAPError:
|
||||
logger.exception('LDAP referral search exception')
|
||||
return None
|
||||
|
||||
except ldap.LDAPError:
|
||||
logger.exception('LDAP search exception')
|
||||
return None
|
||||
|
||||
logger.debug('Found matching pairs: %s', pairs)
|
||||
if len(pairs) < 1:
|
||||
|
||||
results = [LDAPUsers._LDAPResult(*pair) for pair in pairs]
|
||||
|
||||
# Filter out pairs without DNs. Some LDAP impls will return such
|
||||
# pairs.
|
||||
with_dns = [result for result in results if result.dn]
|
||||
if len(with_dns) < 1:
|
||||
return None
|
||||
|
||||
for pair in pairs:
|
||||
if pair[0] is not None:
|
||||
logger.debug('Found user: %s', pair)
|
||||
return pair
|
||||
# If we have found a single pair, then return it.
|
||||
if len(with_dns) == 1:
|
||||
return with_dns[0]
|
||||
|
||||
return None
|
||||
# Otherwise, there are multiple pairs with DNs, so find the one with the mail
|
||||
# attribute (if any).
|
||||
with_mail = [result for result in results if result.attrs.get(self._email_attr)]
|
||||
return with_mail[0] if with_mail else with_dns[0]
|
||||
|
||||
def verify_user(self, username_or_email, password):
|
||||
def confirm_existing_user(self, username, password):
|
||||
""" Verify the username and password by looking up the *LDAP* username and confirming the
|
||||
password.
|
||||
"""
|
||||
db_user = model.get_user(username)
|
||||
if not db_user:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
federated_login = model.lookup_federated_login(db_user, 'ldap')
|
||||
if not federated_login:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
return self.verify_user(federated_login.service_ident, password, create_new_user=False)
|
||||
|
||||
def verify_user(self, username_or_email, password, create_new_user=True):
|
||||
""" Verify the credentials with LDAP and if they are valid, create or update the user
|
||||
in our database. """
|
||||
|
||||
|
@ -94,17 +153,29 @@ class LDAPUsers(object):
|
|||
return (None, 'Anonymous binding not allowed')
|
||||
|
||||
found_user = self._ldap_user_search(username_or_email)
|
||||
|
||||
if found_user is None:
|
||||
return (None, 'Username not found')
|
||||
|
||||
found_dn, found_response = found_user
|
||||
logger.debug('Found user for LDAP username %s; validating password', username_or_email)
|
||||
logger.debug('DN %s found: %s', found_dn, found_response)
|
||||
|
||||
# First validate the password by binding as the user
|
||||
logger.debug('Found user %s; validating password', username_or_email)
|
||||
try:
|
||||
with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')):
|
||||
pass
|
||||
except ldap.REFERRAL as re:
|
||||
referral_dn = self._get_ldap_referral_dn(re)
|
||||
if not referral_dn:
|
||||
return (None, 'Invalid username')
|
||||
|
||||
try:
|
||||
with LDAPConnection(self._ldap_uri, referral_dn, password.encode('utf-8')):
|
||||
pass
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
logger.exception('Invalid LDAP credentials')
|
||||
return (None, 'Invalid password')
|
||||
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
logger.exception('Invalid LDAP credentials')
|
||||
return (None, 'Invalid password')
|
||||
|
@ -121,6 +192,9 @@ class LDAPUsers(object):
|
|||
db_user = model.verify_federated_login('ldap', username)
|
||||
|
||||
if not db_user:
|
||||
if not create_new_user:
|
||||
return (None, 'Invalid user')
|
||||
|
||||
# We must create the user in our db
|
||||
valid_username = None
|
||||
for valid_username in generate_valid_usernames(username):
|
||||
|
@ -232,6 +306,13 @@ class UserAuthentication(object):
|
|||
|
||||
return data.get('password', encrypted)
|
||||
|
||||
def confirm_existing_user(self, username, password):
|
||||
""" Verifies that the given password matches to the given DB username. Unlike verify_user, this
|
||||
call first translates the DB user via the FederatedLogin table (where applicable).
|
||||
"""
|
||||
return self.state.confirm_existing_user(username, password)
|
||||
|
||||
|
||||
def verify_user(self, username_or_email, password, basic_auth=False):
|
||||
# First try to decode the password as a signed token.
|
||||
if basic_auth:
|
||||
|
|
|
@ -238,8 +238,8 @@ class SuperUserSendRecoveryEmail(ApiResource):
|
|||
@nickname('sendInstallUserRecoveryEmail')
|
||||
def post(self, username):
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_user(username)
|
||||
if not user or user.organization or user.robot:
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
|
@ -288,8 +288,8 @@ class SuperUserManagement(ApiResource):
|
|||
def get(self, username):
|
||||
""" Returns information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_user(username)
|
||||
if not user or user.organization or user.robot:
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
return user_view(user)
|
||||
|
@ -302,8 +302,8 @@ class SuperUserManagement(ApiResource):
|
|||
def delete(self, username):
|
||||
""" Deletes the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_user(username)
|
||||
if not user or user.organization or user.robot:
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
|
@ -321,8 +321,8 @@ class SuperUserManagement(ApiResource):
|
|||
def put(self, username):
|
||||
""" Updates information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = model.get_user(username)
|
||||
if not user or user.organization or user.robot:
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
abort(404)
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
|
|
|
@ -283,7 +283,7 @@ class User(ApiResource):
|
|||
user_data = request.get_json()
|
||||
invite_code = user_data.get('invite_code', '')
|
||||
|
||||
existing_user = model.get_user(user_data['username'])
|
||||
existing_user = model.get_nonrobot_user(user_data['username'])
|
||||
if existing_user:
|
||||
raise request_error(message='The username already exists')
|
||||
|
||||
|
@ -372,8 +372,7 @@ class ClientKey(ApiResource):
|
|||
""" Return's the user's private client key. """
|
||||
username = get_authenticated_user().username
|
||||
password = request.get_json()['password']
|
||||
|
||||
(result, error_message) = authentication.verify_user(username, password)
|
||||
(result, error_message) = authentication.confirm_existing_user(username, password)
|
||||
if not result:
|
||||
raise request_error(message=error_message)
|
||||
|
||||
|
@ -541,7 +540,17 @@ class VerifyUser(ApiResource):
|
|||
""" Verifies the signed in the user with the specified credentials. """
|
||||
signin_data = request.get_json()
|
||||
password = signin_data['password']
|
||||
return conduct_signin(get_authenticated_user().username, password)
|
||||
|
||||
username = get_authenticated_user().username
|
||||
(result, error_message) = authentication.confirm_existing_user(username, password)
|
||||
if not result:
|
||||
return {
|
||||
'message': error_message,
|
||||
'invalidCredentials': True,
|
||||
}, 403
|
||||
|
||||
common_login(result)
|
||||
return {'success': True}
|
||||
|
||||
|
||||
@resource('/v1/signout')
|
||||
|
@ -815,8 +824,8 @@ class Users(ApiResource):
|
|||
@nickname('getUserInformation')
|
||||
def get(self, username):
|
||||
""" Get user information for the specified user. """
|
||||
user = model.get_user(username)
|
||||
if user is None or user.organization or user.robot:
|
||||
user = model.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
abort(404)
|
||||
|
||||
return user_view(user)
|
||||
|
|
|
@ -71,7 +71,7 @@ class QuayNotificationMethod(NotificationMethod):
|
|||
target_info = config_data['target']
|
||||
|
||||
if target_info['kind'] == 'user':
|
||||
target = model.get_user(target_info['name'])
|
||||
target = model.get_nonrobot_user(target_info['name'])
|
||||
if not target:
|
||||
# Just to be safe.
|
||||
return (True, 'Unknown user %s' % target_info['name'], [])
|
||||
|
|
|
@ -244,7 +244,11 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
def _get_authorized_client(self):
|
||||
base_client = self._get_client()
|
||||
auth_token = self.auth_token or 'invalid:invalid'
|
||||
(access_token, access_token_secret) = auth_token.split(':')
|
||||
token_parts = auth_token.split(':')
|
||||
if len(token_parts) != 2:
|
||||
token_parts = ['invalid', 'invalid']
|
||||
|
||||
(access_token, access_token_secret) = token_parts
|
||||
return base_client.get_authorized_client(access_token, access_token_secret)
|
||||
|
||||
def _get_repository_client(self):
|
||||
|
@ -253,6 +257,13 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
bitbucket_client = self._get_authorized_client()
|
||||
return bitbucket_client.for_namespace(namespace).repositories().get(name)
|
||||
|
||||
def _get_default_branch(self, repository, default_value='master'):
|
||||
(result, data, _) = repository.get_main_branch()
|
||||
if result:
|
||||
return data['name']
|
||||
|
||||
return default_value
|
||||
|
||||
def get_oauth_url(self):
|
||||
bitbucket_client = self._get_client()
|
||||
(result, data, err_msg) = bitbucket_client.get_authorization_url()
|
||||
|
@ -372,6 +383,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
# Find the first matching branch.
|
||||
repo_branches = self.list_field_values('branch_name') or []
|
||||
branches = find_matching_branches(config, repo_branches)
|
||||
if not branches:
|
||||
branches = [self._get_default_branch(repository)]
|
||||
|
||||
(result, data, err_msg) = repository.get_path_contents('', revision=branches[0])
|
||||
if not result:
|
||||
raise RepositoryReadException(err_msg)
|
||||
|
@ -432,10 +446,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
|
||||
# Lookup the default branch associated with the repository. We use this when building
|
||||
# the tags.
|
||||
default_branch = ''
|
||||
(result, data, _) = repository.get_main_branch()
|
||||
if result:
|
||||
default_branch = data['name']
|
||||
default_branch = self._get_default_branch(repository)
|
||||
|
||||
# Lookup the commit sha.
|
||||
(result, data, _) = repository.changesets().get(commit_sha)
|
||||
|
@ -488,30 +499,36 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
# Parse the JSON payload.
|
||||
payload_json = request.form.get('payload')
|
||||
if not payload_json:
|
||||
logger.debug('Skipping BitBucket request due to missing payload')
|
||||
raise SkipRequestException()
|
||||
|
||||
try:
|
||||
payload = json.loads(payload_json)
|
||||
except ValueError:
|
||||
logger.debug('Skipping BitBucket request due to invalid payload')
|
||||
raise SkipRequestException()
|
||||
|
||||
logger.debug('BitBucket trigger payload %s', payload)
|
||||
|
||||
# Make sure we have a commit in the payload.
|
||||
if not payload.get('commits'):
|
||||
logger.debug('Skipping BitBucket request due to missing commits block')
|
||||
raise SkipRequestException()
|
||||
|
||||
# Check if this build should be skipped by commit message.
|
||||
commit = payload['commits'][0]
|
||||
commit_message = commit['message']
|
||||
if should_skip_commit(commit_message):
|
||||
logger.debug('Skipping BitBucket request due to commit message request')
|
||||
raise SkipRequestException()
|
||||
|
||||
# Check to see if this build should be skipped by ref.
|
||||
if not commit.get('branch') and not commit.get('tag'):
|
||||
logger.debug('Skipping BitBucket request due to missing branch and tag')
|
||||
raise SkipRequestException()
|
||||
|
||||
ref = 'refs/heads/' + commit['branch'] if commit.get('branch') else 'refs/tags/' + commit['tag']
|
||||
logger.debug('Checking BitBucket request: %s', ref)
|
||||
raise_if_skipped(self.config, ref)
|
||||
|
||||
commit_sha = commit['node']
|
||||
|
@ -523,10 +540,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
repository = self._get_repository_client()
|
||||
|
||||
# Find the branch to build.
|
||||
branch_name = run_parameters.get('branch_name')
|
||||
(result, data, _) = repository.get_main_branch()
|
||||
if result:
|
||||
branch_name = branch_name or data['name']
|
||||
branch_name = run_parameters.get('branch_name') or self._get_default_branch(repository)
|
||||
|
||||
# Lookup the commit SHA for the branch.
|
||||
(result, data, _) = repository.get_branches()
|
||||
|
@ -1048,7 +1062,7 @@ class CustomBuildTrigger(BuildTriggerHandler):
|
|||
}
|
||||
|
||||
prepared = PreparedBuild(self.trigger)
|
||||
prepared.tags = [commit_sha]
|
||||
prepared.tags = [commit_sha[:7]]
|
||||
prepared.name_from_sha(commit_sha)
|
||||
prepared.subdirectory = config['subdir']
|
||||
prepared.metadata = metadata
|
||||
|
|
|
@ -9,10 +9,11 @@ from health.healthcheck import get_healthchecker
|
|||
|
||||
from data import model
|
||||
from data.model.oauth import DatabaseAuthorizationProvider
|
||||
from app import app, billing as stripe, build_logs, avatar, signer
|
||||
from app import app, billing as stripe, build_logs, avatar, signer, log_archive
|
||||
from auth.auth import require_session_login, process_oauth
|
||||
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
|
||||
SuperUserPermission, AdministerRepositoryPermission)
|
||||
SuperUserPermission, AdministerRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
|
||||
from util.invoice import renderInvoiceToPdf
|
||||
from util.seo import render_snapshot
|
||||
|
@ -250,6 +251,31 @@ def robots():
|
|||
return send_from_directory('static', 'robots.txt')
|
||||
|
||||
|
||||
@web.route('/buildlogs/<build_uuid>', methods=['GET'])
|
||||
@route_show_if(features.BUILD_SUPPORT)
|
||||
@require_session_login
|
||||
def buildlogs(build_uuid):
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if not build:
|
||||
abort(403)
|
||||
|
||||
repo = build.repository
|
||||
if not ModifyRepositoryPermission(repo.namespace_user.username, repo.name).can():
|
||||
abort(403)
|
||||
|
||||
# If the logs have been archived, just return a URL of the completed archive
|
||||
if build.logs_archived:
|
||||
return redirect(log_archive.get_file_url(build.uuid))
|
||||
|
||||
_, logs = build_logs.get_log_entries(build.uuid, 0)
|
||||
response = jsonify({
|
||||
'logs': [log for log in logs]
|
||||
})
|
||||
|
||||
response.headers["Content-Disposition"] = "attachment;filename=" + build.uuid + ".json"
|
||||
return response
|
||||
|
||||
|
||||
@web.route('/receipt', methods=['GET'])
|
||||
@route_show_if(features.BILLING)
|
||||
@require_session_login
|
||||
|
|
|
@ -77,10 +77,11 @@ class LocalHealthCheck(HealthCheck):
|
|||
|
||||
|
||||
class ProductionHealthCheck(HealthCheck):
|
||||
def __init__(self, app, access_key, secret_key):
|
||||
def __init__(self, app, access_key, secret_key, db_instance='quay'):
|
||||
super(ProductionHealthCheck, self).__init__(app)
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
self.db_instance = db_instance
|
||||
|
||||
@classmethod
|
||||
def check_name(cls):
|
||||
|
@ -115,7 +116,10 @@ class ProductionHealthCheck(HealthCheck):
|
|||
aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key)
|
||||
response = region.describe_db_instances()['DescribeDBInstancesResponse']
|
||||
result = response['DescribeDBInstancesResult']
|
||||
instances = result['DBInstances']
|
||||
instances = [i for i in result['DBInstances'] if i['DBInstanceIdentifier'] == self.db_instance]
|
||||
if not instances:
|
||||
return 'error'
|
||||
|
||||
status = instances[0]['DBInstanceStatus']
|
||||
return status
|
||||
except:
|
||||
|
|
|
@ -21,7 +21,6 @@ paramiko
|
|||
xhtml2pdf
|
||||
redis
|
||||
hiredis
|
||||
docker-py
|
||||
flask-restful==0.2.12
|
||||
jsonschema
|
||||
git+https://github.com/NateFerrero/oauth2lib.git
|
||||
|
@ -38,12 +37,12 @@ psycopg2
|
|||
pyyaml
|
||||
git+https://github.com/DevTable/aniso8601-fake.git
|
||||
git+https://github.com/DevTable/anunidecode.git
|
||||
git+https://github.com/DevTable/avatar-generator.git
|
||||
git+https://github.com/DevTable/pygithub.git
|
||||
git+https://github.com/DevTable/container-cloud-config.git
|
||||
git+https://github.com/DevTable/python-etcd.git
|
||||
git+https://github.com/coreos/py-bitbucket.git
|
||||
git+https://github.com/coreos/pyapi-gitlab.git
|
||||
git+https://github.com/coreos/mockldap.git
|
||||
gipc
|
||||
pyOpenSSL
|
||||
pygpgme
|
||||
|
@ -51,4 +50,6 @@ cachetools
|
|||
mock
|
||||
psutil
|
||||
stringscore
|
||||
mockldap
|
||||
python-swiftclient
|
||||
python-keystoneclient
|
||||
Flask-Testing
|
|
@ -1,76 +1,99 @@
|
|||
APScheduler==3.0.1
|
||||
APScheduler==3.0.3
|
||||
Babel==1.3
|
||||
Flask==0.10.1
|
||||
Flask-Login==0.2.11
|
||||
Flask-Mail==0.9.1
|
||||
Flask-Principal==0.4.0
|
||||
Flask-RESTful==0.2.12
|
||||
Flask-Testing==0.4.2
|
||||
Jinja2==2.7.3
|
||||
LogentriesLogger==0.2.1
|
||||
Mako==1.0.0
|
||||
Logentries==0.7
|
||||
Mako==1.0.1
|
||||
MarkupSafe==0.23
|
||||
Pillow==2.7.0
|
||||
PyMySQL==0.6.3
|
||||
Pillow==2.8.1
|
||||
PyMySQL==0.6.6
|
||||
PyPDF2==1.24
|
||||
PyYAML==3.11
|
||||
SQLAlchemy==0.9.8
|
||||
WebOb==1.4
|
||||
Werkzeug==0.9.6
|
||||
aiowsgi==0.3
|
||||
alembic==0.7.4
|
||||
SQLAlchemy==1.0.3
|
||||
WebOb==1.4.1
|
||||
Werkzeug==0.10.4
|
||||
aiowsgi==0.5
|
||||
alembic==0.7.5.post2
|
||||
argparse==1.3.0
|
||||
autobahn==0.9.3-3
|
||||
backports.ssl-match-hostname==3.4.0.2
|
||||
beautifulsoup4==4.3.2
|
||||
blinker==1.3
|
||||
boto==2.35.1
|
||||
boto==2.38.0
|
||||
cachetools==1.0.0
|
||||
docker-py==0.7.1
|
||||
ecdsa==0.11
|
||||
certifi==2015.04.28
|
||||
cffi==0.9.2
|
||||
cryptography==0.8.2
|
||||
ecdsa==0.13
|
||||
enum34==1.0.4
|
||||
funcparserlib==0.3.6
|
||||
futures==2.2.0
|
||||
gevent==1.0.1
|
||||
gipc==0.5.0
|
||||
greenlet==0.4.5
|
||||
gunicorn==18.0
|
||||
hiredis==0.1.5
|
||||
html5lib==0.999
|
||||
hiredis==0.2.0
|
||||
html5lib==0.99999
|
||||
iso8601==0.1.10
|
||||
itsdangerous==0.24
|
||||
jsonschema==2.4.0
|
||||
marisa-trie==0.7
|
||||
mixpanel-py==3.2.1
|
||||
marisa-trie==0.7.2
|
||||
mixpanel-py==4.0.2
|
||||
mock==1.0.1
|
||||
mockldap==0.2.4
|
||||
msgpack-python==0.4.6
|
||||
netaddr==0.7.14
|
||||
netifaces==0.10.4
|
||||
oauthlib==0.7.2
|
||||
oslo.config==1.11.0
|
||||
oslo.i18n==1.6.0
|
||||
oslo.serialization==1.5.0
|
||||
oslo.utils==1.5.0
|
||||
paramiko==1.15.2
|
||||
peewee==2.4.7
|
||||
pbr==0.11.0
|
||||
peewee==2.6.0
|
||||
prettytable==0.7.2
|
||||
psutil==2.2.1
|
||||
psycopg2==2.5.4
|
||||
psycopg2==2.6
|
||||
py-bcrypt==0.4
|
||||
pyOpenSSL==0.15.1
|
||||
pyasn1==0.1.7
|
||||
pycparser==2.12
|
||||
pycrypto==2.6.1
|
||||
python-dateutil==2.4.0
|
||||
pygpgme==0.3
|
||||
python-dateutil==2.4.2
|
||||
python-keystoneclient==1.4.0
|
||||
python-ldap==2.4.19
|
||||
python-magic==0.4.6
|
||||
pygpgme==0.3
|
||||
pytz==2014.10
|
||||
pyOpenSSL==0.14
|
||||
raven==5.1.1
|
||||
python-swiftclient==2.4.0
|
||||
pytz==2015.2
|
||||
raven==5.3.0
|
||||
redis==2.10.3
|
||||
reportlab==2.7
|
||||
requests==2.5.1
|
||||
requests==2.6.2
|
||||
requests-oauthlib==0.4.2
|
||||
simplejson==3.7.1
|
||||
six==1.9.0
|
||||
stevedore==1.4.0
|
||||
stringscore==0.1.0
|
||||
stripe==1.20.1
|
||||
stripe==1.22.2
|
||||
trollius==1.0.4
|
||||
tzlocal==1.1.2
|
||||
urllib3==1.10.2
|
||||
tzlocal==1.1.3
|
||||
urllib3==1.10.3
|
||||
waitress==0.8.9
|
||||
websocket-client==0.23.0
|
||||
websocket-client==0.30.0
|
||||
wsgiref==0.1.2
|
||||
xhtml2pdf==0.0.6
|
||||
git+https://github.com/DevTable/aniso8601-fake.git
|
||||
git+https://github.com/DevTable/anunidecode.git
|
||||
git+https://github.com/DevTable/avatar-generator.git
|
||||
git+https://github.com/DevTable/pygithub.git
|
||||
git+https://github.com/DevTable/container-cloud-config.git
|
||||
git+https://github.com/DevTable/python-etcd.git
|
||||
git+https://github.com/NateFerrero/oauth2lib.git
|
||||
git+https://github.com/coreos/py-bitbucket.git
|
||||
git+https://github.com/coreos/pyapi-gitlab.git
|
||||
git+https://github.com/coreos/mockldap.git
|
|
@ -388,6 +388,29 @@ a:focus {
|
|||
width: 400px;
|
||||
}
|
||||
|
||||
.config-map-field-element table {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.config-map-field-element .form-control-container {
|
||||
border-top: 1px solid #eee;
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.config-map-field-element .form-control-container select, .config-map-field-element .form-control-container input {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.config-map-field-element .empty {
|
||||
color: #ccc;
|
||||
margin-bottom: 10px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.config-map-field-element .item-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.config-contact-field {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
|
|
@ -157,6 +157,24 @@
|
|||
transition: all 0.15s ease-in-out;
|
||||
}
|
||||
|
||||
.build-logs-view .download-button i.fa {
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.build-logs-view .download-button {
|
||||
position: absolute;
|
||||
top: 6px;
|
||||
right: 124px;
|
||||
z-index: 2;
|
||||
transition: all 0.15s ease-in-out;
|
||||
}
|
||||
|
||||
.build-logs-view .download-button:not(:hover) {
|
||||
background: transparent;
|
||||
border: 1px solid transparent;
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
.build-logs-view .copy-button:not(.zeroclipboard-is-hover) {
|
||||
background: transparent;
|
||||
border: 1px solid transparent;
|
||||
|
|
|
@ -3,6 +3,12 @@
|
|||
<i class="fa fa-clipboard"></i>Copy Logs
|
||||
</button>
|
||||
|
||||
<a id="downloadButton" class="btn btn-primary download-button"
|
||||
ng-href="/buildlogs/{{ currentBuild.id }}"
|
||||
target="_blank">
|
||||
<i class="fa fa-download"></i>Download Logs
|
||||
</a>
|
||||
|
||||
<span class="cor-loader" ng-if="!logEntries"></span>
|
||||
|
||||
<span class="no-logs" ng-if="!logEntries.length && currentBuild.phase == 'waiting'">
|
||||
|
|
20
static/directives/config/config-map-field.html
Normal file
20
static/directives/config/config-map-field.html
Normal file
|
@ -0,0 +1,20 @@
|
|||
<div class="config-map-field-element">
|
||||
<table class="table" ng-show="hasValues(binding)">
|
||||
<tr class="item" ng-repeat="(key, value) in binding">
|
||||
<td class="item-title">{{ key }}</td>
|
||||
<td class="item-value">{{ value }}</td>
|
||||
<td class="item-delete">
|
||||
<a href="javascript:void(0)" ng-click="removeKey(key)">Remove</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<span class="empty" ng-if="!hasValues(binding)">No entries defined</span>
|
||||
<form class="form-control-container" ng-submit="addEntry()">
|
||||
Add Key-Value:
|
||||
<select ng-model="newKey">
|
||||
<option ng-repeat="key in keys" value="{{ key }}">{{ key }}</option>
|
||||
</select>
|
||||
<input type="text" class="form-control" placeholder="Value" ng-model="newValue">
|
||||
<button class="btn btn-default" style="display: inline-block">Add Entry</button>
|
||||
</form>
|
||||
</div>
|
|
@ -112,6 +112,11 @@
|
|||
A valid SSL certificate and private key files are required to use this option.
|
||||
</div>
|
||||
|
||||
<div class="co-alert co-alert-info" ng-if="config.PREFERRED_URL_SCHEME == 'https'">
|
||||
Enabling SSL also enables <a href="https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security">HTTP Strict Transport Security</a>.<br/>
|
||||
This prevents downgrade attacks and cookie theft, but browsers will reject all future insecure connections on this hostname.
|
||||
</div>
|
||||
|
||||
<table class="config-table" ng-if="config.PREFERRED_URL_SCHEME == 'https'">
|
||||
<tr>
|
||||
<td class="non-input">Certificate:</td>
|
||||
|
@ -198,6 +203,7 @@
|
|||
<option value="S3Storage">Amazon S3</option>
|
||||
<option value="GoogleCloudStorage">Google Cloud Storage</option>
|
||||
<option value="RadosGWStorage">Ceph Object Gateway (RADOS)</option>
|
||||
<option value="SwiftStorage">OpenStack Storage (Swift)</option>
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -206,10 +212,15 @@
|
|||
<tr ng-repeat="field in STORAGE_CONFIG_FIELDS[config.DISTRIBUTED_STORAGE_CONFIG.local[0]]">
|
||||
<td>{{ field.title }}:</td>
|
||||
<td>
|
||||
<span class="config-map-field"
|
||||
binding="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name]"
|
||||
ng-if="field.kind == 'map'"
|
||||
keys="field.keys"></span>
|
||||
<span class="config-string-field"
|
||||
binding="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name]"
|
||||
placeholder="{{ field.placeholder }}"
|
||||
ng-if="field.kind == 'text'"></span>
|
||||
ng-if="field.kind == 'text'"
|
||||
is-optional="field.optional"></span>
|
||||
<div class="co-checkbox" ng-if="field.kind == 'bool'">
|
||||
<input id="dsc-{{ field.name }}" type="checkbox"
|
||||
ng-model="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name]">
|
||||
|
@ -849,4 +860,4 @@
|
|||
</div><!-- /.modal -->
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<form name="fieldform" novalidate>
|
||||
<input type="text" class="form-control" placeholder="{{ placeholder || '' }}"
|
||||
ng-model="binding" ng-trim="false" ng-minlength="1"
|
||||
ng-pattern="getRegexp(pattern)" required>
|
||||
ng-pattern="getRegexp(pattern)" ng-required="!isOptional">
|
||||
<div class="alert alert-danger" ng-show="errorMessage">
|
||||
{{ errorMessage }}
|
||||
</div>
|
||||
|
|
|
@ -63,6 +63,12 @@
|
|||
|
||||
<!-- Build Status Badge -->
|
||||
<div class="panel-body panel-section hidden-xs">
|
||||
|
||||
<!-- Token Info Banner -->
|
||||
<div class="co-alert co-alert-info" ng-if="!repository.is_public">
|
||||
Note: This badge contains a token so the badge can be seen by external users. The token does not grant any other access and is safe to share!
|
||||
</div>
|
||||
|
||||
<!-- Status Image -->
|
||||
<a ng-href="/repository/{{ repository.namespace }}/{{ repository.name }}">
|
||||
<img ng-src="/repository/{{ repository.namespace }}/{{ repository.name }}/status?token={{ repository.status_token }}"
|
||||
|
|
|
@ -15,10 +15,10 @@
|
|||
|
||||
<div class="empty" ng-if="!notifications.length">
|
||||
<div class="empty-primary-msg">No notifications have been setup for this repository.</div>
|
||||
<div class="empty-secondary-msg hidden-xs" ng-if="repository.can_write">
|
||||
<div class="empty-secondary-msg hidden-sm hidden-xs" ng-if="repository.can_write">
|
||||
Click the "Create Notification" button above to add a new notification for a repository event.
|
||||
</div>
|
||||
<div class="empty-secondary-msg visible-xs" ng-if="repository.can_write">
|
||||
<div class="empty-secondary-msg visible-sm visible-xs" ng-if="repository.can_write">
|
||||
<a href="javascript:void(0)" ng-click="askCreateNotification()">Click here</a> to add a new notification for a repository event.
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -78,6 +78,19 @@ angular.module("core-config-setup", ['angularFileUpload'])
|
|||
{'name': 'secret_key', 'title': 'Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||
{'name': 'bucket_name', 'title': 'Bucket Name', 'placeholder': 'my-cool-bucket', 'kind': 'text'},
|
||||
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'}
|
||||
],
|
||||
|
||||
'SwiftStorage': [
|
||||
{'name': 'auth_url', 'title': 'Swift Auth URL', 'placeholder': '', 'kind': 'text'},
|
||||
{'name': 'swift_container', 'title': 'Swift Container Name', 'placeholder': 'mycontainer', 'kind': 'text'},
|
||||
{'name': 'storage_path', 'title': 'Storage Path', 'placeholder': '/path/inside/container', 'kind': 'text'},
|
||||
|
||||
{'name': 'swift_user', 'title': 'Username', 'placeholder': 'accesskeyhere', 'kind': 'text'},
|
||||
{'name': 'swift_password', 'title': 'Password/Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||
|
||||
{'name': 'ca_cert_path', 'title': 'CA Cert Filename', 'placeholder': 'conf/stack/swift.cert', 'kind': 'text', 'optional': true},
|
||||
{'name': 'os_options', 'title': 'OS Options', 'kind': 'map',
|
||||
'keys': ['tenant_id', 'auth_token', 'service_type', 'endpoint_type', 'tenant_name', 'object_storage_url', 'region_name']}
|
||||
]
|
||||
};
|
||||
|
||||
|
@ -760,6 +773,42 @@ angular.module("core-config-setup", ['angularFileUpload'])
|
|||
return directiveDefinitionObject;
|
||||
})
|
||||
|
||||
.directive('configMapField', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
templateUrl: '/static/directives/config/config-map-field.html',
|
||||
replace: false,
|
||||
transclude: false,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'binding': '=binding',
|
||||
'keys': '=keys'
|
||||
},
|
||||
controller: function($scope, $element) {
|
||||
$scope.newKey = null;
|
||||
$scope.newValue = null;
|
||||
|
||||
$scope.hasValues = function(binding) {
|
||||
return binding && Object.keys(binding).length;
|
||||
};
|
||||
|
||||
$scope.removeKey = function(key) {
|
||||
delete $scope.binding[key];
|
||||
};
|
||||
|
||||
$scope.addEntry = function() {
|
||||
if (!$scope.newKey || !$scope.newValue) { return; }
|
||||
|
||||
$scope.binding = $scope.binding || {};
|
||||
$scope.binding[$scope.newKey] = $scope.newValue;
|
||||
$scope.newKey = null;
|
||||
$scope.newValue = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
})
|
||||
|
||||
.directive('configStringField', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
|
@ -772,7 +821,8 @@ angular.module("core-config-setup", ['angularFileUpload'])
|
|||
'placeholder': '@placeholder',
|
||||
'pattern': '@pattern',
|
||||
'defaultValue': '@defaultValue',
|
||||
'validator': '&validator'
|
||||
'validator': '&validator',
|
||||
'isOptional': '=isOptional'
|
||||
},
|
||||
controller: function($scope, $element) {
|
||||
$scope.getRegexp = function(pattern) {
|
||||
|
|
|
@ -260,7 +260,7 @@ angular.module('quay').directive('repoPanelBuilds', function () {
|
|||
};
|
||||
|
||||
$scope.handleBuildStarted = function(build) {
|
||||
if (!$scope.allBuilds) {
|
||||
if ($scope.allBuilds) {
|
||||
$scope.allBuilds.push(build);
|
||||
}
|
||||
updateBuilds();
|
||||
|
|
|
@ -123,6 +123,7 @@ angular.module('quay').directive('triggerSetupGithost', function () {
|
|||
|
||||
ApiService.listBuildTriggerSubdirs($scope.trigger['config'], params).then(function(resp) {
|
||||
if (resp['status'] == 'error') {
|
||||
$scope.locations = [];
|
||||
callback(resp['message'] || 'Could not load Dockerfile locations');
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@
|
|||
$scope.signinStarted = function() {
|
||||
if (Features.BILLING) {
|
||||
PlanService.getMinimumPlan(1, true, function(plan) {
|
||||
if (!plan) { return; }
|
||||
PlanService.notePlan(plan.stripeId);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@
|
|||
}, ApiService.errorDisplay('Could not generate token'));
|
||||
};
|
||||
|
||||
UIService.showPasswordDialog('Enter your password to generated an encrypted version:', generateToken);
|
||||
UIService.showPasswordDialog('Enter your password to generate an encrypted version:', generateToken);
|
||||
};
|
||||
|
||||
$scope.changeEmail = function() {
|
||||
|
|
9
static/js/services/angular-poll-channel.js
vendored
9
static/js/services/angular-poll-channel.js
vendored
|
@ -1,7 +1,8 @@
|
|||
/**
|
||||
* Specialized class for conducting an HTTP poll, while properly preventing multiple calls.
|
||||
*/
|
||||
angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout', function(ApiService, $timeout) {
|
||||
angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout', 'DocumentVisibilityService',
|
||||
function(ApiService, $timeout, DocumentVisibilityService) {
|
||||
var _PollChannel = function(scope, requester, opt_sleeptime) {
|
||||
this.scope_ = scope;
|
||||
this.requester_ = requester;
|
||||
|
@ -50,6 +51,12 @@ angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout',
|
|||
_PollChannel.prototype.call_ = function() {
|
||||
if (this.working) { return; }
|
||||
|
||||
// If the document is currently hidden, skip the call.
|
||||
if (DocumentVisibilityService.isHidden()) {
|
||||
this.setupTimer_();
|
||||
return;
|
||||
}
|
||||
|
||||
var that = this;
|
||||
this.working = true;
|
||||
this.scope_.$apply(function() {
|
||||
|
|
60
static/js/services/document-visibility-service.js
Normal file
60
static/js/services/document-visibility-service.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
/**
|
||||
* Helper service which fires off events when the document's visibility changes, as well as allowing
|
||||
* other Angular code to query the state of the document's visibility directly.
|
||||
*/
|
||||
angular.module('quay').constant('CORE_EVENT', {
|
||||
DOC_VISIBILITY_CHANGE: 'core.event.doc_visibility_change'
|
||||
});
|
||||
|
||||
angular.module('quay').factory('DocumentVisibilityService', ['$rootScope', '$document', 'CORE_EVENT',
|
||||
function($rootScope, $document, CORE_EVENT) {
|
||||
var document = $document[0],
|
||||
features,
|
||||
detectedFeature;
|
||||
|
||||
function broadcastChangeEvent() {
|
||||
$rootScope.$broadcast(CORE_EVENT.DOC_VISIBILITY_CHANGE,
|
||||
document[detectedFeature.propertyName]);
|
||||
}
|
||||
|
||||
features = {
|
||||
standard: {
|
||||
eventName: 'visibilitychange',
|
||||
propertyName: 'hidden'
|
||||
},
|
||||
moz: {
|
||||
eventName: 'mozvisibilitychange',
|
||||
propertyName: 'mozHidden'
|
||||
},
|
||||
ms: {
|
||||
eventName: 'msvisibilitychange',
|
||||
propertyName: 'msHidden'
|
||||
},
|
||||
webkit: {
|
||||
eventName: 'webkitvisibilitychange',
|
||||
propertyName: 'webkitHidden'
|
||||
}
|
||||
};
|
||||
|
||||
Object.keys(features).some(function(feature) {
|
||||
if (document[features[feature].propertyName] !== undefined) {
|
||||
detectedFeature = features[feature];
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
if (detectedFeature) {
|
||||
$document.on(detectedFeature.eventName, broadcastChangeEvent);
|
||||
}
|
||||
|
||||
return {
|
||||
/**
|
||||
* Is the window currently hidden or not.
|
||||
*/
|
||||
isHidden: function() {
|
||||
if (detectedFeature) {
|
||||
return document[detectedFeature.propertyName];
|
||||
}
|
||||
}
|
||||
};
|
||||
}]);
|
|
@ -196,6 +196,10 @@ function($rootScope, $interval, UserService, ApiService, StringBuilderService, P
|
|||
};
|
||||
|
||||
notificationService.getClasses = function(notifications) {
|
||||
if (!notifications.length) {
|
||||
return '';
|
||||
}
|
||||
|
||||
var classes = [];
|
||||
for (var i = 0; i < notifications.length; ++i) {
|
||||
var notification = notifications[i];
|
||||
|
|
|
@ -122,7 +122,7 @@ angular.module('quay').factory('TriggerService', ['UtilService', '$sanitize', 'K
|
|||
'title': 'Commit',
|
||||
'type': 'regex',
|
||||
'name': 'commit_sha',
|
||||
'regex': '^([A-Fa-f0-9]{7})$',
|
||||
'regex': '^([A-Fa-f0-9]{7,})$',
|
||||
'placeholder': '1c002dd'
|
||||
}
|
||||
],
|
||||
|
|
|
@ -2,6 +2,7 @@ from storage.local import LocalStorage
|
|||
from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage
|
||||
from storage.fakestorage import FakeStorage
|
||||
from storage.distributedstorage import DistributedStorage
|
||||
from storage.swift import SwiftStorage
|
||||
|
||||
|
||||
STORAGE_DRIVER_CLASSES = {
|
||||
|
@ -9,6 +10,7 @@ STORAGE_DRIVER_CLASSES = {
|
|||
'S3Storage': S3Storage,
|
||||
'GoogleCloudStorage': GoogleCloudStorage,
|
||||
'RadosGWStorage': RadosGWStorage,
|
||||
'SwiftStorage': SwiftStorage,
|
||||
}
|
||||
|
||||
def get_storage_driver(storage_params):
|
||||
|
|
|
@ -1,27 +1,31 @@
|
|||
from storage.basestorage import BaseStorage
|
||||
|
||||
_FAKE_STORAGE_MAP = {}
|
||||
|
||||
class FakeStorage(BaseStorage):
|
||||
def _init_path(self, path=None, create=False):
|
||||
return path
|
||||
|
||||
def get_content(self, path):
|
||||
raise IOError('Fake files are fake!')
|
||||
if not path in _FAKE_STORAGE_MAP:
|
||||
raise IOError('Fake file %s not found' % path)
|
||||
|
||||
return _FAKE_STORAGE_MAP.get(path)
|
||||
|
||||
def put_content(self, path, content):
|
||||
return path
|
||||
_FAKE_STORAGE_MAP[path] = content
|
||||
|
||||
def stream_read(self, path):
|
||||
yield ''
|
||||
yield _FAKE_STORAGE_MAP[path]
|
||||
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
pass
|
||||
_FAKE_STORAGE_MAP[path] = fp.read()
|
||||
|
||||
def remove(self, path):
|
||||
pass
|
||||
_FAKE_STORAGE_MAP.pop(path, None)
|
||||
|
||||
def exists(self, path):
|
||||
return False
|
||||
return path in _FAKE_STORAGE_MAP
|
||||
|
||||
def get_checksum(self, path):
|
||||
return 'abcdefg'
|
||||
return path
|
||||
|
|
188
storage/swift.py
Normal file
188
storage/swift.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
""" Swift storage driver. Based on: github.com/bacongobbler/docker-registry-driver-swift/ """
|
||||
from swiftclient.client import Connection, ClientException
|
||||
from storage.basestorage import BaseStorage
|
||||
|
||||
from random import SystemRandom
|
||||
import string
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SwiftStorage(BaseStorage):
|
||||
def __init__(self, swift_container, storage_path, auth_url, swift_user,
|
||||
swift_password, auth_version=None, os_options=None, ca_cert_path=None):
|
||||
self._swift_container = swift_container
|
||||
self._storage_path = storage_path
|
||||
|
||||
self._auth_url = auth_url
|
||||
self._ca_cert_path = ca_cert_path
|
||||
|
||||
self._swift_user = swift_user
|
||||
self._swift_password = swift_password
|
||||
|
||||
self._auth_version = auth_version or 2
|
||||
self._os_options = os_options or {}
|
||||
|
||||
self._initialized = False
|
||||
self._swift_connection = None
|
||||
|
||||
def _initialize(self):
|
||||
if self._initialized:
|
||||
return
|
||||
|
||||
self._initialized = True
|
||||
self._swift_connection = self._get_connection()
|
||||
|
||||
def _get_connection(self):
|
||||
return Connection(
|
||||
authurl=self._auth_url,
|
||||
cacert=self._ca_cert_path,
|
||||
|
||||
user=self._swift_user,
|
||||
key=self._swift_password,
|
||||
|
||||
auth_version=self._auth_version,
|
||||
os_options=self._os_options)
|
||||
|
||||
def _get_relative_path(self, path):
|
||||
if path.startswith(self._storage_path):
|
||||
path = path[len(self._storage_path)]
|
||||
|
||||
if path.endswith('/'):
|
||||
path = path[:-1]
|
||||
|
||||
return path
|
||||
|
||||
def _normalize_path(self, path=None):
|
||||
path = self._storage_path + (path or '')
|
||||
|
||||
# Openstack does not like paths starting with '/' and we always normalize
|
||||
# to remove trailing '/'
|
||||
if path.startswith('/'):
|
||||
path = path[1:]
|
||||
|
||||
if path.endswith('/'):
|
||||
path = path[:-1]
|
||||
|
||||
return path
|
||||
|
||||
def _get_container(self, path):
|
||||
self._initialize()
|
||||
path = self._normalize_path(path)
|
||||
|
||||
if path and not path.endswith('/'):
|
||||
path += '/'
|
||||
|
||||
try:
|
||||
_, container = self._swift_connection.get_container(
|
||||
container=self._swift_container,
|
||||
prefix=path, delimiter='/')
|
||||
return container
|
||||
except:
|
||||
logger.exception('Could not get container: %s', path)
|
||||
raise IOError('Unknown path: %s' % path)
|
||||
|
||||
def _get_object(self, path, chunk_size=None):
|
||||
self._initialize()
|
||||
path = self._normalize_path(path)
|
||||
try:
|
||||
_, obj = self._swift_connection.get_object(self._swift_container, path,
|
||||
resp_chunk_size=chunk_size)
|
||||
return obj
|
||||
except Exception:
|
||||
logger.exception('Could not get object: %s', path)
|
||||
raise IOError('Path %s not found' % path)
|
||||
|
||||
def _put_object(self, path, content, chunk=None, content_type=None, content_encoding=None):
|
||||
self._initialize()
|
||||
path = self._normalize_path(path)
|
||||
headers = {}
|
||||
|
||||
if content_encoding is not None:
|
||||
headers['Content-Encoding'] = content_encoding
|
||||
|
||||
try:
|
||||
self._swift_connection.put_object(self._swift_container, path, content,
|
||||
chunk_size=chunk, content_type=content_type,
|
||||
headers=headers)
|
||||
except ClientException:
|
||||
# We re-raise client exception here so that validation of config during setup can see
|
||||
# the client exception messages.
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception('Could not put object: %s', path)
|
||||
raise IOError("Could not put content: %s" % path)
|
||||
|
||||
def _head_object(self, path):
|
||||
self._initialize()
|
||||
path = self._normalize_path(path)
|
||||
try:
|
||||
return self._swift_connection.head_object(self._swift_container, path)
|
||||
except Exception:
|
||||
logger.exception('Could not head object: %s', path)
|
||||
return None
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
if requires_cors:
|
||||
return None
|
||||
|
||||
# TODO(jschorr): This method is not strictly necessary but would result in faster operations
|
||||
# when using this storage engine. However, the implementation (as seen in the link below)
|
||||
# is not clean, so we punt on this for now.
|
||||
# http://docs.openstack.org/juno/config-reference/content/object-storage-tempurl.html
|
||||
return None
|
||||
|
||||
def get_content(self, path):
|
||||
return self._get_object(path)
|
||||
|
||||
def put_content(self, path, content):
|
||||
self._put_object(path, content)
|
||||
|
||||
def stream_read(self, path):
|
||||
for data in self._get_object(path, self.buffer_size):
|
||||
yield data
|
||||
|
||||
def stream_read_file(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
self._put_object(path, fp, self.buffer_size, content_type=content_type,
|
||||
content_encoding=content_encoding)
|
||||
|
||||
def list_directory(self, path=None):
|
||||
container = self._get_container(path)
|
||||
if not container:
|
||||
raise OSError('Unknown path: %s' % path)
|
||||
|
||||
for entry in container:
|
||||
param = None
|
||||
if 'name' in entry:
|
||||
param = 'name'
|
||||
elif 'subdir' in entry:
|
||||
param = 'subdir'
|
||||
else:
|
||||
continue
|
||||
|
||||
yield self._get_relative_path(entry[param])
|
||||
|
||||
def exists(self, path):
|
||||
return bool(self._head_object(path))
|
||||
|
||||
def remove(self, path):
|
||||
self._initialize()
|
||||
path = self._normalize_path(path)
|
||||
try:
|
||||
self._swift_connection.delete_object(self._swift_container, path)
|
||||
except Exception:
|
||||
raise IOError('Cannot delete path: %s' % path)
|
||||
|
||||
def _random_checksum(self, count):
|
||||
chars = string.ascii_uppercase + string.digits
|
||||
return ''.join(SystemRandom().choice(chars) for _ in range(count))
|
||||
|
||||
def get_checksum(self, path):
|
||||
headers = self._head_object(path)
|
||||
if not headers:
|
||||
raise IOError('Cannot lookup path: %s' % path)
|
||||
|
||||
return headers.get('etag', '')[1:-1][:7] or self._random_checksum(7)
|
247
test/registry_tests.py
Normal file
247
test/registry_tests.py
Normal file
|
@ -0,0 +1,247 @@
|
|||
import unittest
|
||||
import requests
|
||||
|
||||
from flask.blueprints import Blueprint
|
||||
from flask.ext.testing import LiveServerTestCase
|
||||
|
||||
from app import app
|
||||
from endpoints.registry import registry
|
||||
from endpoints.index import index
|
||||
from endpoints.tags import tags
|
||||
from endpoints.api import api_bp
|
||||
from initdb import wipe_database, initialize_database, populate_database
|
||||
from endpoints.csrf import generate_csrf_token
|
||||
|
||||
import endpoints.decorated
|
||||
import json
|
||||
|
||||
import tarfile
|
||||
|
||||
from cStringIO import StringIO
|
||||
from util.checksums import compute_simple
|
||||
|
||||
try:
|
||||
app.register_blueprint(index, url_prefix='/v1')
|
||||
app.register_blueprint(tags, url_prefix='/v1')
|
||||
app.register_blueprint(registry, url_prefix='/v1')
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
except ValueError:
|
||||
# Blueprint was already registered
|
||||
pass
|
||||
|
||||
|
||||
# Add a test blueprint for generating CSRF tokens.
|
||||
testbp = Blueprint('testbp', __name__)
|
||||
@testbp.route('/csrf', methods=['GET'])
|
||||
def generate_csrf():
|
||||
return generate_csrf_token()
|
||||
|
||||
app.register_blueprint(testbp, url_prefix='/__test')
|
||||
|
||||
|
||||
class RegistryTestCase(LiveServerTestCase):
|
||||
maxDiff = None
|
||||
|
||||
def create_app(self):
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
def setUp(self):
|
||||
# Note: We cannot use the normal savepoint-based DB setup here because we are accessing
|
||||
# different app instances remotely via a live webserver, which is multiprocess. Therefore, we
|
||||
# completely clear the database between tests.
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database()
|
||||
|
||||
self.clearSession()
|
||||
|
||||
def clearSession(self):
|
||||
self.session = requests.Session()
|
||||
self.signature = None
|
||||
self.docker_token = 'true'
|
||||
|
||||
# Load the CSRF token.
|
||||
self.csrf_token = ''
|
||||
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
||||
|
||||
def conduct(self, method, url, headers=None, data=None, auth=None, expected_code=200):
|
||||
headers = headers or {}
|
||||
headers['X-Docker-Token'] = self.docker_token
|
||||
|
||||
if self.signature and not auth:
|
||||
headers['Authorization'] = 'token ' + self.signature
|
||||
|
||||
response = self.session.request(method, self.get_server_url() + url, headers=headers, data=data,
|
||||
auth=auth, params=dict(_csrf_token=self.csrf_token))
|
||||
if response.status_code != expected_code:
|
||||
print response.text
|
||||
|
||||
if 'www-authenticate' in response.headers:
|
||||
self.signature = response.headers['www-authenticate']
|
||||
|
||||
if 'X-Docker-Token' in response.headers:
|
||||
self.docker_token = response.headers['X-Docker-Token']
|
||||
|
||||
self.assertEquals(response.status_code, expected_code)
|
||||
return response
|
||||
|
||||
def ping(self):
|
||||
self.conduct('GET', '/v1/_ping')
|
||||
|
||||
def do_login(self, username, password='password'):
|
||||
self.ping()
|
||||
result = self.conduct('POST', '/v1/users/',
|
||||
data=json.dumps(dict(username=username, password=password,
|
||||
email='bar@example.com')),
|
||||
headers={"Content-Type": "application/json"},
|
||||
expected_code=400)
|
||||
|
||||
self.assertEquals(result.text, '"Username or email already exists"')
|
||||
self.conduct('GET', '/v1/users/', auth=(username, password))
|
||||
|
||||
def do_push(self, namespace, repository, username, password, images):
|
||||
auth = (username, password)
|
||||
|
||||
# Ping!
|
||||
self.ping()
|
||||
|
||||
# PUT /v1/repositories/{namespace}/{repository}/
|
||||
data = [{"id": image['id']} for image in images]
|
||||
self.conduct('PUT', '/v1/repositories/%s/%s' % (namespace, repository),
|
||||
data=json.dumps(data), auth=auth,
|
||||
expected_code=201)
|
||||
|
||||
for image in images:
|
||||
# PUT /v1/images/{imageID}/json
|
||||
self.conduct('PUT', '/v1/images/%s/json' % image['id'], data=json.dumps(image))
|
||||
|
||||
# PUT /v1/images/{imageID}/layer
|
||||
tar_file_info = tarfile.TarInfo(name='image_name')
|
||||
tar_file_info.type = tarfile.REGTYPE
|
||||
tar_file_info.size = len(image['id'])
|
||||
|
||||
layer_data = StringIO()
|
||||
|
||||
tar_file = tarfile.open(fileobj=layer_data, mode='w|gz')
|
||||
tar_file.addfile(tar_file_info, StringIO(image['id']))
|
||||
tar_file.close()
|
||||
|
||||
layer_bytes = layer_data.getvalue()
|
||||
layer_data.close()
|
||||
|
||||
self.conduct('PUT', '/v1/images/%s/layer' % image['id'], data=StringIO(layer_bytes))
|
||||
|
||||
# PUT /v1/images/{imageID}/checksum
|
||||
checksum = compute_simple(StringIO(layer_bytes), json.dumps(image))
|
||||
self.conduct('PUT', '/v1/images/%s/checksum' % image['id'],
|
||||
headers={'X-Docker-Checksum-Payload': checksum})
|
||||
|
||||
|
||||
# PUT /v1/repositories/{namespace}/{repository}/tags/latest
|
||||
self.conduct('PUT', '/v1/repositories/%s/%s/tags/latest' % (namespace, repository),
|
||||
data='"' + images[0]['id'] + '"')
|
||||
|
||||
# PUT /v1/repositories/{namespace}/{repository}/images
|
||||
self.conduct('PUT', '/v1/repositories/%s/%s/images' % (namespace, repository),
|
||||
expected_code=204)
|
||||
|
||||
|
||||
def do_pull(self, namespace, repository, username=None, password='password', expected_code=200):
|
||||
auth = None
|
||||
if username:
|
||||
auth = (username, password)
|
||||
|
||||
# Ping!
|
||||
self.ping()
|
||||
|
||||
prefix = '/v1/repositories/%s/%s/' % (namespace, repository)
|
||||
|
||||
# GET /v1/repositories/{namespace}/{repository}/
|
||||
self.conduct('GET', prefix + 'images', auth=auth, expected_code=expected_code)
|
||||
if expected_code != 200:
|
||||
return
|
||||
|
||||
# GET /v1/repositories/{namespace}/{repository}/
|
||||
result = json.loads(self.conduct('GET', prefix + 'tags').text)
|
||||
|
||||
for image_id in result.values():
|
||||
# /v1/images/{imageID}/{ancestry, json, layer}
|
||||
image_prefix = '/v1/images/%s/' % image_id
|
||||
self.conduct('GET', image_prefix + 'ancestry')
|
||||
self.conduct('GET', image_prefix + 'json')
|
||||
self.conduct('GET', image_prefix + 'layer')
|
||||
|
||||
def conduct_api_login(self, username, password):
|
||||
self.conduct('POST', '/api/v1/signin',
|
||||
data=json.dumps(dict(username=username, password=password)),
|
||||
headers={'Content-Type': 'application/json'})
|
||||
|
||||
def change_repo_visibility(self, repository, namespace, visibility):
|
||||
self.conduct('POST', '/api/v1/repository/%s/%s/changevisibility' % (repository, namespace),
|
||||
data=json.dumps(dict(visibility=visibility)),
|
||||
headers={'Content-Type': 'application/json'})
|
||||
|
||||
|
||||
class RegistryTests(RegistryTestCase):
|
||||
def test_pull_publicrepo_anonymous(self):
|
||||
# Add a new repository under the public user, so we have a real repository to pull.
|
||||
images = [{
|
||||
'id': 'onlyimagehere'
|
||||
}]
|
||||
self.do_push('public', 'newrepo', 'public', 'password', images)
|
||||
self.clearSession()
|
||||
|
||||
# First try to pull the (currently private) repo anonymously, which should fail (since it is
|
||||
# private)
|
||||
self.do_pull('public', 'newrepo', expected_code=403)
|
||||
|
||||
# Make the repository public.
|
||||
self.conduct_api_login('public', 'password')
|
||||
self.change_repo_visibility('public', 'newrepo', 'public')
|
||||
self.clearSession()
|
||||
|
||||
# Pull the repository anonymously, which should succeed because the repository is public.
|
||||
self.do_pull('public', 'newrepo')
|
||||
|
||||
|
||||
def test_pull_publicrepo_devtable(self):
|
||||
# Add a new repository under the public user, so we have a real repository to pull.
|
||||
images = [{
|
||||
'id': 'onlyimagehere'
|
||||
}]
|
||||
self.do_push('public', 'newrepo', 'public', 'password', images)
|
||||
self.clearSession()
|
||||
|
||||
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
||||
# to public.
|
||||
self.do_pull('public', 'newrepo', 'devtable', 'password', expected_code=403)
|
||||
|
||||
# Make the repository public.
|
||||
self.conduct_api_login('public', 'password')
|
||||
self.change_repo_visibility('public', 'newrepo', 'public')
|
||||
self.clearSession()
|
||||
|
||||
# Pull the repository as devtable, which should succeed because the repository is public.
|
||||
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
||||
|
||||
|
||||
def test_pull_private_repo(self):
|
||||
# Add a new repository under the devtable user, so we have a real repository to pull.
|
||||
images = [{
|
||||
'id': 'onlyimagehere'
|
||||
}]
|
||||
self.do_push('devtable', 'newrepo', 'devtable', 'password', images)
|
||||
self.clearSession()
|
||||
|
||||
# First try to pull the (currently private) repo as public, which should fail as it belongs
|
||||
# to devtable.
|
||||
self.do_pull('devtable', 'newrepo', 'public', 'password', expected_code=403)
|
||||
|
||||
# Pull the repository as devtable, which should succeed because the repository is owned by
|
||||
# devtable.
|
||||
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -38,45 +38,95 @@ class TestLDAP(unittest.TestCase):
|
|||
'ou': 'employees',
|
||||
'uid': ['nomail'],
|
||||
'userPassword': ['somepass']
|
||||
}
|
||||
},
|
||||
'uid=cool.user,ou=employees,dc=quay,dc=io': {
|
||||
'dc': ['quay', 'io'],
|
||||
'ou': 'employees',
|
||||
'uid': ['cool.user', 'referred'],
|
||||
'userPassword': ['somepass'],
|
||||
'mail': ['foo@bar.com']
|
||||
},
|
||||
'uid=referred,ou=employees,dc=quay,dc=io': {
|
||||
'uid': ['referred'],
|
||||
'_referral': 'ldap:///uid=cool.user,ou=employees,dc=quay,dc=io'
|
||||
},
|
||||
'uid=invalidreferred,ou=employees,dc=quay,dc=io': {
|
||||
'uid': ['invalidreferred'],
|
||||
'_referral': 'ldap:///uid=someinvaliduser,ou=employees,dc=quay,dc=io'
|
||||
},
|
||||
'uid=multientry,ou=subgroup1,ou=employees,dc=quay,dc=io': {
|
||||
'uid': ['multientry'],
|
||||
'mail': ['foo@bar.com'],
|
||||
'userPassword': ['somepass'],
|
||||
},
|
||||
'uid=multientry,ou=subgroup2,ou=employees,dc=quay,dc=io': {
|
||||
'uid': ['multientry'],
|
||||
'another': ['key']
|
||||
},
|
||||
})
|
||||
|
||||
self.mockldap.start()
|
||||
|
||||
base_dn = ['dc=quay', 'dc=io']
|
||||
admin_dn = 'uid=testy,ou=employees,dc=quay,dc=io'
|
||||
admin_passwd = 'password'
|
||||
user_rdn = ['ou=employees']
|
||||
uid_attr = 'uid'
|
||||
email_attr = 'mail'
|
||||
|
||||
ldap = LDAPUsers('ldap://localhost', base_dn, admin_dn, admin_passwd, user_rdn,
|
||||
uid_attr, email_attr)
|
||||
|
||||
self.ldap = ldap
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
self.mockldap.stop()
|
||||
finished_database_for_testing(self)
|
||||
self.ctx.__exit__(True, None, None)
|
||||
|
||||
def test_login(self):
|
||||
base_dn = ['dc=quay', 'dc=io']
|
||||
admin_dn = 'uid=testy,ou=employees,dc=quay,dc=io'
|
||||
admin_passwd = 'password'
|
||||
user_rdn = ['ou=employees']
|
||||
uid_attr = 'uid'
|
||||
email_attr = 'mail'
|
||||
# Verify we can login.
|
||||
(response, _) = self.ldap.verify_user('someuser', 'somepass')
|
||||
self.assertEquals(response.username, 'someuser')
|
||||
|
||||
ldap = LDAPUsers('ldap://localhost', base_dn, admin_dn, admin_passwd, user_rdn,
|
||||
uid_attr, email_attr)
|
||||
|
||||
(response, _) = ldap.verify_user('someuser', 'somepass')
|
||||
# Verify we can confirm the user.
|
||||
(response, _) = self.ldap.confirm_existing_user('someuser', 'somepass')
|
||||
self.assertEquals(response.username, 'someuser')
|
||||
|
||||
def test_missing_mail(self):
|
||||
base_dn = ['dc=quay', 'dc=io']
|
||||
admin_dn = 'uid=testy,ou=employees,dc=quay,dc=io'
|
||||
admin_passwd = 'password'
|
||||
user_rdn = ['ou=employees']
|
||||
uid_attr = 'uid'
|
||||
email_attr = 'mail'
|
||||
|
||||
ldap = LDAPUsers('ldap://localhost', base_dn, admin_dn, admin_passwd, user_rdn,
|
||||
uid_attr, email_attr)
|
||||
|
||||
(response, err_msg) = ldap.verify_user('nomail', 'somepass')
|
||||
(response, err_msg) = self.ldap.verify_user('nomail', 'somepass')
|
||||
self.assertIsNone(response)
|
||||
self.assertEquals('Missing mail field "mail" in user record', err_msg)
|
||||
|
||||
def test_confirm_different_username(self):
|
||||
# Verify that the user is logged in and their username was adjusted.
|
||||
(response, _) = self.ldap.verify_user('cool.user', 'somepass')
|
||||
self.assertEquals(response.username, 'cool_user')
|
||||
|
||||
# Verify we can confirm the user's quay username.
|
||||
(response, _) = self.ldap.confirm_existing_user('cool_user', 'somepass')
|
||||
self.assertEquals(response.username, 'cool_user')
|
||||
|
||||
# Verify that we *cannot* confirm the LDAP username.
|
||||
(response, _) = self.ldap.confirm_existing_user('cool.user', 'somepass')
|
||||
self.assertIsNone(response)
|
||||
|
||||
def test_referral(self):
|
||||
(response, _) = self.ldap.verify_user('referred', 'somepass')
|
||||
self.assertEquals(response.username, 'cool_user')
|
||||
|
||||
# Verify we can confirm the user's quay username.
|
||||
(response, _) = self.ldap.confirm_existing_user('cool_user', 'somepass')
|
||||
self.assertEquals(response.username, 'cool_user')
|
||||
|
||||
def test_invalid_referral(self):
|
||||
(response, _) = self.ldap.verify_user('invalidreferred', 'somepass')
|
||||
self.assertIsNone(response)
|
||||
|
||||
def test_multientry(self):
|
||||
(response, _) = self.ldap.verify_user('multientry', 'somepass')
|
||||
self.assertEquals(response.username, 'multientry')
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
29
test/test_trigger.py
Normal file
29
test/test_trigger.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
import unittest
|
||||
import re
|
||||
|
||||
from endpoints.trigger import matches_ref
|
||||
|
||||
class TestRegex(unittest.TestCase):
|
||||
def assertDoesNotMatch(self, ref, filt):
|
||||
self.assertFalse(matches_ref(ref, re.compile(filt)))
|
||||
|
||||
def assertMatches(self, ref, filt):
|
||||
self.assertTrue(matches_ref(ref, re.compile(filt)))
|
||||
|
||||
def test_matches_ref(self):
|
||||
self.assertMatches('ref/heads/master', '.+')
|
||||
self.assertMatches('ref/heads/master', 'heads/.+')
|
||||
self.assertMatches('ref/heads/master', 'heads/master')
|
||||
|
||||
self.assertDoesNotMatch('ref/heads/foobar', 'heads/master')
|
||||
self.assertDoesNotMatch('ref/heads/master', 'tags/master')
|
||||
|
||||
self.assertMatches('ref/heads/master', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||
self.assertMatches('ref/heads/alpha', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||
self.assertMatches('ref/heads/beta', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||
self.assertMatches('ref/heads/gamma', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||
|
||||
self.assertDoesNotMatch('ref/heads/delta', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -10,7 +10,7 @@ from flask import Flask, current_app
|
|||
from flask_mail import Mail
|
||||
|
||||
def sendConfirmation(username):
|
||||
user = model.get_user(username)
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
|
|
@ -10,7 +10,7 @@ from flask import Flask, current_app
|
|||
from flask_mail import Mail
|
||||
|
||||
def sendReset(username):
|
||||
user = model.get_user(username)
|
||||
user = model.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
|
|
@ -29,6 +29,7 @@ def no_cache(f):
|
|||
@wraps(f)
|
||||
def add_no_cache(*args, **kwargs):
|
||||
response = f(*args, **kwargs)
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
if response is not None:
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
return response
|
||||
return add_no_cache
|
||||
|
|
|
@ -11,7 +11,7 @@ def parse_basic_auth(header_value):
|
|||
return None
|
||||
|
||||
try:
|
||||
basic_parts = base64.b64decode(parts[1]).split(':')
|
||||
basic_parts = base64.b64decode(parts[1]).split(':', 1)
|
||||
if len(basic_parts) != 2:
|
||||
return None
|
||||
|
||||
|
|
Reference in a new issue