From 027ada1f5c77ed66172dd68e76f1ec7b12c660ff Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Fri, 9 May 2014 17:39:43 -0400 Subject: [PATCH 01/34] First stab at LDAP integration. --- Dockerfile | 3 + app.py | 4 ++ config.py | 3 + data/users.py | 122 +++++++++++++++++++++++++++++++++++++++++ endpoints/api/user.py | 6 +- requirements-nover.txt | 1 + 6 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 data/users.py diff --git a/Dockerfile b/Dockerfile index e5a9d7e07..ec0e1f4e8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,6 +23,9 @@ RUN tar xjf phantomjs.tar.bz2 && ln -s `pwd`/phantomjs*/bin/phantomjs /usr/bin/p RUN apt-get install -y nodejs RUN npm install -g grunt-cli +# LDAP +RUN apt-get install libldap2-dev libsasl2-dev + ADD binary_dependencies binary_dependencies RUN gdebi --n binary_dependencies/*.deb diff --git a/app.py b/app.py index aa663418c..b943f62d1 100644 --- a/app.py +++ b/app.py @@ -10,6 +10,7 @@ import features from storage import Storage from data.userfiles import Userfiles +from data.users import UserAuthentication from util.analytics import Analytics from util.exceptionlog import Sentry from data.billing import Billing @@ -46,3 +47,6 @@ userfiles = Userfiles(app) analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) + +from data import model +authentication = UserAuthentication(app, model) diff --git a/config.py b/config.py index d5fc126cb..9202b26d7 100644 --- a/config.py +++ b/config.py @@ -72,6 +72,9 @@ class DefaultConfig(object): STORAGE_TYPE = 'LocalStorage' STORAGE_PATH = 'test/data/registry' + # Authentication + AUTHENTICATION_TYPE = 'Database' + # Build logs BUILDLOGS = BuildLogs('logs.quay.io') # Change me diff --git a/data/users.py b/data/users.py new file mode 100644 index 000000000..895c6872c --- /dev/null +++ b/data/users.py @@ -0,0 +1,122 @@ +import ldap +import logging + + +logger = logging.getLogger(__name__) + + +class DatabaseUsers(object): + def __init__(self, app_db): + self._app_db = app_db + + def verify_user(self, username_or_email, password): + """ Simply delegate to the model implementation. """ + return self._app_db.verify_user(username_or_email, password) + + +class LDAPConnection(object): + def __init__(self, ldap_uri, user_dn, user_pw): + self._ldap_uri = ldap_uri + self._user_dn = user_dn + self._user_pw = user_pw + self._conn = None + + def __enter__(self): + self._conn = ldap.initialize(self._ldap_uri) + self._conn.simple_bind_s(self._user_dn, self._user_pw) + return self._conn + + def __exit__(self, exc_type, value, tb): + self._conn.unbind_s() + + +class LDAPUsers(object): + def __init__(self, app_db, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, + email_attr, passwd_attr): + self._app_db = app_db + self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd) + self._base_dn = base_dn + self._user_rdn = user_rdn + self._uid_attr = uid_attr + self._email_attr = email_attr + self._passwd_attr = passwd_attr + + def verify_user(self, username_or_email, password): + """ Verify the credentials with LDAP and if they are valid, create or update the user + in our database. """ + + with self._ldap_conn as conn: + user_search_dn = ','.join(self._user_rdn + self._base_dn) + query = '(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr, + username_or_email) + user = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query) + + if len(user) != 1: + return None + + found_dn, found_response = user[0] + + # First validate the password + valid_passwd = conn.compare_s(found_dn, self._passwd_attr, password) == 1 + if not valid_passwd: + return None + + logger.debug('LDAP Response: %s', found_response) + + # Now check if we have the same username in our DB + username = found_response[self._uid_attr][0] + email = found_response[self._email_attr][0] + password = found_response[self._passwd_attr][0] + db_user = self._app_db.get_user(username) + + logger.debug('Email: %s', email) + + if not db_user: + # We must create the user in our db + db_user = self._app_db.create_user(username, 'password_from_ldap', email) + db_user.verified = True + else: + # Update the db attributes from ldap + db_user.email = email + + db_user.save() + + return db_user + + +class UserAuthentication(object): + def __init__(self, app=None, model=None): + self.app = app + if app is not None: + self.state = self.init_app(app, model) + else: + self.state = None + + def init_app(self, app, model): + authentication_type = app.config.get('AUTHENTICATION_TYPE', 'Database') + + if authentication_type == 'Database': + users = DatabaseUsers(model) + elif authentication_type == 'LDAP': + ldap_uri = app.config.get('LDAP_URI', 'ldap://localhost') + base_dn = app.config.get('LDAP_BASE_DN') + admin_dn = app.config.get('LDAP_ADMIN_DN') + admin_passwd = app.config.get('LDAP_ADMIN_PASSWD') + user_rdn = app.config.get('LDAP_USER_RDN', []) + uid_attr = app.config.get('LDAP_UID_ATTR', 'uid') + email_attr = app.config.get('LDAP_EMAIL_ATTR', 'mail') + passwd_attr = app.config.get('LDAP_PASSWD_ATTR', 'userPassword') + + users = LDAPUsers(model, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, + email_attr, passwd_attr) + + else: + raise RuntimeError('Unknown authentication type: %s' % authentication_type) + + # register extension with app + app.extensions = getattr(app, 'extensions', {}) + app.extensions['authentication'] = users + return users + + def __getattr__(self, name): + return getattr(self.state, name, None) diff --git a/endpoints/api/user.py b/endpoints/api/user.py index 437f37450..4d54b3e50 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -5,7 +5,7 @@ from flask import request from flask.ext.login import logout_user from flask.ext.principal import identity_changed, AnonymousIdentity -from app import app, billing as stripe +from app import app, billing as stripe, authentication from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error, log_action, internal_only, NotFound, require_user_admin, InvalidToken, require_scope, format_date, hide_if, show_if) @@ -227,7 +227,7 @@ def conduct_signin(username_or_email, password): needs_email_verification = False invalid_credentials = False - verified = model.verify_user(username_or_email, password) + verified = authentication.verify_user(username_or_email, password) if verified: if common_login(verified): return {'success': True} @@ -289,7 +289,7 @@ class ConvertToOrganization(ApiResource): # Ensure that the sign in credentials work. admin_password = convert_data['adminPassword'] - if not model.verify_user(admin_username, admin_password): + if not authentication.verify_user(admin_username, admin_password): raise request_error(reason='invaliduser', message='The admin user credentials are not valid') diff --git a/requirements-nover.txt b/requirements-nover.txt index cc370da9d..efda6ebef 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -32,3 +32,4 @@ python-magic reportlab==2.7 blinker raven +python-ldap From 08ccad7fe45d0b62aa120322847359728f065819 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Fri, 9 May 2014 18:49:33 -0400 Subject: [PATCH 02/34] Add support for not using CDN-based resources. When USE_CDN = False, all CDN-based resources will instead be used from the local system. --- Dockerfile | 8 +++++ config.py | 4 +++ endpoints/common.py | 6 ++++ external_libraries.py | 77 +++++++++++++++++++++++++++++++++++++++++++ templates/base.html | 23 ++++--------- 5 files changed, 101 insertions(+), 17 deletions(-) create mode 100644 external_libraries.py diff --git a/Dockerfile b/Dockerfile index ec0e1f4e8..365adfec7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -53,6 +53,7 @@ ADD app.py app.py ADD application.py application.py ADD config.py config.py ADD initdb.py initdb.py +ADD external_libraries.py external_libraries.py ADD conf/init/mklogsdir.sh /etc/my_init.d/ ADD conf/init/gunicorn.sh /etc/service/gunicorn/run @@ -60,9 +61,16 @@ ADD conf/init/nginx.sh /etc/service/nginx/run ADD conf/init/diffsworker.sh /etc/service/diffsworker/run ADD conf/init/webhookworker.sh /etc/service/webhookworker/run +# Build the compiled binaries of JS and CSS RUN cd grunt && npm install RUN cd grunt && grunt +# Download any external libs. +RUN mkdir static/fonts +RUN mkdir static/ldn + +RUN venv/bin/python -m external_libraries + # Add the tests last because they're prone to accidental changes, then run them ADD test test RUN TEST=true venv/bin/python -m unittest discover diff --git a/config.py b/config.py index 9202b26d7..8d7cf43af 100644 --- a/config.py +++ b/config.py @@ -68,6 +68,10 @@ class DefaultConfig(object): DB_TRANSACTION_FACTORY = create_transaction + # If true, CDN URLs will be used for our external dependencies, rather than the local + # copies. + USE_CDN = True + # Data storage STORAGE_TYPE = 'LocalStorage' STORAGE_PATH = 'test/data/registry' diff --git a/endpoints/common.py b/endpoints/common.py index e9bd7b7c6..6a85ef5e6 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -17,6 +17,7 @@ from endpoints.api.discovery import swagger_route_data from werkzeug.routing import BaseConverter from functools import wraps from config import getFrontendVisibleConfig +from external_libraries import get_external_javascript, get_external_css import features @@ -147,7 +148,12 @@ def render_page_template(name, **kwargs): main_scripts = ['dist/quay-frontend.min.js'] cache_buster = random_string() + external_styles = get_external_css(local=not app.config.get('USE_CDN', True)) + external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True)) + resp = make_response(render_template(name, route_data=json.dumps(get_route_data()), + external_styles=external_styles, + external_scripts=external_scripts, main_styles=main_styles, library_styles=library_styles, main_scripts=main_scripts, diff --git a/external_libraries.py b/external_libraries.py new file mode 100644 index 000000000..d731d5ce1 --- /dev/null +++ b/external_libraries.py @@ -0,0 +1,77 @@ +import urllib2 +import re +import os + +LOCAL_DIRECTORY = 'static/ldn/' + +EXTERNAL_JS = [ + 'code.jquery.com/jquery.js', + 'netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js', + 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular.min.js', + 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js', + 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js', + 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js', + 'cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js', + 'cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs', + 'cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js', + 'checkout.stripe.com/checkout.js', +] + +EXTERNAL_CSS = [ + 'netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css', + 'netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css', + 'fonts.googleapis.com/css?family=Droid+Sans:400,700', +] + +EXTERNAL_FONTS = [ + 'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.woff?v=4.0.3', + 'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.ttf?v=4.0.3', + 'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.svg?v=4.0.3', +] + + +def get_external_javascript(local=False): + if local: + return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_JS] + + return ['//' + src for src in EXTERNAL_JS] + + +def get_external_css(local=False): + if local: + return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_CSS] + + return ['//' + src for src in EXTERNAL_CSS] + + +def format_local_name(url): + filename = url.split('/')[-1] + filename = re.sub(r'[+,?@=:]', '', filename) + if not filename.endswith('.css') and not filename.endswith('.js'): + if filename.find('css') >= 0: + filename = filename + '.css' + else: + filename = filename + '.js' + + return filename + + +if __name__ == '__main__': + for url in EXTERNAL_JS + EXTERNAL_CSS: + print 'Downloading %s' % url + response = urllib2.urlopen('https://' + url) + contents = response.read() + + filename = format_local_name(url) + print 'Writing %s' % filename + with open(LOCAL_DIRECTORY + filename, 'w') as f: + f.write(contents) + + + for url in EXTERNAL_FONTS: + print 'Downloading %s' % url + response = urllib2.urlopen('https://' + url) + + filename = os.path.basename(url).split('?')[0] + with open('static/fonts/' + filename, "wb") as local_file: + local_file.write(response.read()) diff --git a/templates/base.html b/templates/base.html index 01f0e6f6c..228cb2742 100644 --- a/templates/base.html +++ b/templates/base.html @@ -11,9 +11,9 @@ - - - + {% for style_url in external_styles %} + + {% endfor %} @@ -47,20 +47,9 @@ window.__token = '{{ csrf_token() }}'; - - - - - - - - - - - - - - + {% for script_url in external_scripts %} + + {% endfor %} {% for script_path in library_scripts %} From f12970469bd0937e5ff0b0407772283772b33328 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Mon, 12 May 2014 14:45:19 -0400 Subject: [PATCH 03/34] Add security tests for the superuser API --- test/test_api_security.py | 85 +++++++++++++++++++++++++++++++++++++++ test/testconfig.py | 3 ++ 2 files changed, 88 insertions(+) diff --git a/test/test_api_security.py b/test/test_api_security.py index f0a2d59f0..34fe7ee18 100644 --- a/test/test_api_security.py +++ b/test/test_api_security.py @@ -36,6 +36,9 @@ from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repos from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission, RepositoryTeamPermissionList, RepositoryUserPermissionList) +from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement + + try: app.register_blueprint(api_bp, url_prefix='/api') except ValueError: @@ -3275,5 +3278,87 @@ class TestUserAuthorization(ApiTestCase): self._run_test('DELETE', 404, 'devtable', None) +class TestSuperUserLogs(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserLogs) + + def test_get_anonymous(self): + self._run_test('GET', 403, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 200, 'devtable', None) + + +class TestSuperUserList(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserList) + + def test_get_anonymous(self): + self._run_test('GET', 403, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 200, 'devtable', None) + + + +class TestSuperUserManagement(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserManagement, username='freshuser') + + def test_get_anonymous(self): + self._run_test('GET', 403, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 200, 'devtable', None) + + + def test_put_anonymous(self): + self._run_test('PUT', 403, None, {}) + + def test_put_freshuser(self): + self._run_test('PUT', 403, 'freshuser', {}) + + def test_put_reader(self): + self._run_test('PUT', 403, 'reader', {}) + + def test_put_devtable(self): + self._run_test('PUT', 200, 'devtable', {}) + + + def test_delete_anonymous(self): + self._run_test('DELETE', 403, None, None) + + def test_delete_freshuser(self): + self._run_test('DELETE', 403, 'freshuser', None) + + def test_delete_reader(self): + self._run_test('DELETE', 403, 'reader', None) + + def test_delete_devtable(self): + self._run_test('DELETE', 204, 'devtable', None) + + + if __name__ == '__main__': unittest.main() diff --git a/test/testconfig.py b/test/testconfig.py index d012af469..f39ee42c2 100644 --- a/test/testconfig.py +++ b/test/testconfig.py @@ -28,3 +28,6 @@ class TestConfig(DefaultConfig): 'deadbeef-dead-beef-dead-beefdeadbeef') USERFILES_TYPE = 'FakeUserfiles' + + FEATURE_SUPER_USERS = True + SUPER_USERS = ['devtable'] From 0fd114df844cffab1687e98091f556d78fcbbaa8 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Mon, 12 May 2014 15:22:58 -0400 Subject: [PATCH 04/34] Add API usage tests for the superuser API --- test/test_api_usage.py | 62 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/test/test_api_usage.py b/test/test_api_usage.py index a1683b372..80d7eee0d 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -38,6 +38,7 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember, from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission, RepositoryTeamPermissionList, RepositoryUserPermissionList) +from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement try: app.register_blueprint(api_bp, url_prefix='/api') @@ -1937,5 +1938,66 @@ class TestUserAuthorizations(ApiTestCase): self.getJsonResponse(UserAuthorization, params=dict(access_token_uuid = authorization['uuid']), expected_code=404) + +class TestSuperUserLogs(ApiTestCase): + def test_get_logs(self): + self.login(ADMIN_ACCESS_USER) + + json = self.getJsonResponse(SuperUserLogs) + + assert 'logs' in json + assert len(json['logs']) > 0 + + +class TestSuperUserList(ApiTestCase): + def test_get_users(self): + self.login(ADMIN_ACCESS_USER) + + json = self.getJsonResponse(SuperUserList) + + assert 'users' in json + assert len(json['users']) > 0 + + +class TestSuperUserManagement(ApiTestCase): + def test_get_user(self): + self.login(ADMIN_ACCESS_USER) + + json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) + self.assertEquals('freshuser', json['username']) + self.assertEquals('no@thanks.com', json['email']) + self.assertEquals(False, json['super_user']) + + def test_delete_user(self): + self.login(ADMIN_ACCESS_USER) + + # Verify the user exists. + json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) + self.assertEquals('freshuser', json['username']) + + # Delete the user. + self.deleteResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=204) + + # Verify the user no longer exists. + self.getResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=404) + + + def test_update_user(self): + self.login(ADMIN_ACCESS_USER) + + # Verify the user exists. + json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) + self.assertEquals('freshuser', json['username']) + self.assertEquals('no@thanks.com', json['email']) + + # Update the user. + self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com')) + + # Verify the user was updated. + json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) + self.assertEquals('freshuser', json['username']) + self.assertEquals('foo@bar.com', json['email']) + + if __name__ == '__main__': unittest.main() From 5fdccfe3e6567bebbcdad79bf4518237ec40676b Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 12:17:26 -0400 Subject: [PATCH 05/34] Add an alembic migration for the full initial database with the data. Switch LDAP to using bind and creating a federated login entry. Add LDAP support to the registry and index endpoints. Add a username transliteration and suggestion mechanism. Switch the database and model to require a manual initialization call. --- app.py | 8 +- auth/auth.py | 4 +- data/database.py | 12 +- data/migrations/env.py | 5 +- .../5a07499ce53f_set_up_initial_database.py | 607 ++++++++++++++++++ data/model/legacy.py | 69 +- data/users.py | 66 +- endpoints/index.py | 4 +- initdb.py | 1 + requirements-nover.txt | 1 + test/data/test.db | Bin 200704 -> 200704 bytes util/validation.py | 37 +- 12 files changed, 739 insertions(+), 75 deletions(-) create mode 100644 data/migrations/versions/5a07499ce53f_set_up_initial_database.py diff --git a/app.py b/app.py index b943f62d1..a27d7f058 100644 --- a/app.py +++ b/app.py @@ -9,6 +9,8 @@ from flask.ext.mail import Mail import features from storage import Storage +from data import model +from data import database from data.userfiles import Userfiles from data.users import UserAuthentication from util.analytics import Analytics @@ -47,6 +49,8 @@ userfiles = Userfiles(app) analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) +authentication = UserAuthentication(app) -from data import model -authentication = UserAuthentication(app, model) +database.configure(app.config) +model.config.app_config = app.config +model.config.store = storage diff --git a/auth/auth.py b/auth/auth.py index ac78102a4..0e5457a36 100644 --- a/auth/auth.py +++ b/auth/auth.py @@ -11,7 +11,7 @@ import scopes from data import model from data.model import oauth -from app import app +from app import app, authentication from permissions import QuayDeferredPermissionUser from auth_context import (set_authenticated_user, set_validated_token, set_authenticated_user_deferred, set_validated_oauth_token) @@ -108,7 +108,7 @@ def process_basic_auth(auth): logger.debug('Invalid robot or password for robot: %s' % credentials[0]) else: - authenticated = model.verify_user(credentials[0], credentials[1]) + authenticated = authentication.verify_user(credentials[0], credentials[1]) if authenticated: logger.debug('Successfully validated user: %s' % authenticated.username) diff --git a/data/database.py b/data/database.py index c8fa6cf25..5fd8abbf9 100644 --- a/data/database.py +++ b/data/database.py @@ -9,19 +9,19 @@ from playhouse.pool import PooledMySQLDatabase from sqlalchemy.engine.url import make_url from urlparse import urlparse -from app import app - logger = logging.getLogger(__name__) SCHEME_DRIVERS = { 'mysql': PooledMySQLDatabase, + 'mysql+pymysql': PooledMySQLDatabase, 'sqlite': SqliteDatabase, } +db = Proxy() -def generate_db(config_object): +def configure(config_object): db_kwargs = dict(config_object['DB_CONNECTION_ARGS']) parsed_url = make_url(config_object['DB_URI']) @@ -34,10 +34,8 @@ def generate_db(config_object): if parsed_url.password: db_kwargs['passwd'] = parsed_url.password - return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs) - - -db = generate_db(app.config) + real_db = SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs) + db.initialize(real_db) def random_string_generator(length=16): diff --git a/data/migrations/env.py b/data/migrations/env.py index 65f00819f..5b1564b50 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -2,6 +2,7 @@ from __future__ import with_statement from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig +from urllib import unquote from data.database import all_models from app import app @@ -10,7 +11,7 @@ from data.model.sqlalchemybridge import gen_sqlalchemy_metadata # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config -config.set_main_option('sqlalchemy.url', app.config['DB_URI']) +config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI'])) # Interpret the config file for Python logging. # This line sets up loggers basically. @@ -39,7 +40,7 @@ def run_migrations_offline(): script output. """ - url = app.config['DB_CONNECTION'] + url = unquote(app.config['DB_URI']) context.configure(url=url, target_metadata=target_metadata) with context.begin_transaction(): diff --git a/data/migrations/versions/5a07499ce53f_set_up_initial_database.py b/data/migrations/versions/5a07499ce53f_set_up_initial_database.py new file mode 100644 index 000000000..3c4a8de5d --- /dev/null +++ b/data/migrations/versions/5a07499ce53f_set_up_initial_database.py @@ -0,0 +1,607 @@ +"""Set up initial database + +Revision ID: 5a07499ce53f +Revises: None +Create Date: 2014-05-13 11:26:51.808426 + +""" + +# revision identifiers, used by Alembic. +revision = '5a07499ce53f' +down_revision = None + +from alembic import op +from data.model.sqlalchemybridge import gen_sqlalchemy_metadata +from data.database import all_models +import sqlalchemy as sa + + +def upgrade(): + schema = gen_sqlalchemy_metadata(all_models) + + ### commands auto generated by Alembic - please adjust! ### + op.create_table('loginservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('loginservice_name', 'loginservice', ['name'], unique=True) + + op.bulk_insert(schema.tables['loginservice'], + [ + {'id':1, 'name':'github'}, + {'id':2, 'name':'quayrobot'}, + {'id':3, 'name':'ldap'}, + ]) + + op.create_table('imagestorage', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('checksum', sa.String(length=255), nullable=True), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('comment', sa.Text(), nullable=True), + sa.Column('command', sa.Text(), nullable=True), + sa.Column('image_size', sa.BigInteger(), nullable=True), + sa.Column('uploading', sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('queueitem', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('queue_name', sa.String(length=1024), nullable=False), + sa.Column('body', sa.Text(), nullable=False), + sa.Column('available_after', sa.DateTime(), nullable=False), + sa.Column('available', sa.Boolean(), nullable=False), + sa.Column('processing_expires', sa.DateTime(), nullable=True), + sa.Column('retries_remaining', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('queueitem_available', 'queueitem', ['available'], unique=False) + op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False) + op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False) + op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False) + op.create_table('role', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('role_name', 'role', ['name'], unique=False) + + op.bulk_insert(schema.tables['role'], + [ + {'id':1, 'name':'admin'}, + {'id':2, 'name':'write'}, + {'id':3, 'name':'read'}, + ]) + + op.create_table('logentrykind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) + + op.bulk_insert(schema.tables['logentrykind'], + [ + {'id':1, 'name':'account_change_plan'}, + {'id':2, 'name':'account_change_cc'}, + {'id':3, 'name':'account_change_password'}, + {'id':4, 'name':'account_convert'}, + + {'id':5, 'name':'create_robot'}, + {'id':6, 'name':'delete_robot'}, + + {'id':7, 'name':'create_repo'}, + {'id':8, 'name':'push_repo'}, + {'id':9, 'name':'pull_repo'}, + {'id':10, 'name':'delete_repo'}, + {'id':11, 'name':'create_tag'}, + {'id':12, 'name':'move_tag'}, + {'id':13, 'name':'delete_tag'}, + {'id':14, 'name':'add_repo_permission'}, + {'id':15, 'name':'change_repo_permission'}, + {'id':16, 'name':'delete_repo_permission'}, + {'id':17, 'name':'change_repo_visibility'}, + {'id':18, 'name':'add_repo_accesstoken'}, + {'id':19, 'name':'delete_repo_accesstoken'}, + {'id':20, 'name':'add_repo_webhook'}, + {'id':21, 'name':'delete_repo_webhook'}, + {'id':22, 'name':'set_repo_description'}, + + {'id':23, 'name':'build_dockerfile'}, + + {'id':24, 'name':'org_create_team'}, + {'id':25, 'name':'org_delete_team'}, + {'id':26, 'name':'org_add_team_member'}, + {'id':27, 'name':'org_remove_team_member'}, + {'id':28, 'name':'org_set_team_description'}, + {'id':29, 'name':'org_set_team_role'}, + + {'id':30, 'name':'create_prototype_permission'}, + {'id':31, 'name':'modify_prototype_permission'}, + {'id':32, 'name':'delete_prototype_permission'}, + + {'id':33, 'name':'setup_repo_trigger'}, + {'id':34, 'name':'delete_repo_trigger'}, + + {'id':35, 'name':'create_application'}, + {'id':36, 'name':'update_application'}, + {'id':37, 'name':'delete_application'}, + {'id':38, 'name':'reset_application_client_secret'}, + ]) + + op.create_table('notificationkind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) + + op.bulk_insert(schema.tables['notificationkind'], + [ + {'id':1, 'name':'password_required'}, + {'id':2, 'name':'over_private_usage'}, + ]) + + op.create_table('teamrole', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('teamrole_name', 'teamrole', ['name'], unique=False) + + op.bulk_insert(schema.tables['teamrole'], + [ + {'id':1, 'name':'admin'}, + {'id':2, 'name':'creator'}, + {'id':3, 'name':'member'}, + ]) + + op.create_table('visibility', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('visibility_name', 'visibility', ['name'], unique=False) + + op.bulk_insert(schema.tables['visibility'], + [ + {'id':1, 'name':'public'}, + {'id':2, 'name':'private'}, + ]) + + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('password_hash', sa.String(length=255), nullable=True), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('verified', sa.Boolean(), nullable=False), + sa.Column('stripe_id', sa.String(length=255), nullable=True), + sa.Column('organization', sa.Boolean(), nullable=False), + sa.Column('robot', sa.Boolean(), nullable=False), + sa.Column('invoice_email', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('user_email', 'user', ['email'], unique=True) + op.create_index('user_organization', 'user', ['organization'], unique=False) + op.create_index('user_robot', 'user', ['robot'], unique=False) + op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False) + op.create_index('user_username', 'user', ['username'], unique=True) + op.create_table('buildtriggerservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) + + op.bulk_insert(schema.tables['buildtriggerservice'], + [ + {'id':1, 'name':'github'}, + ]) + + op.create_table('federatedlogin', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False), + sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False) + op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True) + op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True) + op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False) + op.create_table('oauthapplication', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('client_id', sa.String(length=255), nullable=False), + sa.Column('client_secret', sa.String(length=255), nullable=False), + sa.Column('redirect_uri', sa.String(length=255), nullable=False), + sa.Column('application_uri', sa.String(length=255), nullable=False), + sa.Column('organization_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('gravatar_email', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False) + op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False) + op.create_table('notification', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=False), + sa.Column('target_id', sa.Integer(), nullable=False), + sa.Column('metadata_json', sa.Text(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], ), + sa.ForeignKeyConstraint(['target_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('notification_created', 'notification', ['created'], unique=False) + op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False) + op.create_index('notification_target_id', 'notification', ['target_id'], unique=False) + op.create_index('notification_uuid', 'notification', ['uuid'], unique=False) + op.create_table('emailconfirmation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('pw_reset', sa.Boolean(), nullable=False), + sa.Column('new_email', sa.String(length=255), nullable=True), + sa.Column('email_confirm', sa.Boolean(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True) + op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False) + op.create_table('team', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('organization_id', sa.Integer(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('team_name', 'team', ['name'], unique=False) + op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True) + op.create_index('team_organization_id', 'team', ['organization_id'], unique=False) + op.create_index('team_role_id', 'team', ['role_id'], unique=False) + op.create_table('repository', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('namespace', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('visibility_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('badge_token', sa.String(length=255), nullable=False), + sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True) + op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False) + op.create_table('accesstoken', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('friendly_name', sa.String(length=255), nullable=True), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.Column('temporary', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True) + op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False) + op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False) + op.create_table('repositorypermission', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('team_id', sa.Integer(), nullable=True), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False) + op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False) + op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False) + op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True) + op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False) + op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True) + op.create_table('oauthaccesstoken', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('application_id', sa.Integer(), nullable=False), + sa.Column('authorized_user_id', sa.Integer(), nullable=False), + sa.Column('scope', sa.String(length=255), nullable=False), + sa.Column('access_token', sa.String(length=255), nullable=False), + sa.Column('token_type', sa.String(length=255), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=False), + sa.Column('refresh_token', sa.String(length=255), nullable=True), + sa.Column('data', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ), + sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False) + op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False) + op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False) + op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False) + op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False) + op.create_table('teammember', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('team_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False) + op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False) + op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True) + op.create_table('webhook', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('public_id', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('parameters', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('webhook_public_id', 'webhook', ['public_id'], unique=True) + op.create_index('webhook_repository_id', 'webhook', ['repository_id'], unique=False) + op.create_table('oauthauthorizationcode', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('application_id', sa.Integer(), nullable=False), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('scope', sa.String(length=255), nullable=False), + sa.Column('data', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False) + op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False) + op.create_table('image', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('docker_image_id', sa.String(length=255), nullable=False), + sa.Column('checksum', sa.String(length=255), nullable=True), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('comment', sa.Text(), nullable=True), + sa.Column('command', sa.Text(), nullable=True), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('image_size', sa.BigInteger(), nullable=True), + sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True), + sa.Column('storage_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('image_ancestors', 'image', ['ancestors'], unique=False) + op.create_index('image_repository_id', 'image', ['repository_id'], unique=False) + op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=False) + op.create_index('image_storage_id', 'image', ['storage_id'], unique=False) + op.create_table('permissionprototype', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('org_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('activating_user_id', sa.Integer(), nullable=True), + sa.Column('delegate_user_id', sa.Integer(), nullable=True), + sa.Column('delegate_team_id', sa.Integer(), nullable=True), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], ), + sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['org_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False) + op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False) + op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False) + op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False) + op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False) + op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False) + op.create_table('repositorytag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('image_id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['image_id'], ['image.id'], ), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False) + op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False) + op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True) + op.create_table('logentry', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=False), + sa.Column('account_id', sa.Integer(), nullable=False), + sa.Column('performer_id', sa.Integer(), nullable=True), + sa.Column('repository_id', sa.Integer(), nullable=True), + sa.Column('access_token_id', sa.Integer(), nullable=True), + sa.Column('datetime', sa.DateTime(), nullable=False), + sa.Column('ip', sa.String(length=255), nullable=True), + sa.Column('metadata_json', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ), + sa.ForeignKeyConstraint(['account_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], ), + sa.ForeignKeyConstraint(['performer_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False) + op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False) + op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False) + op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False) + op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False) + op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False) + op.create_table('repositorybuildtrigger', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('connected_user_id', sa.Integer(), nullable=False), + sa.Column('auth_token', sa.String(length=255), nullable=False), + sa.Column('config', sa.Text(), nullable=False), + sa.Column('write_token_id', sa.Integer(), nullable=True), + sa.Column('pull_robot_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], ), + sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False) + op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False) + op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False) + op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False) + op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False) + op.create_table('repositorybuild', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('access_token_id', sa.Integer(), nullable=False), + sa.Column('resource_key', sa.String(length=255), nullable=False), + sa.Column('job_config', sa.Text(), nullable=False), + sa.Column('phase', sa.String(length=255), nullable=False), + sa.Column('started', sa.DateTime(), nullable=False), + sa.Column('display_name', sa.String(length=255), nullable=False), + sa.Column('trigger_id', sa.Integer(), nullable=True), + sa.Column('pull_robot_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ), + sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), + sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False) + op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False) + op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False) + op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False) + op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False) + op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False) + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.drop_index('repositorybuild_uuid', table_name='repositorybuild') + op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild') + op.drop_index('repositorybuild_resource_key', table_name='repositorybuild') + op.drop_index('repositorybuild_repository_id', table_name='repositorybuild') + op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild') + op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild') + op.drop_table('repositorybuild') + op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger') + op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger') + op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger') + op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger') + op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger') + op.drop_table('repositorybuildtrigger') + op.drop_index('logentry_repository_id', table_name='logentry') + op.drop_index('logentry_performer_id', table_name='logentry') + op.drop_index('logentry_kind_id', table_name='logentry') + op.drop_index('logentry_datetime', table_name='logentry') + op.drop_index('logentry_account_id', table_name='logentry') + op.drop_index('logentry_access_token_id', table_name='logentry') + op.drop_table('logentry') + op.drop_index('repositorytag_repository_id_name', table_name='repositorytag') + op.drop_index('repositorytag_repository_id', table_name='repositorytag') + op.drop_index('repositorytag_image_id', table_name='repositorytag') + op.drop_table('repositorytag') + op.drop_index('permissionprototype_role_id', table_name='permissionprototype') + op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype') + op.drop_index('permissionprototype_org_id', table_name='permissionprototype') + op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype') + op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype') + op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype') + op.drop_table('permissionprototype') + op.drop_index('image_storage_id', table_name='image') + op.drop_index('image_repository_id_docker_image_id', table_name='image') + op.drop_index('image_repository_id', table_name='image') + op.drop_index('image_ancestors', table_name='image') + op.drop_table('image') + op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode') + op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode') + op.drop_table('oauthauthorizationcode') + op.drop_index('webhook_repository_id', table_name='webhook') + op.drop_index('webhook_public_id', table_name='webhook') + op.drop_table('webhook') + op.drop_index('teammember_user_id_team_id', table_name='teammember') + op.drop_index('teammember_user_id', table_name='teammember') + op.drop_index('teammember_team_id', table_name='teammember') + op.drop_table('teammember') + op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken') + op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken') + op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken') + op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken') + op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken') + op.drop_table('oauthaccesstoken') + op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission') + op.drop_index('repositorypermission_user_id', table_name='repositorypermission') + op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission') + op.drop_index('repositorypermission_team_id', table_name='repositorypermission') + op.drop_index('repositorypermission_role_id', table_name='repositorypermission') + op.drop_index('repositorypermission_repository_id', table_name='repositorypermission') + op.drop_table('repositorypermission') + op.drop_index('accesstoken_role_id', table_name='accesstoken') + op.drop_index('accesstoken_repository_id', table_name='accesstoken') + op.drop_index('accesstoken_code', table_name='accesstoken') + op.drop_table('accesstoken') + op.drop_index('repository_visibility_id', table_name='repository') + op.drop_index('repository_namespace_name', table_name='repository') + op.drop_table('repository') + op.drop_index('team_role_id', table_name='team') + op.drop_index('team_organization_id', table_name='team') + op.drop_index('team_name_organization_id', table_name='team') + op.drop_index('team_name', table_name='team') + op.drop_table('team') + op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation') + op.drop_index('emailconfirmation_code', table_name='emailconfirmation') + op.drop_table('emailconfirmation') + op.drop_index('notification_uuid', table_name='notification') + op.drop_index('notification_target_id', table_name='notification') + op.drop_index('notification_kind_id', table_name='notification') + op.drop_index('notification_created', table_name='notification') + op.drop_table('notification') + op.drop_index('oauthapplication_organization_id', table_name='oauthapplication') + op.drop_index('oauthapplication_client_id', table_name='oauthapplication') + op.drop_table('oauthapplication') + op.drop_index('federatedlogin_user_id', table_name='federatedlogin') + op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin') + op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin') + op.drop_index('federatedlogin_service_id', table_name='federatedlogin') + op.drop_table('federatedlogin') + op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') + op.drop_table('buildtriggerservice') + op.drop_index('user_username', table_name='user') + op.drop_index('user_stripe_id', table_name='user') + op.drop_index('user_robot', table_name='user') + op.drop_index('user_organization', table_name='user') + op.drop_index('user_email', table_name='user') + op.drop_table('user') + op.drop_index('visibility_name', table_name='visibility') + op.drop_table('visibility') + op.drop_index('teamrole_name', table_name='teamrole') + op.drop_table('teamrole') + op.drop_index('notificationkind_name', table_name='notificationkind') + op.drop_table('notificationkind') + op.drop_index('logentrykind_name', table_name='logentrykind') + op.drop_table('logentrykind') + op.drop_index('role_name', table_name='role') + op.drop_table('role') + op.drop_index('queueitem_queue_name', table_name='queueitem') + op.drop_index('queueitem_processing_expires', table_name='queueitem') + op.drop_index('queueitem_available_after', table_name='queueitem') + op.drop_index('queueitem_available', table_name='queueitem') + op.drop_table('queueitem') + op.drop_table('imagestorage') + op.drop_index('loginservice_name', table_name='loginservice') + op.drop_table('loginservice') + ### end Alembic commands ### diff --git a/data/model/legacy.py b/data/model/legacy.py index 1c207cb79..23b1bde77 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -8,11 +8,17 @@ from data.database import * from util.validation import * from util.names import format_robot_username -from app import storage as store - logger = logging.getLogger(__name__) -transaction_factory = app.config['DB_TRANSACTION_FACTORY'] + + +class Config(object): + def __init__(self): + self.app_config = None + self.store = None + +config = Config() + class DataModelException(Exception): pass @@ -58,7 +64,7 @@ class InvalidBuildTriggerException(DataModelException): pass -def create_user(username, password, email, is_organization=False): +def create_user(username, password, email, add_change_pw_notification=True): if not validate_email(email): raise InvalidEmailAddressException('Invalid email address: %s' % email) @@ -97,7 +103,7 @@ def create_user(username, password, email, is_organization=False): # If the password is None, then add a notification for the user to change # their password ASAP. - if not pw_hash and not is_organization: + if not pw_hash and add_change_pw_notification: create_notification('password_required', new_user) return new_user @@ -105,10 +111,18 @@ def create_user(username, password, email, is_organization=False): raise DataModelException(ex.message) +def is_username_unique(test_username): + try: + User.get((User.username == test_username)) + return False + except User.DoesNotExist: + return True + + def create_organization(name, email, creating_user): try: # Create the org - new_org = create_user(name, None, email, is_organization=True) + new_org = create_user(name, None, email, add_change_pw_notification=False) new_org.organization = True new_org.save() @@ -340,18 +354,16 @@ def attach_federated_login(user, service_name, service_id): def verify_federated_login(service_name, service_id): - selected = FederatedLogin.select(FederatedLogin, User) - with_service = selected.join(LoginService) - with_user = with_service.switch(FederatedLogin).join(User) - found = with_user.where(FederatedLogin.service_ident == service_id, - LoginService.name == service_name) - - found_list = list(found) - - if found_list: - return found_list[0].user - - return None + try: + found = (FederatedLogin + .select(FederatedLogin, User) + .join(LoginService) + .switch(FederatedLogin).join(User) + .where(FederatedLogin.service_ident == service_id, LoginService.name == service_name) + .get()) + return found.user + except FederatedLogin.DoesNotExist: + return None def list_federated_logins(user): @@ -935,7 +947,7 @@ def __translate_ancestry(old_ancestry, translations, repository, username): def find_create_or_link_image(docker_image_id, repository, username, translations): - with transaction_factory(db): + with config.app_config['DB_TRANSACTION_FACTORY'](db): repo_image = get_repo_image(repository.namespace, repository.name, docker_image_id) if repo_image: @@ -1018,7 +1030,7 @@ def set_image_size(docker_image_id, namespace_name, repository_name, def set_image_metadata(docker_image_id, namespace_name, repository_name, created_date_str, comment, command, parent=None): - with transaction_factory(db): + with config.app_config['DB_TRANSACTION_FACTORY'](db): query = (Image .select(Image, ImageStorage) .join(Repository) @@ -1098,10 +1110,10 @@ def garbage_collect_repository(namespace_name, repository_name): image_to_remove.storage.uuid) uuids_to_check_for_gc.add(image_to_remove.storage.uuid) else: - image_path = store.image_path(namespace_name, repository_name, - image_to_remove.docker_image_id, None) + image_path = config.store.image_path(namespace_name, repository_name, + image_to_remove.docker_image_id, None) logger.debug('Deleting image storage: %s', image_path) - store.remove(image_path) + config.store.remove(image_path) image_to_remove.delete_instance() @@ -1116,10 +1128,9 @@ def garbage_collect_repository(namespace_name, repository_name): for storage in storage_to_remove: logger.debug('Garbage collecting image storage: %s', storage.uuid) storage.delete_instance() - image_path = store.image_path(namespace_name, repository_name, - image_to_remove.docker_image_id, - storage.uuid) - store.remove(image_path) + image_path = config.store.image_path(namespace_name, repository_name, + image_to_remove.docker_image_id, storage.uuid) + config.store.remove(image_path) return len(to_remove) @@ -1489,8 +1500,8 @@ def get_pull_credentials(robotname): return { 'username': robot.username, 'password': login_info.service_ident, - 'registry': '%s://%s/v1/' % (app.config['PREFERRED_URL_SCHEME'], - app.config['SERVER_HOSTNAME']), + 'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'], + config.app_config['SERVER_HOSTNAME']), } diff --git a/data/users.py b/data/users.py index 895c6872c..e5064b6fd 100644 --- a/data/users.py +++ b/data/users.py @@ -1,17 +1,16 @@ import ldap import logging +from util.validation import generate_valid_usernames +from data import model logger = logging.getLogger(__name__) class DatabaseUsers(object): - def __init__(self, app_db): - self._app_db = app_db - def verify_user(self, username_or_email, password): """ Simply delegate to the model implementation. """ - return self._app_db.verify_user(username_or_email, password) + return model.verify_user(username_or_email, password) class LDAPConnection(object): @@ -31,10 +30,10 @@ class LDAPConnection(object): class LDAPUsers(object): - def __init__(self, app_db, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, - email_attr, passwd_attr): - self._app_db = app_db + def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr, + passwd_attr): self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd) + self._ldap_uri = ldap_uri self._base_dn = base_dn self._user_rdn = user_rdn self._uid_attr = uid_attr @@ -45,36 +44,47 @@ class LDAPUsers(object): """ Verify the credentials with LDAP and if they are valid, create or update the user in our database. """ + # Make sure that even if the server supports anonymous binds, we don't allow it + if not password: + return None + with self._ldap_conn as conn: user_search_dn = ','.join(self._user_rdn + self._base_dn) - query = '(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr, - username_or_email) - user = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query) + query = u'(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr, + username_or_email) + user = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query.encode('utf-8')) if len(user) != 1: return None found_dn, found_response = user[0] - # First validate the password - valid_passwd = conn.compare_s(found_dn, self._passwd_attr, password) == 1 - if not valid_passwd: + # First validate the password by binding as the user + try: + with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')): + pass + except ldap.INVALID_CREDENTIALS: return None - logger.debug('LDAP Response: %s', found_response) - - # Now check if we have the same username in our DB - username = found_response[self._uid_attr][0] + # Now check if we have a federated login for this user + username = unicode(found_response[self._uid_attr][0].decode('utf-8')) email = found_response[self._email_attr][0] - password = found_response[self._passwd_attr][0] - db_user = self._app_db.get_user(username) - - logger.debug('Email: %s', email) + db_user = model.verify_federated_login('ldap', username) if not db_user: # We must create the user in our db - db_user = self._app_db.create_user(username, 'password_from_ldap', email) + valid_username = None + for valid_username in generate_valid_usernames(username): + if model.is_username_unique(valid_username): + break + + if not valid_username: + logger.error('Unable to pick a username for user: %s', username) + return None + + db_user = model.create_user(valid_username, None, email, add_change_pw_notification=False) db_user.verified = True + model.attach_federated_login(db_user, 'ldap', username) else: # Update the db attributes from ldap db_user.email = email @@ -85,18 +95,18 @@ class LDAPUsers(object): class UserAuthentication(object): - def __init__(self, app=None, model=None): + def __init__(self, app=None): self.app = app if app is not None: - self.state = self.init_app(app, model) + self.state = self.init_app(app) else: self.state = None - def init_app(self, app, model): + def init_app(self, app): authentication_type = app.config.get('AUTHENTICATION_TYPE', 'Database') if authentication_type == 'Database': - users = DatabaseUsers(model) + users = DatabaseUsers() elif authentication_type == 'LDAP': ldap_uri = app.config.get('LDAP_URI', 'ldap://localhost') base_dn = app.config.get('LDAP_BASE_DN') @@ -107,8 +117,8 @@ class UserAuthentication(object): email_attr = app.config.get('LDAP_EMAIL_ATTR', 'mail') passwd_attr = app.config.get('LDAP_PASSWD_ATTR', 'userPassword') - users = LDAPUsers(model, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, - email_attr, passwd_attr) + users = LDAPUsers(ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr, + passwd_attr) else: raise RuntimeError('Unknown authentication type: %s' % authentication_type) diff --git a/endpoints/index.py b/endpoints/index.py index 6ebec2d6c..f0d233414 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -9,7 +9,7 @@ from collections import OrderedDict from data import model from data.model import oauth from data.queue import webhook_queue -from app import analytics, app +from app import analytics, app, authentication from auth.auth import process_auth from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token from util.names import parse_repository_name @@ -97,7 +97,7 @@ def create_user(): existing_user = model.get_user(username) if existing_user: - verified = model.verify_user(username, password) + verified = authentication.verify_user(username, password) if verified: # Mark that the user was logged in. event = app.config['USER_EVENTS'].get_event(username) diff --git a/initdb.py b/initdb.py index 2570b7ca9..065e0f6a1 100644 --- a/initdb.py +++ b/initdb.py @@ -181,6 +181,7 @@ def initialize_database(): Visibility.create(name='private') LoginService.create(name='github') LoginService.create(name='quayrobot') + LoginService.create(name='ldap') BuildTriggerService.create(name='github') diff --git a/requirements-nover.txt b/requirements-nover.txt index efda6ebef..ee1329ae2 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -33,3 +33,4 @@ reportlab==2.7 blinker raven python-ldap +unidecode diff --git a/test/data/test.db b/test/data/test.db index 3ea2e8dfa3f00e22b1b077f0e2a6d5fc9d862abe..1e322ef232db1141f4f2fe5d50cea86223c5cf76 100644 GIT binary patch delta 5420 zcmbtXdvsLQxzE{Wn3+6S1d0TNJPHGXa5CrpNL$D}GBcTXCeKNPaOTW8Nyvkc2NP6` zV?}V)N4>P!y-EOWSBpF>64#)s;$y9QZNYl0T|%YnE-cq7+`9@zL02P0?l(qO5vaOk z{+hMFy}$3bf4{wd-#+*5EVy@P!H?$>TOXU}BDOxe`WHocBzYo-{FppNo*0`x`%N;} zLXtVRbS2f^u~T!4O%Q%Jhx~+mmpl#Ox5!*8c?*$^l4GYAl)0@WMLvZ#p%r6=?K~Q8 zsCPEk#AQYB_#KMN8>tuSIhQxU#$sVlh-DpBO|gh881p$8xOiXA&;k~ znT|#MwXCIy_c1=FYwM9U?Ujy(NI2|iX!J&!Vxkmr#Ty*%Aj?ROnCuN!F|xxGXp$P8 ziWCdDn_|93ug}3n9Sxk{%eh0GFS50-Q>Y9^JhJSmjn^_U2kna2#Uk|~MF??BIIIK% zuI9R+(<6nO_-Zf5u+{$RU{xR*Zxp-{x-l5zT*BC?&Wm|BO^c5-=MY4CYCif_)C_02 z`GOiBnoKnX-7G&oVlop#Uq@2e*Rgb9DA#IPN4`KVK_|zUg0~RfS&7WU=w^?T$vXvD zBfcGL(cyoxMt+g#W;sKiETG)4W$bgp=6=PwI&lBRS&LoH0Mk_C3x*kUO|l+%Zhs_ds0?jzqC{~l zLsLARps55W(UhXlDl5}TMWYS!uKE3IyY=}MW%JjoU4#1kjaOG^D;TkY=E`MO;CLZp zBF^T|$fCMtypRA`-=tb*HZepFi-M+c3JthqNuv0qEK{mzsFX(Qh9WYW#_BAI?8ur! z)aSyr%@a6{mJ*CXv7Dh(JSS_E3`wYjq%(>E*piBlNIUWQ0wRaVOVYYz=(8k}NbWzi66=HO>@iO6Cbt!SF4h%_b3xick ziFJlch(=PAR2H*U#1CwWCJP)b$dpVgNkD3VJh}>p1rW-NOmnoV;H_1J)21`DmXMN= ziDOilsfm=V3M?fXqN0lcGNB~#conhKCeZMc8J$WpaEkZ@5BwUsN+lIVS7a_bk- z;jqaGR+2?Yq%<}OIR%9Whz3Kc2_wO(vI?MByw5|-v#FdcCs~G}M4m}dypojR7^yO4 zs0wWeA`eta_>70}*?5r?F7T8p2sFh3@uH}L3W|n* zhU)5Sc)x7D{`iaKv?TC~a`1ABc$KWY*$SDviN~jb?gFnWJWwgoz&r!nQgl|ObVFc3 zHJ+9E#KD(-MAVgBb>YAUXjYLJY}-v7|I#?Ezy;;-v^~VpFOSQTATb9&*+cx?IsDI? zE0v3q!iY>k3F#oN-|>}j{#S?dzB-(HO?dbV+qjb5dQJSAu;uD7tz0pkE33_43d`4q zr(7TI$a>k8<)$yirE8ayUmZrpq$BqVZ}fGmGv|o$N>kYq_9;9;qoAq$kkx`i3>q=B zuf6tR%7ic9fjB&mMU|%cjXTfa)hya>xyAWpPr3w+97a4=IAoYDN9HxR5P0WdnEMWg zY^K@2{hx{v9*vv6_vT}MJi(*5nX5Pu4j15GCt=Q`0xGf;e)7iobW#3@4p~kLsMS1Y znNXR^!|Ac-mYA9x}ON>2aV{vW3nXL%Lyf9Yts zX;y6Z+qlpGC2bApa=K&@p1U1V{jUM#7fgNVKUx;z-)x6!TiQU%iVHvAg7>#UM%4T2 zmP|VupJ_(b-#gaA;GI=4iQNbo^Xn%b57U#pKe`bxE<8EdlPs?Tl3U zB=1A_Actkaf_c5^QVOeHsD0rc;Mn|R?@PFK7&tEb&I?_cdqD{kclM!o@x=WwCX~MV zN4$L#s66NBLjz$E?+9mAegLX1v#Wnd7YX=W7*gH$092bgv-3|W9zPg?DYtHhvqZeH z=@4GE8B`YLZ*;>$EZ4D5hitoX3|Khs4H9A#kH=nl|zim=?3 z*@?_nY)+y8j_pECCWg82ef-R>tR8DG4&cvrfx;^yr>CZZlidFPX*ko%TlndC`fgBI zFySty}|1GUNz7^7*pKI=EfnPhxMhZMNa9^?pzmKQ?DrlY2cD9NqCIdzUoC8TB2Q zdloGdIFU<6-Tk$EXdui+<#irA-#erWhF)VIP{QlW>*)4aXPejQ>f&nDb&++w?cT@+ z{FCg`@_8`;JIhYhYp|1AzLc@1LQFX8*wqAGlL<2@mdxtyKjh{b&ZW9{U*ShXZ*?We{J4DUy z@;jRBA#ZsoHaOJ57;CHI>o#;o>VmB757qW}t_g(K=)L+nJaY`){3k7ba170ZZ}Uhz zRTFVl+hck%tS0$*ztk{T-WRG41cO?6q@#WVuSoIk#_FE7_MT2v>1ta$c<|3-=mE0w zW_ihU-c!Nc^;WfGgju1t>_SP6MKl3oX>Jr=9E9?n1ksT(P6Xw-)nnX z>haD^a4FbBa4E{B9p}S!lOFTvAyiaY+^^i7o>iNdQ=h|qH(gBPReKPH-#dg>PEmhY zmx@h#&4!myrGhp{mRInPw^L2{L7a8A*TLD$ z`MZzfRj-4yaU;WZncst5%-EDeZboO&D5@lOz)m;iG;C!G9GK?OGJOBrXs~dp@M0`o zqJ6O&io`3=quVie6t$CUa%jBcD8O1&b0mW+kAVfweC5O+@#rxWB9Eva;WNj;atbF- z38__+&x05L1Tg09dbu|9PPW~ZKk|M8Py7{(P|z=(O_z3LEgst4`_S%|IU0ATOWLz` z#NqGWV!QlYx=4qI0*AMqLaR;1 zZsz&SY4o(!v}Ck7f%lyQGc<3$`&m434!n6;Q~!Gye+b@OW?lXpT=)^V;Ib70kuK>O z$u_LhA3@c;eS2oLbZ0vli#MM~3r)(8YJZ0JpNC9K4$mht7r@cY%OA0=XxWf$#VlsV z0a{r>L2q~{D-Vkw83$-36SJYsrLyvH_%GvVwR!2OH}6WV&&tE%p--XBEgZh1JY71N zeM)fn{HM@lD&G8w9k*TthUY8^Br})LrF>K21SUtUWaO$R@s;-9Aenr!5{hiEqMEjW=+F1vB~seDqpVdUVMe6pIn(H?x$$bVb(O}<{{Z~T92 CRY`FG delta 5359 zcmbtXdvsJqp3bd%>2wk>AUFYn35G^s@Fw@xeTP|0zu&L)Bi+rIaBtu4B*X+r=sa+k z(5wTpJ3g67c|-y_><%c5Ks?R3i|9Vi9)&%QyDO~YvbydL!t5Db2AtUwA+Xg%jtZ=L zfp-VRYpLbDIxzro*A3L{2MuTruuFQ z51lO`|45$2L#N2GvkT_ijNnJ$e)4&e9-H35gJB_NX|}r(-f${z^~GZ;mWf*8j4R?2 z9ZZ~wvwYGU4~U+q=t(Jx-Qr+e4#nq=#Vu~D-D7TPnQEu<(+C(1S>4`9%p5W^j8E`E zuQwWp&~JBI5~)BenToU|y}>x&Y-2nL))5J@jFV-psZ?o*3CS^EaNDs}9Zhj}*cJD? zMHq_9jG_oaq=olz0md2d#v@)c?{e5JBHI#`tl?mDjDbF%mt#D<+3jR)Nt-*lt+z{P zvf6}Zn=2A;X$}U0yw%YXVq9UnknqR(L`bx`d3C^4pfhMH)&7B0sGHd@E^&i`sl9jSEbsngVWD9~Uv134pH?Y5v!#bZD^5 zSh@s!NWMa@9b>2d6F|F~fMFQi?s1}=Q?P2pw^NNB{#-TkvqZP68E_N{m3^&Z*95En zigR`7;Y;6HY!+<^S5h>){Smv>Er+blp@@U`LbKEyh&i0rm<`_zr`^2mwe1VHxwZ!v zGybR~`fOs{YqeSUKqwLjGvFqf&@@I`d7QIH#XwqBA21}X)L__*TEBC8w5rxQh5-vF&Jn;0aAIjjoQS&8Pe zS&in!G)F7ErqLYF@vO>90+W_W&;X1jM63*7+wyE$Q3a7q?@U(s;*(0*)&h{Jd1N?*$geSY6fpyhdj$8Xea)}l+K8JR?Ubsri_mWWCdDP zSdJFbSP_*KvQT8vHalT4sTnn`LJ8+oi-$>rBCW8BPU|cqak46ka#lp+c4DcC(Xo!4 zrqhA~v-sF$7+TgOfflnGCvb9FRTYR_4#I3=G?rI%1|JStg%~}}(3-46tdp)wDx))e zMn?M`#5|KGvociD#gN7o&l27d)Kpp)ac2+(T@utZI_Drc|e&jHU$jF#4QhDASjVm-5IK~t49 z4vAt>C7z|#3|2{EI8BuyzDm4Wa1py3Rs?*Jh!hIT4u?A&hjFXfAMp9&{^qCh<)y<{2Z+BRn{F4O@B`wR zN)uEyC`nmJb7>YU&kA_&Sq|&Ps*;cvV1}2u!$<#|h}L}J!eJ%?BlA2my+Hi>%i~yd zRz#IA5+}Yq&hZKl5C8E+;%AoO&u^|!&nt=~DpPAnGja2duZJtXI$ZwM;j$aT!=Ks4 z_3XwQ;x~j#KM(UXl&_o4_0@(ig{2$AQ*I7-7QO8Ha_UQQ@y6xkSBJsfq`7QTH_Yv( z)rIrKcoS9q=J4xif&o5iA+hEZ^0Q#X(0IRQ8lceC?*k6agP@73|J|k`v=V~RQue^n z&Ueqjn6!+_&X$;7;_?9-M-46hT@-*?iTEk-ZpX zlLLeBt$$ZOFdbd}4S;Bl6PT&x>0c12)rmobjZYsfxG?F`CGgod^0n1SJ&u?0e$Y&g{(T)ICt?WqTBsv!WBh<82L>g@gf*b|f_slAUibNa3 zCG}|H`yf}k0V=Gv+%Z?|(4VYueMq*KbnUwwU4VdHi z>QBwdS2bLd{lg7d_L+%tdZ2z%_Q4+lb7@WEaC^R%MyeA-fAT{t@w{WkI@C6dC0-i& z_+nurE(CSg8|kBH;$b{iyI@y48vPNjaqWW_>;Vz&3>0g;8H3G~-S6kC1av-tQ*GLe z(F~*1r2!so4dN+tx8RdB$8zcvYTbg1OeCLfEj)tDQYxrR{~}*||70<^pMnm`I5_1r z+WAwA)@c0GBdBQv7une7+KqOM;QcRa7uKV5Be=-&$WAlbf^d=V*xYxe@N|*s%(wri z6-_*gi(LKTzK`>@cJ#W4>FnQucUJ!C!*8M4qs7B>VDlyP$|&B&^0i0S2Q26+gDHA< z;$4*g>d4>ctIR0G;$=H_;<9ot)dUK=fWbH`TtXfsqhJioAf7I{Qu2$Ea7i5syaOZ@ z+JiaPKK#*3X#XC}aoW+9HE3cFuKKjtk6QE?@--gR z#$%-4zJROVP;vUdR=Cks9+P?GMPRz4?5^H-{jRc-SP6Ge35lKzf~Dy6i=c{(H~({C zA9#)udbFWjx}jH+hTK7pOAESUZdj9Bt6Ktt?P`2*mEA3-{FbON=!MNXbUM2_0=c#5 zf)7~%(Ddm=p8B_}-Y&`?LGWxdj$TMvC{OW2iFIdi|e%b7^%)^$0huP5H6MI$+g zxP#z6A%tI012)Fa=>DYF!scSD70y@BCe5(l8|)fbw>p{XX>q1|wY9xWgSVk)jod9{ z&`*j>V|smz$*lLU%f)PkPW7kH;p)(P+%}X8JmEeVJa|SWPT6_4KS+!#To3(N(!z*ZLu$ z-yP|;)vrrPZu^=+&eQ8j&Ft-$lJV8uEex|x3irnq7wbg7JOmaA%`874bl4)67QL@0 zQXlS1_Non;&WHq?oryrFx5de~cLnu!{Gy1YdU_+-Y-*?l%^Cx@|Jg-b$G|*+>ljpf zq8+|&Unb*JJ+7`HnH$VSJ5znis;E>2^PS zXxCvdzv)|jlE3grF8mT0C}iM40<|3h+bR08ya`=7f^TSLa{Tc?bkb*@copB!`|=<7 z@>P*ZpZU*MvCmYWuK)c&^Q6xV9R*FLOZU&+ny(Ebbql^)pB%+^e|EIff!bcfcYo&j z8Cv0WFm9x%{yPq#i8t`r)H%AtU&{kA*wd2M9NLDwZ*5laK8awmn3;&3%0vUW@7;SzBtShZN*p2-yJ?TTj3Cv~L z{QrIq?L2{Tmp|~%9Mp6Yd&i=1a5LI*68On_`DJwOB({$71@p@WS|>aIk$=TFGq1c~ zRd~19$|~RX{R~a~2D_Yi=abLpYdeenja%7i+{#4D-kN+(N6}U|6!-|V8@~51n?GIA zUbGd6&V2+H8s=IZ{~O2_-vAJK&Vbpa-#zu#!hBT*zX3S(>>03}Ja@owDa^dPa)P=Ni= z=&^B3O$dAx$yW^&_2sQs}giTy(1$ zc;#FqmyzaM1@ZDF=;bnU_Wu)HKI>NR)FPX?$Wu<Y9rRgzEL`%OM3bsPQ= 4 and len(username) <= 30) + length_match = (len(username) >= MIN_LENGTH and len(username) <= MAX_LENGTH) if not length_match: - return (False, 'Username must be between 4 and 30 characters in length') + return (False, 'Username must be between %s and %s characters in length' % + (MIN_LENGTH, MAX_LENGTH)) return (True, '') @@ -27,3 +37,24 @@ def validate_password(password): if re.search(r'\s', password): return False return len(password) > 7 + + +def _gen_filler_chars(num_filler_chars): + if num_filler_chars == 0: + yield '' + else: + for char in VALID_CHARACTERS: + for suffix in _gen_filler_chars(num_filler_chars - 1): + yield char + suffix + + +def generate_valid_usernames(input_username): + normalized = unidecode(input_username).strip().lower() + prefix = re.sub(INVALID_USERNAME_CHARACTERS, '_', normalized)[:30] + + num_filler_chars = max(0, MIN_LENGTH - len(prefix)) + + while num_filler_chars + len(prefix) <= MAX_LENGTH: + for suffix in _gen_filler_chars(num_filler_chars): + yield prefix + suffix + num_filler_chars += 1 From 553ef36e9bab935c8f641bc38c01530199f043af Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 15:17:16 -0400 Subject: [PATCH 06/34] Fix a remaining direct usage of transaction factory in the data model. --- data/model/legacy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/model/legacy.py b/data/model/legacy.py index 23b1bde77..88189ab3a 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -1076,7 +1076,7 @@ def list_repository_tags(namespace_name, repository_name): def garbage_collect_repository(namespace_name, repository_name): - with transaction_factory(db): + with config.app_config['DB_TRANSACTION_FACTORY'](db): # Get a list of all images used by tags in the repository tag_query = (RepositoryTag .select(RepositoryTag, Image, ImageStorage) From f049f738da734368e9de012d6a5d2beebaa6978e Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 15:20:17 -0400 Subject: [PATCH 07/34] Run the db migrations on container start unless we're running against Sqlite. --- Dockerfile | 24 +++++++++++++++--------- conf/init/runmigration.sh | 5 +++++ data/migrations/env.py | 10 ++++++++-- 3 files changed, 28 insertions(+), 11 deletions(-) create mode 100755 conf/init/runmigration.sh diff --git a/Dockerfile b/Dockerfile index 365adfec7..c581c1004 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,7 @@ RUN apt-get install -y nodejs RUN npm install -g grunt-cli # LDAP -RUN apt-get install libldap2-dev libsasl2-dev +RUN apt-get install -y libldap2-dev libsasl2-dev ADD binary_dependencies binary_dependencies RUN gdebi --n binary_dependencies/*.deb @@ -35,15 +35,19 @@ ADD requirements.txt requirements.txt RUN virtualenv --distribute venv RUN venv/bin/pip install -r requirements.txt +# Add the static assets and run grunt +ADD grunt grunt +ADD static static +RUN cd grunt && npm install +RUN cd grunt && grunt + +# Add the backend assets ADD auth auth ADD buildstatus buildstatus -ADD conf conf ADD data data ADD endpoints endpoints ADD features features -ADD grunt grunt ADD screenshots screenshots -ADD static static ADD storage storage ADD templates templates ADD util util @@ -54,17 +58,20 @@ ADD application.py application.py ADD config.py config.py ADD initdb.py initdb.py ADD external_libraries.py external_libraries.py +ADD alembic.ini alembic.ini + +# Add the config +ADD conf conf +RUN rm -rf /conf/stack ADD conf/init/mklogsdir.sh /etc/my_init.d/ +ADD conf/init/runmigration.sh /etc/my_init.d/ + ADD conf/init/gunicorn.sh /etc/service/gunicorn/run ADD conf/init/nginx.sh /etc/service/nginx/run ADD conf/init/diffsworker.sh /etc/service/diffsworker/run ADD conf/init/webhookworker.sh /etc/service/webhookworker/run -# Build the compiled binaries of JS and CSS -RUN cd grunt && npm install -RUN cd grunt && grunt - # Download any external libs. RUN mkdir static/fonts RUN mkdir static/ldn @@ -75,7 +82,6 @@ RUN venv/bin/python -m external_libraries ADD test test RUN TEST=true venv/bin/python -m unittest discover -RUN rm -rf /conf/stack VOLUME ["/conf/stack", "/mnt/logs"] EXPOSE 443 80 diff --git a/conf/init/runmigration.sh b/conf/init/runmigration.sh new file mode 100755 index 000000000..5a2ef5cae --- /dev/null +++ b/conf/init/runmigration.sh @@ -0,0 +1,5 @@ +#! /bin/bash +set -e + +# Run the database migration +PYTHONPATH=. venv/bin/alembic upgrade head diff --git a/data/migrations/env.py b/data/migrations/env.py index 5b1564b50..c267c2f50 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -3,8 +3,9 @@ from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig from urllib import unquote +from peewee import SqliteDatabase -from data.database import all_models +from data.database import all_models, db from app import app from data.model.sqlalchemybridge import gen_sqlalchemy_metadata @@ -41,7 +42,7 @@ def run_migrations_offline(): """ url = unquote(app.config['DB_URI']) - context.configure(url=url, target_metadata=target_metadata) + context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True) with context.begin_transaction(): context.run_migrations() @@ -53,6 +54,11 @@ def run_migrations_online(): and associate a connection with the context. """ + + if isinstance(db.obj, SqliteDatabase): + print ('Skipping Sqlite migration!') + return + engine = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', From 2da8b4737e3449ea88925147e6dcab8c9927b249 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 15:22:31 -0400 Subject: [PATCH 08/34] Fix the registry to work with unicode usernames in LDAP. --- auth/auth.py | 2 +- data/users.py | 90 +++++++++++++++++++++++++++------------------- endpoints/index.py | 3 +- requirements.txt | 2 ++ 4 files changed, 57 insertions(+), 40 deletions(-) diff --git a/auth/auth.py b/auth/auth.py index 0e5457a36..715b5a0dd 100644 --- a/auth/auth.py +++ b/auth/auth.py @@ -70,7 +70,7 @@ def process_basic_auth(auth): logger.debug('Invalid basic auth format.') return - credentials = b64decode(normalized[1]).split(':', 1) + credentials = [part.decode('utf-8') for part in b64decode(normalized[1]).split(':', 1)] if len(credentials) != 2: logger.debug('Invalid basic auth credential format.') diff --git a/data/users.py b/data/users.py index e5064b6fd..7cb83a619 100644 --- a/data/users.py +++ b/data/users.py @@ -12,6 +12,9 @@ class DatabaseUsers(object): """ Simply delegate to the model implementation. """ return model.verify_user(username_or_email, password) + def user_exists(self, username): + return model.get_user(username) is not None + class LDAPConnection(object): def __init__(self, ldap_uri, user_dn, user_pw): @@ -40,15 +43,9 @@ class LDAPUsers(object): self._email_attr = email_attr self._passwd_attr = passwd_attr - def verify_user(self, username_or_email, password): - """ Verify the credentials with LDAP and if they are valid, create or update the user - in our database. """ - - # Make sure that even if the server supports anonymous binds, we don't allow it - if not password: - return None - + def _ldap_user_search(self, username_or_email): with self._ldap_conn as conn: + logger.debug('Incoming username or email param: %s', username_or_email.__repr__()) user_search_dn = ','.join(self._user_rdn + self._base_dn) query = u'(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr, username_or_email) @@ -57,41 +54,60 @@ class LDAPUsers(object): if len(user) != 1: return None - found_dn, found_response = user[0] + return user[0] - # First validate the password by binding as the user - try: - with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')): - pass - except ldap.INVALID_CREDENTIALS: + def verify_user(self, username_or_email, password): + """ Verify the credentials with LDAP and if they are valid, create or update the user + in our database. """ + + # Make sure that even if the server supports anonymous binds, we don't allow it + if not password: + return None + + found_user = self._ldap_user_search(username_or_email) + + if found_user is None: + return None + + found_dn, found_response = found_user + + # First validate the password by binding as the user + try: + with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')): + pass + except ldap.INVALID_CREDENTIALS: + return None + + # Now check if we have a federated login for this user + username = found_response[self._uid_attr][0].decode('utf-8') + email = found_response[self._email_attr][0] + db_user = model.verify_federated_login('ldap', username) + + if not db_user: + # We must create the user in our db + valid_username = None + for valid_username in generate_valid_usernames(username): + if model.is_username_unique(valid_username): + break + + if not valid_username: + logger.error('Unable to pick a username for user: %s', username) return None - # Now check if we have a federated login for this user - username = unicode(found_response[self._uid_attr][0].decode('utf-8')) - email = found_response[self._email_attr][0] - db_user = model.verify_federated_login('ldap', username) + db_user = model.create_user(valid_username, None, email, add_change_pw_notification=False) + db_user.verified = True + model.attach_federated_login(db_user, 'ldap', username) + else: + # Update the db attributes from ldap + db_user.email = email - if not db_user: - # We must create the user in our db - valid_username = None - for valid_username in generate_valid_usernames(username): - if model.is_username_unique(valid_username): - break + db_user.save() - if not valid_username: - logger.error('Unable to pick a username for user: %s', username) - return None + return db_user - db_user = model.create_user(valid_username, None, email, add_change_pw_notification=False) - db_user.verified = True - model.attach_federated_login(db_user, 'ldap', username) - else: - # Update the db attributes from ldap - db_user.email = email - - db_user.save() - - return db_user + def user_exists(self, username): + found_user = self._ldap_user_search(username) + return found_user is not None class UserAuthentication(object): diff --git a/endpoints/index.py b/endpoints/index.py index f0d233414..ea60ff56e 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -95,8 +95,7 @@ def create_user(): abort(400, 'Invalid robot account or password.', issue='robot-login-failure') - existing_user = model.get_user(username) - if existing_user: + if authentication.user_exists(username): verified = authentication.verify_user(username, password) if verified: # Mark that the user was logged in. diff --git a/requirements.txt b/requirements.txt index 7951f5dd8..264879298 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,7 @@ PyGithub==1.24.1 PyMySQL==0.6.2 PyPDF2==1.21 SQLAlchemy==0.9.4 +Unidecode==0.04.16 Werkzeug==0.9.4 alembic==0.6.4 aniso8601==0.82 @@ -40,6 +41,7 @@ pycrypto==2.6.1 python-daemon==1.6 python-dateutil==2.2 python-digitalocean==0.7 +python-ldap==2.4.15 python-magic==0.4.6 pytz==2014.2 raven==4.2.1 From f60c73463ac12218a624a530cd1be721fc5e0fc5 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 15:26:13 -0400 Subject: [PATCH 09/34] Fix the tests to work with the database proxy. --- initdb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/initdb.py b/initdb.py index 065e0f6a1..04212d43e 100644 --- a/initdb.py +++ b/initdb.py @@ -148,7 +148,7 @@ def setup_database_for_testing(testcase): # Sanity check to make sure we're not killing our prod db db = model.db - if not isinstance(model.db, SqliteDatabase): + if not isinstance(model.db.obj, SqliteDatabase): raise RuntimeError('Attempted to wipe production database!') global db_initialized_for_testing @@ -242,7 +242,7 @@ def wipe_database(): # Sanity check to make sure we're not killing our prod db db = model.db - if not isinstance(model.db, SqliteDatabase): + if not isinstance(model.db.obj, SqliteDatabase): raise RuntimeError('Attempted to wipe production database!') drop_model_tables(all_models, fail_silently=True) From 628d09afe079394ef0df136c9c78fe476e3a6562 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 15:52:20 -0400 Subject: [PATCH 10/34] Remove the passwd attr ldap config. --- data/users.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/data/users.py b/data/users.py index 7cb83a619..65fbff0d1 100644 --- a/data/users.py +++ b/data/users.py @@ -33,15 +33,13 @@ class LDAPConnection(object): class LDAPUsers(object): - def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr, - passwd_attr): + def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr): self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd) self._ldap_uri = ldap_uri self._base_dn = base_dn self._user_rdn = user_rdn self._uid_attr = uid_attr self._email_attr = email_attr - self._passwd_attr = passwd_attr def _ldap_user_search(self, username_or_email): with self._ldap_conn as conn: @@ -131,10 +129,8 @@ class UserAuthentication(object): user_rdn = app.config.get('LDAP_USER_RDN', []) uid_attr = app.config.get('LDAP_UID_ATTR', 'uid') email_attr = app.config.get('LDAP_EMAIL_ATTR', 'mail') - passwd_attr = app.config.get('LDAP_PASSWD_ATTR', 'userPassword') - users = LDAPUsers(ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr, - passwd_attr) + users = LDAPUsers(ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr) else: raise RuntimeError('Unknown authentication type: %s' % authentication_type) From 53fa778cda9c5c3bbd5f8760713c05d5a6470ccf Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 13 May 2014 16:27:54 -0400 Subject: [PATCH 11/34] Export a volume for data storage. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c581c1004..e03eb3adb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -82,7 +82,7 @@ RUN venv/bin/python -m external_libraries ADD test test RUN TEST=true venv/bin/python -m unittest discover -VOLUME ["/conf/stack", "/mnt/logs"] +VOLUME ["/conf/stack", "/mnt/logs", "/datastorage"] EXPOSE 443 80 From d14798de1da3f337a342253bd6eca06f8239e1d5 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Wed, 21 May 2014 19:50:37 -0400 Subject: [PATCH 12/34] Add a queue capacity reporter plugin to the queue. Move the queue definitions to app. Add a cloudwatch reporter to the dockerfile build queue. --- app.py | 8 ++++ config.py | 5 +- data/queue.py | 93 +++++++++++++++++++++++++------------- endpoints/common.py | 3 +- endpoints/index.py | 3 +- endpoints/registry.py | 4 +- test/test_queue.py | 41 ++++++++++++++++- util/queuemetrics.py | 51 +++++++++++++++++++++ workers/diffsworker.py | 2 +- workers/dockerfilebuild.py | 3 +- workers/webhookworker.py | 2 +- 11 files changed, 171 insertions(+), 44 deletions(-) create mode 100644 util/queuemetrics.py diff --git a/app.py b/app.py index f1b029b55..3fa1c3961 100644 --- a/app.py +++ b/app.py @@ -12,8 +12,10 @@ from storage import Storage from data.userfiles import Userfiles from util.analytics import Analytics from util.exceptionlog import Sentry +from util.queuemetrics import QueueMetrics from data.billing import Billing from data.buildlogs import BuildLogs +from data.queue import WorkQueue OVERRIDE_CONFIG_FILENAME = 'conf/stack/config.py' @@ -48,3 +50,9 @@ analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) build_logs = BuildLogs(app) +queue_metrics = QueueMetrics(app) + +image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME']) +dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], + reporter=queue_metrics.report) +webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME']) diff --git a/config.py b/config.py index 54650e566..85d88239f 100644 --- a/config.py +++ b/config.py @@ -86,7 +86,10 @@ class DefaultConfig(object): USERFILES_PATH = 'test/data/registry/userfiles' # Analytics - ANALYTICS_TYPE = "FakeAnalytics" + ANALYTICS_TYPE = 'FakeAnalytics' + + # Build Queue Metrics + QUEUE_METRICS_TYPE = 'Null' # Exception logging EXCEPTION_LOG_TYPE = 'FakeSentry' diff --git a/data/queue.py b/data/queue.py index 61a03a631..4a074e44c 100644 --- a/data/queue.py +++ b/data/queue.py @@ -11,18 +11,53 @@ MINIMUM_EXTENSION = timedelta(seconds=20) class WorkQueue(object): - def __init__(self, queue_name, canonical_name_match_list=None): - self.queue_name = queue_name + def __init__(self, queue_name, canonical_name_match_list=None, reporter=None): + self._queue_name = queue_name + self._reporter = reporter if canonical_name_match_list is None: - self.canonical_name_match_list = [] + self._canonical_name_match_list = [] else: - self.canonical_name_match_list = canonical_name_match_list + self._canonical_name_match_list = canonical_name_match_list @staticmethod def _canonical_name(name_list): return '/'.join(name_list) + '/' + def _running_jobs(self, now, name_match_query): + return (QueueItem + .select(QueueItem.queue_name) + .where(QueueItem.available == False, + QueueItem.processing_expires > now, + QueueItem.queue_name ** name_match_query)) + + def _name_match_query(self): + return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list) + + def _report_queue_metrics(self): + if self._reporter is None: + return + + now = datetime.now() + name_match_query = self._name_match_query() + + total_jobs = (QueueItem + .select(QueueItem.queue_name) + .where(QueueItem.queue_name ** name_match_query, + QueueItem.available_after <= now, + ((QueueItem.available == True) | (QueueItem.processing_expires > now) | + (QueueItem.retries_remaining > 0))) + .distinct() + .count()) + + running = self._running_jobs(now, name_match_query).distinct().count() + + self._reporter(running, total_jobs) + + def update_metrics(self): + with transaction_factory(db): + self._report_queue_metrics() + def put(self, canonical_name_list, message, available_after=0, retries_remaining=5): """ Put an item, if it shouldn't be processed for some number of seconds, @@ -30,7 +65,7 @@ class WorkQueue(object): """ params = { - 'queue_name': self._canonical_name([self.queue_name] + canonical_name_list), + 'queue_name': self._canonical_name([self._queue_name] + canonical_name_list), 'body': message, 'retries_remaining': retries_remaining, } @@ -39,7 +74,9 @@ class WorkQueue(object): available_date = datetime.now() + timedelta(seconds=available_after) params['available_after'] = available_date - QueueItem.create(**params) + with transaction_factory(db): + QueueItem.create(**params) + self._report_queue_metrics() def get(self, processing_time=300): """ @@ -48,15 +85,10 @@ class WorkQueue(object): """ now = datetime.now() - name_match_query = '%s%%' % self._canonical_name([self.queue_name] + - self.canonical_name_match_list) + name_match_query = self._name_match_query() with transaction_factory(db): - running = (QueueItem - .select(QueueItem.queue_name) - .where(QueueItem.available == False, - QueueItem.processing_expires > now, - QueueItem.queue_name ** name_match_query)) + running = self._running_jobs(now, name_match_query) avail = QueueItem.select().where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now, @@ -67,6 +99,8 @@ class WorkQueue(object): found = list(avail.limit(1).order_by(QueueItem.id)) + item = None + if found: item = found[0] item.available = False @@ -74,24 +108,26 @@ class WorkQueue(object): item.retries_remaining -= 1 item.save() - return item + self._report_queue_metrics() - return None + return item - @staticmethod - def complete(completed_item): - completed_item.delete_instance() + def complete(self, completed_item): + with transaction_factory(db): + completed_item.delete_instance() + self._report_queue_metrics() - @staticmethod - def incomplete(incomplete_item, retry_after=300, restore_retry=False): - retry_date = datetime.now() + timedelta(seconds=retry_after) - incomplete_item.available_after = retry_date - incomplete_item.available = True + def incomplete(self, incomplete_item, retry_after=300, restore_retry=False): + with transaction_factory(db): + retry_date = datetime.now() + timedelta(seconds=retry_after) + incomplete_item.available_after = retry_date + incomplete_item.available = True - if restore_retry: - incomplete_item.retries_remaining += 1 + if restore_retry: + incomplete_item.retries_remaining += 1 - incomplete_item.save() + incomplete_item.save() + self._report_queue_metrics() @staticmethod def extend_processing(queue_item, seconds_from_now): @@ -101,8 +137,3 @@ class WorkQueue(object): if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION: queue_item.processing_expires = new_expiration queue_item.save() - - -image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME']) -dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME']) -webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME']) diff --git a/endpoints/common.py b/endpoints/common.py index e9bd7b7c6..ad2f3e66b 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -9,8 +9,7 @@ from flask.ext.principal import identity_changed from random import SystemRandom from data import model -from data.queue import dockerfile_build_queue -from app import app, login_manager +from app import app, login_manager, dockerfile_build_queue from auth.permissions import QuayDeferredPermissionUser from auth import scopes from endpoints.api.discovery import swagger_route_data diff --git a/endpoints/index.py b/endpoints/index.py index 6ebec2d6c..25013f05e 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -8,8 +8,7 @@ from collections import OrderedDict from data import model from data.model import oauth -from data.queue import webhook_queue -from app import analytics, app +from app import analytics, app, webhook_queue from auth.auth import process_auth from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token from util.names import parse_repository_name diff --git a/endpoints/registry.py b/endpoints/registry.py index 6c9800f5c..b2018a2b0 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -7,9 +7,7 @@ from functools import wraps from datetime import datetime from time import time -from data.queue import image_diff_queue - -from app import storage as store +from app import storage as store, image_diff_queue from auth.auth import process_auth, extract_namespace_repo_from_session from util import checksums, changes from util.http import abort diff --git a/test/test_queue.py b/test/test_queue.py index c29568951..433a350d8 100644 --- a/test/test_queue.py +++ b/test/test_queue.py @@ -9,12 +9,23 @@ from data.queue import WorkQueue QUEUE_NAME = 'testqueuename' +class SaveLastCountReporter(object): + def __init__(self): + self.running = None + self.total = None + + def __call__(self, running, total_jobs): + self.running = running + self.total = total_jobs + + class QueueTestCase(unittest.TestCase): TEST_MESSAGE_1 = json.dumps({'data': 1}) TEST_MESSAGE_2 = json.dumps({'data': 2}) def setUp(self): - self.queue = WorkQueue(QUEUE_NAME) + self.reporter = SaveLastCountReporter() + self.queue = WorkQueue(QUEUE_NAME, reporter=self.reporter) setup_database_for_testing(self) def tearDown(self): @@ -23,33 +34,52 @@ class QueueTestCase(unittest.TestCase): class TestQueue(QueueTestCase): def test_same_canonical_names(self): + self.assertEqual(self.reporter.running, None) + self.assertEqual(self.reporter.total, None) + self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) self.queue.put(['abc', 'def'], self.TEST_MESSAGE_2) + self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.total, 1) one = self.queue.get() self.assertNotEqual(None, one) self.assertEqual(self.TEST_MESSAGE_1, one.body) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 1) two_fail = self.queue.get() self.assertEqual(None, two_fail) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 1) self.queue.complete(one) + self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.total, 1) two = self.queue.get() self.assertNotEqual(None, two) self.assertEqual(self.TEST_MESSAGE_2, two.body) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 1) def test_different_canonical_names(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) self.queue.put(['abc', 'ghi'], self.TEST_MESSAGE_2) + self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.total, 2) one = self.queue.get() self.assertNotEqual(None, one) self.assertEqual(self.TEST_MESSAGE_1, one.body) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 2) two = self.queue.get() self.assertNotEqual(None, two) self.assertEqual(self.TEST_MESSAGE_2, two.body) + self.assertEqual(self.reporter.running, 2) + self.assertEqual(self.reporter.total, 2) def test_canonical_name(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) @@ -63,17 +93,26 @@ class TestQueue(QueueTestCase): def test_expiration(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) + self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.total, 1) one = self.queue.get(processing_time=0.5) self.assertNotEqual(None, one) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 1) one_fail = self.queue.get() self.assertEqual(None, one_fail) time.sleep(1) + self.queue.update_metrics() + self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.total, 1) one_again = self.queue.get() self.assertNotEqual(None, one_again) + self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.total, 1) def test_specialized_queue(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) diff --git a/util/queuemetrics.py b/util/queuemetrics.py new file mode 100644 index 000000000..bc4784b4e --- /dev/null +++ b/util/queuemetrics.py @@ -0,0 +1,51 @@ +import logging +import boto + + +logger = logging.getLogger(__name__) + + +class NullReporter(object): + def report(self, running_count, total_count): + pass + + +class CloudWatchReporter(object): + def __init__(self, aws_access_key, aws_secret_key, namespace, name): + self._connection = boto.connect_cloudwatch(aws_access_key, aws_secret_key) + self._namespace = namespace + self._name = name + + def report(self, running_count, total_count): + need_capacity_count = total_count - running_count + self._connection.put_metric_data(self._namespace, self._name, need_capacity_count, + unit='Count') + + +class QueueMetrics(object): + def __init__(self, app=None): + self.app = app + if app is not None: + self.state = self.init_app(app) + else: + self.state = None + + def init_app(self, app): + analytics_type = app.config.get('QUEUE_METRICS_TYPE', 'Null') + + if analytics_type == 'CloudWatch': + access_key = app.config.get('QUEUE_METRICS_AWS_ACCESS_KEY', '') + secret_key = app.config.get('QUEUE_METRICS_AWS_SECRET_KEY', '') + namespace = app.config.get('QUEUE_METRICS_NAMESPACE', '') + name = app.config.get('QUEUE_METRICS_NAME', '') + reporter = CloudWatchReporter(access_key, secret_key, namespace, name) + else: + reporter = NullReporter() + + # register extension with app + app.extensions = getattr(app, 'extensions', {}) + app.extensions['queuemetrics'] = reporter + return reporter + + def __getattr__(self, name): + return getattr(self.state, name, None) diff --git a/workers/diffsworker.py b/workers/diffsworker.py index 85b615cbe..70f74f1db 100644 --- a/workers/diffsworker.py +++ b/workers/diffsworker.py @@ -1,7 +1,7 @@ import logging import argparse -from data.queue import image_diff_queue +from app import image_diff_queue from data.model import DataModelException from endpoints.registry import process_image_changes from workers.worker import Worker diff --git a/workers/dockerfilebuild.py b/workers/dockerfilebuild.py index dbd9ab3a4..bbd326775 100644 --- a/workers/dockerfilebuild.py +++ b/workers/dockerfilebuild.py @@ -18,10 +18,9 @@ from threading import Event from uuid import uuid4 from collections import defaultdict -from data.queue import dockerfile_build_queue from data import model from workers.worker import Worker, WorkerUnhealthyException, JobException -from app import userfiles as user_files, build_logs, sentry +from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue from util.safetar import safe_extractall from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile diff --git a/workers/webhookworker.py b/workers/webhookworker.py index 2b785acb6..ccff884c2 100644 --- a/workers/webhookworker.py +++ b/workers/webhookworker.py @@ -3,7 +3,7 @@ import argparse import requests import json -from data.queue import webhook_queue +from app import webhook_queue from workers.worker import Worker From f4c488f9b675a1ba9babf7ec43978b7068c63945 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Thu, 22 May 2014 13:50:06 -0400 Subject: [PATCH 13/34] Fix the queue query for old jobs which won't run. --- data/queue.py | 40 ++++++++++++++++++---------------------- util/queuemetrics.py | 2 ++ 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/data/queue.py b/data/queue.py index 77868ad0a..be0c8301b 100644 --- a/data/queue.py +++ b/data/queue.py @@ -29,6 +29,13 @@ class WorkQueue(object): QueueItem.processing_expires > now, QueueItem.queue_name ** name_match_query)) + def _available_jobs(self, now, name_match_query, running_query): + return (QueueItem + .select() + .where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now, + ((QueueItem.available == True) | (QueueItem.processing_expires <= now)), + QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query))) + def _name_match_query(self): return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list) @@ -39,18 +46,14 @@ class WorkQueue(object): now = datetime.now() name_match_query = self._name_match_query() - total_jobs = (QueueItem - .select(QueueItem.queue_name) - .where(QueueItem.queue_name ** name_match_query, - QueueItem.available_after <= now, - ((QueueItem.available == True) | (QueueItem.processing_expires > now) | - (QueueItem.retries_remaining > 0))) - .distinct() - .count()) + running_query = self._running_jobs(now, name_match_query) + running_count =running_query.distinct().count() - running = self._running_jobs(now, name_match_query).distinct().count() + avialable_query = self._available_jobs(now, name_match_query, running_query) + available_count = avialable_query.select(QueueItem.queue_name).distinct().count() - self._reporter(running, total_jobs) + + self._reporter(running_count, running_count + available_count) def update_metrics(self): with self._transaction_factory(db): @@ -87,24 +90,17 @@ class WorkQueue(object): with self._transaction_factory(db): running = self._running_jobs(now, name_match_query) - - avail = QueueItem.select().where(QueueItem.queue_name ** name_match_query, - QueueItem.available_after <= now, - ((QueueItem.available == True) | - (QueueItem.processing_expires <= now)), - QueueItem.retries_remaining > 0, - ~(QueueItem.queue_name << running)) - - found = list(avail.limit(1).order_by(QueueItem.id)) + avail = self._available_jobs(now, name_match_query, running) item = None - - if found: - item = found[0] + try: + item = avail.order_by(QueueItem.id).get() item.available = False item.processing_expires = now + timedelta(seconds=processing_time) item.retries_remaining -= 1 item.save() + except QueueItem.DoesNotExist: + pass self._report_queue_metrics() diff --git a/util/queuemetrics.py b/util/queuemetrics.py index bc4784b4e..10b5caf3b 100644 --- a/util/queuemetrics.py +++ b/util/queuemetrics.py @@ -17,6 +17,8 @@ class CloudWatchReporter(object): self._name = name def report(self, running_count, total_count): + logger.debug('Worker indicated %s running count and %s total count', running_count, + total_count) need_capacity_count = total_count - running_count self._connection.put_metric_data(self._namespace, self._name, need_capacity_count, unit='Count') From a9a8105fbcc2ecb69b86175f6932b33c7b67d9c0 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Thu, 22 May 2014 16:52:51 -0400 Subject: [PATCH 14/34] Add credit card expiration date to the billing info screen --- endpoints/api/billing.py | 4 +++- static/css/quay.css | 12 ++++++++++++ static/directives/billing-options.html | 8 ++++++++ static/js/app.js | 7 +++++++ 4 files changed, 30 insertions(+), 1 deletion(-) diff --git a/endpoints/api/billing.py b/endpoints/api/billing.py index 6880308a0..3e13df6b6 100644 --- a/endpoints/api/billing.py +++ b/endpoints/api/billing.py @@ -36,7 +36,9 @@ def get_card(user): card_info = { 'owner': default_card.name, 'type': default_card.type, - 'last4': default_card.last4 + 'last4': default_card.last4, + 'exp_month': default_card.exp_month, + 'exp_year': default_card.exp_year } return {'card': card_info} diff --git a/static/css/quay.css b/static/css/quay.css index e6cf04f1e..68e317c70 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -478,6 +478,18 @@ i.toggle-icon:hover { color: black; } +.billing-options-element .current-card .expires:before { + content: "Expires:"; + color: #aaa; + font-size: 12px; +} + + +.billing-options-element .current-card .expires { + margin-left: 20px; + font-size: 12px; +} + .billing-options-element .current-card img { margin-right: 10px; vertical-align: middle; diff --git a/static/directives/billing-options.html b/static/directives/billing-options.html index 8ae5115d5..374715a2f 100644 --- a/static/directives/billing-options.html +++ b/static/directives/billing-options.html @@ -7,10 +7,18 @@
+
+ Your current credit card is expiring soon! +
+ ****-****-****-{{ currentCard.last4 }} + + {{ currentCard.exp_month }} / {{ currentCard.exp_year }} + + No credit card found
diff --git a/static/js/app.js b/static/js/app.js index b20c4b082..75220520c 100644 --- a/static/js/app.js +++ b/static/js/app.js @@ -3293,6 +3293,13 @@ quayApp.directive('billingOptions', function () { PlanService.unregisterListener(this); }); + $scope.isExpiringSoon = function(cardInfo) { + var current = new Date(); + var expires = new Date(cardInfo.exp_year, cardInfo.exp_month, 1); + var difference = expires - current; + return difference < (60 * 60 * 24 * 60 * 1000 /* 60 days */); + }; + $scope.changeCard = function() { var previousCard = $scope.currentCard; $scope.changingCard = true; From 0b6552d6ccf2a08b46669459fdf95fc47ad6038c Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Fri, 23 May 2014 14:16:26 -0400 Subject: [PATCH 15/34] Fix the metrics so they are usable for scaling the workers down and up. Switch all datetimes which touch the database from now to utcnow. Fix the worker Dockerfile. --- Dockerfile.buildworker | 2 ++ auth/auth.py | 2 +- data/billing.py | 8 ++--- data/model/oauth.py | 6 ++-- data/queue.py | 51 ++++++++++++++--------------- test/test_queue.py | 66 +++++++++++++++++++++++++++----------- util/analytics.py | 2 +- util/queuemetrics.py | 59 ++++++++++++++++++++++++++-------- workers/dockerfilebuild.py | 12 +++---- workers/worker.py | 4 +++ 10 files changed, 137 insertions(+), 75 deletions(-) diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 4ad6ba6ff..d49387dfa 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -8,6 +8,8 @@ RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev l ### End common section ### +RUN apt-get install -y libldap2-dev libsasl2-dev + RUN apt-get install -y lxc aufs-tools RUN usermod -v 100000-200000 -w 100000-200000 root diff --git a/auth/auth.py b/auth/auth.py index 715b5a0dd..3616792ad 100644 --- a/auth/auth.py +++ b/auth/auth.py @@ -41,7 +41,7 @@ def _validate_and_apply_oauth_token(token): } abort(401, message='OAuth access token could not be validated: %(token)s', issue='invalid-oauth-token', token=token, headers=authenticate_header) - elif validated.expires_at <= datetime.now(): + elif validated.expires_at <= datetime.utcnow(): logger.info('OAuth access with an expired token: %s', token) authenticate_header = { 'WWW-Authenticate': ('Bearer error="invalid_token", ' diff --git a/data/billing.py b/data/billing.py index 8872ad87f..723760210 100644 --- a/data/billing.py +++ b/data/billing.py @@ -131,10 +131,10 @@ class FakeStripe(object): FAKE_SUBSCRIPTION = AttrDict({ 'plan': FAKE_PLAN, - 'current_period_start': timegm(datetime.now().utctimetuple()), - 'current_period_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()), - 'trial_start': timegm(datetime.now().utctimetuple()), - 'trial_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()), + 'current_period_start': timegm(datetime.utcnow().utctimetuple()), + 'current_period_end': timegm((datetime.utcnow() + timedelta(days=30)).utctimetuple()), + 'trial_start': timegm(datetime.utcnow().utctimetuple()), + 'trial_end': timegm((datetime.utcnow() + timedelta(days=30)).utctimetuple()), }) FAKE_CARD = AttrDict({ diff --git a/data/model/oauth.py b/data/model/oauth.py index b99a9cb58..309e2122a 100644 --- a/data/model/oauth.py +++ b/data/model/oauth.py @@ -65,7 +65,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider): .switch(OAuthAccessToken) .join(User) .where(OAuthApplication.client_id == client_id, User.username == username, - OAuthAccessToken.expires_at > datetime.now())) + OAuthAccessToken.expires_at > datetime.utcnow())) found = list(found) logger.debug('Found %s matching tokens.', len(found)) long_scope_string = ','.join([token.scope for token in found]) @@ -116,7 +116,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider): raise RuntimeError('Username must be in the data field') app = OAuthApplication.get(client_id=client_id) - expires_at = datetime.now() + timedelta(seconds=expires_in) + expires_at = datetime.utcnow() + timedelta(seconds=expires_in) OAuthAccessToken.create(application=app, authorized_user=user, scope=scope, access_token=access_token, token_type=token_type, expires_at=expires_at, refresh_token=refresh_token, data=data) @@ -274,7 +274,7 @@ def list_applications_for_org(org): def create_access_token_for_testing(user, client_id, scope): - expires_at = datetime.now() + timedelta(seconds=10000) + expires_at = datetime.utcnow() + timedelta(seconds=10000) application = get_application_for_client_id(client_id) OAuthAccessToken.create(application=application, authorized_user=user, scope=scope, token_type='token', access_token='test', diff --git a/data/queue.py b/data/queue.py index be0c8301b..44d7ad531 100644 --- a/data/queue.py +++ b/data/queue.py @@ -12,6 +12,7 @@ class WorkQueue(object): self._queue_name = queue_name self._reporter = reporter self._transaction_factory = transaction_factory + self._currently_processing = False if canonical_name_match_list is None: self._canonical_name_match_list = [] @@ -39,25 +40,21 @@ class WorkQueue(object): def _name_match_query(self): return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list) - def _report_queue_metrics(self): - if self._reporter is None: - return - - now = datetime.now() - name_match_query = self._name_match_query() - - running_query = self._running_jobs(now, name_match_query) - running_count =running_query.distinct().count() - - avialable_query = self._available_jobs(now, name_match_query, running_query) - available_count = avialable_query.select(QueueItem.queue_name).distinct().count() - - - self._reporter(running_count, running_count + available_count) - def update_metrics(self): with self._transaction_factory(db): - self._report_queue_metrics() + if self._reporter is None: + return + + now = datetime.utcnow() + name_match_query = self._name_match_query() + + running_query = self._running_jobs(now, name_match_query) + running_count =running_query.distinct().count() + + avialable_query = self._available_jobs(now, name_match_query, running_query) + available_count = avialable_query.select(QueueItem.queue_name).distinct().count() + + self._reporter(self._currently_processing, running_count, running_count + available_count) def put(self, canonical_name_list, message, available_after=0, retries_remaining=5): """ @@ -72,19 +69,18 @@ class WorkQueue(object): } if available_after: - available_date = datetime.now() + timedelta(seconds=available_after) + available_date = datetime.utcnow() + timedelta(seconds=available_after) params['available_after'] = available_date with self._transaction_factory(db): QueueItem.create(**params) - self._report_queue_metrics() def get(self, processing_time=300): """ Get an available item and mark it as unavailable for the default of five minutes. """ - now = datetime.now() + now = datetime.utcnow() name_match_query = self._name_match_query() @@ -99,21 +95,22 @@ class WorkQueue(object): item.processing_expires = now + timedelta(seconds=processing_time) item.retries_remaining -= 1 item.save() - except QueueItem.DoesNotExist: - pass - self._report_queue_metrics() + self._currently_processing = True + except QueueItem.DoesNotExist: + self._currently_processing = False + pass return item def complete(self, completed_item): with self._transaction_factory(db): completed_item.delete_instance() - self._report_queue_metrics() + self._currently_processing = False def incomplete(self, incomplete_item, retry_after=300, restore_retry=False): with self._transaction_factory(db): - retry_date = datetime.now() + timedelta(seconds=retry_after) + retry_date = datetime.utcnow() + timedelta(seconds=retry_after) incomplete_item.available_after = retry_date incomplete_item.available = True @@ -121,11 +118,11 @@ class WorkQueue(object): incomplete_item.retries_remaining += 1 incomplete_item.save() - self._report_queue_metrics() + self._currently_processing = False @staticmethod def extend_processing(queue_item, seconds_from_now): - new_expiration = datetime.now() + timedelta(seconds=seconds_from_now) + new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now) # Only actually write the new expiration to the db if it moves the expiration some minimum if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION: diff --git a/test/test_queue.py b/test/test_queue.py index 024a00d72..6c1660eb7 100644 --- a/test/test_queue.py +++ b/test/test_queue.py @@ -2,6 +2,8 @@ import unittest import json import time +from functools import wraps + from app import app from initdb import setup_database_for_testing, finished_database_for_testing from data.queue import WorkQueue @@ -12,14 +14,36 @@ QUEUE_NAME = 'testqueuename' class SaveLastCountReporter(object): def __init__(self): - self.running = None + self.currently_processing = None + self.running_count = None self.total = None - def __call__(self, running, total_jobs): - self.running = running + def __call__(self, currently_processing, running_count, total_jobs): + self.currently_processing = currently_processing + self.running_count = running_count self.total = total_jobs +class AutoUpdatingQueue(object): + def __init__(self, queue_to_wrap): + self._queue = queue_to_wrap + + def _wrapper(self, func): + @wraps(func) + def wrapper(*args, **kwargs): + to_return = func(*args, **kwargs) + self._queue.update_metrics() + return to_return + return wrapper + + def __getattr__(self, attr_name): + method_or_attr = getattr(self._queue, attr_name) + if callable(method_or_attr): + return self._wrapper(method_or_attr) + else: + return method_or_attr + + class QueueTestCase(unittest.TestCase): TEST_MESSAGE_1 = json.dumps({'data': 1}) TEST_MESSAGE_2 = json.dumps({'data': 2}) @@ -27,7 +51,8 @@ class QueueTestCase(unittest.TestCase): def setUp(self): self.reporter = SaveLastCountReporter() self.transaction_factory = app.config['DB_TRANSACTION_FACTORY'] - self.queue = WorkQueue(QUEUE_NAME, self.transaction_factory, reporter=self.reporter) + self.queue = AutoUpdatingQueue(WorkQueue(QUEUE_NAME, self.transaction_factory, + reporter=self.reporter)) setup_database_for_testing(self) def tearDown(self): @@ -36,51 +61,56 @@ class QueueTestCase(unittest.TestCase): class TestQueue(QueueTestCase): def test_same_canonical_names(self): - self.assertEqual(self.reporter.running, None) + self.assertEqual(self.reporter.currently_processing, None) + self.assertEqual(self.reporter.running_count, None) self.assertEqual(self.reporter.total, None) self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) self.queue.put(['abc', 'def'], self.TEST_MESSAGE_2) - self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.currently_processing, False) + self.assertEqual(self.reporter.running_count, 0) self.assertEqual(self.reporter.total, 1) one = self.queue.get() self.assertNotEqual(None, one) self.assertEqual(self.TEST_MESSAGE_1, one.body) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.currently_processing, True) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 1) two_fail = self.queue.get() self.assertEqual(None, two_fail) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 1) self.queue.complete(one) - self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.currently_processing, False) + self.assertEqual(self.reporter.running_count, 0) self.assertEqual(self.reporter.total, 1) two = self.queue.get() self.assertNotEqual(None, two) + self.assertEqual(self.reporter.currently_processing, True) self.assertEqual(self.TEST_MESSAGE_2, two.body) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 1) def test_different_canonical_names(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) self.queue.put(['abc', 'ghi'], self.TEST_MESSAGE_2) - self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.running_count, 0) self.assertEqual(self.reporter.total, 2) one = self.queue.get() self.assertNotEqual(None, one) self.assertEqual(self.TEST_MESSAGE_1, one.body) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 2) two = self.queue.get() self.assertNotEqual(None, two) self.assertEqual(self.TEST_MESSAGE_2, two.body) - self.assertEqual(self.reporter.running, 2) + self.assertEqual(self.reporter.running_count, 2) self.assertEqual(self.reporter.total, 2) def test_canonical_name(self): @@ -95,12 +125,12 @@ class TestQueue(QueueTestCase): def test_expiration(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) - self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.running_count, 0) self.assertEqual(self.reporter.total, 1) one = self.queue.get(processing_time=0.5) self.assertNotEqual(None, one) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 1) one_fail = self.queue.get() @@ -108,19 +138,19 @@ class TestQueue(QueueTestCase): time.sleep(1) self.queue.update_metrics() - self.assertEqual(self.reporter.running, 0) + self.assertEqual(self.reporter.running_count, 0) self.assertEqual(self.reporter.total, 1) one_again = self.queue.get() self.assertNotEqual(None, one_again) - self.assertEqual(self.reporter.running, 1) + self.assertEqual(self.reporter.running_count, 1) self.assertEqual(self.reporter.total, 1) def test_specialized_queue(self): self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1) self.queue.put(['def', 'def'], self.TEST_MESSAGE_2) - my_queue = WorkQueue(QUEUE_NAME, self.transaction_factory, ['def']) + my_queue = AutoUpdatingQueue(WorkQueue(QUEUE_NAME, self.transaction_factory, ['def'])) two = my_queue.get() self.assertNotEqual(None, two) diff --git a/util/analytics.py b/util/analytics.py index 4ae1d9db0..6dfdf923c 100644 --- a/util/analytics.py +++ b/util/analytics.py @@ -26,7 +26,7 @@ class SendToMixpanel(Process): self.daemon = True def run(self): - logger.debug('Starting sender process.') + logger.debug('Starting mixpanel sender process.') while True: mp_request = self._mp_queue.get() logger.debug('Got queued mixpanel reqeust.') diff --git a/util/queuemetrics.py b/util/queuemetrics.py index 10b5caf3b..5d969e9bb 100644 --- a/util/queuemetrics.py +++ b/util/queuemetrics.py @@ -1,27 +1,54 @@ import logging import boto +from multiprocessing import Process, Queue logger = logging.getLogger(__name__) class NullReporter(object): - def report(self, running_count, total_count): + def report(self, *args): pass -class CloudWatchReporter(object): - def __init__(self, aws_access_key, aws_secret_key, namespace, name): - self._connection = boto.connect_cloudwatch(aws_access_key, aws_secret_key) +class QueueingCloudWatchReporter(object): + def __init__(self, request_queue, namespace, need_capacity_name, build_percent_name): self._namespace = namespace - self._name = name + self._need_capacity_name = need_capacity_name + self._build_percent_name = build_percent_name + self._put_metrics_queue = request_queue - def report(self, running_count, total_count): + def _send_to_queue(self, *args, **kwargs): + self._put_metrics_queue.put((args, kwargs)) + + def report(self, currently_processing, running_count, total_count): logger.debug('Worker indicated %s running count and %s total count', running_count, total_count) + need_capacity_count = total_count - running_count - self._connection.put_metric_data(self._namespace, self._name, need_capacity_count, - unit='Count') + self._send_to_queue(self._namespace, self._need_capacity_name, need_capacity_count, + unit='Count') + + building_percent = 100 if currently_processing else 0 + self._send_to_queue(self._namespace, self._build_percent_name, building_percent, + unit='Percent') + + +class SendToCloudWatch(Process): + def __init__(self, request_queue, aws_access_key, aws_secret_key): + Process.__init__(self) + self._aws_access_key = aws_access_key + self._aws_secret_key = aws_secret_key + self._put_metrics_queue = request_queue + self.daemon = True + + def run(self): + logger.debug('Starting cloudwatch sender process.') + connection = boto.connect_cloudwatch(self._aws_access_key, self._aws_secret_key) + while True: + put_metric_args, kwargs = self._put_metrics_queue.get() + logger.debug('Got queued put metrics reqeust.') + connection.put_metric_data(*put_metric_args, **kwargs) class QueueMetrics(object): @@ -36,11 +63,17 @@ class QueueMetrics(object): analytics_type = app.config.get('QUEUE_METRICS_TYPE', 'Null') if analytics_type == 'CloudWatch': - access_key = app.config.get('QUEUE_METRICS_AWS_ACCESS_KEY', '') - secret_key = app.config.get('QUEUE_METRICS_AWS_SECRET_KEY', '') - namespace = app.config.get('QUEUE_METRICS_NAMESPACE', '') - name = app.config.get('QUEUE_METRICS_NAME', '') - reporter = CloudWatchReporter(access_key, secret_key, namespace, name) + access_key = app.config.get('QUEUE_METRICS_AWS_ACCESS_KEY') + secret_key = app.config.get('QUEUE_METRICS_AWS_SECRET_KEY') + namespace = app.config.get('QUEUE_METRICS_NAMESPACE') + req_capacity_name = app.config.get('QUEUE_METRICS_CAPACITY_SHORTAGE_NAME') + build_percent_name = app.config.get('QUEUE_METRICS_BUILD_PERCENT_NAME') + + request_queue = Queue() + reporter = QueueingCloudWatchReporter(request_queue, namespace, req_capacity_name, + build_percent_name) + sender = SendToCloudWatch(request_queue, access_key, secret_key) + sender.start() else: reporter = NullReporter() diff --git a/workers/dockerfilebuild.py b/workers/dockerfilebuild.py index bbd326775..477e0282a 100644 --- a/workers/dockerfilebuild.py +++ b/workers/dockerfilebuild.py @@ -1,3 +1,7 @@ +import logging.config + +logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) + import logging import daemon import argparse @@ -25,12 +29,6 @@ from util.safetar import safe_extractall from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile -root_logger = logging.getLogger('') -root_logger.setLevel(logging.DEBUG) - -FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s' -formatter = logging.Formatter(FORMAT) - logger = logging.getLogger(__name__) TIMEOUT_PERIOD_MINUTES = 20 @@ -558,8 +556,6 @@ parser.add_argument('--cachegb', default=20, type=float, help='Maximum cache size in gigabytes.') args = parser.parse_args() -logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) - worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue, reservation_seconds=RESERVATION_TIME) worker.start(start_status_server_port=8000) diff --git a/workers/worker.py b/workers/worker.py index 112c4f6bc..094f2154f 100644 --- a/workers/worker.py +++ b/workers/worker.py @@ -124,6 +124,9 @@ class Worker(object): if not self._stop.is_set(): logger.debug('No more work.') + def update_queue_metrics(self): + self._queue.update_metrics() + def start(self, start_status_server_port=None): if start_status_server_port is not None: # Start a status server on a thread @@ -140,6 +143,7 @@ class Worker(object): self._sched.start() self._sched.add_interval_job(self.poll_queue, seconds=self._poll_period_seconds, start_date=soon) + self._sched.add_interval_job(self.update_queue_metrics, seconds=60, start_date=soon) self._sched.add_interval_job(self.watchdog, seconds=self._watchdog_period_seconds) signal.signal(signal.SIGTERM, self.terminate) From 81f235e5d60f7de1235db289bc5337dfcbaba618 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Fri, 23 May 2014 15:20:40 -0400 Subject: [PATCH 16/34] Fix error when no token is returned by Github --- endpoints/callbacks.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/endpoints/callbacks.py b/endpoints/callbacks.py index 43aa2ac6b..4085798b0 100644 --- a/endpoints/callbacks.py +++ b/endpoints/callbacks.py @@ -35,7 +35,11 @@ def exchange_github_code_for_token(code, for_login=True): get_access_token = client.post(app.config['GITHUB_TOKEN_URL'], params=payload, headers=headers) - token = get_access_token.json()['access_token'] + json_data = get_access_token.json() + if not json_data: + return '' + + token = json_data.get('access_token', '') return token From 404055decca32252e408714ab0c55cf83d966cd3 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Tue, 27 May 2014 12:38:18 -0400 Subject: [PATCH 17/34] Make sure we always have a valid container element before calculating the dimensions of the various charts --- static/js/graphing.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/static/js/graphing.js b/static/js/graphing.js index df1405503..b5abf2d00 100644 --- a/static/js/graphing.js +++ b/static/js/graphing.js @@ -139,6 +139,7 @@ ImageHistoryTree.prototype.setupOverscroll_ = function() { ImageHistoryTree.prototype.updateDimensions_ = function() { var container = this.container_; var dimensions = this.calculateDimensions_(container); + if (!dimensions) { return; } var m = dimensions.m; var w = dimensions.w; @@ -909,6 +910,10 @@ function FileTreeBase() { */ FileTreeBase.prototype.calculateDimensions_ = function(container) { var containerElm = document.getElementById(container); + if (!containerElm) { + return null; + } + var cw = containerElm ? containerElm.clientWidth : 1200; var barHeight = 20; var ch = (this.getNodesHeight() * barHeight) + 40; @@ -940,6 +945,7 @@ FileTreeBase.prototype.updateDimensions_ = function() { var container = this.container_; var dimensions = this.calculateDimensions_(container); + if (!dimensions) { return; } var w = dimensions.w; var h = dimensions.h; @@ -989,6 +995,7 @@ FileTreeBase.prototype.draw = function(container) { this.container_ = container; var dimensions = this.calculateDimensions_(container); + if (!dimensions) { return; } var w = dimensions.w; var h = dimensions.h; @@ -1107,6 +1114,8 @@ FileTreeBase.prototype.getVisibleCount_ = function(node) { */ FileTreeBase.prototype.getContainerHeight_ = function() { var dimensions = this.calculateDimensions_(this.container_); + if (!dimensions) { return; } + var barHeight = this.barHeight_; var height = (this.getVisibleCount_(this.root_) * (barHeight + 2)); return height + dimensions.m[0] + dimensions.m[2]; From e8355f301e957bb6eff6b8f5d7672195022282ab Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 27 May 2014 15:19:23 -0400 Subject: [PATCH 18/34] Remove our deploy key from the workers/Readme which gets included in the Docker image. --- README.md | 45 ++++++++++++++++++++++++++++++++++++++++++--- workers/README.md | 35 ----------------------------------- 2 files changed, 42 insertions(+), 38 deletions(-) delete mode 100644 workers/README.md diff --git a/README.md b/README.md index 61cd823ad..7a4dc4153 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,11 @@ sudo docker push quay.io/quay/quay to prepare a new host: +Deploy cloud-init script from quayconfig/cloudconfig/webserver.yaml + +or + + ``` curl -s https://get.docker.io/ubuntu/ | sudo sh sudo apt-get update && sudo apt-get install -y git @@ -34,11 +39,45 @@ cd ~/gantryd sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update quay ``` -start the log shipper (DEPRECATED): +to build and upload the builder to quay ``` -sudo docker pull quay.io/quay/logstash -sudo docker run -d -e REDIS_PORT_6379_TCP_ADDR=logs.quay.io -v /mnt/logs:/mnt/logs quay.io/quay/logstash quay.conf +curl -s https://get.docker.io/ubuntu/ | sudo sh +sudo apt-get update && sudo apt-get install -y git +git clone git clone https://bitbucket.org/yackob03/quay.git +cd quay +rm Dockerfile +ln -s Dockerfile.buildworker Dockerfile +sudo docker build -t quay.io/quay/builder . +sudo docker push quay.io/quay/builder +``` + +to run the builder from a fresh 14.04 server: + +Deploy cloud-init script from quayconfig/cloudconfig/builder.yaml + +or + + +``` +sudo apt-get update && sudo apt-get install -y git lxc linux-image-extra-`uname -r` +curl -s https://get.docker.io/ubuntu/ | sudo sh +git clone https://github.com/DevTable/gantryd.git +cd gantryd +cat requirements.system | xargs sudo apt-get install -y +virtualenv --distribute venv +venv/bin/pip install -r requirements.txt +sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused quay.io +``` + +start the worker + +``` +cd ~ +git clone https://bitbucket.org/yackob03/quayconfig.git +sudo docker pull quay.io/quay/builder +cd ~/gantryd +sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update builder ``` running the tests: diff --git a/workers/README.md b/workers/README.md deleted file mode 100644 index f65164d89..000000000 --- a/workers/README.md +++ /dev/null @@ -1,35 +0,0 @@ -to build and upload the builder to quay - -``` -curl -s https://get.docker.io/ubuntu/ | sudo sh -sudo apt-get update && sudo apt-get install -y git -git clone git clone https://bitbucket.org/yackob03/quay.git -cd quay -rm Dockerfile -ln -s Dockerfile.buildworker Dockerfile -sudo docker build -t quay.io/quay/builder . -sudo docker push quay.io/quay/builder -``` - -to run the code from a fresh 14.04 server: - -``` -sudo apt-get update && sudo apt-get install -y git lxc linux-image-extra-`uname -r` -curl -s https://get.docker.io/ubuntu/ | sudo sh -git clone https://github.com/DevTable/gantryd.git -cd gantryd -cat requirements.system | xargs sudo apt-get install -y -virtualenv --distribute venv -venv/bin/pip install -r requirements.txt -sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused quay.io -``` - -start the worker - -``` -cd ~ -git clone https://bitbucket.org/yackob03/quayconfig.git -sudo docker pull quay.io/quay/builder -cd ~/gantryd -sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update builder -``` From 0ef1902957c8936b81b84a9d5640a5aaa8dc6c77 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 27 May 2014 15:24:29 -0400 Subject: [PATCH 19/34] Strip Quay password from readme. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7a4dc4153..5824fea10 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ cd gantryd cat requirements.system | xargs sudo apt-get install -y virtualenv --distribute venv venv/bin/pip install -r requirements.txt -sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused staging.quay.io +sudo docker login -u 'quay+deploy' -e notused staging.quay.io ``` start the quay processes: @@ -67,7 +67,7 @@ cd gantryd cat requirements.system | xargs sudo apt-get install -y virtualenv --distribute venv venv/bin/pip install -r requirements.txt -sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused quay.io +sudo docker login -u 'quay+deploy' -e notused quay.io ``` start the worker From 33b43b75c0d9ff7def2138b60014a266f9f8c5d1 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Wed, 28 May 2014 13:51:52 -0400 Subject: [PATCH 20/34] Eliminate a lot of the if cases in create_user by separating them out. Add a limit to the number of users which can be created based on the license. Add support for creating and loading licenses. --- Dockerfile.web | 1 + app.py | 9 +++++++ data/model/legacy.py | 57 ++++++++++++++++++++++++++--------------- data/users.py | 8 +++--- endpoints/callbacks.py | 2 +- license.py | 13 ++++++++++ license.pyc | Bin 0 -> 915 bytes requirements-nover.txt | 1 + tools/createlicense.py | 38 +++++++++++++++++++++++++++ 9 files changed, 103 insertions(+), 26 deletions(-) create mode 100644 license.py create mode 100644 license.pyc create mode 100644 tools/createlicense.py diff --git a/Dockerfile.web b/Dockerfile.web index a202491b7..82b0f6b4e 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -47,6 +47,7 @@ ADD templates templates ADD util util ADD workers workers +ADD license.pyc license.pyc ADD app.py app.py ADD application.py application.py ADD config.py config.py diff --git a/app.py b/app.py index 92d3c516b..a8b701c66 100644 --- a/app.py +++ b/app.py @@ -19,9 +19,12 @@ from util.queuemetrics import QueueMetrics from data.billing import Billing from data.buildlogs import BuildLogs from data.queue import WorkQueue +from license import load_license +from datetime import datetime OVERRIDE_CONFIG_FILENAME = 'conf/stack/config.py' +LICENSE_FILENAME = 'conf/stack/license.enc' app = Flask(__name__) @@ -41,6 +44,12 @@ else: logger.debug('Applying config file: %s', OVERRIDE_CONFIG_FILENAME) app.config.from_pyfile(OVERRIDE_CONFIG_FILENAME) + logger.debug('Applying license config from: %s', LICENSE_FILENAME) + app.config.update(load_license(LICENSE_FILENAME)) + + if app.config.get('LICENSE_EXPIRATION', datetime.min) < datetime.utcnow(): + raise RuntimeError('License has expired, please contact support@quay.io') + features.import_features(app.config) Principal(app, use_sessions=False) diff --git a/data/model/legacy.py b/data/model/legacy.py index 80b5892de..76d0123be 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -64,7 +64,33 @@ class InvalidBuildTriggerException(DataModelException): pass -def create_user(username, password, email, add_change_pw_notification=True): +class TooManyUsersException(DataModelException): + pass + + +def is_create_user_allowed(): + return get_active_user_count() < config.app_config['LICENSE_USER_LIMIT'] + + +def create_user(username, password, email): + """ Creates a regular user, if allowed. """ + if not validate_password(password): + raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE) + + if not is_create_user_allowed(): + raise TooManyUsersException() + + created = _create_user(username, email) + + # Store the password hash + pw_hash = bcrypt.hashpw(password, bcrypt.gensalt()) + created.password_hash = pw_hash + + created.save() + + return created + +def _create_user(username, email): if not validate_email(email): raise InvalidEmailAddressException('Invalid email address: %s' % email) @@ -72,10 +98,6 @@ def create_user(username, password, email, add_change_pw_notification=True): if not username_valid: raise InvalidUsernameException('Invalid username %s: %s' % (username, username_issue)) - # We allow password none for the federated login case. - if password is not None and not validate_password(password): - raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE) - try: existing = User.get((User.username == username) | (User.email == email)) @@ -94,18 +116,7 @@ def create_user(username, password, email, add_change_pw_notification=True): pass try: - pw_hash = None - if password is not None: - pw_hash = bcrypt.hashpw(password, bcrypt.gensalt()) - - new_user = User.create(username=username, password_hash=pw_hash, - email=email) - - # If the password is None, then add a notification for the user to change - # their password ASAP. - if not pw_hash and add_change_pw_notification: - create_notification('password_required', new_user) - + new_user = User.create(username=username, email=email) return new_user except Exception as ex: raise DataModelException(ex.message) @@ -122,7 +133,7 @@ def is_username_unique(test_username): def create_organization(name, email, creating_user): try: # Create the org - new_org = create_user(name, None, email, add_change_pw_notification=False) + new_org = _create_user(name, email) new_org.organization = True new_org.save() @@ -335,8 +346,11 @@ def set_team_org_permission(team, team_role_name, set_by_username): return team -def create_federated_user(username, email, service_name, service_id): - new_user = create_user(username, None, email) +def create_federated_user(username, email, service_name, service_id, set_password_notification): + if not is_create_user_allowed(): + raise TooManyUsersException() + + new_user = _create_user(username, email) new_user.verified = True new_user.save() @@ -344,6 +358,9 @@ def create_federated_user(username, email, service_name, service_id): FederatedLogin.create(user=new_user, service=service, service_ident=service_id) + if set_password_notification: + create_notification('password_required', new_user) + return new_user diff --git a/data/users.py b/data/users.py index 65fbff0d1..4bde0518b 100644 --- a/data/users.py +++ b/data/users.py @@ -92,14 +92,12 @@ class LDAPUsers(object): logger.error('Unable to pick a username for user: %s', username) return None - db_user = model.create_user(valid_username, None, email, add_change_pw_notification=False) - db_user.verified = True - model.attach_federated_login(db_user, 'ldap', username) + db_user = model.create_federated_user(valid_username, email, 'ldap', username, + set_password_notification=False) else: # Update the db attributes from ldap db_user.email = email - - db_user.save() + db_user.save() return db_user diff --git a/endpoints/callbacks.py b/endpoints/callbacks.py index 4085798b0..3292b349e 100644 --- a/endpoints/callbacks.py +++ b/endpoints/callbacks.py @@ -87,7 +87,7 @@ def github_oauth_callback(): # try to create the user try: to_login = model.create_federated_user(username, found_email, 'github', - github_id) + github_id, set_password_notification=True) # Success, tell analytics analytics.track(to_login.username, 'register', {'service': 'github'}) diff --git a/license.py b/license.py new file mode 100644 index 000000000..b45d90cf8 --- /dev/null +++ b/license.py @@ -0,0 +1,13 @@ +import pickle + +from Crypto.PublicKey import RSA + +n = 24311791124264168943780535074639421876317270880681911499019414944027362498498429776192966738844514582251884695124256895677070273097239290537016363098432785034818859765271229653729724078304186025013011992335454557504431888746007324285000011384941749613875855493086506022340155196030616409545906383713728780211095701026770053812741971198465120292345817928060114890913931047021503727972067476586739126160044293621653486418983183727572502888923949587290840425930251185737996066354726953382305020440374552871209809125535533731995494145421279907938079885061852265339259634996180877443852561265066616143910755505151318370667L +e = 65537L + +def load_license(license_path): + decryptor = RSA.construct((n, e)) + with open(license_path, 'rb') as encrypted_license: + decrypted_data = decryptor.encrypt(encrypted_license.read(), 0) + + return pickle.loads(decrypted_data[0]) diff --git a/license.pyc b/license.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df6085268aa2ad376677ab36a11af0446fa01c9e GIT binary patch literal 915 zcmb7CZ%9*76hH62ZE9J%nPHKCGG`m43&dC^I#DM1XXqQIEKIleo^11NZ|>bKS2GYT z2!i#gf
+{% if not has_billing %} + + +{% endif %} + {% endblock %} From 69be86be97f87acfaa87ccc9798ecef7b297a540 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 May 2014 15:53:53 -0400 Subject: [PATCH 26/34] Add extra seat check in the user API call and turn off user->org conversion when authentication is LDAP --- config.py | 2 +- endpoints/api/user.py | 8 +++++++- static/js/controllers.js | 8 ++++---- static/partials/organizations.html | 2 +- static/partials/user-admin.html | 6 ++++-- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/config.py b/config.py index 1cd28f967..ca1d0ce65 100644 --- a/config.py +++ b/config.py @@ -18,7 +18,7 @@ def build_requests_session(): # values are set to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list. CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID', 'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY', - 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN'] + 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE'] def getFrontendVisibleConfig(config_dict): diff --git a/endpoints/api/user.py b/endpoints/api/user.py index f21b77926..ee5e2e5e5 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -229,7 +229,12 @@ def conduct_signin(username_or_email, password): needs_email_verification = False invalid_credentials = False - verified = authentication.verify_user(username_or_email, password) + verified = None + try: + verified = authentication.verify_user(username_or_email, password) + except model.TooManyUsersException as ex: + raise license_error(exception=ex) + if verified: if common_login(verified): return {'success': True} @@ -247,6 +252,7 @@ def conduct_signin(username_or_email, password): @resource('/v1/user/convert') @internal_only +@show_if(app.config['AUTHENTICATION_TYPE'] == 'Database') class ConvertToOrganization(ApiResource): """ Operations for converting a user to an organization. """ schemas = { diff --git a/static/js/controllers.js b/static/js/controllers.js index e063630df..3a0d4bd1c 100644 --- a/static/js/controllers.js +++ b/static/js/controllers.js @@ -1589,7 +1589,7 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, $routeParams } function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, UserService, CookieService, KeyService, - $routeParams, $http, UIService, Features) { + $routeParams, $http, UIService, Features, Config) { $scope.Features = Features; if ($routeParams['migrate']) { @@ -1597,11 +1597,9 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use } UserService.updateUserIn($scope, function(user) { - if (!Features.GITHUB_LOGIN) { return; } - $scope.cuser = jQuery.extend({}, user); - if ($scope.cuser.logins) { + if (Features.GITHUB_LOGIN && $scope.cuser.logins) { for (var i = 0; i < $scope.cuser.logins.length; i++) { if ($scope.cuser.logins[i].service == 'github') { var githubId = $scope.cuser.logins[i].service_identifier; @@ -1694,6 +1692,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use }; $scope.reallyConvert = function() { + if (Config.AUTHENTICATION_TYPE != 'Database') { return; } + $scope.loading = true; var data = { diff --git a/static/partials/organizations.html b/static/partials/organizations.html index ba591faae..4eaae8ca6 100644 --- a/static/partials/organizations.html +++ b/static/partials/organizations.html @@ -10,7 +10,7 @@ Create New Organization - + diff --git a/test/testconfig.py b/test/testconfig.py index bdf94391a..4aa289aec 100644 --- a/test/testconfig.py +++ b/test/testconfig.py @@ -38,4 +38,5 @@ class TestConfig(DefaultConfig): LICENSE_USER_LIMIT = 500 LICENSE_EXPIRATION = datetime.now() + timedelta(weeks=520) LICENSE_EXPIRATION_WARNING = datetime.now() + timedelta(weeks=520) - \ No newline at end of file + + FEATURE_GITHUB_BUILD = True From a6fb64005d0280abb24ff3bb887c29352a5aef61 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Fri, 30 May 2014 18:28:47 -0400 Subject: [PATCH 33/34] Disable the other convert to organization button when using LDAP login. --- static/partials/organizations.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/partials/organizations.html b/static/partials/organizations.html index 4eaae8ca6..1befae37d 100644 --- a/static/partials/organizations.html +++ b/static/partials/organizations.html @@ -139,7 +139,7 @@ Create New Organization - +