diff --git a/.dockerignore b/.dockerignore index fb1e0c080..86ed5ed2a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,10 +4,11 @@ tools test/data/registry venv .git +!.git/HEAD .gitignore Bobfile README.md requirements-nover.txt run-local.sh .DS_Store -*.pyc \ No newline at end of file +*.pyc diff --git a/.gitignore b/.gitignore index 00e24caf7..9354014ce 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ node_modules static/ldn static/fonts stack_local +GIT_HEAD diff --git a/CHANGELOG.md b/CHANGELOG.md index f59fc5333..aec3c5752 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,29 @@ +### v1.12.0 + +- Added experimental Dex login support (#447, #468) +- Fixed tag pagination in API (#463) +- Improved performance for archiving build logs (#462, #466) +- Optimized cloud storage copying (#460) +- Fixed bug where LDN directory was given a relative domain not absolute (#458) +- Allow robot account names to have underscores (#453) +- Added missing SuperUser aggregate logs endpoint (#449) +- Made JWT validation more strict (#446, #448) +- Added dialog around restarting the container after setup (#441) +- Added selection of Swift API version (#444) +- Improved UX around organization name validation (#437) +- Stopped relying on undocumented behavior for OAuth redirects (#432) +- Hardened against S3 upload failures (#434) +- Added experimental automatic storage replication (#191) +- Deduplicated logging to syslog (#431, #440) +- Added list org member permissions back to API (#429) +- Fixed bug in parsing unicode Dockerfiles (#426) +- Added CloudWatch metrics for multipart uploads (#419) +- Updated CloudWatch metrics to send the max metrics per API call (#412) +- Limited the items auto-loaded from GitHub in trigger setup to 30 (#382) +- Tweaked build UX (#381, #386, #384, #410, #420, #422) +- Changed webhook notifications to also send client SSL certs (#374) +- Improved internal test suite (#381, #374, #388, #455, #457) + ### v1.11.2 - Fixed security bug with LDAP login (#376) diff --git a/Dockerfile b/Dockerfile index ef834f2b6..6b7cbf557 100644 --- a/Dockerfile +++ b/Dockerfile @@ -43,6 +43,7 @@ ADD conf/init/doupdatelimits.sh /etc/my_init.d/ ADD conf/init/copy_syslog_config.sh /etc/my_init.d/ ADD conf/init/runmigration.sh /etc/my_init.d/ ADD conf/init/syslog-ng.conf /etc/syslog-ng/ +ADD conf/init/zz_release.sh /etc/my_init.d/ ADD conf/init/service/ /etc/service/ @@ -53,6 +54,9 @@ RUN mkdir static/fonts static/ldn RUN venv/bin/python -m external_libraries RUN mkdir /usr/local/nginx/logs/ +# TODO(ssewell): only works on a detached head, make work with ref +RUN cat .git/HEAD > GIT_HEAD + # Run the tests RUN TEST=true venv/bin/python -m unittest discover -f RUN TEST=true venv/bin/python -m test.registry_tests -f diff --git a/app.py b/app.py index 5f54a9752..f00d9d559 100644 --- a/app.py +++ b/app.py @@ -26,11 +26,13 @@ from util import get_app_url from util.saas.analytics import Analytics from util.saas.exceptionlog import Sentry from util.names import urn_generator -from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig +from util.config.oauth import (GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig, + DexOAuthConfig) + from util.security.signing import Signer from util.saas.cloudwatch import start_cloudwatch_sender from util.saas.metricqueue import MetricQueue -from util.config.provider import FileConfigProvider, TestConfigProvider +from util.config.provider import get_config_provider from util.config.configutil import generate_secret_key from util.config.superusermanager import SuperUserManager @@ -40,8 +42,6 @@ OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py' OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG' -CONFIG_PROVIDER = FileConfigProvider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', 'config.py') - app = Flask(__name__) logger = logging.getLogger(__name__) @@ -54,10 +54,13 @@ class RegexConverter(BaseConverter): app.url_map.converters['regex'] = RegexConverter -# Instantiate the default configuration (for test or for normal operation). -if 'TEST' in os.environ: - CONFIG_PROVIDER = TestConfigProvider() +# Instantiate the configuration. +is_testing = 'TEST' in os.environ +is_kubernetes = 'KUBERNETES_SERVICE_HOST' in os.environ +config_provider = get_config_provider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', 'config.py', + testing=is_testing, kubernetes=is_kubernetes) +if is_testing: from test.testconfig import TestConfig logger.debug('Loading test config.') app.config.from_object(TestConfig()) @@ -68,7 +71,7 @@ else: app.teardown_request(database.close_db_filter) # Load the override config via the provider. -CONFIG_PROVIDER.update_app_config(app.config) +config_provider.update_app_config(app.config) # Update any configuration found in the override environment variable. OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG' @@ -140,13 +143,16 @@ github_login = GithubOAuthConfig(app.config, 'GITHUB_LOGIN_CONFIG') github_trigger = GithubOAuthConfig(app.config, 'GITHUB_TRIGGER_CONFIG') gitlab_trigger = GitLabOAuthConfig(app.config, 'GITLAB_TRIGGER_CONFIG') google_login = GoogleOAuthConfig(app.config, 'GOOGLE_LOGIN_CONFIG') -oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login] +dex_login = DexOAuthConfig(app.config, 'DEX_LOGIN_CONFIG') -image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf) -image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf) +oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login, dex_login] + +image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf, metric_queue=metric_queue) +image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf, metric_queue=metric_queue) dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf, + metric_queue=metric_queue, reporter=MetricQueueReporter(metric_queue)) -notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf) +notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf, metric_queue=metric_queue) database.configure(app.config) model.config.app_config = app.config diff --git a/buildman/templates/cloudconfig.yaml b/buildman/templates/cloudconfig.yaml index 5a9946659..af83464ce 100644 --- a/buildman/templates/cloudconfig.yaml +++ b/buildman/templates/cloudconfig.yaml @@ -7,6 +7,7 @@ ssh_authorized_keys: - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAgEAo/JkbGO6R7g1ZxARi0xWVM7FOfN02snRAcIO6vT9M7xMUkWVLgD+hM/o91lk+UFiYdql0CATobpFWncRL36KaUqsbw9/1BlI40wg296XHXSSnxhxZ4L7ytf6G1tyN319HXlI2kh9vAf/fy++yDvkH8dI3k1oLoW+mZPET6Pff04/6AXXrRlS5mhmGv9irGwiDHtVKpj6lU8DN/UtOrv1tiQ0pgwEJq05fLGoQfgPNaBCnW2z4Ubpn2gyMcMBMpSwo4hCqJePd349e4bLmFcT+gXYg7Mnup1DoTDlowFFN56wpxQbdp96IxWzU+jYPaIAuRo+BJzCyOS8qBv0Z4RZrgop0qp2JYiVwmViO6TZhIDz6loQJXUOIleQmNgTbiZx8Bwv5GY2jMYoVwlBp7yy5bRjxfbFsJ0vU7TVzNAG7oEJy/74HmHmWzRQlSlQjesr8gRbm9zgR8wqc/L107UOWFg7Cgh8ZNjKuADbXqYuda1Y9m2upcfS26UPz5l5PW5uFRMHZSi8pb1XV6/0Z8H8vwsh37Ur6aLi/5jruRmKhdlsNrB1IiDicBsPW3yg7HHSIdPU4oBNPC77yDCT3l4CKr4el81RrZt7FbJPfY+Ig9Q5O+05f6I8+ZOlJGyZ/Qfyl2aVm1HnlJKuBqPxeic8tMng/9B5N7uZL6Y3k5jFU8c= quentin - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI7LtxLItapmUbt3Gs+4Oxa1i22fkx1+aJDkAjiRWPSX3+cxOzuPfHX9uFzr+qj5hy4J7ErrPp8q9alu+il9lE26GQuUxOZiaUrXu4dRCXXdCqTHARWBxGUXjkxdMp2HIzFpBxmVqcRubrgM36LBzKapdDOqQdz7XnNm5Jmf0tH/N0+TgV60P0WVY1CxmTya+JHNFVgazhd+oIGEhTyW/eszMGcFUgZet7DQFytYIQXYSwwGpGdJ+0InKAJ2SzCt/yuUlSrhrVM8vSGeami1XYmgQiyth1zjteMd8uTrc9NREH7bZTNcMFBqVYE3BYQWGRrv8pMMgP9gxgLbxtVsUl barakmich-titania - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUWB4aSjSRHCz5/6H9/EJhJVvRmPThvEzyHinaWPsuM9prBSLci9NF9WneVl30nczkvllA+w34kycdrS3fKpjTbODaEOLHBobWl3bccY0I6kr86q5z67NZffjCm/P/RL+dBaOiBWS8PV8oiDF1P6YdMo8Jk46n9fozmLCXHUuCw5BJ8PGjQqbsEzA3qFMeKZYdJHOizOfeIfKfCWYrrumVRY9v6SAUDoFOl4PZEM7QdGp9EoRYb9MNLgKLnZ4RjbcLoFwiqxY4KEM4zfjZPNOECiLCuJqvHM2QawwuO1klJ16HpJk+FzOTWQoZtT47LoE/XNSOcNtAOiD+OQ449ia1EArhm7+1DnLXvHXKIl1JtuqJz+wFCsbNSdB7P562OHAGRIxYK3DfE+0CZH1BeHYl7xiRBeCtZ+OZMIocqeJtq8taIS7Un5wnGcQWxFtQnr/f65EgbIi7G2dxPcjhr6K+GWYezsiReVVKnIClq2MHhABG9QOncKDIa47L3nyx3pm4ZfMbC2jmnK2pFgGGSfYDy4487JnAUOG1mzZ9vm4gDhatT+vZFSBOwv1e4CErBh/wYXooF5I0nGmE6y6zkKFqP+ZolJ6iXmXQ7Ea2oaGeyaprweBjkhHgghi4KbwKbClope4Zo9X9JJYBLQSW33sEEuy8MlSBpdZAbz9t/FvJaw== mjibson +- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiNawWSZL2MF99zwG9cFjGmML6agsKwaacQEoTsjcjHGixyUnqHXaLdrGma5i/uphZPkI5XRBKiuIROACY/aRoIxJUpV7AQ1Zx87cILx6fDVePvU5lW2DdhlCDUdwjuzDb/WO/c/qMWjOPqRG4q8XvB7nhuORMMgdpDXWVH4LXPmFez1iIBCKNk04l6Se7wiEOQjaBnTDiBDYlWD78r6RdiAU5eIxpq+lKBDTcET0vegwcA/WE4YOlYBbOrgtHrgwWqG/pXxUu77aapDOmfjtDrgim6XP5kEnytg5gCaN9iLvIpT8b1wD/1Z+LoNSZg6m9gkcC2yTRI0apOBa2G8lz silas@pro.local write_files: - path: /root/overrides.list diff --git a/conf/init/zz_release.sh b/conf/init/zz_release.sh new file mode 100755 index 000000000..152494cff --- /dev/null +++ b/conf/init/zz_release.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +source venv/bin/activate + +export PYTHONPATH=. + +python /release.py diff --git a/config.py b/config.py index e27b07548..e4061b16c 100644 --- a/config.py +++ b/config.py @@ -152,6 +152,9 @@ class DefaultConfig(object): # Feature Flag: Whether Google login is supported. FEATURE_GOOGLE_LOGIN = False + # Feature Flag: Whther Dex login is supported. + FEATURE_DEX_LOGIN = False + # Feature flag, whether to enable olark chat FEATURE_OLARK_CHAT = False @@ -183,6 +186,9 @@ class DefaultConfig(object): # Feature Flag: Whether to automatically replicate between storage engines. FEATURE_STORAGE_REPLICATION = False + # Feature Flag: Whether users can directly login to the UI. + FEATURE_DIRECT_LOGIN = True + BUILD_MANAGER = ('enterprise', {}) DISTRIBUTED_STORAGE_CONFIG = { diff --git a/data/database.py b/data/database.py index 369649db3..fb2cb2d20 100644 --- a/data/database.py +++ b/data/database.py @@ -123,6 +123,7 @@ db = Proxy() read_slave = Proxy() db_random_func = CallableProxy() db_for_update = CallableProxy() +db_transaction = CallableProxy() def validate_database_url(url, db_kwargs, connect_timeout=5): @@ -168,6 +169,10 @@ def configure(config_object): if read_slave_uri is not None: read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs)) + def _db_transaction(): + return config_object['DB_TRANSACTION_FACTORY'](db) + + db_transaction.initialize(_db_transaction) def random_string_generator(length=16): def random_string(): @@ -377,14 +382,15 @@ class Repository(BaseModel): return sorted_models.index(cmp_fk.model_class.__name__) filtered_ops.sort(key=sorted_model_key) - for query, fk in filtered_ops: - model = fk.model_class - if fk.null and not delete_nullable: - model.update(**{fk.name: None}).where(query).execute() - else: - model.delete().where(query).execute() + with db_transaction(): + for query, fk in filtered_ops: + model = fk.model_class + if fk.null and not delete_nullable: + model.update(**{fk.name: None}).where(query).execute() + else: + model.delete().where(query).execute() - return self.delete().where(self._pk_expr()).execute() + return self.delete().where(self._pk_expr()).execute() class Star(BaseModel): user = ForeignKeyField(User, index=True) @@ -469,6 +475,9 @@ class RepositoryBuildTrigger(BaseModel): pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot', robot_null_delete=True) + # TODO(jschorr): Remove this column once we verify the backfill has succeeded. + used_legacy_github = BooleanField(null=True, default=False) + class EmailConfirmation(BaseModel): code = CharField(default=random_string_generator(), unique=True, index=True) @@ -778,6 +787,33 @@ class BlobUpload(BaseModel): ) +class QuayService(BaseModel): + name = CharField(index=True, unique=True) + + +class QuayRegion(BaseModel): + name = CharField(index=True, unique=True) + + +class QuayRelease(BaseModel): + service = ForeignKeyField(QuayService) + version = CharField() + region = ForeignKeyField(QuayRegion) + reverted = BooleanField(default=False) + created = DateTimeField(default=datetime.now, index=True) + + class Meta: + database = db + read_slaves = (read_slave,) + indexes = ( + # unique release per region + (('service', 'version', 'region'), True), + + # get recent releases + (('service', 'region', 'created'), False), + ) + + all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, Visibility, RepositoryTag, EmailConfirmation, FederatedLogin, LoginService, QueueItem, RepositoryBuild, Team, TeamMember, TeamRole, LogEntryKind, LogEntry, @@ -787,4 +823,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification, RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage, TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind, - AccessTokenKind, Star, RepositoryActionCount, TagManifest, BlobUpload, UserRegion] + AccessTokenKind, Star, RepositoryActionCount, TagManifest, UserRegion, + QuayService, QuayRegion, QuayRelease, BlobUpload] diff --git a/data/migrations/env.py b/data/migrations/env.py index 108c4c496..c53421f50 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -1,8 +1,11 @@ from __future__ import with_statement +import logging import os from alembic import context +from alembic.revision import ResolutionError +from alembic.util import CommandError from sqlalchemy import engine_from_config, pool from logging.config import fileConfig from urllib import unquote, quote @@ -11,6 +14,7 @@ from peewee import SqliteDatabase from data.database import all_models, db from app import app from data.model.sqlalchemybridge import gen_sqlalchemy_metadata +from release import GIT_HEAD, REGION, SERVICE from util.morecollections import AttrDict config = context.config @@ -21,6 +25,8 @@ config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI'])) if config.config_file_name: fileConfig(config.config_file_name) +logger = logging.getLogger(__name__) + # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel @@ -77,7 +83,23 @@ def run_migrations_online(): try: with context.begin_transaction(): - context.run_migrations(tables=tables) + try: + context.run_migrations(tables=tables) + except (CommandError, ResolutionError) as ex: + if 'No such revision' not in str(ex): + raise + + if not REGION or not GIT_HEAD: + raise + + from data.model.release import get_recent_releases + + # ignore revision error if we're running the previous release + releases = list(get_recent_releases(SERVICE, REGION).offset(1).limit(1)) + if releases and releases[0].version == GIT_HEAD: + logger.warn('Skipping database migration because revision not found') + else: + raise finally: connection.close() diff --git a/data/migrations/versions/1c0f6ede8992_quay_releases.py b/data/migrations/versions/1c0f6ede8992_quay_releases.py new file mode 100644 index 000000000..92583881d --- /dev/null +++ b/data/migrations/versions/1c0f6ede8992_quay_releases.py @@ -0,0 +1,55 @@ +"""Quay releases + +Revision ID: 1c0f6ede8992 +Revises: 545794454f49 +Create Date: 2015-09-15 15:46:09.784607 + +""" + +# revision identifiers, used by Alembic. +revision = '1c0f6ede8992' +down_revision = '545794454f49' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('quayregion', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion')) + ) + op.create_index('quayregion_name', 'quayregion', ['name'], unique=True) + op.create_table('quayservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice')) + ) + op.create_index('quayservice_name', 'quayservice', ['name'], unique=True) + op.create_table('quayrelease', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('region_id', sa.Integer(), nullable=False), + sa.Column('reverted', sa.Boolean(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')), + sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease')) + ) + op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False) + op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False) + op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False) + op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False) + op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('quayrelease') + op.drop_table('quayservice') + op.drop_table('quayregion') + ### end Alembic commands ### diff --git a/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py b/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py new file mode 100644 index 000000000..5e883237e --- /dev/null +++ b/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py @@ -0,0 +1,26 @@ +"""Add support for Dex login + +Revision ID: 3a3bb77e17d5 +Revises: 9512773a4a2 +Create Date: 2015-09-04 15:57:38.007822 + +""" + +# revision identifiers, used by Alembic. +revision = '3a3bb77e17d5' +down_revision = '9512773a4a2' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + op.bulk_insert(tables.loginservice, [{'id': 7, 'name': 'dex'}]) + + +def downgrade(tables): + op.execute( + tables.loginservice.delete() + .where(tables.loginservice.c.name == op.inline_literal('dex')) + ) + diff --git a/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py b/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py new file mode 100644 index 000000000..820b21548 --- /dev/null +++ b/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py @@ -0,0 +1,28 @@ +"""Migrate GitHub triggers to use deploy keys + +Revision ID: 3ff4fbc94644 +Revises: 4d5f6716df0 +Create Date: 2015-09-16 17:50:22.034146 + +""" + +# revision identifiers, used by Alembic. +revision = '3ff4fbc94644' +down_revision = '4d5f6716df0' + +from alembic import op +import sqlalchemy as sa + +from util.migrate.migrategithubdeploykeys import backfill_github_deploykeys + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + backfill_github_deploykeys() + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### diff --git a/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py b/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py new file mode 100644 index 000000000..17f4360b5 --- /dev/null +++ b/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py @@ -0,0 +1,26 @@ +"""Add legacy column for GitHub backfill tracking + +Revision ID: 4d5f6716df0 +Revises: 1c0f6ede8992 +Create Date: 2015-09-16 17:49:40.334540 + +""" + +# revision identifiers, used by Alembic. +revision = '4d5f6716df0' +down_revision = '1c0f6ede8992' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True)) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('repositorybuildtrigger', 'used_legacy_github') + ### end Alembic commands ### diff --git a/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py b/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py new file mode 100644 index 000000000..17af21eb3 --- /dev/null +++ b/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py @@ -0,0 +1,34 @@ +"""Migrate image data back to image table + +Revision ID: 545794454f49 +Revises: 3a3bb77e17d5 +Create Date: 2015-09-15 11:48:47.554255 + +""" + +# revision identifiers, used by Alembic. +revision = '545794454f49' +down_revision = '3a3bb77e17d5' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('image', sa.Column('aggregate_size', sa.BigInteger(), nullable=True)) + op.add_column('image', sa.Column('command', sa.Text(), nullable=True)) + op.add_column('image', sa.Column('comment', sa.Text(), nullable=True)) + op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True)) + op.add_column('image', sa.Column('v1_json_metadata', sa.Text(), nullable=True)) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('image', 'v1_json_metadata') + op.drop_column('image', 'created') + op.drop_column('image', 'comment') + op.drop_column('image', 'command') + op.drop_column('image', 'aggregate_size') + ### end Alembic commands ### diff --git a/data/model/__init__.py b/data/model/__init__.py index 771801009..0c8122c31 100644 --- a/data/model/__init__.py +++ b/data/model/__init__.py @@ -1,4 +1,4 @@ -from data.database import db +from data.database import db, db_transaction class DataModelException(Exception): @@ -88,10 +88,6 @@ class Config(object): config = Config() -def db_transaction(): - return config.app_config['DB_TRANSACTION_FACTORY'](db) - - # There MUST NOT be any circular dependencies between these subsections. If there are fix it by # moving the minimal number of things to _basequery # TODO document the methods and modules for each one of the submodules below. diff --git a/data/model/build.py b/data/model/build.py index 70501c5dc..6fc4324bb 100644 --- a/data/model/build.py +++ b/data/model/build.py @@ -4,7 +4,7 @@ from peewee import JOIN_LEFT_OUTER from datetime import timedelta, datetime from data.database import (BuildTriggerService, RepositoryBuildTrigger, Repository, Namespace, User, - RepositoryBuild, BUILD_PHASE, db_for_update) + RepositoryBuild, BUILD_PHASE, db_for_update, db_random_func) from data.model import (InvalidBuildTriggerException, InvalidRepositoryBuildException, db_transaction, user as user_model) @@ -163,11 +163,24 @@ def cancel_repository_build(build, work_queue): return True -def archivable_buildlogs_query(): +def get_archivable_build(): presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE - return (RepositoryBuild - .select() + candidates = (RepositoryBuild + .select(RepositoryBuild.id) .where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) | (RepositoryBuild.phase == BUILD_PHASE.ERROR) | (RepositoryBuild.started < presumed_dead_date), - RepositoryBuild.logs_archived == False)) + RepositoryBuild.logs_archived == False) + .limit(50) + .alias('candidates')) + + try: + found_id = (RepositoryBuild + .select(candidates.c.id) + .from_(candidates) + .order_by(db_random_func()) + .get()) + return RepositoryBuild.get(id=found_id) + except RepositoryBuild.DoesNotExist: + return None + diff --git a/data/model/image.py b/data/model/image.py index be1d5b589..922da8280 100644 --- a/data/model/image.py +++ b/data/model/image.py @@ -277,15 +277,24 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created # We cleanup any old checksum in case it's a retry after a fail fetched.storage.checksum = None - fetched.created = datetime.now() + now = datetime.now() + # TODO stop writing to storage when all readers are removed + fetched.storage.created = now + fetched.created = now if created_date_str is not None: try: - fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None) + # TODO stop writing to storage fields when all readers are removed + parsed_created_time = dateutil.parser.parse(created_date_str).replace(tzinfo=None) + fetched.created = parsed_created_time + fetched.storage.created = parsed_created_time except: # parse raises different exceptions, so we cannot use a specific kind of handler here. pass + # TODO stop writing to storage fields when all readers are removed + fetched.storage.comment = comment + fetched.storage.command = command fetched.comment = comment fetched.command = command fetched.v1_json_metadata = v1_json_metadata @@ -327,13 +336,18 @@ def set_image_size(docker_image_id, namespace_name, repository_name, image_size, .where(Image.id << ancestors) .scalar()) + image_size + # TODO stop writing to storage when all readers are removed image.storage.aggregate_size = total_size + image.aggregate_size = total_size except Image.DoesNotExist: pass else: + # TODO stop writing to storage when all readers are removed image.storage.aggregate_size = image_size + image.aggregate_size = image_size image.storage.save() + image.save() return image diff --git a/data/model/release.py b/data/model/release.py new file mode 100644 index 000000000..883ae146e --- /dev/null +++ b/data/model/release.py @@ -0,0 +1,23 @@ +from data.database import QuayRelease, QuayRegion, QuayService + + +def set_region_release(service_name, region_name, version): + service, _ = QuayService.create_or_get(name=service_name) + region, _ = QuayRegion.create_or_get(name=region_name) + + return QuayRelease.create_or_get(service=service, version=version, region=region) + + +def get_recent_releases(service_name, region_name): + return (QuayRelease + .select(QuayRelease) + .join(QuayService) + .switch(QuayRelease) + .join(QuayRegion) + .where( + QuayService.name == service_name, + QuayRegion.name == region_name, + QuayRelease.reverted == False, + ) + .order_by(QuayRelease.created.desc()) + ) diff --git a/data/model/tag.py b/data/model/tag.py index b314be476..9e050c095 100644 --- a/data/model/tag.py +++ b/data/model/tag.py @@ -135,6 +135,7 @@ def list_repository_tag_history(repo_obj, page=1, size=100, specific_tag=None): .where(RepositoryTag.repository == repo_obj) .where(RepositoryTag.hidden == False) .order_by(RepositoryTag.lifetime_start_ts.desc()) + .order_by(RepositoryTag.name) .paginate(page, size)) if specific_tag: diff --git a/data/queue.py b/data/queue.py index 289b99ada..b787d22be 100644 --- a/data/queue.py +++ b/data/queue.py @@ -26,9 +26,10 @@ class MetricQueueReporter(object): class WorkQueue(object): def __init__(self, queue_name, transaction_factory, - canonical_name_match_list=None, reporter=None): + canonical_name_match_list=None, reporter=None, metric_queue=None): self._queue_name = queue_name self._reporter = reporter + self._metric_queue = metric_queue self._transaction_factory = transaction_factory self._currently_processing = False @@ -86,12 +87,20 @@ class WorkQueue(object): return (running_count, available_not_running_count, available_count) def update_metrics(self): - if self._reporter is None: + if self._reporter is None and self._metric_queue is None: return (running_count, available_not_running_count, available_count) = self.get_metrics() - self._reporter(self._currently_processing, running_count, - running_count + available_not_running_count) + + if self._metric_queue: + dim = {'queue': self._queue_name} + self._metric_queue.put('Running', running_count, dimensions=dim) + self._metric_queue.put('AvailableNotRunning', available_not_running_count, dimensions=dim) + self._metric_queue.put('Available', available_count, dimensions=dim) + + if self._reporter: + self._reporter(self._currently_processing, running_count, + running_count + available_not_running_count) def has_retries_remaining(self, item_id): """ Returns whether the queue item with the given id has any retries remaining. If the @@ -185,7 +194,12 @@ class WorkQueue(object): def complete(self, completed_item): with self._transaction_factory(db): - completed_item_obj = self._item_by_id_for_update(completed_item.id) + try: + completed_item_obj = self._item_by_id_for_update(completed_item.id) + except QueueItem.DoesNotExist: + self._currently_processing = False + return + completed_item_obj.delete_instance(recursive=True) self._currently_processing = False diff --git a/data/userfiles.py b/data/userfiles.py index f4b786df5..6ad461df6 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -1,11 +1,13 @@ import os import logging import magic +import urlparse from uuid import uuid4 from flask import url_for, request, send_file, make_response, abort from flask.views import View from _pyio import BufferedReader +from util import get_app_url logger = logging.getLogger(__name__) @@ -77,7 +79,9 @@ class DelegateUserfiles(object): if url is None: with self._app.app_context() as ctx: ctx.url_adapter = self._build_url_adapter() - return (url_for(self._handler_name, file_id=file_id, _external=True), file_id) + file_relative_url = url_for(self._handler_name, file_id=file_id) + file_url = urlparse.urljoin(get_app_url(self._app.config), file_relative_url) + return (file_url, file_id) return (url, file_id) @@ -97,7 +101,8 @@ class DelegateUserfiles(object): if url is None: with self._app.app_context() as ctx: ctx.url_adapter = self._build_url_adapter() - return url_for(self._handler_name, file_id=file_id, _external=True) + file_relative_url = url_for(self._handler_name, file_id=file_id) + return urlparse.urljoin(get_app_url(self._app.config), file_relative_url) return url diff --git a/endpoints/api/billing.py b/endpoints/api/billing.py index d889c7c82..aae577908 100644 --- a/endpoints/api/billing.py +++ b/endpoints/api/billing.py @@ -12,12 +12,42 @@ from auth.permissions import AdministerOrganizationPermission from auth.auth_context import get_authenticated_user from auth import scopes from data import model -from data.billing import PLANS +from data.billing import PLANS, get_plan import features import uuid import json +def lookup_allowed_private_repos(namespace): + """ Returns false if the given namespace has used its allotment of private repositories. """ + # Lookup the namespace and verify it has a subscription. + namespace_user = model.user.get_namespace_user(namespace) + if namespace_user is None: + return False + + if not namespace_user.stripe_id: + return False + + # Ask Stripe for the subscribed plan. + # TODO: Can we cache this or make it faster somehow? + try: + cus = billing.Customer.retrieve(namespace_user.stripe_id) + except stripe.APIConnectionError: + abort(503, message='Cannot contact Stripe') + + if not cus.subscription: + return False + + # Find the number of private repositories used by the namespace and compare it to the + # plan subscribed. + private_repos = model.user.get_private_repo_count(namespace) + current_plan = get_plan(cus.subscription.plan.id) + if current_plan is None: + return False + + return private_repos < current_plan['privateRepos'] + + def carderror_response(e): return {'carderror': e.message}, 402 diff --git a/endpoints/api/repository.py b/endpoints/api/repository.py index 215931785..b241a70a0 100644 --- a/endpoints/api/repository.py +++ b/endpoints/api/repository.py @@ -2,6 +2,7 @@ import logging import datetime +import features from datetime import timedelta @@ -15,7 +16,8 @@ from endpoints.api import (truthy_bool, format_date, nickname, log_action, valid require_repo_read, require_repo_write, require_repo_admin, RepositoryParamResource, resource, query_param, parse_args, ApiResource, request_error, require_scope, Unauthorized, NotFound, InvalidRequest, - path_param) + path_param, ExceedsLicenseException) +from endpoints.api.billing import lookup_allowed_private_repos from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPermission, CreateRepositoryPermission) @@ -26,6 +28,18 @@ from auth import scopes logger = logging.getLogger(__name__) +def check_allowed_private_repos(namespace): + """ Checks to see if the given namespace has reached its private repository limit. If so, + raises a ExceedsLicenseException. + """ + # Not enabled if billing is disabled. + if not features.BILLING: + return + + if not lookup_allowed_private_repos(namespace): + raise ExceedsLicenseException() + + @resource('/v1/repository') class RepositoryList(ApiResource): """Operations for creating and listing repositories.""" @@ -87,6 +101,8 @@ class RepositoryList(ApiResource): raise request_error(message='Repository already exists') visibility = req['visibility'] + if visibility == 'private': + check_allowed_private_repos(namespace_name) repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility) repo.description = req['description'] @@ -339,7 +355,11 @@ class RepositoryVisibility(RepositoryParamResource): repo = model.repository.get_repository(namespace, repository) if repo: values = request.get_json() - model.repository.set_repository_visibility(repo, values['visibility']) + visibility = values['visibility'] + if visibility == 'private': + check_allowed_private_repos(namespace) + + model.repository.set_repository_visibility(repo, visibility) log_action('change_repo_visibility', namespace, {'repo': repository, 'visibility': values['visibility']}, repo=repo) diff --git a/endpoints/api/suconfig.py b/endpoints/api/suconfig.py index 1b24da70c..aaea5a309 100644 --- a/endpoints/api/suconfig.py +++ b/endpoints/api/suconfig.py @@ -9,7 +9,7 @@ from endpoints.api import (ApiResource, nickname, resource, internal_only, show_ require_fresh_login, request, validate_json_request, verify_not_prod) from endpoints.common import common_login -from app import app, CONFIG_PROVIDER, superusers +from app import app, config_provider, superusers from data import model from data.database import configure from auth.permissions import SuperUserPermission @@ -56,13 +56,13 @@ class SuperUserRegistryStatus(ApiResource): """ Returns the status of the registry. """ # If there is no conf/stack volume, then report that status. - if not CONFIG_PROVIDER.volume_exists(): + if not config_provider.volume_exists(): return { 'status': 'missing-config-dir' } # If there is no config file, we need to setup the database. - if not CONFIG_PROVIDER.yaml_exists(): + if not config_provider.config_exists(): return { 'status': 'config-db' } @@ -76,7 +76,7 @@ class SuperUserRegistryStatus(ApiResource): # If we have SETUP_COMPLETE, then we're ready to go! if app.config.get('SETUP_COMPLETE', False): return { - 'requires_restart': CONFIG_PROVIDER.requires_restart(app.config), + 'requires_restart': config_provider.requires_restart(app.config), 'status': 'ready' } @@ -107,10 +107,10 @@ class SuperUserSetupDatabase(ApiResource): """ Invokes the alembic upgrade process. """ # Note: This method is called after the database configured is saved, but before the # database has any tables. Therefore, we only allow it to be run in that unique case. - if CONFIG_PROVIDER.yaml_exists() and not database_is_valid(): + if config_provider.config_exists() and not database_is_valid(): # Note: We need to reconfigure the database here as the config has changed. combined = dict(**app.config) - combined.update(CONFIG_PROVIDER.get_yaml()) + combined.update(config_provider.get_config()) configure(combined) app.config['DB_URI'] = combined['DB_URI'] @@ -185,7 +185,7 @@ class SuperUserConfig(ApiResource): def get(self): """ Returns the currently defined configuration, if any. """ if SuperUserPermission().can(): - config_object = CONFIG_PROVIDER.get_yaml() + config_object = config_provider.get_config() return { 'config': config_object } @@ -196,18 +196,18 @@ class SuperUserConfig(ApiResource): @verify_not_prod @validate_json_request('UpdateConfig') def put(self): - """ Updates the config.yaml file. """ + """ Updates the config override file. """ # Note: This method is called to set the database configuration before super users exists, # so we also allow it to be called if there is no valid registry configuration setup. - if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can(): + if not config_provider.config_exists() or SuperUserPermission().can(): config_object = request.get_json()['config'] hostname = request.get_json()['hostname'] # Add any enterprise defaults missing from the config. add_enterprise_config_defaults(config_object, app.config['SECRET_KEY'], hostname) - # Write the configuration changes to the YAML file. - CONFIG_PROVIDER.save_yaml(config_object) + # Write the configuration changes to the config override file. + config_provider.save_config(config_object) # If the authentication system is not the database, link the superuser account to the # the authentication system chosen. @@ -238,7 +238,7 @@ class SuperUserConfigFile(ApiResource): if SuperUserPermission().can(): return { - 'exists': CONFIG_PROVIDER.volume_file_exists(filename) + 'exists': config_provider.volume_file_exists(filename) } abort(403) @@ -252,12 +252,12 @@ class SuperUserConfigFile(ApiResource): # Note: This method can be called before the configuration exists # to upload the database SSL cert. - if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can(): + if not config_provider.config_exists() or SuperUserPermission().can(): uploaded_file = request.files['file'] if not uploaded_file: abort(400) - CONFIG_PROVIDER.save_volume_file(filename, uploaded_file) + config_provider.save_volume_file(filename, uploaded_file) return { 'status': True } @@ -309,7 +309,7 @@ class SuperUserCreateInitialSuperUser(ApiResource): # # We do this special security check because at the point this method is called, the database # is clean but does not (yet) have any super users for our permissions code to check against. - if CONFIG_PROVIDER.yaml_exists() and not database_has_users(): + if config_provider.config_exists() and not database_has_users(): data = request.get_json() username = data['username'] password = data['password'] @@ -319,9 +319,9 @@ class SuperUserCreateInitialSuperUser(ApiResource): superuser = model.user.create_user(username, password, email, auto_verify=True) # Add the user to the config. - config_object = CONFIG_PROVIDER.get_yaml() + config_object = config_provider.get_config() config_object['SUPER_USERS'] = [username] - CONFIG_PROVIDER.save_yaml(config_object) + config_provider.save_config(config_object) # Update the in-memory config for the new superuser. superusers.register_superuser(username) @@ -369,7 +369,7 @@ class SuperUserConfigValidate(ApiResource): # Note: This method is called to validate the database configuration before super users exists, # so we also allow it to be called if there is no valid registry configuration setup. Note that # this is also safe since this method does not access any information not given in the request. - if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can(): + if not config_provider.config_exists() or SuperUserPermission().can(): config = request.get_json()['config'] return validate_service_for_config(service, config, request.get_json().get('password', '')) diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py index e2bbee4c5..8d66a2fce 100644 --- a/endpoints/api/superuser.py +++ b/endpoints/api/superuser.py @@ -13,7 +13,7 @@ from app import app, avatar, superusers, authentication from endpoints.api import (ApiResource, nickname, resource, validate_json_request, internal_only, require_scope, show_if, parse_args, query_param, abort, require_fresh_login, path_param, verify_not_prod) -from endpoints.api.logs import get_logs +from endpoints.api.logs import get_logs, get_aggregate_logs from data import model from auth.permissions import SuperUserPermission from auth import scopes @@ -83,6 +83,26 @@ class SuperUserSystemLogServices(ApiResource): abort(403) +@resource('/v1/superuser/aggregatelogs') +@internal_only +class SuperUserAggregateLogs(ApiResource): + """ Resource for fetching aggregated logs for the current user. """ + @require_fresh_login + @verify_not_prod + @nickname('listAllAggregateLogs') + @parse_args + @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str) + @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str) + def get(self, args): + """ Returns the aggregated logs for the current system. """ + if SuperUserPermission().can(): + start_time = args['starttime'] + end_time = args['endtime'] + + return get_aggregate_logs(start_time, end_time) + + abort(403) + @resource('/v1/superuser/logs') @internal_only @@ -93,9 +113,9 @@ class SuperUserLogs(ApiResource): @verify_not_prod @nickname('listAllLogs') @parse_args - @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str) - @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str) - @query_param('performer', 'Username for which to filter logs.', type=str) + @query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str) + @query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str) + @query_param('page', 'The page number for the logs', type=int, default=1) @require_scope(scopes.SUPERUSER) def get(self, args): """ List the usage logs for the current system. """ @@ -103,7 +123,7 @@ class SuperUserLogs(ApiResource): start_time = args['starttime'] end_time = args['endtime'] - return get_logs(start_time, end_time) + return get_logs(start_time, end_time, page=args['page']) abort(403) diff --git a/endpoints/api/tag.py b/endpoints/api/tag.py index 5246ee9c6..b8ad1906a 100644 --- a/endpoints/api/tag.py +++ b/endpoints/api/tag.py @@ -43,7 +43,7 @@ class ListRepositoryTags(RepositoryParamResource): specific_tag = args.get('specificTag') or None - page = min(1, args.get('start', 1)) + page = max(1, args.get('page', 1)) limit = min(100, max(1, args.get('limit', 50))) # Note: We ask for limit+1 here, so we can check to see if there are diff --git a/endpoints/api/user.py b/endpoints/api/user.py index de928597f..7c3094cc7 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -306,6 +306,7 @@ class User(ApiResource): return user_view(user) @show_if(features.USER_CREATION) + @show_if(features.DIRECT_LOGIN) @nickname('createNewUser') @internal_only @validate_json_request('NewUser') @@ -496,6 +497,7 @@ class ConvertToOrganization(ApiResource): @resource('/v1/signin') +@show_if(features.DIRECT_LOGIN) @internal_only class Signin(ApiResource): """ Operations for signing in the user. """ @@ -595,6 +597,7 @@ class Signout(ApiResource): @resource('/v1/detachexternal/') +@show_if(features.DIRECT_LOGIN) @internal_only class DetachExternal(ApiResource): """ Resource for detaching an external login. """ diff --git a/endpoints/decorated.py b/endpoints/decorated.py index b51e1ee2e..d1e654da0 100644 --- a/endpoints/decorated.py +++ b/endpoints/decorated.py @@ -4,7 +4,7 @@ import json from flask import make_response from app import app from util.useremails import CannotSendEmailException -from util.config.provider import CannotWriteConfigException +from util.config.provider.baseprovider import CannotWriteConfigException from data import model logger = logging.getLogger(__name__) diff --git a/endpoints/oauthlogin.py b/endpoints/oauthlogin.py index ae41af0ef..665801a6d 100644 --- a/endpoints/oauthlogin.py +++ b/endpoints/oauthlogin.py @@ -5,7 +5,7 @@ from flask import request, redirect, url_for, Blueprint from flask.ext.login import current_user from endpoints.common import render_page_template, common_login, route_show_if -from app import app, analytics, get_app_url, github_login, google_login +from app import app, analytics, get_app_url, github_login, google_login, dex_login from data import model from util.names import parse_repository_name from util.validation import generate_valid_usernames @@ -14,6 +14,7 @@ from auth.auth import require_session_login from peewee import IntegrityError import features +from util.security.strictjwt import decode, InvalidTokenError logger = logging.getLogger(__name__) client = app.config['HTTPCLIENT'] @@ -24,7 +25,7 @@ def render_ologin_error(service_name, return render_page_template('ologinerror.html', service_name=service_name, error_message=error_message, service_url=get_app_url(), - user_creation=features.USER_CREATION) + user_creation=features.USER_CREATION and features.DIRECT_LOGIN) def get_user(service, token): @@ -86,7 +87,7 @@ def conduct_oauth_login(service, user_id, username, email, metadata={}): return render_ologin_error(service_name) -def get_google_username(user_data): +def get_email_username(user_data): username = user_data['email'] at = username.find('@') if at > 0: @@ -108,7 +109,7 @@ def google_oauth_callback(): if not user_data or not user_data.get('id', None) or not user_data.get('email', None): return render_ologin_error('Google') - username = get_google_username(user_data) + username = get_email_username(user_data) metadata = { 'service_username': user_data['email'] } @@ -194,7 +195,7 @@ def google_oauth_attach(): google_id = user_data['id'] user_obj = current_user.db_user() - username = get_google_username(user_data) + username = get_email_username(user_data) metadata = { 'service_username': user_data['email'] } @@ -236,3 +237,83 @@ def github_oauth_attach(): return render_ologin_error('GitHub', err) return redirect(url_for('web.user')) + + +def decode_user_jwt(token, oidc_provider): + try: + return decode(token, oidc_provider.get_public_key(), algorithms=['RS256'], + audience=oidc_provider.client_id(), + issuer=oidc_provider.issuer) + except InvalidTokenError: + # Public key may have expired. Try to retrieve an updated public key and use it to decode. + return decode(token, oidc_provider.get_public_key(force_refresh=True), algorithms=['RS256'], + audience=oidc_provider.client_id(), + issuer=oidc_provider.issuer) + + +@oauthlogin.route('/dex/callback', methods=['GET', 'POST']) +@route_show_if(features.DEX_LOGIN) +def dex_oauth_callback(): + error = request.values.get('error', None) + if error: + return render_ologin_error(dex_login.public_title, error) + + code = request.values.get('code') + if not code: + return render_ologin_error(dex_login.public_title, 'Missing OAuth code') + + token = dex_login.exchange_code_for_token(app.config, client, code, client_auth=True, + form_encode=True) + + try: + payload = decode_user_jwt(token, dex_login) + except InvalidTokenError: + logger.exception('Exception when decoding returned JWT') + return render_ologin_error(dex_login.public_title, + 'Could not decode response. Please contact your system administrator about this error.') + + username = get_email_username(payload) + metadata = {} + + dex_id = payload['sub'] + email_address = payload['email'] + + if not payload.get('email_verified', False): + return render_ologin_error(dex_login.public_title, + 'A verified e-mail address is required for login. Please verify your ' + + 'e-mail address in %s and try again.' % dex_login.public_title) + + + return conduct_oauth_login(dex_login, dex_id, username, email_address, + metadata=metadata) + + +@oauthlogin.route('/dex/callback/attach', methods=['GET', 'POST']) +@route_show_if(features.DEX_LOGIN) +@require_session_login +def dex_oauth_attach(): + code = request.args.get('code') + token = dex_login.exchange_code_for_token(app.config, client, code, redirect_suffix='/attach', + client_auth=True, form_encode=True) + if not token: + return render_ologin_error(dex_login.public_title) + + try: + payload = decode_user_jwt(token, dex_login) + except jwt.InvalidTokenError: + logger.exception('Exception when decoding returned JWT') + return render_ologin_error(dex_login.public_title, + 'Could not decode response. Please contact your system administrator about this error.') + + user_obj = current_user.db_user() + dex_id = payload['sub'] + metadata = {} + + try: + model.user.attach_federated_login(user_obj, 'dex', dex_id, metadata=metadata) + except IntegrityError: + err = '%s account is already attached to a %s account' % (dex_login.public_title, + app.config['REGISTRY_TITLE_SHORT']) + return render_ologin_error(dex_login.public_title, err) + + return redirect(url_for('web.user')) diff --git a/endpoints/trackhelper.py b/endpoints/trackhelper.py index b3e2d86eb..7635d2a19 100644 --- a/endpoints/trackhelper.py +++ b/endpoints/trackhelper.py @@ -1,4 +1,5 @@ import logging +import random from app import analytics, app, userevents from data import model @@ -7,7 +8,7 @@ from auth.auth_context import get_authenticated_user, get_validated_token, get_v logger = logging.getLogger(__name__) -def track_and_log(event_name, repo, **kwargs): +def track_and_log(event_name, repo, analytics_name=None, analytics_sample=1, **kwargs): repository = repo.name namespace = repo.namespace_user.username metadata = { @@ -62,8 +63,11 @@ def track_and_log(event_name, repo, **kwargs): event.publish_event_data('docker-cli', user_event_data) # Save the action to mixpanel. - logger.debug('Logging the %s to Mixpanel', event_name) - analytics.track(analytics_id, event_name, extra_params) + if random.random() < analytics_sample: + if analytics_name is None: + analytics_name = event_name + logger.debug('Logging the %s to Mixpanel', analytics_name) + analytics.track(analytics_id, analytics_name, extra_params) # Log the action to the database. logger.debug('Logging the %s to logs system', event_name) diff --git a/endpoints/v1/index.py b/endpoints/v1/index.py index 457ffd43d..4d701b918 100644 --- a/endpoints/v1/index.py +++ b/endpoints/v1/index.py @@ -270,7 +270,7 @@ def get_repository_images(namespace, repository): resp = make_response(json.dumps([]), 200) resp.mimetype = 'application/json' - track_and_log('pull_repo', repo) + track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01) return resp abort(403) diff --git a/endpoints/web.py b/endpoints/web.py index 8018b3666..154483faf 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -9,7 +9,7 @@ from health.healthcheck import get_healthchecker from data import model from data.database import db -from app import app, billing as stripe, build_logs, avatar, signer, log_archive +from app import app, billing as stripe, build_logs, avatar, signer, log_archive, config_provider from auth.auth import require_session_login, process_oauth from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission, SuperUserPermission, AdministerRepositoryPermission, @@ -209,7 +209,7 @@ def v1(): @web.route('/health/instance', methods=['GET']) @no_cache def instance_health(): - checker = get_healthchecker(app) + checker = get_healthchecker(app, config_provider) (data, status_code) = checker.check_instance() response = jsonify(dict(data=data, status_code=status_code)) response.status_code = status_code @@ -221,7 +221,7 @@ def instance_health(): @web.route('/health/endtoend', methods=['GET']) @no_cache def endtoend_health(): - checker = get_healthchecker(app) + checker = get_healthchecker(app, config_provider) (data, status_code) = checker.check_endtoend() response = jsonify(dict(data=data, status_code=status_code)) response.status_code = status_code diff --git a/external_libraries.py b/external_libraries.py index 74f398a02..b5055abeb 100644 --- a/external_libraries.py +++ b/external_libraries.py @@ -2,7 +2,7 @@ import urllib2 import re import os -LOCAL_DIRECTORY = 'static/ldn/' +LOCAL_DIRECTORY = '/static/ldn/' EXTERNAL_JS = [ 'code.jquery.com/jquery.js', diff --git a/health/healthcheck.py b/health/healthcheck.py index 98de22435..c212c694d 100644 --- a/health/healthcheck.py +++ b/health/healthcheck.py @@ -4,14 +4,15 @@ from health.services import check_all_services logger = logging.getLogger(__name__) -def get_healthchecker(app): +def get_healthchecker(app, config_provider): """ Returns a HealthCheck instance for the given app. """ - return HealthCheck.get_checker(app) + return HealthCheck.get_checker(app, config_provider) class HealthCheck(object): - def __init__(self, app): + def __init__(self, app, config_provider): self.app = app + self.config_provider = config_provider def check_instance(self): """ @@ -52,20 +53,21 @@ class HealthCheck(object): data = { 'services': service_statuses, 'notes': notes, - 'is_testing': self.app.config['TESTING'] + 'is_testing': self.app.config['TESTING'], + 'config_provider': self.config_provider.provider_id } return (data, 200 if is_healthy else 503) @classmethod - def get_checker(cls, app): + def get_checker(cls, app, config_provider): name = app.config['HEALTH_CHECKER'][0] parameters = app.config['HEALTH_CHECKER'][1] or {} for subc in cls.__subclasses__(): if subc.check_name() == name: - return subc(app, **parameters) + return subc(app, config_provider, **parameters) raise Exception('Unknown health check with name %s' % name) @@ -77,8 +79,8 @@ class LocalHealthCheck(HealthCheck): class ProductionHealthCheck(HealthCheck): - def __init__(self, app, access_key, secret_key, db_instance='quay'): - super(ProductionHealthCheck, self).__init__(app) + def __init__(self, app, config_provider, access_key, secret_key, db_instance='quay'): + super(ProductionHealthCheck, self).__init__(app, config_provider) self.access_key = access_key self.secret_key = secret_key self.db_instance = db_instance diff --git a/initdb.py b/initdb.py index c8c814832..311c3004e 100644 --- a/initdb.py +++ b/initdb.py @@ -226,6 +226,7 @@ def initialize_database(): LoginService.create(name='ldap') LoginService.create(name='jwtauthn') LoginService.create(name='keystone') + LoginService.create(name='dex') BuildTriggerService.create(name='github') BuildTriggerService.create(name='custom-git') diff --git a/release.py b/release.py new file mode 100644 index 000000000..91a46f796 --- /dev/null +++ b/release.py @@ -0,0 +1,26 @@ +import os + + +_GIT_HEAD_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'GIT_HEAD') + +SERVICE = 'quay' +GIT_HEAD = None +REGION = os.environ.get('QUAY_REGION') + + +# Load git head if available +if os.path.isfile(_GIT_HEAD_PATH): + with open(_GIT_HEAD_PATH) as f: + GIT_HEAD = f.read().strip() + + +def main(): + from app import app + from data.model.release import set_region_release + + if REGION and GIT_HEAD: + set_region_release(SERVICE, REGION, GIT_HEAD) + + +if __name__ == '__main__': + main() diff --git a/requirements-nover.txt b/requirements-nover.txt index 31da0c5b9..4e0b04d33 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -55,3 +55,4 @@ pyjwt toposort pyjwkest rfc3987 +pyjwkest diff --git a/requirements.txt b/requirements.txt index 598558965..f724fa38e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -59,6 +59,7 @@ pycrypto==2.6.1 pygpgme==0.3 pyjwkest==1.0.3 PyJWT==1.4.0 +pyjwkest==1.0.1 PyMySQL==0.6.6 pyOpenSSL==0.15.1 PyPDF2==1.24 diff --git a/static/css/directives/repo-view/repo-panel-settings.css b/static/css/directives/repo-view/repo-panel-settings.css index ff626b9ba..8925365eb 100644 --- a/static/css/directives/repo-view/repo-panel-settings.css +++ b/static/css/directives/repo-view/repo-panel-settings.css @@ -29,6 +29,18 @@ margin-top: -7px !important; } +.repo-panel-settings-element .repo-count-checker { + margin-top: 20px; +} + +.repo-panel-settings-element .co-alert { + margin-bottom: 0px; +} + +.repo-panel-settings-element .panel-body { + border-bottom: 0px; +} + @media (max-width: 767px) { .repo-panel-settings-element .delete-btn { float: none; diff --git a/static/css/directives/ui/external-login-button.css b/static/css/directives/ui/external-login-button.css index 390eda57d..8cf5ff69e 100644 --- a/static/css/directives/ui/external-login-button.css +++ b/static/css/directives/ui/external-login-button.css @@ -1,3 +1,8 @@ -.external-login-button i.fa { +.external-login-button i.fa, +.external-login-button img { margin-right: 4px; + width: 24px; + font-size: 18px; + text-align: center; + vertical-align: middle; } \ No newline at end of file diff --git a/static/css/directives/ui/external-logins-manager.css b/static/css/directives/ui/external-logins-manager.css index 2c5ca7302..947870dbd 100644 --- a/static/css/directives/ui/external-logins-manager.css +++ b/static/css/directives/ui/external-logins-manager.css @@ -6,6 +6,9 @@ font-size: 18px; } -.external-logins-manager .external-auth-provider td:first-child i.fa { +.external-logins-manager .external-auth-provider-title i.fa, +.external-logins-manager .external-auth-provider-title img { margin-right: 6px; + width: 24px; + text-align: center; } \ No newline at end of file diff --git a/static/css/directives/ui/repo-count-checker.css b/static/css/directives/ui/repo-count-checker.css new file mode 100644 index 000000000..d357afdba --- /dev/null +++ b/static/css/directives/ui/repo-count-checker.css @@ -0,0 +1,26 @@ +.repo-count-checker .btn { + margin-top: 0px !important; +} + +.repo-count-checker .co-alert { + margin-bottom: 6px !important; + padding-right: 120px; +} + +.repo-count-checker .co-alert .btn { + position: absolute; + top: 10px; + right: 10px; +} + +@media (max-width: 767px) { + .repo-count-checker .co-alert { + padding-right: 10px; + } + + .repo-count-checker .co-alert .btn { + position: relative; + margin-top: 20px; + margin-bottom: 10px; + } +} \ No newline at end of file diff --git a/static/css/directives/ui/signup-form.css b/static/css/directives/ui/signup-form.css index 5a3dede2f..1d71692ee 100644 --- a/static/css/directives/ui/signup-form.css +++ b/static/css/directives/ui/signup-form.css @@ -4,4 +4,14 @@ .signup-form-element .co-alert { color: black; +} + +.signup-form-element .single-sign-on a { + font-size: 24px; +} + +.signup-form-element .single-sign-on .external-login-button i.fa, +.signup-form-element .single-sign-on .external-login-button img { + width: 30px; + font-size: 24px; } \ No newline at end of file diff --git a/static/directives/config/config-setup-tool.html b/static/directives/config/config-setup-tool.html index ae85f6d48..9edbffd75 100644 --- a/static/directives/config/config-setup-tool.html +++ b/static/directives/config/config-setup-tool.html @@ -232,6 +232,9 @@ ng-selected="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name] == value">{{ value }} +
+ {{ field.help_text }} +
See Documentation for more information
diff --git a/static/directives/external-login-button.html b/static/directives/external-login-button.html index edf81a36a..65aaee41d 100644 --- a/static/directives/external-login-button.html +++ b/static/directives/external-login-button.html @@ -1,24 +1,14 @@ - - - - - Sign In with GitHub - Enterprise - - - Attach to GitHub - Enterprise - Account - - - - - - - - Sign In with Google - Attach to Google Account - - + + + + + Sign In with {{ providerInfo.title() }} + + + Attach to {{ providerInfo.title() }} + + diff --git a/static/directives/external-logins-manager.html b/static/directives/external-logins-manager.html index 2c07c2a3a..ed7198a75 100644 --- a/static/directives/external-logins-manager.html +++ b/static/directives/external-logins-manager.html @@ -9,52 +9,35 @@ Provider Account Status - Attach/Detach + Attach/Detach - - - - GitHub Enterprise + + + + + {{ provider.title() }} - - Attached to GitHub Enterprise account {{githubLogin}} + + Attached to {{ provider.title() }} account + + + {{ provider.getUserInfo(externalLoginInfo[provider.id]).username }} + + - - (Not attached to GitHub Enterprise) + + Not attached to {{ provider.title() }} - - Detach Account - - - - - - - Google Account - - - - Attached to Google account {{ googleLogin }} - - - - (Not attached to a Google account) - - - - - - Detach Account + + Detach Account diff --git a/static/directives/header-bar.html b/static/directives/header-bar.html index 27d3ff4cd..b3b2111ea 100644 --- a/static/directives/header-bar.html +++ b/static/directives/header-bar.html @@ -42,7 +42,8 @@
  • - Sign in + Sign in + Sign in
  • @@ -133,7 +134,8 @@
  • - Sign in + Sign in + Sign in
  • diff --git a/static/directives/repo-count-checker.html b/static/directives/repo-count-checker.html new file mode 100644 index 000000000..4b89b1204 --- /dev/null +++ b/static/directives/repo-count-checker.html @@ -0,0 +1,24 @@ +
    +
    +
    + In order to make this repository private under + your personal namespace + organization {{ repo.namespace }}, you will need to upgrade your plan to + + {{ planRequired.title }} + . + This will cost ${{ planRequired.price / 100 }}/month. + Upgrade now +
    + or did you mean to have this repository under the {{ user.organizations[0].name }} namespace? +
    +
    +
    +
    +
    + This organization has reached its private repository limit. Please contact your administrator. +
    +
    +
    + \ No newline at end of file diff --git a/static/directives/repo-view/repo-panel-settings.html b/static/directives/repo-view/repo-panel-settings.html index b49d835b2..d6c6d7996 100644 --- a/static/directives/repo-view/repo-panel-settings.html +++ b/static/directives/repo-view/repo-panel-settings.html @@ -22,12 +22,10 @@
    - +
    -
    Repository Settings
    - +
    Repository Visibility
    -
    @@ -44,12 +42,23 @@
    This repository is currently public and is visible to all users, and may be pulled by all users.
    - -
    - + +
    +
    +
    +
    + + + +
    +
    Delete Repository
    +
    +
    +
    +
    - -