Merge remote-tracking branch 'upstream/master' into python-registry-v2
This commit is contained in:
commit
26cea9a07c
96 changed files with 2044 additions and 626 deletions
|
@ -4,10 +4,11 @@ tools
|
|||
test/data/registry
|
||||
venv
|
||||
.git
|
||||
!.git/HEAD
|
||||
.gitignore
|
||||
Bobfile
|
||||
README.md
|
||||
requirements-nover.txt
|
||||
run-local.sh
|
||||
.DS_Store
|
||||
*.pyc
|
||||
*.pyc
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -10,3 +10,4 @@ node_modules
|
|||
static/ldn
|
||||
static/fonts
|
||||
stack_local
|
||||
GIT_HEAD
|
||||
|
|
26
CHANGELOG.md
26
CHANGELOG.md
|
@ -1,3 +1,29 @@
|
|||
### v1.12.0
|
||||
|
||||
- Added experimental Dex login support (#447, #468)
|
||||
- Fixed tag pagination in API (#463)
|
||||
- Improved performance for archiving build logs (#462, #466)
|
||||
- Optimized cloud storage copying (#460)
|
||||
- Fixed bug where LDN directory was given a relative domain not absolute (#458)
|
||||
- Allow robot account names to have underscores (#453)
|
||||
- Added missing SuperUser aggregate logs endpoint (#449)
|
||||
- Made JWT validation more strict (#446, #448)
|
||||
- Added dialog around restarting the container after setup (#441)
|
||||
- Added selection of Swift API version (#444)
|
||||
- Improved UX around organization name validation (#437)
|
||||
- Stopped relying on undocumented behavior for OAuth redirects (#432)
|
||||
- Hardened against S3 upload failures (#434)
|
||||
- Added experimental automatic storage replication (#191)
|
||||
- Deduplicated logging to syslog (#431, #440)
|
||||
- Added list org member permissions back to API (#429)
|
||||
- Fixed bug in parsing unicode Dockerfiles (#426)
|
||||
- Added CloudWatch metrics for multipart uploads (#419)
|
||||
- Updated CloudWatch metrics to send the max metrics per API call (#412)
|
||||
- Limited the items auto-loaded from GitHub in trigger setup to 30 (#382)
|
||||
- Tweaked build UX (#381, #386, #384, #410, #420, #422)
|
||||
- Changed webhook notifications to also send client SSL certs (#374)
|
||||
- Improved internal test suite (#381, #374, #388, #455, #457)
|
||||
|
||||
### v1.11.2
|
||||
|
||||
- Fixed security bug with LDAP login (#376)
|
||||
|
|
|
@ -43,6 +43,7 @@ ADD conf/init/doupdatelimits.sh /etc/my_init.d/
|
|||
ADD conf/init/copy_syslog_config.sh /etc/my_init.d/
|
||||
ADD conf/init/runmigration.sh /etc/my_init.d/
|
||||
ADD conf/init/syslog-ng.conf /etc/syslog-ng/
|
||||
ADD conf/init/zz_release.sh /etc/my_init.d/
|
||||
|
||||
ADD conf/init/service/ /etc/service/
|
||||
|
||||
|
@ -53,6 +54,9 @@ RUN mkdir static/fonts static/ldn
|
|||
RUN venv/bin/python -m external_libraries
|
||||
RUN mkdir /usr/local/nginx/logs/
|
||||
|
||||
# TODO(ssewell): only works on a detached head, make work with ref
|
||||
RUN cat .git/HEAD > GIT_HEAD
|
||||
|
||||
# Run the tests
|
||||
RUN TEST=true venv/bin/python -m unittest discover -f
|
||||
RUN TEST=true venv/bin/python -m test.registry_tests -f
|
||||
|
|
30
app.py
30
app.py
|
@ -26,11 +26,13 @@ from util import get_app_url
|
|||
from util.saas.analytics import Analytics
|
||||
from util.saas.exceptionlog import Sentry
|
||||
from util.names import urn_generator
|
||||
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
||||
from util.config.oauth import (GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig,
|
||||
DexOAuthConfig)
|
||||
|
||||
from util.security.signing import Signer
|
||||
from util.saas.cloudwatch import start_cloudwatch_sender
|
||||
from util.saas.metricqueue import MetricQueue
|
||||
from util.config.provider import FileConfigProvider, TestConfigProvider
|
||||
from util.config.provider import get_config_provider
|
||||
from util.config.configutil import generate_secret_key
|
||||
from util.config.superusermanager import SuperUserManager
|
||||
|
||||
|
@ -40,8 +42,6 @@ OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
|
|||
|
||||
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
||||
|
||||
CONFIG_PROVIDER = FileConfigProvider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', 'config.py')
|
||||
|
||||
app = Flask(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -54,10 +54,13 @@ class RegexConverter(BaseConverter):
|
|||
|
||||
app.url_map.converters['regex'] = RegexConverter
|
||||
|
||||
# Instantiate the default configuration (for test or for normal operation).
|
||||
if 'TEST' in os.environ:
|
||||
CONFIG_PROVIDER = TestConfigProvider()
|
||||
# Instantiate the configuration.
|
||||
is_testing = 'TEST' in os.environ
|
||||
is_kubernetes = 'KUBERNETES_SERVICE_HOST' in os.environ
|
||||
config_provider = get_config_provider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', 'config.py',
|
||||
testing=is_testing, kubernetes=is_kubernetes)
|
||||
|
||||
if is_testing:
|
||||
from test.testconfig import TestConfig
|
||||
logger.debug('Loading test config.')
|
||||
app.config.from_object(TestConfig())
|
||||
|
@ -68,7 +71,7 @@ else:
|
|||
app.teardown_request(database.close_db_filter)
|
||||
|
||||
# Load the override config via the provider.
|
||||
CONFIG_PROVIDER.update_app_config(app.config)
|
||||
config_provider.update_app_config(app.config)
|
||||
|
||||
# Update any configuration found in the override environment variable.
|
||||
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
||||
|
@ -140,13 +143,16 @@ github_login = GithubOAuthConfig(app.config, 'GITHUB_LOGIN_CONFIG')
|
|||
github_trigger = GithubOAuthConfig(app.config, 'GITHUB_TRIGGER_CONFIG')
|
||||
gitlab_trigger = GitLabOAuthConfig(app.config, 'GITLAB_TRIGGER_CONFIG')
|
||||
google_login = GoogleOAuthConfig(app.config, 'GOOGLE_LOGIN_CONFIG')
|
||||
oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login]
|
||||
dex_login = DexOAuthConfig(app.config, 'DEX_LOGIN_CONFIG')
|
||||
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||
image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf)
|
||||
oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login, dex_login]
|
||||
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
||||
image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
||||
metric_queue=metric_queue,
|
||||
reporter=MetricQueueReporter(metric_queue))
|
||||
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf)
|
||||
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
||||
|
||||
database.configure(app.config)
|
||||
model.config.app_config = app.config
|
||||
|
|
|
@ -7,6 +7,7 @@ ssh_authorized_keys:
|
|||
- ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAgEAo/JkbGO6R7g1ZxARi0xWVM7FOfN02snRAcIO6vT9M7xMUkWVLgD+hM/o91lk+UFiYdql0CATobpFWncRL36KaUqsbw9/1BlI40wg296XHXSSnxhxZ4L7ytf6G1tyN319HXlI2kh9vAf/fy++yDvkH8dI3k1oLoW+mZPET6Pff04/6AXXrRlS5mhmGv9irGwiDHtVKpj6lU8DN/UtOrv1tiQ0pgwEJq05fLGoQfgPNaBCnW2z4Ubpn2gyMcMBMpSwo4hCqJePd349e4bLmFcT+gXYg7Mnup1DoTDlowFFN56wpxQbdp96IxWzU+jYPaIAuRo+BJzCyOS8qBv0Z4RZrgop0qp2JYiVwmViO6TZhIDz6loQJXUOIleQmNgTbiZx8Bwv5GY2jMYoVwlBp7yy5bRjxfbFsJ0vU7TVzNAG7oEJy/74HmHmWzRQlSlQjesr8gRbm9zgR8wqc/L107UOWFg7Cgh8ZNjKuADbXqYuda1Y9m2upcfS26UPz5l5PW5uFRMHZSi8pb1XV6/0Z8H8vwsh37Ur6aLi/5jruRmKhdlsNrB1IiDicBsPW3yg7HHSIdPU4oBNPC77yDCT3l4CKr4el81RrZt7FbJPfY+Ig9Q5O+05f6I8+ZOlJGyZ/Qfyl2aVm1HnlJKuBqPxeic8tMng/9B5N7uZL6Y3k5jFU8c= quentin
|
||||
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI7LtxLItapmUbt3Gs+4Oxa1i22fkx1+aJDkAjiRWPSX3+cxOzuPfHX9uFzr+qj5hy4J7ErrPp8q9alu+il9lE26GQuUxOZiaUrXu4dRCXXdCqTHARWBxGUXjkxdMp2HIzFpBxmVqcRubrgM36LBzKapdDOqQdz7XnNm5Jmf0tH/N0+TgV60P0WVY1CxmTya+JHNFVgazhd+oIGEhTyW/eszMGcFUgZet7DQFytYIQXYSwwGpGdJ+0InKAJ2SzCt/yuUlSrhrVM8vSGeami1XYmgQiyth1zjteMd8uTrc9NREH7bZTNcMFBqVYE3BYQWGRrv8pMMgP9gxgLbxtVsUl barakmich-titania
|
||||
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUWB4aSjSRHCz5/6H9/EJhJVvRmPThvEzyHinaWPsuM9prBSLci9NF9WneVl30nczkvllA+w34kycdrS3fKpjTbODaEOLHBobWl3bccY0I6kr86q5z67NZffjCm/P/RL+dBaOiBWS8PV8oiDF1P6YdMo8Jk46n9fozmLCXHUuCw5BJ8PGjQqbsEzA3qFMeKZYdJHOizOfeIfKfCWYrrumVRY9v6SAUDoFOl4PZEM7QdGp9EoRYb9MNLgKLnZ4RjbcLoFwiqxY4KEM4zfjZPNOECiLCuJqvHM2QawwuO1klJ16HpJk+FzOTWQoZtT47LoE/XNSOcNtAOiD+OQ449ia1EArhm7+1DnLXvHXKIl1JtuqJz+wFCsbNSdB7P562OHAGRIxYK3DfE+0CZH1BeHYl7xiRBeCtZ+OZMIocqeJtq8taIS7Un5wnGcQWxFtQnr/f65EgbIi7G2dxPcjhr6K+GWYezsiReVVKnIClq2MHhABG9QOncKDIa47L3nyx3pm4ZfMbC2jmnK2pFgGGSfYDy4487JnAUOG1mzZ9vm4gDhatT+vZFSBOwv1e4CErBh/wYXooF5I0nGmE6y6zkKFqP+ZolJ6iXmXQ7Ea2oaGeyaprweBjkhHgghi4KbwKbClope4Zo9X9JJYBLQSW33sEEuy8MlSBpdZAbz9t/FvJaw== mjibson
|
||||
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiNawWSZL2MF99zwG9cFjGmML6agsKwaacQEoTsjcjHGixyUnqHXaLdrGma5i/uphZPkI5XRBKiuIROACY/aRoIxJUpV7AQ1Zx87cILx6fDVePvU5lW2DdhlCDUdwjuzDb/WO/c/qMWjOPqRG4q8XvB7nhuORMMgdpDXWVH4LXPmFez1iIBCKNk04l6Se7wiEOQjaBnTDiBDYlWD78r6RdiAU5eIxpq+lKBDTcET0vegwcA/WE4YOlYBbOrgtHrgwWqG/pXxUu77aapDOmfjtDrgim6XP5kEnytg5gCaN9iLvIpT8b1wD/1Z+LoNSZg6m9gkcC2yTRI0apOBa2G8lz silas@pro.local
|
||||
|
||||
write_files:
|
||||
- path: /root/overrides.list
|
||||
|
|
8
conf/init/zz_release.sh
Executable file
8
conf/init/zz_release.sh
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
source venv/bin/activate
|
||||
|
||||
export PYTHONPATH=.
|
||||
|
||||
python /release.py
|
|
@ -152,6 +152,9 @@ class DefaultConfig(object):
|
|||
# Feature Flag: Whether Google login is supported.
|
||||
FEATURE_GOOGLE_LOGIN = False
|
||||
|
||||
# Feature Flag: Whther Dex login is supported.
|
||||
FEATURE_DEX_LOGIN = False
|
||||
|
||||
# Feature flag, whether to enable olark chat
|
||||
FEATURE_OLARK_CHAT = False
|
||||
|
||||
|
@ -183,6 +186,9 @@ class DefaultConfig(object):
|
|||
# Feature Flag: Whether to automatically replicate between storage engines.
|
||||
FEATURE_STORAGE_REPLICATION = False
|
||||
|
||||
# Feature Flag: Whether users can directly login to the UI.
|
||||
FEATURE_DIRECT_LOGIN = True
|
||||
|
||||
BUILD_MANAGER = ('enterprise', {})
|
||||
|
||||
DISTRIBUTED_STORAGE_CONFIG = {
|
||||
|
|
|
@ -123,6 +123,7 @@ db = Proxy()
|
|||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
db_for_update = CallableProxy()
|
||||
db_transaction = CallableProxy()
|
||||
|
||||
|
||||
def validate_database_url(url, db_kwargs, connect_timeout=5):
|
||||
|
@ -168,6 +169,10 @@ def configure(config_object):
|
|||
if read_slave_uri is not None:
|
||||
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
||||
|
||||
def _db_transaction():
|
||||
return config_object['DB_TRANSACTION_FACTORY'](db)
|
||||
|
||||
db_transaction.initialize(_db_transaction)
|
||||
|
||||
def random_string_generator(length=16):
|
||||
def random_string():
|
||||
|
@ -377,14 +382,15 @@ class Repository(BaseModel):
|
|||
return sorted_models.index(cmp_fk.model_class.__name__)
|
||||
filtered_ops.sort(key=sorted_model_key)
|
||||
|
||||
for query, fk in filtered_ops:
|
||||
model = fk.model_class
|
||||
if fk.null and not delete_nullable:
|
||||
model.update(**{fk.name: None}).where(query).execute()
|
||||
else:
|
||||
model.delete().where(query).execute()
|
||||
with db_transaction():
|
||||
for query, fk in filtered_ops:
|
||||
model = fk.model_class
|
||||
if fk.null and not delete_nullable:
|
||||
model.update(**{fk.name: None}).where(query).execute()
|
||||
else:
|
||||
model.delete().where(query).execute()
|
||||
|
||||
return self.delete().where(self._pk_expr()).execute()
|
||||
return self.delete().where(self._pk_expr()).execute()
|
||||
|
||||
class Star(BaseModel):
|
||||
user = ForeignKeyField(User, index=True)
|
||||
|
@ -469,6 +475,9 @@ class RepositoryBuildTrigger(BaseModel):
|
|||
pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot',
|
||||
robot_null_delete=True)
|
||||
|
||||
# TODO(jschorr): Remove this column once we verify the backfill has succeeded.
|
||||
used_legacy_github = BooleanField(null=True, default=False)
|
||||
|
||||
|
||||
class EmailConfirmation(BaseModel):
|
||||
code = CharField(default=random_string_generator(), unique=True, index=True)
|
||||
|
@ -778,6 +787,33 @@ class BlobUpload(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class QuayService(BaseModel):
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class QuayRegion(BaseModel):
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class QuayRelease(BaseModel):
|
||||
service = ForeignKeyField(QuayService)
|
||||
version = CharField()
|
||||
region = ForeignKeyField(QuayRegion)
|
||||
reverted = BooleanField(default=False)
|
||||
created = DateTimeField(default=datetime.now, index=True)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
read_slaves = (read_slave,)
|
||||
indexes = (
|
||||
# unique release per region
|
||||
(('service', 'version', 'region'), True),
|
||||
|
||||
# get recent releases
|
||||
(('service', 'region', 'created'), False),
|
||||
)
|
||||
|
||||
|
||||
all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, Visibility,
|
||||
RepositoryTag, EmailConfirmation, FederatedLogin, LoginService, QueueItem,
|
||||
RepositoryBuild, Team, TeamMember, TeamRole, LogEntryKind, LogEntry,
|
||||
|
@ -787,4 +823,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
|||
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
||||
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
||||
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind,
|
||||
AccessTokenKind, Star, RepositoryActionCount, TagManifest, BlobUpload, UserRegion]
|
||||
AccessTokenKind, Star, RepositoryActionCount, TagManifest, UserRegion,
|
||||
QuayService, QuayRegion, QuayRelease, BlobUpload]
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from alembic import context
|
||||
from alembic.revision import ResolutionError
|
||||
from alembic.util import CommandError
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
from urllib import unquote, quote
|
||||
|
@ -11,6 +14,7 @@ from peewee import SqliteDatabase
|
|||
from data.database import all_models, db
|
||||
from app import app
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from release import GIT_HEAD, REGION, SERVICE
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
config = context.config
|
||||
|
@ -21,6 +25,8 @@ config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
|||
if config.config_file_name:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
|
@ -77,7 +83,23 @@ def run_migrations_online():
|
|||
|
||||
try:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations(tables=tables)
|
||||
try:
|
||||
context.run_migrations(tables=tables)
|
||||
except (CommandError, ResolutionError) as ex:
|
||||
if 'No such revision' not in str(ex):
|
||||
raise
|
||||
|
||||
if not REGION or not GIT_HEAD:
|
||||
raise
|
||||
|
||||
from data.model.release import get_recent_releases
|
||||
|
||||
# ignore revision error if we're running the previous release
|
||||
releases = list(get_recent_releases(SERVICE, REGION).offset(1).limit(1))
|
||||
if releases and releases[0].version == GIT_HEAD:
|
||||
logger.warn('Skipping database migration because revision not found')
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
|
|
55
data/migrations/versions/1c0f6ede8992_quay_releases.py
Normal file
55
data/migrations/versions/1c0f6ede8992_quay_releases.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
"""Quay releases
|
||||
|
||||
Revision ID: 1c0f6ede8992
|
||||
Revises: 545794454f49
|
||||
Create Date: 2015-09-15 15:46:09.784607
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1c0f6ede8992'
|
||||
down_revision = '545794454f49'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('quayregion',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion'))
|
||||
)
|
||||
op.create_index('quayregion_name', 'quayregion', ['name'], unique=True)
|
||||
op.create_table('quayservice',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice'))
|
||||
)
|
||||
op.create_index('quayservice_name', 'quayservice', ['name'], unique=True)
|
||||
op.create_table('quayrelease',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('region_id', sa.Integer(), nullable=False),
|
||||
sa.Column('reverted', sa.Boolean(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease'))
|
||||
)
|
||||
op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False)
|
||||
op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False)
|
||||
op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False)
|
||||
op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False)
|
||||
op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True)
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('quayrelease')
|
||||
op.drop_table('quayservice')
|
||||
op.drop_table('quayregion')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,26 @@
|
|||
"""Add support for Dex login
|
||||
|
||||
Revision ID: 3a3bb77e17d5
|
||||
Revises: 9512773a4a2
|
||||
Create Date: 2015-09-04 15:57:38.007822
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3a3bb77e17d5'
|
||||
down_revision = '9512773a4a2'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
op.bulk_insert(tables.loginservice, [{'id': 7, 'name': 'dex'}])
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.execute(
|
||||
tables.loginservice.delete()
|
||||
.where(tables.loginservice.c.name == op.inline_literal('dex'))
|
||||
)
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
"""Migrate GitHub triggers to use deploy keys
|
||||
|
||||
Revision ID: 3ff4fbc94644
|
||||
Revises: 4d5f6716df0
|
||||
Create Date: 2015-09-16 17:50:22.034146
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3ff4fbc94644'
|
||||
down_revision = '4d5f6716df0'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from util.migrate.migrategithubdeploykeys import backfill_github_deploykeys
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
backfill_github_deploykeys()
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,26 @@
|
|||
"""Add legacy column for GitHub backfill tracking
|
||||
|
||||
Revision ID: 4d5f6716df0
|
||||
Revises: 1c0f6ede8992
|
||||
Create Date: 2015-09-16 17:49:40.334540
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4d5f6716df0'
|
||||
down_revision = '1c0f6ede8992'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('repositorybuildtrigger', 'used_legacy_github')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,34 @@
|
|||
"""Migrate image data back to image table
|
||||
|
||||
Revision ID: 545794454f49
|
||||
Revises: 3a3bb77e17d5
|
||||
Create Date: 2015-09-15 11:48:47.554255
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '545794454f49'
|
||||
down_revision = '3a3bb77e17d5'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('image', sa.Column('aggregate_size', sa.BigInteger(), nullable=True))
|
||||
op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
|
||||
op.add_column('image', sa.Column('comment', sa.Text(), nullable=True))
|
||||
op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
|
||||
op.add_column('image', sa.Column('v1_json_metadata', sa.Text(), nullable=True))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('image', 'v1_json_metadata')
|
||||
op.drop_column('image', 'created')
|
||||
op.drop_column('image', 'comment')
|
||||
op.drop_column('image', 'command')
|
||||
op.drop_column('image', 'aggregate_size')
|
||||
### end Alembic commands ###
|
|
@ -1,4 +1,4 @@
|
|||
from data.database import db
|
||||
from data.database import db, db_transaction
|
||||
|
||||
|
||||
class DataModelException(Exception):
|
||||
|
@ -88,10 +88,6 @@ class Config(object):
|
|||
config = Config()
|
||||
|
||||
|
||||
def db_transaction():
|
||||
return config.app_config['DB_TRANSACTION_FACTORY'](db)
|
||||
|
||||
|
||||
# There MUST NOT be any circular dependencies between these subsections. If there are fix it by
|
||||
# moving the minimal number of things to _basequery
|
||||
# TODO document the methods and modules for each one of the submodules below.
|
||||
|
|
|
@ -4,7 +4,7 @@ from peewee import JOIN_LEFT_OUTER
|
|||
from datetime import timedelta, datetime
|
||||
|
||||
from data.database import (BuildTriggerService, RepositoryBuildTrigger, Repository, Namespace, User,
|
||||
RepositoryBuild, BUILD_PHASE, db_for_update)
|
||||
RepositoryBuild, BUILD_PHASE, db_for_update, db_random_func)
|
||||
from data.model import (InvalidBuildTriggerException, InvalidRepositoryBuildException,
|
||||
db_transaction, user as user_model)
|
||||
|
||||
|
@ -163,11 +163,24 @@ def cancel_repository_build(build, work_queue):
|
|||
return True
|
||||
|
||||
|
||||
def archivable_buildlogs_query():
|
||||
def get_archivable_build():
|
||||
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||
return (RepositoryBuild
|
||||
.select()
|
||||
candidates = (RepositoryBuild
|
||||
.select(RepositoryBuild.id)
|
||||
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
||||
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
||||
(RepositoryBuild.started < presumed_dead_date),
|
||||
RepositoryBuild.logs_archived == False))
|
||||
RepositoryBuild.logs_archived == False)
|
||||
.limit(50)
|
||||
.alias('candidates'))
|
||||
|
||||
try:
|
||||
found_id = (RepositoryBuild
|
||||
.select(candidates.c.id)
|
||||
.from_(candidates)
|
||||
.order_by(db_random_func())
|
||||
.get())
|
||||
return RepositoryBuild.get(id=found_id)
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
|
|
@ -277,15 +277,24 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
|
|||
|
||||
# We cleanup any old checksum in case it's a retry after a fail
|
||||
fetched.storage.checksum = None
|
||||
fetched.created = datetime.now()
|
||||
now = datetime.now()
|
||||
# TODO stop writing to storage when all readers are removed
|
||||
fetched.storage.created = now
|
||||
fetched.created = now
|
||||
|
||||
if created_date_str is not None:
|
||||
try:
|
||||
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
||||
# TODO stop writing to storage fields when all readers are removed
|
||||
parsed_created_time = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
||||
fetched.created = parsed_created_time
|
||||
fetched.storage.created = parsed_created_time
|
||||
except:
|
||||
# parse raises different exceptions, so we cannot use a specific kind of handler here.
|
||||
pass
|
||||
|
||||
# TODO stop writing to storage fields when all readers are removed
|
||||
fetched.storage.comment = comment
|
||||
fetched.storage.command = command
|
||||
fetched.comment = comment
|
||||
fetched.command = command
|
||||
fetched.v1_json_metadata = v1_json_metadata
|
||||
|
@ -327,13 +336,18 @@ def set_image_size(docker_image_id, namespace_name, repository_name, image_size,
|
|||
.where(Image.id << ancestors)
|
||||
.scalar()) + image_size
|
||||
|
||||
# TODO stop writing to storage when all readers are removed
|
||||
image.storage.aggregate_size = total_size
|
||||
image.aggregate_size = total_size
|
||||
except Image.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
# TODO stop writing to storage when all readers are removed
|
||||
image.storage.aggregate_size = image_size
|
||||
image.aggregate_size = image_size
|
||||
|
||||
image.storage.save()
|
||||
image.save()
|
||||
|
||||
return image
|
||||
|
||||
|
|
23
data/model/release.py
Normal file
23
data/model/release.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from data.database import QuayRelease, QuayRegion, QuayService
|
||||
|
||||
|
||||
def set_region_release(service_name, region_name, version):
|
||||
service, _ = QuayService.create_or_get(name=service_name)
|
||||
region, _ = QuayRegion.create_or_get(name=region_name)
|
||||
|
||||
return QuayRelease.create_or_get(service=service, version=version, region=region)
|
||||
|
||||
|
||||
def get_recent_releases(service_name, region_name):
|
||||
return (QuayRelease
|
||||
.select(QuayRelease)
|
||||
.join(QuayService)
|
||||
.switch(QuayRelease)
|
||||
.join(QuayRegion)
|
||||
.where(
|
||||
QuayService.name == service_name,
|
||||
QuayRegion.name == region_name,
|
||||
QuayRelease.reverted == False,
|
||||
)
|
||||
.order_by(QuayRelease.created.desc())
|
||||
)
|
|
@ -135,6 +135,7 @@ def list_repository_tag_history(repo_obj, page=1, size=100, specific_tag=None):
|
|||
.where(RepositoryTag.repository == repo_obj)
|
||||
.where(RepositoryTag.hidden == False)
|
||||
.order_by(RepositoryTag.lifetime_start_ts.desc())
|
||||
.order_by(RepositoryTag.name)
|
||||
.paginate(page, size))
|
||||
|
||||
if specific_tag:
|
||||
|
|
|
@ -26,9 +26,10 @@ class MetricQueueReporter(object):
|
|||
|
||||
class WorkQueue(object):
|
||||
def __init__(self, queue_name, transaction_factory,
|
||||
canonical_name_match_list=None, reporter=None):
|
||||
canonical_name_match_list=None, reporter=None, metric_queue=None):
|
||||
self._queue_name = queue_name
|
||||
self._reporter = reporter
|
||||
self._metric_queue = metric_queue
|
||||
self._transaction_factory = transaction_factory
|
||||
self._currently_processing = False
|
||||
|
||||
|
@ -86,12 +87,20 @@ class WorkQueue(object):
|
|||
return (running_count, available_not_running_count, available_count)
|
||||
|
||||
def update_metrics(self):
|
||||
if self._reporter is None:
|
||||
if self._reporter is None and self._metric_queue is None:
|
||||
return
|
||||
|
||||
(running_count, available_not_running_count, available_count) = self.get_metrics()
|
||||
self._reporter(self._currently_processing, running_count,
|
||||
running_count + available_not_running_count)
|
||||
|
||||
if self._metric_queue:
|
||||
dim = {'queue': self._queue_name}
|
||||
self._metric_queue.put('Running', running_count, dimensions=dim)
|
||||
self._metric_queue.put('AvailableNotRunning', available_not_running_count, dimensions=dim)
|
||||
self._metric_queue.put('Available', available_count, dimensions=dim)
|
||||
|
||||
if self._reporter:
|
||||
self._reporter(self._currently_processing, running_count,
|
||||
running_count + available_not_running_count)
|
||||
|
||||
def has_retries_remaining(self, item_id):
|
||||
""" Returns whether the queue item with the given id has any retries remaining. If the
|
||||
|
@ -185,7 +194,12 @@ class WorkQueue(object):
|
|||
|
||||
def complete(self, completed_item):
|
||||
with self._transaction_factory(db):
|
||||
completed_item_obj = self._item_by_id_for_update(completed_item.id)
|
||||
try:
|
||||
completed_item_obj = self._item_by_id_for_update(completed_item.id)
|
||||
except QueueItem.DoesNotExist:
|
||||
self._currently_processing = False
|
||||
return
|
||||
|
||||
completed_item_obj.delete_instance(recursive=True)
|
||||
self._currently_processing = False
|
||||
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import os
|
||||
import logging
|
||||
import magic
|
||||
import urlparse
|
||||
|
||||
from uuid import uuid4
|
||||
from flask import url_for, request, send_file, make_response, abort
|
||||
from flask.views import View
|
||||
from _pyio import BufferedReader
|
||||
from util import get_app_url
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -77,7 +79,9 @@ class DelegateUserfiles(object):
|
|||
if url is None:
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
|
||||
file_relative_url = url_for(self._handler_name, file_id=file_id)
|
||||
file_url = urlparse.urljoin(get_app_url(self._app.config), file_relative_url)
|
||||
return (file_url, file_id)
|
||||
|
||||
return (url, file_id)
|
||||
|
||||
|
@ -97,7 +101,8 @@ class DelegateUserfiles(object):
|
|||
if url is None:
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return url_for(self._handler_name, file_id=file_id, _external=True)
|
||||
file_relative_url = url_for(self._handler_name, file_id=file_id)
|
||||
return urlparse.urljoin(get_app_url(self._app.config), file_relative_url)
|
||||
|
||||
return url
|
||||
|
||||
|
|
|
@ -12,12 +12,42 @@ from auth.permissions import AdministerOrganizationPermission
|
|||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from data.billing import PLANS
|
||||
from data.billing import PLANS, get_plan
|
||||
|
||||
import features
|
||||
import uuid
|
||||
import json
|
||||
|
||||
def lookup_allowed_private_repos(namespace):
|
||||
""" Returns false if the given namespace has used its allotment of private repositories. """
|
||||
# Lookup the namespace and verify it has a subscription.
|
||||
namespace_user = model.user.get_namespace_user(namespace)
|
||||
if namespace_user is None:
|
||||
return False
|
||||
|
||||
if not namespace_user.stripe_id:
|
||||
return False
|
||||
|
||||
# Ask Stripe for the subscribed plan.
|
||||
# TODO: Can we cache this or make it faster somehow?
|
||||
try:
|
||||
cus = billing.Customer.retrieve(namespace_user.stripe_id)
|
||||
except stripe.APIConnectionError:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if not cus.subscription:
|
||||
return False
|
||||
|
||||
# Find the number of private repositories used by the namespace and compare it to the
|
||||
# plan subscribed.
|
||||
private_repos = model.user.get_private_repo_count(namespace)
|
||||
current_plan = get_plan(cus.subscription.plan.id)
|
||||
if current_plan is None:
|
||||
return False
|
||||
|
||||
return private_repos < current_plan['privateRepos']
|
||||
|
||||
|
||||
def carderror_response(e):
|
||||
return {'carderror': e.message}, 402
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import logging
|
||||
import datetime
|
||||
import features
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
|
@ -15,7 +16,8 @@ from endpoints.api import (truthy_bool, format_date, nickname, log_action, valid
|
|||
require_repo_read, require_repo_write, require_repo_admin,
|
||||
RepositoryParamResource, resource, query_param, parse_args, ApiResource,
|
||||
request_error, require_scope, Unauthorized, NotFound, InvalidRequest,
|
||||
path_param)
|
||||
path_param, ExceedsLicenseException)
|
||||
from endpoints.api.billing import lookup_allowed_private_repos
|
||||
|
||||
from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPermission,
|
||||
CreateRepositoryPermission)
|
||||
|
@ -26,6 +28,18 @@ from auth import scopes
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_allowed_private_repos(namespace):
|
||||
""" Checks to see if the given namespace has reached its private repository limit. If so,
|
||||
raises a ExceedsLicenseException.
|
||||
"""
|
||||
# Not enabled if billing is disabled.
|
||||
if not features.BILLING:
|
||||
return
|
||||
|
||||
if not lookup_allowed_private_repos(namespace):
|
||||
raise ExceedsLicenseException()
|
||||
|
||||
|
||||
@resource('/v1/repository')
|
||||
class RepositoryList(ApiResource):
|
||||
"""Operations for creating and listing repositories."""
|
||||
|
@ -87,6 +101,8 @@ class RepositoryList(ApiResource):
|
|||
raise request_error(message='Repository already exists')
|
||||
|
||||
visibility = req['visibility']
|
||||
if visibility == 'private':
|
||||
check_allowed_private_repos(namespace_name)
|
||||
|
||||
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
|
||||
repo.description = req['description']
|
||||
|
@ -339,7 +355,11 @@ class RepositoryVisibility(RepositoryParamResource):
|
|||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo:
|
||||
values = request.get_json()
|
||||
model.repository.set_repository_visibility(repo, values['visibility'])
|
||||
visibility = values['visibility']
|
||||
if visibility == 'private':
|
||||
check_allowed_private_repos(namespace)
|
||||
|
||||
model.repository.set_repository_visibility(repo, visibility)
|
||||
log_action('change_repo_visibility', namespace,
|
||||
{'repo': repository, 'visibility': values['visibility']},
|
||||
repo=repo)
|
||||
|
|
|
@ -9,7 +9,7 @@ from endpoints.api import (ApiResource, nickname, resource, internal_only, show_
|
|||
require_fresh_login, request, validate_json_request, verify_not_prod)
|
||||
|
||||
from endpoints.common import common_login
|
||||
from app import app, CONFIG_PROVIDER, superusers
|
||||
from app import app, config_provider, superusers
|
||||
from data import model
|
||||
from data.database import configure
|
||||
from auth.permissions import SuperUserPermission
|
||||
|
@ -56,13 +56,13 @@ class SuperUserRegistryStatus(ApiResource):
|
|||
""" Returns the status of the registry. """
|
||||
|
||||
# If there is no conf/stack volume, then report that status.
|
||||
if not CONFIG_PROVIDER.volume_exists():
|
||||
if not config_provider.volume_exists():
|
||||
return {
|
||||
'status': 'missing-config-dir'
|
||||
}
|
||||
|
||||
# If there is no config file, we need to setup the database.
|
||||
if not CONFIG_PROVIDER.yaml_exists():
|
||||
if not config_provider.config_exists():
|
||||
return {
|
||||
'status': 'config-db'
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ class SuperUserRegistryStatus(ApiResource):
|
|||
# If we have SETUP_COMPLETE, then we're ready to go!
|
||||
if app.config.get('SETUP_COMPLETE', False):
|
||||
return {
|
||||
'requires_restart': CONFIG_PROVIDER.requires_restart(app.config),
|
||||
'requires_restart': config_provider.requires_restart(app.config),
|
||||
'status': 'ready'
|
||||
}
|
||||
|
||||
|
@ -107,10 +107,10 @@ class SuperUserSetupDatabase(ApiResource):
|
|||
""" Invokes the alembic upgrade process. """
|
||||
# Note: This method is called after the database configured is saved, but before the
|
||||
# database has any tables. Therefore, we only allow it to be run in that unique case.
|
||||
if CONFIG_PROVIDER.yaml_exists() and not database_is_valid():
|
||||
if config_provider.config_exists() and not database_is_valid():
|
||||
# Note: We need to reconfigure the database here as the config has changed.
|
||||
combined = dict(**app.config)
|
||||
combined.update(CONFIG_PROVIDER.get_yaml())
|
||||
combined.update(config_provider.get_config())
|
||||
|
||||
configure(combined)
|
||||
app.config['DB_URI'] = combined['DB_URI']
|
||||
|
@ -185,7 +185,7 @@ class SuperUserConfig(ApiResource):
|
|||
def get(self):
|
||||
""" Returns the currently defined configuration, if any. """
|
||||
if SuperUserPermission().can():
|
||||
config_object = CONFIG_PROVIDER.get_yaml()
|
||||
config_object = config_provider.get_config()
|
||||
return {
|
||||
'config': config_object
|
||||
}
|
||||
|
@ -196,18 +196,18 @@ class SuperUserConfig(ApiResource):
|
|||
@verify_not_prod
|
||||
@validate_json_request('UpdateConfig')
|
||||
def put(self):
|
||||
""" Updates the config.yaml file. """
|
||||
""" Updates the config override file. """
|
||||
# Note: This method is called to set the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup.
|
||||
if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can():
|
||||
if not config_provider.config_exists() or SuperUserPermission().can():
|
||||
config_object = request.get_json()['config']
|
||||
hostname = request.get_json()['hostname']
|
||||
|
||||
# Add any enterprise defaults missing from the config.
|
||||
add_enterprise_config_defaults(config_object, app.config['SECRET_KEY'], hostname)
|
||||
|
||||
# Write the configuration changes to the YAML file.
|
||||
CONFIG_PROVIDER.save_yaml(config_object)
|
||||
# Write the configuration changes to the config override file.
|
||||
config_provider.save_config(config_object)
|
||||
|
||||
# If the authentication system is not the database, link the superuser account to the
|
||||
# the authentication system chosen.
|
||||
|
@ -238,7 +238,7 @@ class SuperUserConfigFile(ApiResource):
|
|||
|
||||
if SuperUserPermission().can():
|
||||
return {
|
||||
'exists': CONFIG_PROVIDER.volume_file_exists(filename)
|
||||
'exists': config_provider.volume_file_exists(filename)
|
||||
}
|
||||
|
||||
abort(403)
|
||||
|
@ -252,12 +252,12 @@ class SuperUserConfigFile(ApiResource):
|
|||
|
||||
# Note: This method can be called before the configuration exists
|
||||
# to upload the database SSL cert.
|
||||
if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can():
|
||||
if not config_provider.config_exists() or SuperUserPermission().can():
|
||||
uploaded_file = request.files['file']
|
||||
if not uploaded_file:
|
||||
abort(400)
|
||||
|
||||
CONFIG_PROVIDER.save_volume_file(filename, uploaded_file)
|
||||
config_provider.save_volume_file(filename, uploaded_file)
|
||||
return {
|
||||
'status': True
|
||||
}
|
||||
|
@ -309,7 +309,7 @@ class SuperUserCreateInitialSuperUser(ApiResource):
|
|||
#
|
||||
# We do this special security check because at the point this method is called, the database
|
||||
# is clean but does not (yet) have any super users for our permissions code to check against.
|
||||
if CONFIG_PROVIDER.yaml_exists() and not database_has_users():
|
||||
if config_provider.config_exists() and not database_has_users():
|
||||
data = request.get_json()
|
||||
username = data['username']
|
||||
password = data['password']
|
||||
|
@ -319,9 +319,9 @@ class SuperUserCreateInitialSuperUser(ApiResource):
|
|||
superuser = model.user.create_user(username, password, email, auto_verify=True)
|
||||
|
||||
# Add the user to the config.
|
||||
config_object = CONFIG_PROVIDER.get_yaml()
|
||||
config_object = config_provider.get_config()
|
||||
config_object['SUPER_USERS'] = [username]
|
||||
CONFIG_PROVIDER.save_yaml(config_object)
|
||||
config_provider.save_config(config_object)
|
||||
|
||||
# Update the in-memory config for the new superuser.
|
||||
superusers.register_superuser(username)
|
||||
|
@ -369,7 +369,7 @@ class SuperUserConfigValidate(ApiResource):
|
|||
# Note: This method is called to validate the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
||||
# this is also safe since this method does not access any information not given in the request.
|
||||
if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can():
|
||||
if not config_provider.config_exists() or SuperUserPermission().can():
|
||||
config = request.get_json()['config']
|
||||
return validate_service_for_config(service, config, request.get_json().get('password', ''))
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ from app import app, avatar, superusers, authentication
|
|||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
|
||||
internal_only, require_scope, show_if, parse_args,
|
||||
query_param, abort, require_fresh_login, path_param, verify_not_prod)
|
||||
from endpoints.api.logs import get_logs
|
||||
from endpoints.api.logs import get_logs, get_aggregate_logs
|
||||
from data import model
|
||||
from auth.permissions import SuperUserPermission
|
||||
from auth import scopes
|
||||
|
@ -83,6 +83,26 @@ class SuperUserSystemLogServices(ApiResource):
|
|||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/superuser/aggregatelogs')
|
||||
@internal_only
|
||||
class SuperUserAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregated logs for the current user. """
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('listAllAggregateLogs')
|
||||
@parse_args
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
def get(self, args):
|
||||
""" Returns the aggregated logs for the current system. """
|
||||
if SuperUserPermission().can():
|
||||
start_time = args['starttime']
|
||||
end_time = args['endtime']
|
||||
|
||||
return get_aggregate_logs(start_time, end_time)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/superuser/logs')
|
||||
@internal_only
|
||||
|
@ -93,9 +113,9 @@ class SuperUserLogs(ApiResource):
|
|||
@verify_not_prod
|
||||
@nickname('listAllLogs')
|
||||
@parse_args
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('page', 'The page number for the logs', type=int, default=1)
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, args):
|
||||
""" List the usage logs for the current system. """
|
||||
|
@ -103,7 +123,7 @@ class SuperUserLogs(ApiResource):
|
|||
start_time = args['starttime']
|
||||
end_time = args['endtime']
|
||||
|
||||
return get_logs(start_time, end_time)
|
||||
return get_logs(start_time, end_time, page=args['page'])
|
||||
|
||||
abort(403)
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ class ListRepositoryTags(RepositoryParamResource):
|
|||
|
||||
specific_tag = args.get('specificTag') or None
|
||||
|
||||
page = min(1, args.get('start', 1))
|
||||
page = max(1, args.get('page', 1))
|
||||
limit = min(100, max(1, args.get('limit', 50)))
|
||||
|
||||
# Note: We ask for limit+1 here, so we can check to see if there are
|
||||
|
|
|
@ -306,6 +306,7 @@ class User(ApiResource):
|
|||
return user_view(user)
|
||||
|
||||
@show_if(features.USER_CREATION)
|
||||
@show_if(features.DIRECT_LOGIN)
|
||||
@nickname('createNewUser')
|
||||
@internal_only
|
||||
@validate_json_request('NewUser')
|
||||
|
@ -496,6 +497,7 @@ class ConvertToOrganization(ApiResource):
|
|||
|
||||
|
||||
@resource('/v1/signin')
|
||||
@show_if(features.DIRECT_LOGIN)
|
||||
@internal_only
|
||||
class Signin(ApiResource):
|
||||
""" Operations for signing in the user. """
|
||||
|
@ -595,6 +597,7 @@ class Signout(ApiResource):
|
|||
|
||||
|
||||
@resource('/v1/detachexternal/<servicename>')
|
||||
@show_if(features.DIRECT_LOGIN)
|
||||
@internal_only
|
||||
class DetachExternal(ApiResource):
|
||||
""" Resource for detaching an external login. """
|
||||
|
|
|
@ -4,7 +4,7 @@ import json
|
|||
from flask import make_response
|
||||
from app import app
|
||||
from util.useremails import CannotSendEmailException
|
||||
from util.config.provider import CannotWriteConfigException
|
||||
from util.config.provider.baseprovider import CannotWriteConfigException
|
||||
from data import model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
@ -5,7 +5,7 @@ from flask import request, redirect, url_for, Blueprint
|
|||
from flask.ext.login import current_user
|
||||
|
||||
from endpoints.common import render_page_template, common_login, route_show_if
|
||||
from app import app, analytics, get_app_url, github_login, google_login
|
||||
from app import app, analytics, get_app_url, github_login, google_login, dex_login
|
||||
from data import model
|
||||
from util.names import parse_repository_name
|
||||
from util.validation import generate_valid_usernames
|
||||
|
@ -14,6 +14,7 @@ from auth.auth import require_session_login
|
|||
from peewee import IntegrityError
|
||||
|
||||
import features
|
||||
from util.security.strictjwt import decode, InvalidTokenError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
client = app.config['HTTPCLIENT']
|
||||
|
@ -24,7 +25,7 @@ def render_ologin_error(service_name,
|
|||
return render_page_template('ologinerror.html', service_name=service_name,
|
||||
error_message=error_message,
|
||||
service_url=get_app_url(),
|
||||
user_creation=features.USER_CREATION)
|
||||
user_creation=features.USER_CREATION and features.DIRECT_LOGIN)
|
||||
|
||||
|
||||
def get_user(service, token):
|
||||
|
@ -86,7 +87,7 @@ def conduct_oauth_login(service, user_id, username, email, metadata={}):
|
|||
|
||||
return render_ologin_error(service_name)
|
||||
|
||||
def get_google_username(user_data):
|
||||
def get_email_username(user_data):
|
||||
username = user_data['email']
|
||||
at = username.find('@')
|
||||
if at > 0:
|
||||
|
@ -108,7 +109,7 @@ def google_oauth_callback():
|
|||
if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
|
||||
return render_ologin_error('Google')
|
||||
|
||||
username = get_google_username(user_data)
|
||||
username = get_email_username(user_data)
|
||||
metadata = {
|
||||
'service_username': user_data['email']
|
||||
}
|
||||
|
@ -194,7 +195,7 @@ def google_oauth_attach():
|
|||
google_id = user_data['id']
|
||||
user_obj = current_user.db_user()
|
||||
|
||||
username = get_google_username(user_data)
|
||||
username = get_email_username(user_data)
|
||||
metadata = {
|
||||
'service_username': user_data['email']
|
||||
}
|
||||
|
@ -236,3 +237,83 @@ def github_oauth_attach():
|
|||
return render_ologin_error('GitHub', err)
|
||||
|
||||
return redirect(url_for('web.user'))
|
||||
|
||||
|
||||
def decode_user_jwt(token, oidc_provider):
|
||||
try:
|
||||
return decode(token, oidc_provider.get_public_key(), algorithms=['RS256'],
|
||||
audience=oidc_provider.client_id(),
|
||||
issuer=oidc_provider.issuer)
|
||||
except InvalidTokenError:
|
||||
# Public key may have expired. Try to retrieve an updated public key and use it to decode.
|
||||
return decode(token, oidc_provider.get_public_key(force_refresh=True), algorithms=['RS256'],
|
||||
audience=oidc_provider.client_id(),
|
||||
issuer=oidc_provider.issuer)
|
||||
|
||||
|
||||
@oauthlogin.route('/dex/callback', methods=['GET', 'POST'])
|
||||
@route_show_if(features.DEX_LOGIN)
|
||||
def dex_oauth_callback():
|
||||
error = request.values.get('error', None)
|
||||
if error:
|
||||
return render_ologin_error(dex_login.public_title, error)
|
||||
|
||||
code = request.values.get('code')
|
||||
if not code:
|
||||
return render_ologin_error(dex_login.public_title, 'Missing OAuth code')
|
||||
|
||||
token = dex_login.exchange_code_for_token(app.config, client, code, client_auth=True,
|
||||
form_encode=True)
|
||||
|
||||
try:
|
||||
payload = decode_user_jwt(token, dex_login)
|
||||
except InvalidTokenError:
|
||||
logger.exception('Exception when decoding returned JWT')
|
||||
return render_ologin_error(dex_login.public_title,
|
||||
'Could not decode response. Please contact your system administrator about this error.')
|
||||
|
||||
username = get_email_username(payload)
|
||||
metadata = {}
|
||||
|
||||
dex_id = payload['sub']
|
||||
email_address = payload['email']
|
||||
|
||||
if not payload.get('email_verified', False):
|
||||
return render_ologin_error(dex_login.public_title,
|
||||
'A verified e-mail address is required for login. Please verify your ' +
|
||||
'e-mail address in %s and try again.' % dex_login.public_title)
|
||||
|
||||
|
||||
return conduct_oauth_login(dex_login, dex_id, username, email_address,
|
||||
metadata=metadata)
|
||||
|
||||
|
||||
@oauthlogin.route('/dex/callback/attach', methods=['GET', 'POST'])
|
||||
@route_show_if(features.DEX_LOGIN)
|
||||
@require_session_login
|
||||
def dex_oauth_attach():
|
||||
code = request.args.get('code')
|
||||
token = dex_login.exchange_code_for_token(app.config, client, code, redirect_suffix='/attach',
|
||||
client_auth=True, form_encode=True)
|
||||
if not token:
|
||||
return render_ologin_error(dex_login.public_title)
|
||||
|
||||
try:
|
||||
payload = decode_user_jwt(token, dex_login)
|
||||
except jwt.InvalidTokenError:
|
||||
logger.exception('Exception when decoding returned JWT')
|
||||
return render_ologin_error(dex_login.public_title,
|
||||
'Could not decode response. Please contact your system administrator about this error.')
|
||||
|
||||
user_obj = current_user.db_user()
|
||||
dex_id = payload['sub']
|
||||
metadata = {}
|
||||
|
||||
try:
|
||||
model.user.attach_federated_login(user_obj, 'dex', dex_id, metadata=metadata)
|
||||
except IntegrityError:
|
||||
err = '%s account is already attached to a %s account' % (dex_login.public_title,
|
||||
app.config['REGISTRY_TITLE_SHORT'])
|
||||
return render_ologin_error(dex_login.public_title, err)
|
||||
|
||||
return redirect(url_for('web.user'))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
import random
|
||||
|
||||
from app import analytics, app, userevents
|
||||
from data import model
|
||||
|
@ -7,7 +8,7 @@ from auth.auth_context import get_authenticated_user, get_validated_token, get_v
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def track_and_log(event_name, repo, **kwargs):
|
||||
def track_and_log(event_name, repo, analytics_name=None, analytics_sample=1, **kwargs):
|
||||
repository = repo.name
|
||||
namespace = repo.namespace_user.username
|
||||
metadata = {
|
||||
|
@ -62,8 +63,11 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
||||
# Save the action to mixpanel.
|
||||
logger.debug('Logging the %s to Mixpanel', event_name)
|
||||
analytics.track(analytics_id, event_name, extra_params)
|
||||
if random.random() < analytics_sample:
|
||||
if analytics_name is None:
|
||||
analytics_name = event_name
|
||||
logger.debug('Logging the %s to Mixpanel', analytics_name)
|
||||
analytics.track(analytics_id, analytics_name, extra_params)
|
||||
|
||||
# Log the action to the database.
|
||||
logger.debug('Logging the %s to logs system', event_name)
|
||||
|
|
|
@ -270,7 +270,7 @@ def get_repository_images(namespace, repository):
|
|||
resp = make_response(json.dumps([]), 200)
|
||||
resp.mimetype = 'application/json'
|
||||
|
||||
track_and_log('pull_repo', repo)
|
||||
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01)
|
||||
return resp
|
||||
|
||||
abort(403)
|
||||
|
|
|
@ -9,7 +9,7 @@ from health.healthcheck import get_healthchecker
|
|||
|
||||
from data import model
|
||||
from data.database import db
|
||||
from app import app, billing as stripe, build_logs, avatar, signer, log_archive
|
||||
from app import app, billing as stripe, build_logs, avatar, signer, log_archive, config_provider
|
||||
from auth.auth import require_session_login, process_oauth
|
||||
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
|
||||
SuperUserPermission, AdministerRepositoryPermission,
|
||||
|
@ -209,7 +209,7 @@ def v1():
|
|||
@web.route('/health/instance', methods=['GET'])
|
||||
@no_cache
|
||||
def instance_health():
|
||||
checker = get_healthchecker(app)
|
||||
checker = get_healthchecker(app, config_provider)
|
||||
(data, status_code) = checker.check_instance()
|
||||
response = jsonify(dict(data=data, status_code=status_code))
|
||||
response.status_code = status_code
|
||||
|
@ -221,7 +221,7 @@ def instance_health():
|
|||
@web.route('/health/endtoend', methods=['GET'])
|
||||
@no_cache
|
||||
def endtoend_health():
|
||||
checker = get_healthchecker(app)
|
||||
checker = get_healthchecker(app, config_provider)
|
||||
(data, status_code) = checker.check_endtoend()
|
||||
response = jsonify(dict(data=data, status_code=status_code))
|
||||
response.status_code = status_code
|
||||
|
|
|
@ -2,7 +2,7 @@ import urllib2
|
|||
import re
|
||||
import os
|
||||
|
||||
LOCAL_DIRECTORY = 'static/ldn/'
|
||||
LOCAL_DIRECTORY = '/static/ldn/'
|
||||
|
||||
EXTERNAL_JS = [
|
||||
'code.jquery.com/jquery.js',
|
||||
|
|
|
@ -4,14 +4,15 @@ from health.services import check_all_services
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def get_healthchecker(app):
|
||||
def get_healthchecker(app, config_provider):
|
||||
""" Returns a HealthCheck instance for the given app. """
|
||||
return HealthCheck.get_checker(app)
|
||||
return HealthCheck.get_checker(app, config_provider)
|
||||
|
||||
|
||||
class HealthCheck(object):
|
||||
def __init__(self, app):
|
||||
def __init__(self, app, config_provider):
|
||||
self.app = app
|
||||
self.config_provider = config_provider
|
||||
|
||||
def check_instance(self):
|
||||
"""
|
||||
|
@ -52,20 +53,21 @@ class HealthCheck(object):
|
|||
data = {
|
||||
'services': service_statuses,
|
||||
'notes': notes,
|
||||
'is_testing': self.app.config['TESTING']
|
||||
'is_testing': self.app.config['TESTING'],
|
||||
'config_provider': self.config_provider.provider_id
|
||||
}
|
||||
|
||||
return (data, 200 if is_healthy else 503)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_checker(cls, app):
|
||||
def get_checker(cls, app, config_provider):
|
||||
name = app.config['HEALTH_CHECKER'][0]
|
||||
parameters = app.config['HEALTH_CHECKER'][1] or {}
|
||||
|
||||
for subc in cls.__subclasses__():
|
||||
if subc.check_name() == name:
|
||||
return subc(app, **parameters)
|
||||
return subc(app, config_provider, **parameters)
|
||||
|
||||
raise Exception('Unknown health check with name %s' % name)
|
||||
|
||||
|
@ -77,8 +79,8 @@ class LocalHealthCheck(HealthCheck):
|
|||
|
||||
|
||||
class ProductionHealthCheck(HealthCheck):
|
||||
def __init__(self, app, access_key, secret_key, db_instance='quay'):
|
||||
super(ProductionHealthCheck, self).__init__(app)
|
||||
def __init__(self, app, config_provider, access_key, secret_key, db_instance='quay'):
|
||||
super(ProductionHealthCheck, self).__init__(app, config_provider)
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
self.db_instance = db_instance
|
||||
|
|
|
@ -226,6 +226,7 @@ def initialize_database():
|
|||
LoginService.create(name='ldap')
|
||||
LoginService.create(name='jwtauthn')
|
||||
LoginService.create(name='keystone')
|
||||
LoginService.create(name='dex')
|
||||
|
||||
BuildTriggerService.create(name='github')
|
||||
BuildTriggerService.create(name='custom-git')
|
||||
|
|
26
release.py
Normal file
26
release.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
|
||||
|
||||
_GIT_HEAD_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'GIT_HEAD')
|
||||
|
||||
SERVICE = 'quay'
|
||||
GIT_HEAD = None
|
||||
REGION = os.environ.get('QUAY_REGION')
|
||||
|
||||
|
||||
# Load git head if available
|
||||
if os.path.isfile(_GIT_HEAD_PATH):
|
||||
with open(_GIT_HEAD_PATH) as f:
|
||||
GIT_HEAD = f.read().strip()
|
||||
|
||||
|
||||
def main():
|
||||
from app import app
|
||||
from data.model.release import set_region_release
|
||||
|
||||
if REGION and GIT_HEAD:
|
||||
set_region_release(SERVICE, REGION, GIT_HEAD)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -55,3 +55,4 @@ pyjwt
|
|||
toposort
|
||||
pyjwkest
|
||||
rfc3987
|
||||
pyjwkest
|
||||
|
|
|
@ -59,6 +59,7 @@ pycrypto==2.6.1
|
|||
pygpgme==0.3
|
||||
pyjwkest==1.0.3
|
||||
PyJWT==1.4.0
|
||||
pyjwkest==1.0.1
|
||||
PyMySQL==0.6.6
|
||||
pyOpenSSL==0.15.1
|
||||
PyPDF2==1.24
|
||||
|
|
|
@ -29,6 +29,18 @@
|
|||
margin-top: -7px !important;
|
||||
}
|
||||
|
||||
.repo-panel-settings-element .repo-count-checker {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.repo-panel-settings-element .co-alert {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.repo-panel-settings-element .panel-body {
|
||||
border-bottom: 0px;
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
.repo-panel-settings-element .delete-btn {
|
||||
float: none;
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
.external-login-button i.fa {
|
||||
.external-login-button i.fa,
|
||||
.external-login-button img {
|
||||
margin-right: 4px;
|
||||
width: 24px;
|
||||
font-size: 18px;
|
||||
text-align: center;
|
||||
vertical-align: middle;
|
||||
}
|
|
@ -6,6 +6,9 @@
|
|||
font-size: 18px;
|
||||
}
|
||||
|
||||
.external-logins-manager .external-auth-provider td:first-child i.fa {
|
||||
.external-logins-manager .external-auth-provider-title i.fa,
|
||||
.external-logins-manager .external-auth-provider-title img {
|
||||
margin-right: 6px;
|
||||
width: 24px;
|
||||
text-align: center;
|
||||
}
|
26
static/css/directives/ui/repo-count-checker.css
Normal file
26
static/css/directives/ui/repo-count-checker.css
Normal file
|
@ -0,0 +1,26 @@
|
|||
.repo-count-checker .btn {
|
||||
margin-top: 0px !important;
|
||||
}
|
||||
|
||||
.repo-count-checker .co-alert {
|
||||
margin-bottom: 6px !important;
|
||||
padding-right: 120px;
|
||||
}
|
||||
|
||||
.repo-count-checker .co-alert .btn {
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
.repo-count-checker .co-alert {
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
.repo-count-checker .co-alert .btn {
|
||||
position: relative;
|
||||
margin-top: 20px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
}
|
|
@ -4,4 +4,14 @@
|
|||
|
||||
.signup-form-element .co-alert {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.signup-form-element .single-sign-on a {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.signup-form-element .single-sign-on .external-login-button i.fa,
|
||||
.signup-form-element .single-sign-on .external-login-button img {
|
||||
width: 30px;
|
||||
font-size: 24px;
|
||||
}
|
|
@ -232,6 +232,9 @@
|
|||
ng-selected="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name] == value">{{ value }}</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="help-text" ng-if="field.help_text">
|
||||
{{ field.help_text }}
|
||||
</div>
|
||||
<div class="help-text" ng-if="field.help_url">
|
||||
See <a href="{{ field.help_url }}" target="_blank">Documentation</a> for more information
|
||||
</div>
|
||||
|
|
|
@ -1,24 +1,14 @@
|
|||
<span class="external-login-button-element">
|
||||
<span ng-if="provider == 'github'">
|
||||
<a href="javascript:void(0)" ng-class="isLink ? '' : 'btn btn-primary btn-block'" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px" ng-disabled="signingIn">
|
||||
<i class="fa fa-github fa-lg"></i>
|
||||
<span ng-if="action != 'attach'">
|
||||
Sign In with GitHub
|
||||
<span ng-if="isEnterprise('github')">Enterprise</span>
|
||||
</span>
|
||||
<span ng-if="action == 'attach'">
|
||||
Attach to GitHub
|
||||
<span ng-if="isEnterprise('github')">Enterprise</span>
|
||||
Account
|
||||
</span>
|
||||
</a>
|
||||
</span>
|
||||
|
||||
<span ng-if="provider == 'google'">
|
||||
<a href="javascript:void(0)" ng-class="isLink ? '' : 'btn btn-primary btn-block'" quay-require="['GOOGLE_LOGIN']" ng-click="startSignin('google')" ng-disabled="signingIn">
|
||||
<i class="fa fa-google fa-lg"></i>
|
||||
<span ng-if="action != 'attach'">Sign In with Google</span>
|
||||
<span ng-if="action == 'attach'">Attach to Google Account</span>
|
||||
</a>
|
||||
</span>
|
||||
<a href="javascript:void(0)" ng-class="isLink ? '' : 'btn btn-primary btn-block'"
|
||||
ng-if="providerInfo.enabled" ng-click="startSignin()" style="margin-bottom: 10px"
|
||||
ng-disabled="signingIn">
|
||||
<img ng-src="{{ providerInfo.icon().url }}" ng-if="providerInfo.icon().url">
|
||||
<i class="fa" ng-class="providerInfo.icon().icon" ng-if="providerInfo.icon().icon"></i>
|
||||
<span ng-if="action != 'attach'">
|
||||
Sign In with {{ providerInfo.title() }}
|
||||
</span>
|
||||
<span ng-if="action == 'attach'">
|
||||
Attach to {{ providerInfo.title() }}
|
||||
</span>
|
||||
</a>
|
||||
</span>
|
||||
|
|
|
@ -9,52 +9,35 @@
|
|||
<thead>
|
||||
<td>Provider</td>
|
||||
<td>Account Status</td>
|
||||
<td>Attach/Detach</td>
|
||||
<td quay-show="Features.DIRECT_LOGIN">Attach/Detach</td>
|
||||
</thead>
|
||||
|
||||
<!-- GitHub Login -->
|
||||
<tr class="external-auth-provider" ng-show="Features.GITHUB_LOGIN">
|
||||
<td>
|
||||
<i class="fa fa-github"></i> GitHub <span ng-if="KeyService.isEnterprise('github')">Enterprise</span>
|
||||
<tr class="external-auth-provider" ng-repeat="provider in EXTERNAL_LOGINS">
|
||||
<td class="external-auth-provider-title">
|
||||
<img ng-src="{{ provider.icon().url }}" ng-if="provider.icon().url">
|
||||
<i class="fa" ng-class="provider.icon().icon" ng-if="provider.icon().icon"></i>
|
||||
{{ provider.title() }}
|
||||
</td>
|
||||
<td>
|
||||
<span ng-if="hasGithubLogin">
|
||||
Attached to GitHub <span ng-if="KeyService.isEnterprise('github')">Enterprise</span> account <b><a href="{{githubEndpoint}}{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
||||
<span ng-if="externalLoginInfo[provider.id]">
|
||||
Attached to {{ provider.title() }} account
|
||||
<b ng-if="provider.hasUserInfo">
|
||||
<a ng-href="{{ provider.getUserInfo(externalLoginInfo[provider.id]).endpoint }}" target="_blank">
|
||||
{{ provider.getUserInfo(externalLoginInfo[provider.id]).username }}
|
||||
</a>
|
||||
</b>
|
||||
</span>
|
||||
|
||||
<span class="empty" ng-if="!hasGithubLogin">
|
||||
(Not attached to GitHub<span ng-if="KeyService.isEnterprise('github')"> Enterprise</span>)
|
||||
<span class="empty" ng-if="!externalLoginInfo[provider.id]">
|
||||
Not attached to {{ provider.title() }}
|
||||
</span>
|
||||
</td>
|
||||
|
||||
<td>
|
||||
<span class="external-login-button" provider="github" action="attach" is-link="true"
|
||||
ng-if="!hasGithubLogin"></span>
|
||||
<a href="javascript:void(0)" ng-if="hasGithubLogin"
|
||||
ng-click="detachExternalLogin('github')">Detach Account</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Google Login -->
|
||||
<tr class="external-auth-provider" ng-show="Features.GOOGLE_LOGIN">
|
||||
<td>
|
||||
<i class="fa fa-google"></i> Google Account
|
||||
</td>
|
||||
<td>
|
||||
<span ng-if="hasGoogleLogin">
|
||||
Attached to Google account <b>{{ googleLogin }}</b>
|
||||
</span>
|
||||
|
||||
<span class="empty" ng-if="!hasGoogleLogin">
|
||||
(Not attached to a Google account)
|
||||
</span>
|
||||
</td>
|
||||
|
||||
<td>
|
||||
<span class="external-login-button" provider="google" action="attach" is-link="true"
|
||||
ng-if="!hasGoogleLogin"></span>
|
||||
<a href="javascript:void(0)" ng-if="hasGoogleLogin"
|
||||
ng-click="detachExternalLogin('google')">Detach Account</a>
|
||||
<span class="external-login-button" provider="{{ provider.id }}" action="attach" is-link="true"
|
||||
ng-if="!externalLoginInfo[provider.id]"></span>
|
||||
<a href="javascript:void(0)" ng-if="externalLoginInfo[provider.id] && Features.DIRECT_LOGIN"
|
||||
ng-click="detachExternalLogin(provider.id)">Detach Account</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
|
|
@ -42,7 +42,8 @@
|
|||
</a>
|
||||
</li>
|
||||
<li ng-switch-default>
|
||||
<a class="user-view" href="/signin/" target="{{ appLinkTarget() }}">Sign in</a>
|
||||
<a class="user-view" href="/signin/" target="{{ appLinkTarget() }}" ng-if="!externalSigninUrl">Sign in</a>
|
||||
<a class="user-view" ng-href="{{ externalSigninUrl }}" ng-if="externalSigninUrl">Sign in</a>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
|
@ -133,7 +134,8 @@
|
|||
</ul>
|
||||
</li>
|
||||
<li ng-switch-default>
|
||||
<a class="user-view" href="/signin/" target="{{ appLinkTarget() }}">Sign in</a>
|
||||
<a class="user-view" href="/signin/" target="{{ appLinkTarget() }}" ng-if="!externalSigninUrl">Sign in</a>
|
||||
<a class="user-view" ng-href="{{ externalSigninUrl }}" ng-if="externalSigninUrl">Sign in</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div><!-- /.navbar-collapse -->
|
||||
|
|
24
static/directives/repo-count-checker.html
Normal file
24
static/directives/repo-count-checker.html
Normal file
|
@ -0,0 +1,24 @@
|
|||
<div class="repo-count-checker-element">
|
||||
<div class="required-plan" ng-show="isEnabled && planRequired && planRequired.title">
|
||||
<div class="co-alert co-alert-info">
|
||||
In order to make this repository private under
|
||||
<strong ng-if="isUserNamespace">your personal namespace</strong>
|
||||
<strong ng-if="!isUserNamespace">organization <b>{{ repo.namespace }}</b></strong>, you will need to upgrade your plan to
|
||||
<b style="border-bottom: 1px dotted black;" data-html="true"
|
||||
data-title="{{ '<b>' + planRequired.title + '</b><br>' + planRequired.privateRepos + ' private repositories' }}" bs-tooltip>
|
||||
{{ planRequired.title }}
|
||||
</b>.
|
||||
This will cost $<span>{{ planRequired.price / 100 }}</span>/month.
|
||||
<a class="btn btn-primary" ng-click="upgradePlan()" ng-show="!planChanging">Upgrade now</a>
|
||||
</div>
|
||||
<span ng-if="isUserNamespace && user.organizations.length == 1" style="margin-left: 6px; display: inline-block;">or did you mean to have this repository under the <b>{{ user.organizations[0].name }}</b> namespace?</span>
|
||||
<div class="cor-loader-inline" ng-show="planChanging"></div>
|
||||
</div>
|
||||
<div class="cor-loader-inline" ng-show="isEnabled && checkingPlan"></div>
|
||||
<div class="required-plan" ng-show="isEnabled && planRequired && !isUserNamespace && !planRequired.title">
|
||||
<div class="co-alert co-alert-warning">
|
||||
This organization has reached its private repository limit. Please contact your administrator.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -22,12 +22,10 @@
|
|||
<div class="repository-events-table" repository="repository"
|
||||
is-enabled="isEnabled"></div>
|
||||
|
||||
<!-- Other settings -->
|
||||
<!-- Visibility settings -->
|
||||
<div class="co-panel">
|
||||
<div class="co-panel-heading"><i class="fa fa-gears"></i> Repository Settings</div>
|
||||
|
||||
<div class="co-panel-heading"><i class="fa fa-unlock-alt"></i> Repository Visibility</div>
|
||||
<div class="cor-loader" ng-show="!repository"></div>
|
||||
|
||||
<div ng-show="repository">
|
||||
<!-- Public/Private -->
|
||||
<div class="panel-body panel-section lock-section" ng-if="!repository.is_public">
|
||||
|
@ -44,12 +42,23 @@
|
|||
|
||||
<div>This repository is currently <b>public</b> and is visible to all users, and may be pulled by all users.</div>
|
||||
|
||||
<button class="btn btn-default" ng-click="askChangeAccess('private')">
|
||||
<button class="btn btn-default" ng-click="askChangeAccess('private')" ng-show="!planRequired">
|
||||
<i class="fa fa-lock"></i>Make Private
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Delete Repository -->
|
||||
<!-- Payment -->
|
||||
<div class="repo-count-checker" namespace="repository.namespace" plan-required="planRequired"
|
||||
is-enabled="repository.is_public">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Delete repository -->
|
||||
<div class="co-panel">
|
||||
<div class="co-panel-heading"><i class="fa fa-trash"></i> Delete Repository</div>
|
||||
<div class="cor-loader" ng-show="!repository"></div>
|
||||
<div ng-show="repository">
|
||||
<div class="panel-body panel-section">
|
||||
<div class="co-alert co-alert-danger">
|
||||
<button class="btn btn-danger delete-btn" ng-click="askDelete()">
|
||||
|
@ -60,10 +69,16 @@
|
|||
Deleting a repository <b>cannot be undone</b>. Here be dragons!
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Build Status Badge -->
|
||||
<div class="panel-body panel-section hidden-xs">
|
||||
|
||||
<!-- Build Status Badge -->
|
||||
<div class="co-panel hidden-xs">
|
||||
<div class="co-panel-heading"><i class="fa fa-tasks"></i> Build Status Badge</div>
|
||||
<div class="cor-loader" ng-show="!repository"></div>
|
||||
<div ng-show="repository">
|
||||
<div class="panel-body panel-section">
|
||||
<!-- Token Info Banner -->
|
||||
<div class="co-alert co-alert-info" ng-if="!repository.is_public">
|
||||
Note: This badge contains a token so the badge can be seen by external users. The token does not grant any other access and is safe to share!
|
||||
|
|
|
@ -1,25 +1,28 @@
|
|||
<div class="signin-form-element" style="position: relative">
|
||||
<span class="cor-loader" ng-show="signingIn"></span>
|
||||
|
||||
<form class="form-signin" ng-submit="signin();" ng-show="!signingIn">
|
||||
<input type="text" class="form-control input-lg" name="username"
|
||||
placeholder="Username or E-mail Address" ng-model="user.username" autofocus>
|
||||
<input type="password" class="form-control input-lg" name="password"
|
||||
placeholder="Password" ng-model="user.password">
|
||||
<div quay-show="Features.DIRECT_LOGIN">
|
||||
<input type="text" class="form-control input-lg" name="username"
|
||||
placeholder="Username or E-mail Address" ng-model="user.username" autofocus>
|
||||
<input type="password" class="form-control input-lg" name="password"
|
||||
placeholder="Password" ng-model="user.password">
|
||||
</div>
|
||||
|
||||
<div class="co-alert co-alert-warning" ng-show="tryAgainSoon > 0">
|
||||
Too many attempts have been made to login. Please try again in {{ tryAgainSoon }} second<span ng-if="tryAgainSoon != 1">s</span>.
|
||||
</div>
|
||||
|
||||
<span ng-show="tryAgainSoon == 0">
|
||||
<button class="btn btn-lg btn-primary btn-block" type="submit">Sign In</button>
|
||||
<button class="btn btn-lg btn-primary btn-block" type="submit" quay-show="Features.DIRECT_LOGIN">Sign In</button>
|
||||
|
||||
<span class="social-alternate" quay-show="Features.GITHUB_LOGIN || Features.GOOGLE_LOGIN">
|
||||
<span class="social-alternate" quay-show="EXTERNAL_LOGINS.length && Features.DIRECT_LOGIN">
|
||||
<i class="fa fa-circle"></i>
|
||||
<span class="inner-text">OR</span>
|
||||
</span>
|
||||
|
||||
<div class="external-login-button" provider="github" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
|
||||
<div class="external-login-button" provider="google" redirect-url="redirectUrl" sign-in-started="markStarted()"></div>
|
||||
<div class="external-login-button" provider="{{ provider.id }}" redirect-url="redirectUrl"
|
||||
sign-in-started="markStarted()" ng-repeat="provider in EXTERNAL_LOGINS"></div>
|
||||
</span>
|
||||
</form>
|
||||
|
||||
|
|
|
@ -1,32 +1,39 @@
|
|||
<div class="signup-form-element"
|
||||
quay-show="Features.USER_CREATION && Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering">
|
||||
<input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/">
|
||||
<input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required>
|
||||
<input type="password" class="form-control" placeholder="Create a password" ng-model="newUser.password" required
|
||||
ng-pattern="/^.{8,}$/">
|
||||
<input type="password" class="form-control" placeholder="Verify your password" ng-model="newUser.repeatPassword"
|
||||
match="newUser.password" required
|
||||
ng-pattern="/^.{8,}$/">
|
||||
<div class="form-group signin-buttons">
|
||||
<button id="signupButton"
|
||||
class="btn btn-primary btn-block landing-signup-button" ng-disabled="signupForm.$invalid" type="submit"
|
||||
analytics-on analytics-event="register">
|
||||
<span quay-show="Features.BILLING">Sign Up for Free!</span>
|
||||
<span quay-show="!Features.BILLING">Sign Up</span>
|
||||
</button>
|
||||
<span class="social-alternate" quay-require="['GITHUB_LOGIN']">
|
||||
<i class="fa fa-circle"></i>
|
||||
<span class="inner-text">OR</span>
|
||||
</span>
|
||||
<div class="external-login-button" provider="github"></div>
|
||||
<div class="external-login-button" provider="google"></div>
|
||||
<div class="signup-form-element">
|
||||
<div quay-show="singleSigninUrl" class="single-sign-on">
|
||||
<div class="external-login-button" provider="{{ EXTERNAL_LOGINS[0].id }}"></div>
|
||||
</div>
|
||||
|
||||
<div quay-show="Features.USER_CREATION && Config.AUTHENTICATION_TYPE == 'Database' && !singleSigninUrl">
|
||||
<form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering">
|
||||
<div quay-show="Features.DIRECT_LOGIN">
|
||||
<input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/">
|
||||
<input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required>
|
||||
<input type="password" class="form-control" placeholder="Create a password" ng-model="newUser.password" required
|
||||
ng-pattern="/^.{8,}$/">
|
||||
<input type="password" class="form-control" placeholder="Verify your password" ng-model="newUser.repeatPassword"
|
||||
match="newUser.password" required
|
||||
ng-pattern="/^.{8,}$/">
|
||||
</div>
|
||||
<div class="form-group signin-buttons">
|
||||
<button id="signupButton"
|
||||
class="btn btn-primary btn-block landing-signup-button" ng-disabled="signupForm.$invalid" type="submit"
|
||||
analytics-on analytics-event="register"
|
||||
quay-show="Features.DIRECT_LOGIN">
|
||||
<span quay-show="Features.BILLING">Sign Up for Free!</span>
|
||||
<span quay-show="!Features.BILLING">Sign Up</span>
|
||||
</button>
|
||||
<span class="social-alternate" quay-show="Features.DIRECT_LOGIN && EXTERNAL_LOGINS.length">
|
||||
<i class="fa fa-circle"></i>
|
||||
<span class="inner-text">OR</span>
|
||||
</span>
|
||||
<div class="external-login-button" provider="{{ provider.id }}" ng-repeat="provider in EXTERNAL_LOGINS"></div>
|
||||
</div>
|
||||
</form>
|
||||
<div class="cor-loader" ng-show="registering"></div>
|
||||
<div class="co-alert co-alert-info"
|
||||
ng-show="awaitingConfirmation && hideRegisteredMessage != 'true'">
|
||||
Thank you for registering! We have sent you an activation email.
|
||||
You must <b>verify your email address</b> before you can continue.
|
||||
</div>
|
||||
</form>
|
||||
<div class="cor-loader" ng-show="registering"></div>
|
||||
<div class="co-alert co-alert-info"
|
||||
ng-show="awaitingConfirmation && hideRegisteredMessage != 'true'">
|
||||
Thank you for registering! We have sent you an activation email.
|
||||
You must <b>verify your email address</b> before you can continue.
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="panel panel-default"
|
||||
quay-show="Features.USER_CREATION && Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
quay-show="Features.USER_CREATION && Config.AUTHENTICATION_TYPE == 'Database' && Features.DIRECT_LOGIN">
|
||||
<div class="panel-heading">
|
||||
<h6 class="panel-title accordion-title">
|
||||
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" data-target="#collapseRegister">
|
||||
|
@ -30,7 +30,7 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="panel panel-default"
|
||||
quay-show="Features.MAILING && Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
quay-show="Features.MAILING && Config.AUTHENTICATION_TYPE == 'Database' && Features.DIRECT_LOGIN">
|
||||
<div class="panel-heading">
|
||||
<h6 class="panel-title accordion-title">
|
||||
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" data-target="#collapseForgot">
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
|
||||
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
|
||||
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9_]{3,29}$';
|
||||
var USER_PATTERN = '^[a-z0-9_]{4,30}$';
|
||||
|
||||
// Define the pages module.
|
||||
|
|
|
@ -90,14 +90,23 @@ angular.module("core-config-setup", ['angularFileUpload'])
|
|||
|
||||
'SwiftStorage': [
|
||||
{'name': 'auth_version', 'title': 'Swift Version', 'kind': 'option', 'values': [1, 2]},
|
||||
{'name': 'auth_url', 'title': 'Swift Auth URL', 'placeholder': '', 'kind': 'text'},
|
||||
{'name': 'swift_container', 'title': 'Swift Container Name', 'placeholder': 'mycontainer', 'kind': 'text'},
|
||||
{'name': 'auth_url', 'title': 'Swift Auth URL', 'placeholder': 'http://swiftdomain/auth/v1.0', 'kind': 'text'},
|
||||
{'name': 'swift_container', 'title': 'Swift Container Name', 'placeholder': 'mycontainer', 'kind': 'text',
|
||||
'help_text': 'The swift container for all objects. Must already exist inside Swift.'},
|
||||
|
||||
{'name': 'storage_path', 'title': 'Storage Path', 'placeholder': '/path/inside/container', 'kind': 'text'},
|
||||
|
||||
{'name': 'swift_user', 'title': 'Username', 'placeholder': 'accesskeyhere', 'kind': 'text'},
|
||||
{'name': 'swift_password', 'title': 'Password/Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||
{'name': 'swift_user', 'title': 'Username', 'placeholder': 'accesskeyhere', 'kind': 'text',
|
||||
'help_text': 'Note: For Swift V1, this is "username:password" (-U on the CLI).'},
|
||||
{'name': 'swift_password', 'title': 'Key/Password', 'placeholder': 'secretkeyhere', 'kind': 'text',
|
||||
'help_text': 'Note: For Swift V1, this is the API token (-K on the CLI).'},
|
||||
|
||||
{'name': 'ca_cert_path', 'title': 'CA Cert Filename', 'placeholder': 'conf/stack/swift.cert', 'kind': 'text', 'optional': true},
|
||||
|
||||
{'name': 'temp_url_key', 'title': 'Temp URL Key (optional)', 'placholder': 'key-here', 'kind': 'text', 'optional': true,
|
||||
'help_url': 'https://coreos.com/products/enterprise-registry/docs/latest/swift-temp-url.html',
|
||||
'help_text': 'If enabled, will allow for faster pulls directly from Swift.'},
|
||||
|
||||
{'name': 'os_options', 'title': 'OS Options', 'kind': 'map',
|
||||
'keys': ['tenant_id', 'auth_token', 'service_type', 'endpoint_type', 'tenant_name', 'object_storage_url', 'region_name']}
|
||||
]
|
||||
|
|
|
@ -72,17 +72,16 @@ angular.module('quay').directive('buildLogsView', function () {
|
|||
// Process the logs we've received.
|
||||
$scope.logStartIndex = processLogs(logsData['logs'], logsData['start'], logsData['total']);
|
||||
|
||||
// If the build status is an error, open the last two log entries.
|
||||
// If the build status is an error, automatically open the last command run.
|
||||
var currentBuild = $scope.currentBuild;
|
||||
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
||||
var openLogEntries = function(entry) {
|
||||
if (entry.logs) {
|
||||
entry.logs.setVisible(true);
|
||||
if (currentBuild['phase'] == 'error') {
|
||||
for (var i = $scope.logEntries.length - 1; i >= 0; i--) {
|
||||
var currentEntry = $scope.logEntries[i];
|
||||
if (currentEntry['type'] == 'command') {
|
||||
currentEntry['logs'].setVisible(true);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
||||
}
|
||||
}
|
||||
|
||||
// If the build phase is an error or a complete, then we mark the channel
|
||||
|
|
|
@ -15,14 +15,14 @@ angular.module('quay').directive('externalLoginButton', function () {
|
|||
'provider': '@provider',
|
||||
'action': '@action'
|
||||
},
|
||||
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
|
||||
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, ExternalLoginService) {
|
||||
$scope.signingIn = false;
|
||||
$scope.isEnterprise = KeyService.isEnterprise;
|
||||
$scope.providerInfo = ExternalLoginService.getProvider($scope.provider);
|
||||
|
||||
$scope.startSignin = function(service) {
|
||||
$scope.signInStarted({'service': service});
|
||||
$scope.startSignin = function() {
|
||||
$scope.signInStarted({'service': $scope.provider});
|
||||
|
||||
var url = KeyService.getExternalLoginUrl(service, $scope.action || 'login');
|
||||
var url = ExternalLoginService.getLoginUrl($scope.provider, $scope.action || 'login');
|
||||
|
||||
// Save the redirect URL in a cookie so that we can redirect back after the service returns to us.
|
||||
var redirectURL = $scope.redirectUrl || window.location.toString();
|
||||
|
|
|
@ -11,41 +11,37 @@ angular.module('quay').directive('externalLoginsManager', function () {
|
|||
scope: {
|
||||
'user': '=user',
|
||||
},
|
||||
controller: function($scope, $element, ApiService, UserService, Features, Config, KeyService) {
|
||||
controller: function($scope, $element, ApiService, UserService, Features, Config, KeyService,
|
||||
ExternalLoginService) {
|
||||
$scope.Features = Features;
|
||||
$scope.Config = Config;
|
||||
$scope.KeyService = KeyService;
|
||||
|
||||
$scope.EXTERNAL_LOGINS = ExternalLoginService.EXTERNAL_LOGINS;
|
||||
$scope.externalLoginInfo = {};
|
||||
$scope.hasSingleSignin = ExternalLoginService.hasSingleSignin();
|
||||
|
||||
UserService.updateUserIn($scope, function(user) {
|
||||
$scope.cuser = jQuery.extend({}, user);
|
||||
$scope.externalLoginInfo = {};
|
||||
|
||||
if ($scope.cuser.logins) {
|
||||
for (var i = 0; i < $scope.cuser.logins.length; i++) {
|
||||
var login = $scope.cuser.logins[i];
|
||||
login.metadata = login.metadata || {};
|
||||
|
||||
if (login.service == 'github') {
|
||||
$scope.hasGithubLogin = true;
|
||||
$scope.githubLogin = login.metadata['service_username'];
|
||||
$scope.githubEndpoint = KeyService['githubEndpoint'];
|
||||
}
|
||||
|
||||
if (login.service == 'google') {
|
||||
$scope.hasGoogleLogin = true;
|
||||
$scope.googleLogin = login.metadata['service_username'];
|
||||
}
|
||||
$scope.externalLoginInfo[login.service] = login;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
$scope.detachExternalLogin = function(kind) {
|
||||
if (!Features.DIRECT_LOGIN) { return; }
|
||||
|
||||
var params = {
|
||||
'servicename': kind
|
||||
};
|
||||
|
||||
ApiService.detachExternalLogin(null, params).then(function() {
|
||||
$scope.hasGithubLogin = false;
|
||||
$scope.hasGoogleLogin = false;
|
||||
UserService.load();
|
||||
}, ApiService.errorDisplay('Count not detach service'));
|
||||
};
|
||||
|
|
|
@ -14,7 +14,10 @@ angular.module('quay').directive('headerBar', function () {
|
|||
},
|
||||
controller: function($rootScope, $scope, $element, $location, $timeout, hotkeys, UserService,
|
||||
PlanService, ApiService, NotificationService, Config, CreateService, Features,
|
||||
DocumentationService) {
|
||||
DocumentationService, ExternalLoginService) {
|
||||
|
||||
$scope.externalSigninUrl = ExternalLoginService.getSingleSigninUrl();
|
||||
|
||||
var hotkeysAdded = false;
|
||||
var userUpdated = function(cUser) {
|
||||
$scope.searchingAllowed = Features.ANONYMOUS_ACCESS || !cUser.anonymous;
|
||||
|
|
81
static/js/directives/ui/repo-count-checker.js
Normal file
81
static/js/directives/ui/repo-count-checker.js
Normal file
|
@ -0,0 +1,81 @@
|
|||
/**
|
||||
* An element which displays a message when the maximum number of private repositories has been
|
||||
* reached.
|
||||
*/
|
||||
angular.module('quay').directive('repoCountChecker', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
templateUrl: '/static/directives/repo-count-checker.html',
|
||||
replace: false,
|
||||
transclude: true,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'namespace': '=namespace',
|
||||
'planRequired': '=planRequired',
|
||||
'isEnabled': '=isEnabled'
|
||||
},
|
||||
controller: function($scope, $element, ApiService, UserService, PlanService, Features) {
|
||||
var refresh = function() {
|
||||
$scope.planRequired = null;
|
||||
|
||||
if (!$scope.isEnabled || !$scope.namespace || !Features.BILLING) {
|
||||
return;
|
||||
}
|
||||
|
||||
$scope.checkingPlan = true;
|
||||
$scope.isUserNamespace = UserService.isUserNamespace($scope.namespace);
|
||||
|
||||
ApiService.getPrivateAllowed($scope.isUserNamespace ? null : $scope.namespace).then(function(resp) {
|
||||
$scope.checkingPlan = false;
|
||||
|
||||
if (resp['privateAllowed']) {
|
||||
$scope.planRequired = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (resp['privateCount'] == null) {
|
||||
// Organization where we are not the admin.
|
||||
$scope.planRequired = {};
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, lookup the matching plan.
|
||||
PlanService.getMinimumPlan(resp['privateCount'] + 1, !$scope.isUserNamespace, function(minimum) {
|
||||
$scope.planRequired = minimum;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var subscribedToPlan = function(sub) {
|
||||
$scope.planChanging = false;
|
||||
$scope.subscription = sub;
|
||||
|
||||
PlanService.getPlan(sub.plan, function(subscribedPlan) {
|
||||
$scope.subscribedPlan = subscribedPlan;
|
||||
refresh();
|
||||
});
|
||||
};
|
||||
|
||||
$scope.$watch('namespace', refresh);
|
||||
$scope.$watch('isEnabled', refresh);
|
||||
|
||||
$scope.upgradePlan = function() {
|
||||
var callbacks = {
|
||||
'started': function() { $scope.planChanging = true; },
|
||||
'opened': function() { $scope.planChanging = true; },
|
||||
'closed': function() { $scope.planChanging = false; },
|
||||
'success': subscribedToPlan,
|
||||
'failure': function(resp) {
|
||||
$('#couldnotsubscribeModal').modal();
|
||||
$scope.planChanging = false;
|
||||
}
|
||||
};
|
||||
|
||||
var isUserNamespace = UserService.isUserNamespace($scope.namespace);
|
||||
var namespace = isUserNamespace ? null : $scope.namespace;
|
||||
PlanService.changePlan($scope, namespace, $scope.planRequired.stripeId, callbacks);
|
||||
};
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
});
|
|
@ -14,10 +14,12 @@ angular.module('quay').directive('signinForm', function () {
|
|||
'signInStarted': '&signInStarted',
|
||||
'signedIn': '&signedIn'
|
||||
},
|
||||
controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config) {
|
||||
controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config, ExternalLoginService) {
|
||||
$scope.tryAgainSoon = 0;
|
||||
$scope.tryAgainInterval = null;
|
||||
$scope.signingIn = false;
|
||||
$scope.EXTERNAL_LOGINS = ExternalLoginService.EXTERNAL_LOGINS;
|
||||
$scope.Features = Features;
|
||||
|
||||
$scope.markStarted = function() {
|
||||
$scope.signingIn = true;
|
||||
|
@ -45,7 +47,7 @@ angular.module('quay').directive('signinForm', function () {
|
|||
});
|
||||
|
||||
$scope.signin = function() {
|
||||
if ($scope.tryAgainSoon > 0) { return; }
|
||||
if ($scope.tryAgainSoon > 0 || !Features.DIRECT_LOGIN) { return; }
|
||||
|
||||
$scope.markStarted();
|
||||
$scope.cancelInterval();
|
||||
|
|
|
@ -13,11 +13,13 @@ angular.module('quay').directive('signupForm', function () {
|
|||
'hideRegisteredMessage': '@hideRegisteredMessage',
|
||||
'userRegistered': '&userRegistered'
|
||||
},
|
||||
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) {
|
||||
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService, ExternalLoginService) {
|
||||
$('.form-signup').popover();
|
||||
|
||||
$scope.awaitingConfirmation = false;
|
||||
$scope.registering = false;
|
||||
$scope.EXTERNAL_LOGINS = ExternalLoginService.EXTERNAL_LOGINS;
|
||||
$scope.singleSigninUrl = ExternalLoginService.getSingleSigninUrl();
|
||||
|
||||
$scope.register = function() {
|
||||
UIService.hidePopover('#signupButton');
|
||||
|
|
|
@ -22,22 +22,6 @@
|
|||
'initialize': ''
|
||||
};
|
||||
|
||||
// Watch the namespace on the repo. If it changes, we update the plan and the public/private
|
||||
// accordingly.
|
||||
$scope.isUserNamespace = true;
|
||||
$scope.$watch('repo.namespace', function(namespace) {
|
||||
// Note: Can initially be undefined.
|
||||
if (!namespace) { return; }
|
||||
|
||||
var isUserNamespace = (namespace == $scope.user.username);
|
||||
|
||||
$scope.planRequired = null;
|
||||
$scope.isUserNamespace = isUserNamespace;
|
||||
|
||||
// Determine whether private repositories are allowed for the namespace.
|
||||
checkPrivateAllowed();
|
||||
});
|
||||
|
||||
$scope.changeNamespace = function(namespace) {
|
||||
$scope.repo.namespace = namespace;
|
||||
};
|
||||
|
@ -108,65 +92,5 @@
|
|||
});
|
||||
});
|
||||
};
|
||||
|
||||
$scope.upgradePlan = function() {
|
||||
var callbacks = {
|
||||
'started': function() { $scope.planChanging = true; },
|
||||
'opened': function() { $scope.planChanging = true; },
|
||||
'closed': function() { $scope.planChanging = false; },
|
||||
'success': subscribedToPlan,
|
||||
'failure': function(resp) {
|
||||
$('#couldnotsubscribeModal').modal();
|
||||
$scope.planChanging = false;
|
||||
}
|
||||
};
|
||||
|
||||
var namespace = $scope.isUserNamespace ? null : $scope.repo.namespace;
|
||||
PlanService.changePlan($scope, namespace, $scope.planRequired.stripeId, callbacks);
|
||||
};
|
||||
|
||||
var checkPrivateAllowed = function() {
|
||||
if (!$scope.repo || !$scope.repo.namespace) { return; }
|
||||
|
||||
if (!Features.BILLING) {
|
||||
$scope.checkingPlan = false;
|
||||
$scope.planRequired = null;
|
||||
return;
|
||||
}
|
||||
|
||||
$scope.checkingPlan = true;
|
||||
|
||||
var isUserNamespace = $scope.isUserNamespace;
|
||||
ApiService.getPrivateAllowed(isUserNamespace ? null : $scope.repo.namespace).then(function(resp) {
|
||||
$scope.checkingPlan = false;
|
||||
|
||||
if (resp['privateAllowed']) {
|
||||
$scope.planRequired = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (resp['privateCount'] == null) {
|
||||
// Organization where we are not the admin.
|
||||
$scope.planRequired = {};
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, lookup the matching plan.
|
||||
PlanService.getMinimumPlan(resp['privateCount'] + 1, !isUserNamespace, function(minimum) {
|
||||
$scope.planRequired = minimum;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var subscribedToPlan = function(sub) {
|
||||
$scope.planChanging = false;
|
||||
$scope.subscription = sub;
|
||||
|
||||
PlanService.getPlan(sub.plan, function(subscribedPlan) {
|
||||
$scope.subscribedPlan = subscribedPlan;
|
||||
$scope.planRequired = null;
|
||||
checkPrivateAllowed();
|
||||
});
|
||||
};
|
||||
}
|
||||
})();
|
|
@ -8,7 +8,12 @@
|
|||
});
|
||||
}]);
|
||||
|
||||
function SignInCtrl($scope, $location) {
|
||||
function SignInCtrl($scope, $location, ExternalLoginService, Features) {
|
||||
$scope.redirectUrl = '/';
|
||||
|
||||
var singleUrl = ExternalLoginService.getSingleSigninUrl();
|
||||
if (singleUrl) {
|
||||
document.location = singleUrl;
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
})
|
||||
}]);
|
||||
|
||||
function UserViewCtrl($scope, $routeParams, $timeout, ApiService, UserService, UIService, AvatarService, Config) {
|
||||
function UserViewCtrl($scope, $routeParams, $timeout, ApiService, UserService, UIService, AvatarService, Config, ExternalLoginService) {
|
||||
var username = $routeParams.username;
|
||||
|
||||
$scope.showInvoicesCounter = 0;
|
||||
|
@ -18,6 +18,7 @@
|
|||
$scope.showRobotsCounter = 0;
|
||||
$scope.changeEmailInfo = {};
|
||||
$scope.changePasswordInfo = {};
|
||||
$scope.hasSingleSignin = ExternalLoginService.hasSingleSignin();
|
||||
|
||||
UserService.updateUserIn($scope);
|
||||
|
||||
|
|
141
static/js/services/external-login-service.js
Normal file
141
static/js/services/external-login-service.js
Normal file
|
@ -0,0 +1,141 @@
|
|||
/**
|
||||
* Service which exposes the supported external logins.
|
||||
*/
|
||||
angular.module('quay').factory('ExternalLoginService', ['KeyService', 'Features', 'Config',
|
||||
function(KeyService, Features, Config) {
|
||||
var externalLoginService = {};
|
||||
|
||||
externalLoginService.getLoginUrl = function(service, action) {
|
||||
var serviceInfo = externalLoginService.getProvider(service);
|
||||
if (!serviceInfo) { return ''; }
|
||||
|
||||
var stateClause = '';
|
||||
|
||||
if (Config.MIXPANEL_KEY && window.mixpanel) {
|
||||
if (mixpanel.get_distinct_id !== undefined) {
|
||||
stateClause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
|
||||
}
|
||||
}
|
||||
|
||||
var loginUrl = KeyService.getConfiguration(serviceInfo.key, 'AUTHORIZE_ENDPOINT');
|
||||
var clientId = KeyService.getConfiguration(serviceInfo.key, 'CLIENT_ID');
|
||||
|
||||
var scope = serviceInfo.scopes();
|
||||
var redirectUri = Config.getUrl('/oauth2/' + service + '/callback');
|
||||
|
||||
if (action == 'attach') {
|
||||
redirectUri += '/attach';
|
||||
}
|
||||
|
||||
var url = loginUrl + 'client_id=' + clientId + '&scope=' + scope + '&redirect_uri=' +
|
||||
redirectUri + stateClause;
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
var DEX = {
|
||||
id: 'dex',
|
||||
key: 'DEX_LOGIN_CONFIG',
|
||||
|
||||
title: function() {
|
||||
return KeyService.getConfiguration('DEX_LOGIN_CONFIG', 'OIDC_TITLE');
|
||||
},
|
||||
|
||||
icon: function() {
|
||||
return {'url': KeyService.getConfiguration('DEX_LOGIN_CONFIG', 'OIDC_LOGO') };
|
||||
},
|
||||
|
||||
scopes: function() {
|
||||
return 'openid email profile'
|
||||
},
|
||||
|
||||
enabled: Features.DEX_LOGIN
|
||||
};
|
||||
|
||||
var GITHUB = {
|
||||
id: 'github',
|
||||
key: 'GITHUB_LOGIN_CONFIG',
|
||||
|
||||
title: function() {
|
||||
return KeyService.isEnterprise('github') ? 'GitHub Enterprise' : 'GitHub';
|
||||
},
|
||||
|
||||
icon: function() {
|
||||
return {'icon': 'fa-github'};
|
||||
},
|
||||
|
||||
hasUserInfo: true,
|
||||
getUserInfo: function(service_info) {
|
||||
username = service_info['metadata']['service_username'];
|
||||
return {
|
||||
'username': username,
|
||||
'endpoint': KeyService['githubEndpoint'] + username
|
||||
}
|
||||
},
|
||||
|
||||
scopes: function() {
|
||||
var scopes = 'user:email';
|
||||
if (KeyService.getConfiguration('GITHUB_LOGIN_CONFIG', 'ORG_RESTRICT')) {
|
||||
scopes += ' read:org';
|
||||
}
|
||||
|
||||
return scopes;
|
||||
},
|
||||
|
||||
enabled: Features.GITHUB_LOGIN
|
||||
};
|
||||
|
||||
var GOOGLE = {
|
||||
id: 'google',
|
||||
key: 'GOOGLE_LOGIN_CONFIG',
|
||||
|
||||
title: function() {
|
||||
return 'Google';
|
||||
},
|
||||
|
||||
icon: function() {
|
||||
return {'icon': 'fa-google'};
|
||||
},
|
||||
|
||||
scopes: function() {
|
||||
return 'openid email';
|
||||
},
|
||||
|
||||
enabled: Features.GOOGLE_LOGIN
|
||||
};
|
||||
|
||||
externalLoginService.ALL_EXTERNAL_LOGINS = [
|
||||
DEX, GITHUB, GOOGLE
|
||||
];
|
||||
|
||||
externalLoginService.EXTERNAL_LOGINS = externalLoginService.ALL_EXTERNAL_LOGINS.filter(function(el) {
|
||||
return el.enabled;
|
||||
});
|
||||
|
||||
externalLoginService.getProvider = function(providerId) {
|
||||
for (var i = 0; i < externalLoginService.EXTERNAL_LOGINS.length; ++i) {
|
||||
var current = externalLoginService.EXTERNAL_LOGINS[i];
|
||||
if (current.id == providerId) {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
externalLoginService.hasSingleSignin = function() {
|
||||
return externalLoginService.EXTERNAL_LOGINS.length == 1 && !Features.DIRECT_LOGIN;
|
||||
};
|
||||
|
||||
externalLoginService.getSingleSigninUrl = function() {
|
||||
// If there is a single external login service and direct login is disabled,
|
||||
// then redirect to the external login directly.
|
||||
if (externalLoginService.hasSingleSignin()) {
|
||||
return externalLoginService.getLoginUrl(externalLoginService.EXTERNAL_LOGINS[0].id);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
return externalLoginService;
|
||||
}]);
|
|
@ -10,35 +10,26 @@ angular.module('quay').factory('KeyService', ['$location', 'Config', function($l
|
|||
|
||||
keyService['gitlabTriggerClientId'] = oauth['GITLAB_TRIGGER_CONFIG']['CLIENT_ID'];
|
||||
keyService['githubTriggerClientId'] = oauth['GITHUB_TRIGGER_CONFIG']['CLIENT_ID'];
|
||||
keyService['githubLoginClientId'] = oauth['GITHUB_LOGIN_CONFIG']['CLIENT_ID'];
|
||||
keyService['googleLoginClientId'] = oauth['GOOGLE_LOGIN_CONFIG']['CLIENT_ID'];
|
||||
|
||||
keyService['gitlabRedirectUri'] = Config.getUrl('/oauth2/gitlab/callback');
|
||||
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
|
||||
keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback');
|
||||
|
||||
keyService['githubLoginUrl'] = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
keyService['googleLoginUrl'] = oauth['GOOGLE_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
|
||||
keyService['githubEndpoint'] = oauth['GITHUB_LOGIN_CONFIG']['GITHUB_ENDPOINT'];
|
||||
|
||||
keyService['githubTriggerEndpoint'] = oauth['GITHUB_TRIGGER_CONFIG']['GITHUB_ENDPOINT'];
|
||||
keyService['githubTriggerAuthorizeUrl'] = oauth['GITHUB_TRIGGER_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
|
||||
keyService['gitlabTriggerEndpoint'] = oauth['GITLAB_TRIGGER_CONFIG']['GITLAB_ENDPOINT'];
|
||||
keyService['gitlabTriggerAuthorizeUrl'] = oauth['GITLAB_TRIGGER_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
|
||||
keyService['githubLoginScope'] = 'user:email';
|
||||
if (oauth['GITHUB_LOGIN_CONFIG']['ORG_RESTRICT']) {
|
||||
keyService['githubLoginScope'] += ',read:org';
|
||||
}
|
||||
|
||||
keyService['googleLoginScope'] = 'openid email';
|
||||
keyService.getConfiguration = function(parent, key) {
|
||||
return oauth[parent][key];
|
||||
};
|
||||
|
||||
keyService.isEnterprise = function(service) {
|
||||
switch (service) {
|
||||
case 'github':
|
||||
return keyService['githubLoginUrl'].indexOf('https://github.com/') < 0;
|
||||
var loginUrl = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
return loginUrl.indexOf('https://github.com/') < 0;
|
||||
|
||||
case 'github-trigger':
|
||||
return keyService['githubTriggerAuthorizeUrl'].indexOf('https://github.com/') < 0;
|
||||
|
@ -47,26 +38,5 @@ angular.module('quay').factory('KeyService', ['$location', 'Config', function($l
|
|||
return false;
|
||||
};
|
||||
|
||||
keyService.getExternalLoginUrl = function(service, action) {
|
||||
var state_clause = '';
|
||||
if (Config.MIXPANEL_KEY && window.mixpanel) {
|
||||
if (mixpanel.get_distinct_id !== undefined) {
|
||||
state_clause = "&state=" + encodeURIComponent(mixpanel.get_distinct_id());
|
||||
}
|
||||
}
|
||||
|
||||
var client_id = keyService[service + 'LoginClientId'];
|
||||
var scope = keyService[service + 'LoginScope'];
|
||||
var redirect_uri = keyService[service + 'RedirectUri'];
|
||||
if (action == 'attach') {
|
||||
redirect_uri += '/attach';
|
||||
}
|
||||
|
||||
var url = keyService[service + 'LoginUrl'] + 'client_id=' + client_id + '&scope=' + scope +
|
||||
'&redirect_uri=' + redirect_uri + state_clause;
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
return keyService;
|
||||
}]);
|
||||
|
|
|
@ -126,6 +126,10 @@ function(ApiService, CookieService, $rootScope, Config) {
|
|||
return userResponse;
|
||||
};
|
||||
|
||||
userService.isUserNamespace = function(namespace) {
|
||||
return namespace == userResponse.username;
|
||||
};
|
||||
|
||||
// Update the user in the root scope.
|
||||
userService.updateUserIn($rootScope);
|
||||
|
||||
|
|
|
@ -88,31 +88,9 @@
|
|||
</div>
|
||||
|
||||
<!-- Payment -->
|
||||
<div class="required-plan" ng-show="repo.is_public == '0' && planRequired && planRequired.title">
|
||||
<div class="co-alert co-alert-warning">
|
||||
In order to make this repository private under
|
||||
<strong ng-if="isUserNamespace">your personal namespace</strong>
|
||||
<strong ng-if="!isUserNamespace">organization <b>{{ repo.namespace }}</b></strong>, you will need to upgrade your plan to
|
||||
<b style="border-bottom: 1px dotted black;" data-html="true"
|
||||
data-title="{{ '<b>' + planRequired.title + '</b><br>' + planRequired.privateRepos + ' private repositories' }}" bs-tooltip>
|
||||
{{ planRequired.title }}
|
||||
</b>.
|
||||
This will cost $<span>{{ planRequired.price / 100 }}</span>/month.
|
||||
</div>
|
||||
<a class="btn btn-primary" ng-click="upgradePlan()" ng-show="!planChanging">Upgrade now</a>
|
||||
<span ng-if="isUserNamespace && user.organizations.length == 1" style="margin-left: 6px; display: inline-block;">or did you mean to create this repository
|
||||
under <a href="javascript:void(0)" ng-click="changeNamespace(user.organizations[0].name)"><b>{{ user.organizations[0].name }}</b></a>?</span>
|
||||
<div class="cor-loader-inline" ng-show="planChanging"></div>
|
||||
<div class="repo-count-checker" namespace="repo.namespace" plan-required="planRequired"
|
||||
is-enabled="repo.is_public == '0'">
|
||||
</div>
|
||||
|
||||
<div class="cor-loader-inline" ng-show="repo.is_public == '0' && checkingPlan"></div>
|
||||
|
||||
<div class="required-plan" ng-show="repo.is_public == '0' && planRequired && !isUserNamespace && !planRequired.title">
|
||||
<div class="co-alert co-alert-warning">
|
||||
This organization has reached its private repository limit. Please contact your administrator.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -43,7 +43,8 @@
|
|||
tab-init="showInvoices()" quay-show="Features.BILLING">
|
||||
<i class="fa ci-invoice"></i>
|
||||
</span>
|
||||
<span class="cor-tab" tab-title="External Logins" tab-target="#external">
|
||||
<span class="cor-tab" tab-title="External Logins" tab-target="#external"
|
||||
quay-show="!hasSingleSignin">
|
||||
<i class="fa fa-external-link-square"></i>
|
||||
</span>
|
||||
<span class="cor-tab" tab-title="Authorized Applications" tab-target="#applications"
|
||||
|
@ -70,7 +71,7 @@
|
|||
</div>
|
||||
|
||||
<!-- External Logins -->
|
||||
<div id="external" class="tab-pane">
|
||||
<div id="external" class="tab-pane" quay-show="!hasSingleSignin">
|
||||
<div class="external-logins-manager" user="viewuser"></div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -50,8 +50,9 @@ class BaseStorage(StoragePaths):
|
|||
""" Called to perform any storage system setup. """
|
||||
pass
|
||||
|
||||
def validate(self):
|
||||
""" Called to perform any custom storage system validation. """
|
||||
def validate(self, client):
|
||||
""" Called to perform any custom storage system validation. The client is an HTTP
|
||||
client to use for any external calls. """
|
||||
pass
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
|
|
|
@ -257,6 +257,8 @@ class _CloudStorage(BaseStorageV2):
|
|||
return k.etag[1:-1][:7]
|
||||
|
||||
def copy_to(self, destination, path):
|
||||
self._initialize_cloud_conn()
|
||||
|
||||
# First try to copy directly via boto, but only if the storages are the
|
||||
# same type, with the same access information.
|
||||
if (self.__class__ == destination.__class__ and
|
||||
|
@ -268,6 +270,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
source_path = self._init_path(path)
|
||||
source_key = self._key_class(self._cloud_bucket, source_path)
|
||||
|
||||
destination._initialize_cloud_conn()
|
||||
dest_path = destination._init_path(path)
|
||||
source_key.copy(destination._cloud_bucket, dest_path)
|
||||
return
|
||||
|
|
|
@ -123,7 +123,7 @@ class LocalStorage(BaseStorageV2):
|
|||
content_path = self._init_path(self._rel_upload_path(uuid))
|
||||
os.remove(content_path)
|
||||
|
||||
def validate(self):
|
||||
def validate(self, client):
|
||||
# Load the set of disk mounts.
|
||||
try:
|
||||
mounts = psutil.disk_partitions(all=True)
|
||||
|
|
|
@ -2,8 +2,12 @@
|
|||
from swiftclient.client import Connection, ClientException
|
||||
from storage.basestorage import BaseStorage
|
||||
from util.registry.generatorfile import GeneratorFile
|
||||
|
||||
from urlparse import urlparse
|
||||
from random import SystemRandom
|
||||
from hashlib import sha1
|
||||
from time import time
|
||||
|
||||
import hmac
|
||||
import string
|
||||
import logging
|
||||
|
||||
|
@ -12,9 +16,9 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class SwiftStorage(BaseStorage):
|
||||
def __init__(self, swift_container, storage_path, auth_url, swift_user,
|
||||
swift_password, auth_version=None, os_options=None, ca_cert_path=None):
|
||||
swift_password, auth_version=None, os_options=None, ca_cert_path=None,
|
||||
temp_url_key=None):
|
||||
super(SwiftStorage, self).__init__()
|
||||
|
||||
self._swift_container = swift_container
|
||||
self._storage_path = storage_path
|
||||
|
||||
|
@ -24,6 +28,8 @@ class SwiftStorage(BaseStorage):
|
|||
self._swift_user = swift_user
|
||||
self._swift_password = swift_password
|
||||
|
||||
self._temp_url_key = temp_url_key
|
||||
|
||||
try:
|
||||
self._auth_version = int(auth_version or '2')
|
||||
except ValueError:
|
||||
|
@ -53,8 +59,12 @@ class SwiftStorage(BaseStorage):
|
|||
|
||||
return path
|
||||
|
||||
def _normalize_path(self, path=None):
|
||||
path = self._storage_path + (path or '')
|
||||
def _normalize_path(self, object_path=None):
|
||||
path = self._storage_path
|
||||
if not path.endswith('/'):
|
||||
path = path + '/'
|
||||
|
||||
path = path + (object_path or '')
|
||||
|
||||
# Openstack does not like paths starting with '/' and we always normalize
|
||||
# to remove trailing '/'
|
||||
|
@ -118,15 +128,66 @@ class SwiftStorage(BaseStorage):
|
|||
logger.exception('Could not head object: %s', path)
|
||||
return None
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
def get_direct_download_url(self, object_path, expires_in=60, requires_cors=False):
|
||||
if requires_cors:
|
||||
return None
|
||||
|
||||
# TODO(jschorr): This method is not strictly necessary but would result in faster operations
|
||||
# when using this storage engine. However, the implementation (as seen in the link below)
|
||||
# is not clean, so we punt on this for now.
|
||||
# http://docs.openstack.org/juno/config-reference/content/object-storage-tempurl.html
|
||||
return None
|
||||
# Reference: http://docs.openstack.org/juno/config-reference/content/object-storage-tempurl.html
|
||||
if not self._temp_url_key:
|
||||
return None
|
||||
|
||||
# Retrieve the auth details for the connection.
|
||||
try:
|
||||
object_url_value, _ = self._get_connection().get_auth()
|
||||
except ClientException:
|
||||
logger.exception('Got client exception when trying to load Swift auth')
|
||||
return None
|
||||
|
||||
object_url = urlparse(object_url_value)
|
||||
scheme = object_url.scheme
|
||||
path = object_url.path
|
||||
hostname = object_url.netloc
|
||||
|
||||
if not path.endswith('/'):
|
||||
path = path + '/'
|
||||
|
||||
object_path = self._normalize_path(object_path)
|
||||
|
||||
# Generate the signed HMAC body.
|
||||
method = 'GET'
|
||||
expires = int(time() + expires_in)
|
||||
full_path = '%s%s/%s' % (path, self._swift_container, object_path)
|
||||
|
||||
hmac_body = '%s\n%s\n%s' % (method, expires, full_path)
|
||||
sig = hmac.new(self._temp_url_key.encode('utf-8'), hmac_body.encode('utf-8'), sha1).hexdigest()
|
||||
|
||||
surl = '{scheme}://{host}{full_path}?temp_url_sig={sig}&temp_url_expires={expires}'
|
||||
return surl.format(scheme=scheme, host=hostname, full_path=full_path, sig=sig, expires=expires)
|
||||
|
||||
def validate(self, client):
|
||||
if self._temp_url_key:
|
||||
# Add a file to test direct download.
|
||||
self.put_content('dd_path', 'testing 3456')
|
||||
|
||||
# Generate a direct download URL.
|
||||
dd_url = self.get_direct_download_url('dd_path')
|
||||
|
||||
if not dd_url:
|
||||
self.remove('dd_path')
|
||||
raise Exception('Could not validate direct download URL; the token may be invalid.')
|
||||
|
||||
# Try to retrieve the direct download URL.
|
||||
response = client.get(dd_url, timeout=2)
|
||||
|
||||
# Remove the test file.
|
||||
self.remove('dd_path')
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.debug('Direct download failure: %s => %s with body %s', dd_url,
|
||||
response.status_code, response.text)
|
||||
|
||||
msg = 'Direct download URL failed with status code %s. Please check your temp-url-key.'
|
||||
raise Exception(msg % response.status_code)
|
||||
|
||||
def get_content(self, path):
|
||||
return self._get_object(path)
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<h2 style="margin-bottom: 20px;">There was an error logging in with {{ service_name }}.</h2>
|
||||
|
||||
{% if error_message %}
|
||||
<div class="alert alert-danger">{{ error_message }}</div>
|
||||
<div class="co-alert co-alert-danger">{{ error_message }}</div>
|
||||
{% endif %}
|
||||
|
||||
{% if user_creation %}
|
||||
|
|
Binary file not shown.
|
@ -7,12 +7,14 @@ from flask.blueprints import Blueprint
|
|||
from flask.ext.testing import LiveServerTestCase
|
||||
|
||||
from app import app
|
||||
from data.database import close_db_filter, configure
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v2 import v2_bp
|
||||
from endpoints.v2.manifest import SignedManifestBuilder
|
||||
from endpoints.api import api_bp
|
||||
from initdb import wipe_database, initialize_database, populate_database
|
||||
from endpoints.csrf import generate_csrf_token
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import endpoints.decorated
|
||||
import json
|
||||
|
@ -20,6 +22,7 @@ import features
|
|||
import hashlib
|
||||
|
||||
import tarfile
|
||||
import shutil
|
||||
|
||||
from jwkest.jws import SIGNER_ALGS
|
||||
from jwkest.jwk import RSAKey
|
||||
|
@ -37,7 +40,9 @@ except ValueError:
|
|||
pass
|
||||
|
||||
|
||||
# Add a test blueprint for generating CSRF tokens and setting feature flags.
|
||||
# Add a test blueprint for generating CSRF tokens, setting feature flags and reloading the
|
||||
# DB connection.
|
||||
|
||||
testbp = Blueprint('testbp', __name__)
|
||||
|
||||
@testbp.route('/csrf', methods=['GET'])
|
||||
|
@ -51,6 +56,15 @@ def set_feature(feature_name):
|
|||
features._FEATURES[feature_name].value = request.get_json()['value']
|
||||
return jsonify({'old_value': old_value})
|
||||
|
||||
@testbp.route('/reloaddb', methods=['POST'])
|
||||
def reload_db():
|
||||
# Close any existing connection.
|
||||
close_db_filter(None)
|
||||
|
||||
# Reload the database config.
|
||||
configure(app.config)
|
||||
return 'OK'
|
||||
|
||||
app.register_blueprint(testbp, url_prefix='/__test')
|
||||
|
||||
|
||||
|
@ -78,6 +92,62 @@ class TestFeature(object):
|
|||
headers={'Content-Type': 'application/json'})
|
||||
|
||||
|
||||
_PORT_NUMBER = 5001
|
||||
_CLEAN_DATABASE_PATH = None
|
||||
|
||||
|
||||
def get_new_database_uri():
|
||||
# If a clean copy of the database has not yet been created, create one now.
|
||||
global _CLEAN_DATABASE_PATH
|
||||
if not _CLEAN_DATABASE_PATH:
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database()
|
||||
close_db_filter(None)
|
||||
|
||||
# Save the path of the clean database.
|
||||
_CLEAN_DATABASE_PATH = app.config['TEST_DB_FILE'].name
|
||||
|
||||
# Create a new temp file to be used as the actual backing database for the test.
|
||||
# Note that we have the close() the file to ensure we can copy to it via shutil.
|
||||
local_db_file = NamedTemporaryFile(delete=True)
|
||||
local_db_file.close()
|
||||
|
||||
# Copy the clean database to the path.
|
||||
shutil.copy2(_CLEAN_DATABASE_PATH, local_db_file.name)
|
||||
return 'sqlite:///{0}'.format(local_db_file.name)
|
||||
|
||||
|
||||
class RegistryTestCase(LiveServerTestCase):
|
||||
maxDiff = None
|
||||
|
||||
def create_app(self):
|
||||
global _PORT_NUMBER
|
||||
_PORT_NUMBER = _PORT_NUMBER + 1
|
||||
app.config['TESTING'] = True
|
||||
app.config['LIVESERVER_PORT'] = _PORT_NUMBER
|
||||
app.config['DB_URI'] = get_new_database_uri()
|
||||
return app
|
||||
|
||||
def setUp(self):
|
||||
self.clearSession()
|
||||
|
||||
# Tell the remote running app to reload the database. By default, the app forks from the
|
||||
# current context and has already loaded the DB config with the *original* DB URL. We call
|
||||
# the remote reload method to force it to pick up the changes to DB_URI set in the create_app
|
||||
# method.
|
||||
self.conduct('POST', '/__test/reloaddb')
|
||||
|
||||
def clearSession(self):
|
||||
self.session = requests.Session()
|
||||
self.signature = None
|
||||
self.docker_token = 'true'
|
||||
|
||||
# Load the CSRF token.
|
||||
self.csrf_token = ''
|
||||
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
||||
|
||||
|
||||
class BaseRegistryMixin(object):
|
||||
def conduct(self, method, url, headers=None, data=None, auth=None, params=None, expected_code=200):
|
||||
params = params or {}
|
||||
|
|
|
@ -47,7 +47,8 @@ from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPe
|
|||
RepositoryUserTransitivePermission)
|
||||
from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement,
|
||||
SuperUserSendRecoveryEmail, ChangeLog,
|
||||
SuperUserOrganizationManagement, SuperUserOrganizationList)
|
||||
SuperUserOrganizationManagement, SuperUserOrganizationList,
|
||||
SuperUserAggregateLogs)
|
||||
|
||||
|
||||
try:
|
||||
|
@ -3928,6 +3929,24 @@ class TestUserAuthorization(ApiTestCase):
|
|||
self._run_test('DELETE', 404, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperAggregateUserLogs(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserAggregateLogs)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 401, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserLogs(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# coding=utf-8
|
||||
|
||||
import unittest
|
||||
import datetime
|
||||
import json as py_json
|
||||
|
||||
from urllib import urlencode
|
||||
|
@ -15,6 +16,7 @@ from endpoints.trigger import BuildTriggerHandler
|
|||
from app import app
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data import database, model
|
||||
from data.database import RepositoryActionCount
|
||||
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam
|
||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags
|
||||
|
@ -315,8 +317,18 @@ class TestGetUserPrivateAllowed(ApiTestCase):
|
|||
|
||||
def test_allowed(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Change the subscription of the namespace.
|
||||
self.putJsonResponse(UserPlan, data=dict(plan='personal-30'))
|
||||
|
||||
json = self.getJsonResponse(PrivateRepositories)
|
||||
assert json['privateCount'] >= 6
|
||||
assert not json['privateAllowed']
|
||||
|
||||
# Change the subscription of the namespace.
|
||||
self.putJsonResponse(UserPlan, data=dict(plan='bus-large-30'))
|
||||
|
||||
json = self.getJsonResponse(PrivateRepositories)
|
||||
assert json['privateAllowed']
|
||||
|
||||
|
||||
|
@ -1435,6 +1447,36 @@ class TestUpdateRepo(ApiTestCase):
|
|||
|
||||
class TestChangeRepoVisibility(ApiTestCase):
|
||||
SIMPLE_REPO = ADMIN_ACCESS_USER + '/simple'
|
||||
|
||||
def test_trychangevisibility(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Make public.
|
||||
self.postJsonResponse(RepositoryVisibility,
|
||||
params=dict(repository=self.SIMPLE_REPO),
|
||||
data=dict(visibility='public'))
|
||||
|
||||
# Verify the visibility.
|
||||
json = self.getJsonResponse(Repository,
|
||||
params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
self.assertEquals(True, json['is_public'])
|
||||
|
||||
# Change the subscription of the namespace.
|
||||
self.putJsonResponse(UserPlan, data=dict(plan='personal-30'))
|
||||
|
||||
# Try to make private.
|
||||
self.postJsonResponse(RepositoryVisibility,
|
||||
params=dict(repository=self.SIMPLE_REPO),
|
||||
data=dict(visibility='private'),
|
||||
expected_code=402)
|
||||
|
||||
# Verify the visibility.
|
||||
json = self.getJsonResponse(Repository,
|
||||
params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
self.assertEquals(True, json['is_public'])
|
||||
|
||||
def test_changevisibility(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
@ -1468,6 +1510,10 @@ class TestDeleteRepository(ApiTestCase):
|
|||
def test_deleterepo(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the repo exists.
|
||||
self.getResponse(Repository,
|
||||
params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
self.deleteResponse(Repository, params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
|
@ -1478,6 +1524,10 @@ class TestDeleteRepository(ApiTestCase):
|
|||
def test_deleterepo2(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the repo exists.
|
||||
self.getResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
self.deleteResponse(Repository, params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
|
@ -1488,7 +1538,11 @@ class TestDeleteRepository(ApiTestCase):
|
|||
def test_populate_and_delete_repo(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Make sure the repository has come images and tags.
|
||||
# Verify the repo exists.
|
||||
self.getResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Make sure the repository has some images and tags.
|
||||
self.assertTrue(len(list(model.image.get_repository_images(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
self.assertTrue(len(list(model.tag.list_repository_tags(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
|
||||
|
@ -1524,6 +1578,13 @@ class TestDeleteRepository(ApiTestCase):
|
|||
model.repository.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'a@b.com')
|
||||
model.repository.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'b@c.com')
|
||||
|
||||
# Create some repository action count entries.
|
||||
RepositoryActionCount.create(repository=repository, date=datetime.datetime.now(), count=1)
|
||||
RepositoryActionCount.create(repository=repository,
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=1), count=2)
|
||||
RepositoryActionCount.create(repository=repository,
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=3), count=6)
|
||||
|
||||
# Delete the repository.
|
||||
self.deleteResponse(Repository, params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
|
@ -2079,6 +2140,7 @@ class TestRevertTag(ApiTestCase):
|
|||
self.assertEquals(previous_image_id, json['tags'][0]['docker_image_id'])
|
||||
|
||||
|
||||
|
||||
class TestListAndDeleteTag(ApiTestCase):
|
||||
def test_listdeletecreateandmovetag(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
@ -2166,6 +2228,19 @@ class TestListAndDeleteTag(ApiTestCase):
|
|||
|
||||
self.assertEquals(prod_images, json['images'])
|
||||
|
||||
def test_listtagpagination(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
for i in xrange(1, 100):
|
||||
model.tag.create_or_update_tag(ADMIN_ACCESS_USER, "complex", "tag" + str(i),
|
||||
"1d8cbff4e0363d1826c6a0b64ef0bc501d8cbff4e0363d1826c6a0b64ef0bc50")
|
||||
|
||||
json = self.getJsonResponse(ListRepositoryTags,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', page=2))
|
||||
|
||||
# Make sure that we're able to see the second page of results.
|
||||
assert json['page'] == 2
|
||||
assert len(json['tags']) == 50
|
||||
|
||||
class TestRepoPermissions(ApiTestCase):
|
||||
def listUserPermissions(self, namespace=ADMIN_ACCESS_USER, repo='simple'):
|
||||
|
@ -2512,6 +2587,26 @@ class TestOrgRobots(ApiTestCase):
|
|||
return [r['name'] for r in self.getJsonResponse(OrgRobotList,
|
||||
params=dict(orgname=ORGANIZATION))['robots']]
|
||||
|
||||
def test_create_robot_with_underscores(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Create the robot.
|
||||
self.putJsonResponse(OrgRobot,
|
||||
params=dict(orgname=ORGANIZATION, robot_shortname='mr_bender'),
|
||||
expected_code=201)
|
||||
|
||||
# Add the robot to a team.
|
||||
membername = ORGANIZATION + '+mr_bender'
|
||||
self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername))
|
||||
|
||||
# Retrieve the robot's details.
|
||||
self.getJsonResponse(OrgRobot,
|
||||
params=dict(orgname=ORGANIZATION, robot_shortname='mr_bender'),
|
||||
expected_code=200)
|
||||
|
||||
|
||||
def test_delete_robot_after_use(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
|
|
@ -75,8 +75,8 @@ class TestGarbageCollection(unittest.TestCase):
|
|||
v1_metadata['parent'] = parent.docker_image_id
|
||||
|
||||
# Set the ancestors for the image.
|
||||
parent = model.image.set_image_metadata(image_id, namespace, name, '', '', '',
|
||||
v1_metadata, parent=parent)
|
||||
parent = model.image.set_image_metadata(image_id, namespace, name, '', '', '', v1_metadata,
|
||||
parent=parent)
|
||||
|
||||
# Set the tag for the image.
|
||||
model.tag.create_or_update_tag(namespace, name, tag_name, image_ids[-1])
|
||||
|
|
49
test/test_queries.py
Normal file
49
test/test_queries.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
import unittest
|
||||
|
||||
from app import app
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data import model
|
||||
from data.database import RepositoryBuild
|
||||
|
||||
ADMIN_ACCESS_USER = 'devtable'
|
||||
SIMPLE_REPO = 'simple'
|
||||
|
||||
class TestSpecificQueries(unittest.TestCase):
|
||||
def setUp(self):
|
||||
setup_database_for_testing(self)
|
||||
self.app = app.test_client()
|
||||
self.ctx = app.test_request_context()
|
||||
self.ctx.__enter__()
|
||||
|
||||
def tearDown(self):
|
||||
finished_database_for_testing(self)
|
||||
self.ctx.__exit__(True, None, None)
|
||||
|
||||
def test_archivable_buildlogs(self):
|
||||
# Make sure there are no archivable logs.
|
||||
result = model.build.get_archivable_build()
|
||||
self.assertIsNone(result)
|
||||
|
||||
# Add a build that cannot (yet) be archived.
|
||||
repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO)
|
||||
token = model.token.create_access_token(repo, 'write')
|
||||
created = RepositoryBuild.create(repository=repo, access_token=token,
|
||||
phase=model.build.BUILD_PHASE.WAITING,
|
||||
logs_archived=False, job_config='{}',
|
||||
display_name='')
|
||||
|
||||
# Make sure there are no archivable logs.
|
||||
result = model.build.get_archivable_build()
|
||||
self.assertIsNone(result)
|
||||
|
||||
# Change the build to being complete.
|
||||
created.phase = model.build.BUILD_PHASE.COMPLETE
|
||||
created.save()
|
||||
|
||||
# Make sure we now find an archivable build.
|
||||
result = model.build.get_archivable_build()
|
||||
self.assertEquals(created.id, result.id)
|
||||
self.assertEquals(created.uuid, result.uuid)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,7 +1,7 @@
|
|||
from test.test_api_usage import ApiTestCase, READ_ACCESS_USER, ADMIN_ACCESS_USER
|
||||
from endpoints.api.suconfig import (SuperUserRegistryStatus, SuperUserConfig, SuperUserConfigFile,
|
||||
SuperUserCreateInitialSuperUser, SuperUserConfigValidate)
|
||||
from app import CONFIG_PROVIDER
|
||||
from app import config_provider
|
||||
from data.database import User
|
||||
|
||||
import unittest
|
||||
|
@ -10,11 +10,11 @@ import unittest
|
|||
class ConfigForTesting(object):
|
||||
|
||||
def __enter__(self):
|
||||
CONFIG_PROVIDER.reset_for_test()
|
||||
return CONFIG_PROVIDER
|
||||
config_provider.reset_for_test()
|
||||
return config_provider
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
CONFIG_PROVIDER.reset_for_test()
|
||||
config_provider.reset_for_test()
|
||||
|
||||
|
||||
class TestSuperUserRegistryStatus(ApiTestCase):
|
||||
|
@ -166,7 +166,7 @@ class TestSuperUserConfig(ApiTestCase):
|
|||
self.assertTrue(json['exists'])
|
||||
|
||||
# Verify the config file exists.
|
||||
self.assertTrue(config.yaml_exists())
|
||||
self.assertTrue(config.config_exists())
|
||||
|
||||
# Try writing it again. This should now fail, since the config.yaml exists.
|
||||
self.putResponse(SuperUserConfig, data=dict(config={}, hostname='barbaz'), expected_code=403)
|
||||
|
|
|
@ -20,7 +20,9 @@ TEST_DB_FILE = NamedTemporaryFile(delete=True)
|
|||
class TestConfig(DefaultConfig):
|
||||
TESTING = True
|
||||
SECRET_KEY = 'a36c9d7d-25a9-4d3f-a586-3d2f8dc40a83'
|
||||
BILLING_TYPE = 'FakeStripe'
|
||||
|
||||
TEST_DB_FILE = TEST_DB_FILE
|
||||
DB_URI = os.environ.get('TEST_DATABASE_URI', 'sqlite:///{0}'.format(TEST_DB_FILE.name))
|
||||
DB_CONNECTION_ARGS = {
|
||||
'threadlocals': True,
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
import urlparse
|
||||
import github
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from cachetools.func import TTLCache
|
||||
from jwkest.jwk import KEYS, keyrep
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class OAuthConfig(object):
|
||||
def __init__(self, config, key_name):
|
||||
|
@ -38,10 +46,8 @@ class OAuthConfig(object):
|
|||
|
||||
|
||||
def exchange_code_for_token(self, app_config, http_client, code, form_encode=False,
|
||||
redirect_suffix=''):
|
||||
redirect_suffix='', client_auth=False):
|
||||
payload = {
|
||||
'client_id': self.client_id(),
|
||||
'client_secret': self.client_secret(),
|
||||
'code': code,
|
||||
'grant_type': 'authorization_code',
|
||||
'redirect_uri': self.get_redirect_uri(app_config, redirect_suffix)
|
||||
|
@ -51,11 +57,18 @@ class OAuthConfig(object):
|
|||
'Accept': 'application/json'
|
||||
}
|
||||
|
||||
auth = None
|
||||
if client_auth:
|
||||
auth = (self.client_id(), self.client_secret())
|
||||
else:
|
||||
payload['client_id'] = self.client_id()
|
||||
payload['client_secret'] = self.client_secret()
|
||||
|
||||
token_url = self.token_endpoint()
|
||||
if form_encode:
|
||||
get_access_token = http_client.post(token_url, data=payload, headers=headers)
|
||||
get_access_token = http_client.post(token_url, data=payload, headers=headers, auth=auth)
|
||||
else:
|
||||
get_access_token = http_client.post(token_url, params=payload, headers=headers)
|
||||
get_access_token = http_client.post(token_url, params=payload, headers=headers, auth=auth)
|
||||
|
||||
json_data = get_access_token.json()
|
||||
if not json_data:
|
||||
|
@ -248,3 +261,102 @@ class GitLabOAuthConfig(OAuthConfig):
|
|||
'AUTHORIZE_ENDPOINT': self.authorize_endpoint(),
|
||||
'GITLAB_ENDPOINT': self._endpoint(),
|
||||
}
|
||||
|
||||
|
||||
OIDC_WELLKNOWN = ".well-known/openid-configuration"
|
||||
PUBLIC_KEY_CACHE_TTL = 3600 # 1 hour
|
||||
|
||||
class OIDCConfig(OAuthConfig):
|
||||
def __init__(self, config, key_name):
|
||||
super(OIDCConfig, self).__init__(config, key_name)
|
||||
|
||||
self._public_key_cache = TTLCache(1, PUBLIC_KEY_CACHE_TTL, missing=self._get_public_key)
|
||||
self._oidc_config = {}
|
||||
self._http_client = config['HTTPCLIENT']
|
||||
|
||||
if self.config.get('OIDC_SERVER'):
|
||||
self._load_via_discovery(config['DEBUGGING'])
|
||||
|
||||
def _load_via_discovery(self, is_debugging):
|
||||
oidc_server = self.config['OIDC_SERVER']
|
||||
if not oidc_server.startswith('https://') and not is_debugging:
|
||||
raise Exception('OIDC server must be accessed over SSL')
|
||||
|
||||
discovery_url = urlparse.urljoin(oidc_server, OIDC_WELLKNOWN)
|
||||
discovery = self._http_client.get(discovery_url, timeout=5)
|
||||
|
||||
if discovery.status_code / 100 != 2:
|
||||
raise Exception("Could not load OIDC discovery information")
|
||||
|
||||
try:
|
||||
self._oidc_config = json.loads(discovery.text)
|
||||
except ValueError:
|
||||
logger.exception('Could not parse OIDC discovery for url: %s', discovery_url)
|
||||
raise Exception("Could not parse OIDC discovery information")
|
||||
|
||||
def authorize_endpoint(self):
|
||||
return self._oidc_config.get('authorization_endpoint', '') + '?'
|
||||
|
||||
def token_endpoint(self):
|
||||
return self._oidc_config.get('token_endpoint')
|
||||
|
||||
def user_endpoint(self):
|
||||
return None
|
||||
|
||||
def validate_client_id_and_secret(self, http_client, app_config):
|
||||
pass
|
||||
|
||||
def get_public_config(self):
|
||||
return {
|
||||
'CLIENT_ID': self.client_id(),
|
||||
'AUTHORIZE_ENDPOINT': self.authorize_endpoint()
|
||||
}
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
return self.config.get('OIDC_ISSUER', self.config['OIDC_SERVER'])
|
||||
|
||||
def get_public_key(self, force_refresh=False):
|
||||
""" Retrieves the public key for this handler. """
|
||||
# If force_refresh is true, we expire all the items in the cache by setting the time to
|
||||
# the current time + the expiration TTL.
|
||||
if force_refresh:
|
||||
self._public_key_cache.expire(time=time.time() + PUBLIC_KEY_CACHE_TTL)
|
||||
|
||||
# Retrieve the public key from the cache. If the cache does not contain the public key,
|
||||
# it will internally call _get_public_key to retrieve it and then save it. The None is
|
||||
# a random key chose to be stored in the cache, and could be anything.
|
||||
return self._public_key_cache[None]
|
||||
|
||||
def _get_public_key(self):
|
||||
""" Retrieves the public key for this handler. """
|
||||
keys_url = self._oidc_config['jwks_uri']
|
||||
|
||||
keys = KEYS()
|
||||
keys.load_from_url(keys_url)
|
||||
|
||||
if not list(keys):
|
||||
raise Exception('No keys provided by OIDC provider')
|
||||
|
||||
rsa_key = list(keys)[0]
|
||||
rsa_key.deserialize()
|
||||
return rsa_key.key.exportKey('PEM')
|
||||
|
||||
|
||||
class DexOAuthConfig(OIDCConfig):
|
||||
def service_name(self):
|
||||
return 'Dex'
|
||||
|
||||
@property
|
||||
def public_title(self):
|
||||
return self.get_public_config()['OIDC_TITLE']
|
||||
|
||||
def get_public_config(self):
|
||||
return {
|
||||
'CLIENT_ID': self.client_id(),
|
||||
'AUTHORIZE_ENDPOINT': self.authorize_endpoint(),
|
||||
|
||||
# TODO(jschorr): This should ideally come from the Dex side.
|
||||
'OIDC_TITLE': 'Dex',
|
||||
'OIDC_LOGO': 'https://tectonic.com/assets/ico/favicon-96x96.png'
|
||||
}
|
||||
|
|
|
@ -1,181 +0,0 @@
|
|||
import os
|
||||
import yaml
|
||||
import logging
|
||||
import json
|
||||
from StringIO import StringIO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CannotWriteConfigException(Exception):
|
||||
""" Exception raised when the config cannot be written. """
|
||||
pass
|
||||
|
||||
def _import_yaml(config_obj, config_file):
|
||||
with open(config_file) as f:
|
||||
c = yaml.safe_load(f)
|
||||
if not c:
|
||||
logger.debug('Empty YAML config file')
|
||||
return
|
||||
|
||||
if isinstance(c, str):
|
||||
raise Exception('Invalid YAML config file: ' + str(c))
|
||||
|
||||
for key in c.iterkeys():
|
||||
if key.isupper():
|
||||
config_obj[key] = c[key]
|
||||
|
||||
return config_obj
|
||||
|
||||
|
||||
def _export_yaml(config_obj, config_file):
|
||||
try:
|
||||
with open(config_file, 'w') as f:
|
||||
f.write(yaml.safe_dump(config_obj, encoding='utf-8', allow_unicode=True))
|
||||
except IOError as ioe:
|
||||
raise CannotWriteConfigException(str(ioe))
|
||||
|
||||
|
||||
class BaseProvider(object):
|
||||
""" A configuration provider helps to load, save, and handle config override in the application.
|
||||
"""
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
""" Updates the given application config object with the loaded override config. """
|
||||
raise NotImplementedError
|
||||
|
||||
def get_yaml(self):
|
||||
""" Returns the contents of the YAML config override file, or None if none. """
|
||||
raise NotImplementedError
|
||||
|
||||
def save_yaml(self, config_object):
|
||||
""" Updates the contents of the YAML config override file to those given. """
|
||||
raise NotImplementedError
|
||||
|
||||
def yaml_exists(self):
|
||||
""" Returns true if a YAML config override file exists in the config volume. """
|
||||
raise NotImplementedError
|
||||
|
||||
def volume_exists(self):
|
||||
""" Returns whether the config override volume exists. """
|
||||
raise NotImplementedError
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
""" Returns whether the file with the given name exists under the config override volume. """
|
||||
raise NotImplementedError
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
""" Returns a Python file referring to the given name under the config override volumne. """
|
||||
raise NotImplementedError
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
""" Saves the given flask file to the config override volume, with the given
|
||||
filename.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
""" If true, the configuration loaded into memory for the app does not match that on disk,
|
||||
indicating that this container requires a restart.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class FileConfigProvider(BaseProvider):
|
||||
""" Implementation of the config provider that reads the data from the file system. """
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
self.config_volume = config_volume
|
||||
self.yaml_filename = yaml_filename
|
||||
self.py_filename = py_filename
|
||||
|
||||
self.yaml_path = os.path.join(config_volume, yaml_filename)
|
||||
self.py_path = os.path.join(config_volume, py_filename)
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
if os.path.exists(self.py_path):
|
||||
logger.debug('Applying config file: %s', self.py_path)
|
||||
app_config.from_pyfile(self.py_path)
|
||||
|
||||
if os.path.exists(self.yaml_path):
|
||||
logger.debug('Applying config file: %s', self.yaml_path)
|
||||
_import_yaml(app_config, self.yaml_path)
|
||||
|
||||
def get_yaml(self):
|
||||
if not os.path.exists(self.yaml_path):
|
||||
return None
|
||||
|
||||
config_obj = {}
|
||||
_import_yaml(config_obj, self.yaml_path)
|
||||
return config_obj
|
||||
|
||||
def save_yaml(self, config_obj):
|
||||
_export_yaml(config_obj, self.yaml_path)
|
||||
|
||||
def yaml_exists(self):
|
||||
return self.volume_file_exists(self.yaml_filename)
|
||||
|
||||
def volume_exists(self):
|
||||
return os.path.exists(self.config_volume)
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
return os.path.exists(os.path.join(self.config_volume, filename))
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
return open(os.path.join(self.config_volume, filename), mode)
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
try:
|
||||
flask_file.save(os.path.join(self.config_volume, filename))
|
||||
except IOError as ioe:
|
||||
raise CannotWriteConfigException(str(ioe))
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
file_config = self.get_yaml()
|
||||
if not file_config:
|
||||
return False
|
||||
|
||||
for key in file_config:
|
||||
if app_config.get(key) != file_config[key]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class TestConfigProvider(BaseProvider):
|
||||
""" Implementation of the config provider for testing. Everything is kept in-memory instead on
|
||||
the real file system. """
|
||||
def __init__(self):
|
||||
self.files = {}
|
||||
self._config = None
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
self._config = app_config
|
||||
|
||||
def get_yaml(self):
|
||||
if not 'config.yaml' in self.files:
|
||||
return None
|
||||
|
||||
return json.loads(self.files.get('config.yaml', '{}'))
|
||||
|
||||
def save_yaml(self, config_obj):
|
||||
self.files['config.yaml'] = json.dumps(config_obj)
|
||||
|
||||
def yaml_exists(self):
|
||||
return 'config.yaml' in self.files
|
||||
|
||||
def volume_exists(self):
|
||||
return True
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
return filename in self.files
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
self.files[filename] = ''
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
return StringIO(self.files[filename])
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
return False
|
||||
|
||||
def reset_for_test(self):
|
||||
self._config['SUPER_USERS'] = ['devtable']
|
||||
self.files = {}
|
16
util/config/provider/__init__.py
Normal file
16
util/config/provider/__init__.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from util.config.provider.fileprovider import FileConfigProvider
|
||||
from util.config.provider.testprovider import TestConfigProvider
|
||||
from util.config.provider.k8sprovider import KubernetesConfigProvider
|
||||
|
||||
import os
|
||||
|
||||
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False, kubernetes=False):
|
||||
""" Loads and returns the config provider for the current environment. """
|
||||
if testing:
|
||||
return TestConfigProvider()
|
||||
|
||||
if kubernetes:
|
||||
return KubernetesConfigProvider(config_volume, yaml_filename, py_filename)
|
||||
|
||||
return FileConfigProvider(config_volume, yaml_filename, py_filename)
|
||||
|
84
util/config/provider/baseprovider.py
Normal file
84
util/config/provider/baseprovider.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
import yaml
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CannotWriteConfigException(Exception):
|
||||
""" Exception raised when the config cannot be written. """
|
||||
pass
|
||||
|
||||
def import_yaml(config_obj, config_file):
|
||||
with open(config_file) as f:
|
||||
c = yaml.safe_load(f)
|
||||
if not c:
|
||||
logger.debug('Empty YAML config file')
|
||||
return
|
||||
|
||||
if isinstance(c, str):
|
||||
raise Exception('Invalid YAML config file: ' + str(c))
|
||||
|
||||
for key in c.iterkeys():
|
||||
if key.isupper():
|
||||
config_obj[key] = c[key]
|
||||
|
||||
return config_obj
|
||||
|
||||
|
||||
def get_yaml(config_obj):
|
||||
return yaml.safe_dump(config_obj, encoding='utf-8', allow_unicode=True)
|
||||
|
||||
def export_yaml(config_obj, config_file):
|
||||
try:
|
||||
with open(config_file, 'w') as f:
|
||||
f.write(get_yaml(config_obj))
|
||||
except IOError as ioe:
|
||||
raise CannotWriteConfigException(str(ioe))
|
||||
|
||||
|
||||
class BaseProvider(object):
|
||||
""" A configuration provider helps to load, save, and handle config override in the application.
|
||||
"""
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
""" Updates the given application config object with the loaded override config. """
|
||||
raise NotImplementedError
|
||||
|
||||
def get_config(self):
|
||||
""" Returns the contents of the config override file, or None if none. """
|
||||
raise NotImplementedError
|
||||
|
||||
def save_config(self, config_object):
|
||||
""" Updates the contents of the config override file to those given. """
|
||||
raise NotImplementedError
|
||||
|
||||
def config_exists(self):
|
||||
""" Returns true if a config override file exists in the config volume. """
|
||||
raise NotImplementedError
|
||||
|
||||
def volume_exists(self):
|
||||
""" Returns whether the config override volume exists. """
|
||||
raise NotImplementedError
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
""" Returns whether the file with the given name exists under the config override volume. """
|
||||
raise NotImplementedError
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
""" Returns a Python file referring to the given name under the config override volumne. """
|
||||
raise NotImplementedError
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
""" Saves the given flask file to the config override volume, with the given
|
||||
filename.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
""" If true, the configuration loaded into memory for the app does not match that on disk,
|
||||
indicating that this container requires a restart.
|
||||
"""
|
||||
raise NotImplementedError
|
73
util/config/provider/fileprovider.py
Normal file
73
util/config/provider/fileprovider.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from util.config.provider.baseprovider import (BaseProvider, import_yaml, export_yaml,
|
||||
CannotWriteConfigException)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class FileConfigProvider(BaseProvider):
|
||||
""" Implementation of the config provider that reads the data from the file system. """
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
self.config_volume = config_volume
|
||||
self.yaml_filename = yaml_filename
|
||||
self.py_filename = py_filename
|
||||
|
||||
self.yaml_path = os.path.join(config_volume, yaml_filename)
|
||||
self.py_path = os.path.join(config_volume, py_filename)
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
return 'file'
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
if os.path.exists(self.py_path):
|
||||
logger.debug('Applying config file: %s', self.py_path)
|
||||
app_config.from_pyfile(self.py_path)
|
||||
|
||||
if os.path.exists(self.yaml_path):
|
||||
logger.debug('Applying config file: %s', self.yaml_path)
|
||||
import_yaml(app_config, self.yaml_path)
|
||||
|
||||
def get_config(self):
|
||||
if not os.path.exists(self.yaml_path):
|
||||
return None
|
||||
|
||||
config_obj = {}
|
||||
import_yaml(config_obj, self.yaml_path)
|
||||
return config_obj
|
||||
|
||||
def save_config(self, config_obj):
|
||||
export_yaml(config_obj, self.yaml_path)
|
||||
|
||||
def config_exists(self):
|
||||
return self.volume_file_exists(self.yaml_filename)
|
||||
|
||||
def volume_exists(self):
|
||||
return os.path.exists(self.config_volume)
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
return os.path.exists(os.path.join(self.config_volume, filename))
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
return open(os.path.join(self.config_volume, filename), mode)
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
filepath = os.path.join(self.config_volume, filename)
|
||||
try:
|
||||
flask_file.save(filepath)
|
||||
except IOError as ioe:
|
||||
raise CannotWriteConfigException(str(ioe))
|
||||
|
||||
return filepath
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
file_config = self.get_config()
|
||||
if not file_config:
|
||||
return False
|
||||
|
||||
for key in file_config:
|
||||
if app_config.get(key) != file_config[key]:
|
||||
return True
|
||||
|
||||
return False
|
109
util/config/provider/k8sprovider.py
Normal file
109
util/config/provider/k8sprovider.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
import os
|
||||
import logging
|
||||
import json
|
||||
import base64
|
||||
|
||||
from requests import Request, Session
|
||||
|
||||
from util.config.provider.baseprovider import get_yaml, CannotWriteConfigException
|
||||
from util.config.provider.fileprovider import FileConfigProvider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KUBERNETES_API_HOST = 'kubernetes.default.svc.cluster.local'
|
||||
|
||||
SERVICE_ACCOUNT_TOKEN_PATH = '/var/run/secrets/kubernetes.io/serviceaccount/token'
|
||||
|
||||
ER_NAMESPACE = 'quay'
|
||||
ER_CONFIG_SECRET = 'quay-config-secret'
|
||||
|
||||
class KubernetesConfigProvider(FileConfigProvider):
|
||||
""" Implementation of the config provider that reads and writes configuration
|
||||
data from a Kubernetes Secret. """
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
super(KubernetesConfigProvider, self).__init__(config_volume, yaml_filename, py_filename)
|
||||
|
||||
self.yaml_filename = yaml_filename
|
||||
|
||||
# Load the service account token from the local store.
|
||||
if not os.path.exists(SERVICE_ACCOUNT_TOKEN_PATH):
|
||||
raise Exception('Cannot load Kubernetes service account token')
|
||||
|
||||
with open(SERVICE_ACCOUNT_TOKEN_PATH, 'r') as f:
|
||||
self._service_token = f.read()
|
||||
|
||||
# Make sure the configuration volume exists.
|
||||
if not self.volume_exists():
|
||||
os.makedirs(config_volume)
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
return 'k8s'
|
||||
|
||||
def save_config(self, config_obj):
|
||||
self._update_secret_file(self.yaml_filename, get_yaml(config_obj))
|
||||
super(KubernetesConfigProvider, self).save_config(config_obj)
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
filepath = super(KubernetesConfigProvider, self).save_volume_file(filename, flask_file)
|
||||
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
self._update_secret_file(filename, f.read())
|
||||
except IOError as ioe:
|
||||
raise CannotWriteConfigException(str(ioe))
|
||||
|
||||
def _assert_success(self, response):
|
||||
if response.status_code != 200:
|
||||
logger.error('K8s API call failed with response: %s => %s', response.status_code,
|
||||
response.text)
|
||||
raise CannotWriteConfigException('K8s API call failed. Please report this to support')
|
||||
|
||||
def _update_secret_file(self, filename, value):
|
||||
secret_data = {}
|
||||
secret_data[filename] = base64.b64encode(value)
|
||||
|
||||
data = {
|
||||
"kind": "Secret",
|
||||
"apiVersion": "v1",
|
||||
"metadata": {
|
||||
"name": ER_CONFIG_SECRET
|
||||
},
|
||||
"data": secret_data
|
||||
}
|
||||
|
||||
secret_url = 'namespaces/%s/secrets/%s' % (ER_NAMESPACE, ER_CONFIG_SECRET)
|
||||
secret = self._lookup_secret()
|
||||
if not secret:
|
||||
self._assert_success(self._execute_k8s_api('POST', secret_url, data))
|
||||
return
|
||||
|
||||
if not 'data' in secret:
|
||||
secret['data'] = {}
|
||||
|
||||
secret['data'][filename] = base64.b64encode(value)
|
||||
self._assert_success(self._execute_k8s_api('PUT', secret_url, secret))
|
||||
|
||||
|
||||
def _lookup_secret(self):
|
||||
secret_url = 'namespaces/%s/secrets/%s' % (ER_NAMESPACE, ER_CONFIG_SECRET)
|
||||
response = self._execute_k8s_api('GET', secret_url)
|
||||
if response.status_code != 200:
|
||||
return None
|
||||
|
||||
return json.loads(response.text)
|
||||
|
||||
def _execute_k8s_api(self, method, relative_url, data=None):
|
||||
headers = {
|
||||
'Authorization': 'Bearer ' + self._service_token
|
||||
}
|
||||
|
||||
if data:
|
||||
headers['Content-Type'] = 'application/json'
|
||||
|
||||
data = json.dumps(data) if data else None
|
||||
session = Session()
|
||||
url = 'https://%s/api/v1/%s' % (KUBERNETES_API_HOST, relative_url)
|
||||
|
||||
request = Request(method, url, data=data, headers=headers)
|
||||
return session.send(request.prepare(), verify=False, timeout=2)
|
49
util/config/provider/testprovider.py
Normal file
49
util/config/provider/testprovider.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
import json
|
||||
from StringIO import StringIO
|
||||
|
||||
from util.config.provider.baseprovider import BaseProvider
|
||||
|
||||
class TestConfigProvider(BaseProvider):
|
||||
""" Implementation of the config provider for testing. Everything is kept in-memory instead on
|
||||
the real file system. """
|
||||
def __init__(self):
|
||||
self.files = {}
|
||||
self._config = None
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
return 'test'
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
self._config = app_config
|
||||
|
||||
def get_config(self):
|
||||
if not 'config.yaml' in self.files:
|
||||
return None
|
||||
|
||||
return json.loads(self.files.get('config.yaml', '{}'))
|
||||
|
||||
def save_config(self, config_obj):
|
||||
self.files['config.yaml'] = json.dumps(config_obj)
|
||||
|
||||
def config_exists(self):
|
||||
return 'config.yaml' in self.files
|
||||
|
||||
def volume_exists(self):
|
||||
return True
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
return filename in self.files
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
self.files[filename] = ''
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
return StringIO(self.files[filename])
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
return False
|
||||
|
||||
def reset_for_test(self):
|
||||
self._config['SUPER_USERS'] = ['devtable']
|
||||
self.files = {}
|
|
@ -19,7 +19,7 @@ from auth.auth_context import get_authenticated_user
|
|||
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
||||
from bitbucket import BitBucket
|
||||
|
||||
from app import app, CONFIG_PROVIDER, get_app_url, OVERRIDE_CONFIG_DIRECTORY
|
||||
from app import app, config_provider, get_app_url, OVERRIDE_CONFIG_DIRECTORY
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -83,7 +83,7 @@ def _validate_registry_storage(config, _):
|
|||
driver = get_storage_provider(config)
|
||||
|
||||
# Run custom validation on the driver.
|
||||
driver.validate()
|
||||
driver.validate(app.config['HTTPCLIENT'])
|
||||
|
||||
# Put and remove a temporary file to make sure the normal storage paths work.
|
||||
driver.put_content('_verify', 'testing 123')
|
||||
|
@ -223,10 +223,10 @@ def _validate_ssl(config, _):
|
|||
return
|
||||
|
||||
for filename in SSL_FILENAMES:
|
||||
if not CONFIG_PROVIDER.volume_file_exists(filename):
|
||||
if not config_provider.volume_file_exists(filename):
|
||||
raise Exception('Missing required SSL file: %s' % filename)
|
||||
|
||||
with CONFIG_PROVIDER.get_volume_file(SSL_FILENAMES[0]) as f:
|
||||
with config_provider.get_volume_file(SSL_FILENAMES[0]) as f:
|
||||
cert_contents = f.read()
|
||||
|
||||
# Validate the certificate.
|
||||
|
@ -239,7 +239,7 @@ def _validate_ssl(config, _):
|
|||
raise Exception('The specified SSL certificate has expired.')
|
||||
|
||||
private_key_path = None
|
||||
with CONFIG_PROVIDER.get_volume_file(SSL_FILENAMES[1]) as f:
|
||||
with config_provider.get_volume_file(SSL_FILENAMES[1]) as f:
|
||||
private_key_path = f.name
|
||||
|
||||
if not private_key_path:
|
||||
|
|
|
@ -2,7 +2,8 @@ import logging
|
|||
import json
|
||||
|
||||
from app import app
|
||||
from data.database import configure, RepositoryBuildTrigger, BuildTriggerService
|
||||
from data.database import configure, BaseModel, uuid_generator
|
||||
from peewee import *
|
||||
from bitbucket import BitBucket
|
||||
from endpoints.trigger import BitbucketBuildTrigger
|
||||
|
||||
|
@ -10,6 +11,31 @@ configure(app.config)
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Note: We vendor the RepositoryBuildTrigger and its dependencies here
|
||||
class Repository(BaseModel):
|
||||
pass
|
||||
|
||||
class BuildTriggerService(BaseModel):
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
class AccessToken(BaseModel):
|
||||
pass
|
||||
|
||||
class User(BaseModel):
|
||||
pass
|
||||
|
||||
class RepositoryBuildTrigger(BaseModel):
|
||||
uuid = CharField(default=uuid_generator)
|
||||
service = ForeignKeyField(BuildTriggerService, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
connected_user = ForeignKeyField(User)
|
||||
auth_token = CharField(null=True)
|
||||
private_key = TextField(null=True)
|
||||
config = TextField(default='{}')
|
||||
write_token = ForeignKeyField(AccessToken, null=True)
|
||||
pull_robot = ForeignKeyField(User, related_name='triggerpullrobot')
|
||||
|
||||
|
||||
def run_bitbucket_migration():
|
||||
bitbucket_trigger = BuildTriggerService.get(BuildTriggerService.name == "bitbucket")
|
||||
|
||||
|
|
87
util/migrate/migrategithubdeploykeys.py
Normal file
87
util/migrate/migrategithubdeploykeys.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
import logging
|
||||
import logging.config
|
||||
import json
|
||||
|
||||
from data.database import RepositoryBuildTrigger, BuildTriggerService, db, db_for_update
|
||||
from app import app
|
||||
from endpoints.trigger import BuildTriggerHandler
|
||||
from util.security.ssh import generate_ssh_keypair
|
||||
from github import GithubException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def backfill_github_deploykeys():
|
||||
""" Generates and saves private deploy keys for any GitHub build triggers still relying on
|
||||
the old buildpack behavior. """
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.debug('GitHub deploy key backfill: Began execution')
|
||||
|
||||
encountered = set()
|
||||
github_service = BuildTriggerService.get(name='github')
|
||||
|
||||
while True:
|
||||
build_trigger_ids = list(RepositoryBuildTrigger
|
||||
.select(RepositoryBuildTrigger.id)
|
||||
.where(RepositoryBuildTrigger.private_key >> None)
|
||||
.where(RepositoryBuildTrigger.service == github_service)
|
||||
.limit(10))
|
||||
|
||||
filtered_ids = [trigger.id for trigger in build_trigger_ids if trigger.id not in encountered]
|
||||
if len(filtered_ids) == 0:
|
||||
# We're done!
|
||||
logger.debug('GitHub deploy key backfill: Backfill completed')
|
||||
return
|
||||
|
||||
logger.debug('GitHub deploy key backfill: Found %s records to update', len(filtered_ids))
|
||||
for trigger_id in filtered_ids:
|
||||
encountered.add(trigger_id)
|
||||
logger.debug('Updating build trigger: %s', trigger_id)
|
||||
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
try:
|
||||
query = RepositoryBuildTrigger.select(RepositoryBuildTrigger.id == trigger_id)
|
||||
trigger = db_for_update(query).get()
|
||||
except RepositoryBuildTrigger.DoesNotExist:
|
||||
logger.debug('Could not find build trigger %s', trigger_id)
|
||||
continue
|
||||
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
|
||||
config = handler.config
|
||||
build_source = config['build_source']
|
||||
gh_client = handler._get_client()
|
||||
|
||||
# Find the GitHub repository.
|
||||
try:
|
||||
gh_repo = gh_client.get_repo(build_source)
|
||||
except GithubException:
|
||||
logger.exception('Cannot find repository %s for trigger %s', build_source, trigger.id)
|
||||
continue
|
||||
|
||||
# Add a deploy key to the GitHub repository.
|
||||
public_key, private_key = generate_ssh_keypair()
|
||||
config['credentials'] = [
|
||||
{
|
||||
'name': 'SSH Public Key',
|
||||
'value': public_key,
|
||||
},
|
||||
]
|
||||
|
||||
logger.debug('Adding deploy key to build trigger %s', trigger.id)
|
||||
try:
|
||||
deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'], public_key)
|
||||
config['deploy_key_id'] = deploy_key.id
|
||||
except GithubException:
|
||||
logger.exception('Cannot add deploy key to repository %s for trigger %s', build_source, trigger.id)
|
||||
continue
|
||||
|
||||
logger.debug('Saving deploy key for trigger %s', trigger.id)
|
||||
trigger.used_legacy_github = True
|
||||
trigger.private_key = private_key
|
||||
trigger.config = json.dumps(config)
|
||||
trigger.save()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||
backfill_github_deploykeys()
|
14
util/seo.py
14
util/seo.py
|
@ -32,11 +32,13 @@ def render_snapshot(url):
|
|||
# Remove script tags
|
||||
logger.info('Removing script tags: %s' % url)
|
||||
|
||||
soup = BeautifulSoup(out_html.decode('utf8'))
|
||||
to_extract = soup.findAll('script')
|
||||
for item in to_extract:
|
||||
item.extract()
|
||||
|
||||
logger.info('Snapshotted url: %s' % url)
|
||||
try:
|
||||
soup = BeautifulSoup(out_html.decode('utf8'), 'html.parser')
|
||||
to_extract = soup.findAll('script')
|
||||
for item in to_extract:
|
||||
item.extract()
|
||||
except:
|
||||
logger.exception('Exception when trying to parse served HTML')
|
||||
return out_html
|
||||
|
||||
return str(soup)
|
||||
|
|
|
@ -24,34 +24,34 @@ class ArchiveBuildLogsWorker(Worker):
|
|||
def _archive_redis_buildlogs(self):
|
||||
""" Archive a single build, choosing a candidate at random. This process must be idempotent to
|
||||
avoid needing two-phase commit. """
|
||||
try:
|
||||
# Get a random build to archive
|
||||
to_archive = model.build.archivable_buildlogs_query().order_by(db_random_func()).get()
|
||||
logger.debug('Archiving: %s', to_archive.uuid)
|
||||
|
||||
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
|
||||
to_encode = {
|
||||
'start': 0,
|
||||
'total': length,
|
||||
'logs': entries,
|
||||
}
|
||||
|
||||
with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
|
||||
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||
zipstream.write(chunk)
|
||||
|
||||
tempfile.seek(0)
|
||||
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||
file_id=to_archive.uuid)
|
||||
|
||||
to_archive.logs_archived = True
|
||||
to_archive.save()
|
||||
|
||||
build_logs.expire_log_entries(to_archive.uuid)
|
||||
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
# Get a random build to archive
|
||||
to_archive = model.build.get_archivable_build()
|
||||
if to_archive is None:
|
||||
logger.debug('No more builds to archive')
|
||||
return
|
||||
|
||||
logger.debug('Archiving: %s', to_archive.uuid)
|
||||
|
||||
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
|
||||
to_encode = {
|
||||
'start': 0,
|
||||
'total': length,
|
||||
'logs': entries,
|
||||
}
|
||||
|
||||
with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
|
||||
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||
zipstream.write(chunk)
|
||||
|
||||
tempfile.seek(0)
|
||||
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||
file_id=to_archive.uuid)
|
||||
|
||||
to_archive.logs_archived = True
|
||||
to_archive.save()
|
||||
|
||||
build_logs.expire_log_entries(to_archive.uuid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -23,6 +23,12 @@ class DiffsWorker(QueueWorker):
|
|||
msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
|
||||
(image_id, namespace, repository))
|
||||
logger.warning(msg)
|
||||
except IOError:
|
||||
# This exception is unrecoverable, and the item should continue and be
|
||||
# marked as complete.
|
||||
msg = ("Data could not be retrieved for image %s under repo %s/%s" %
|
||||
(image_id, namespace, repository))
|
||||
logger.exception(msg)
|
||||
|
||||
return True
|
||||
|
||||
|
|
Reference in a new issue