Merge branch 'master' into nomenclature
Conflicts: test/data/test.db
This commit is contained in:
commit
f4681f2c18
60 changed files with 1716 additions and 496 deletions
|
@ -8,4 +8,5 @@ venv
|
|||
Bobfile
|
||||
README.md
|
||||
requirements-nover.txt
|
||||
run-local.sh
|
||||
run-local.sh
|
||||
.DS_Store
|
|
@ -22,15 +22,18 @@ RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev li
|
|||
ADD binary_dependencies binary_dependencies
|
||||
RUN gdebi --n binary_dependencies/*.deb
|
||||
|
||||
# Grunt
|
||||
# Install Grunt
|
||||
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||
RUN npm install -g grunt-cli
|
||||
|
||||
# Install Grunt depenencies
|
||||
ADD grunt grunt
|
||||
RUN cd grunt && npm install
|
||||
|
||||
# Add all of the files!
|
||||
ADD . .
|
||||
|
||||
# Run grunt
|
||||
RUN cd grunt && npm install
|
||||
RUN cd grunt && grunt
|
||||
|
||||
ADD conf/init/svlogd_config /svlogd_config
|
||||
|
|
6
app.py
6
app.py
|
@ -19,6 +19,7 @@ from util.analytics import Analytics
|
|||
from util.exceptionlog import Sentry
|
||||
from util.queuemetrics import QueueMetrics
|
||||
from util.names import urn_generator
|
||||
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig
|
||||
from data.billing import Billing
|
||||
from data.buildlogs import BuildLogs
|
||||
from data.archivedlogs import LogArchive
|
||||
|
@ -131,6 +132,11 @@ queue_metrics = QueueMetrics(app)
|
|||
authentication = UserAuthentication(app)
|
||||
userevents = UserEventsBuilderModule(app)
|
||||
|
||||
github_login = GithubOAuthConfig(app, 'GITHUB_LOGIN_CONFIG')
|
||||
github_trigger = GithubOAuthConfig(app, 'GITHUB_TRIGGER_CONFIG')
|
||||
google_login = GoogleOAuthConfig(app, 'GOOGLE_LOGIN_CONFIG')
|
||||
oauth_apps = [github_login, github_trigger, google_login]
|
||||
|
||||
tf = app.config['DB_TRANSACTION_FACTORY']
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
||||
|
|
|
@ -3,3 +3,6 @@ set -e
|
|||
|
||||
# Run the database migration
|
||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
||||
|
||||
# Run the uncompressed size migration
|
||||
PYTHONPATH=. venv/bin/python -m util.uncompressedsize
|
|
@ -58,5 +58,5 @@ location /v1/_ping {
|
|||
add_header Content-Type text/plain;
|
||||
add_header X-Docker-Registry-Version 0.6.0;
|
||||
add_header X-Docker-Registry-Standalone 0;
|
||||
return 200 'okay';
|
||||
return 200 'true';
|
||||
}
|
32
config.py
32
config.py
|
@ -15,11 +15,10 @@ def build_requests_session():
|
|||
|
||||
|
||||
# The set of configuration key names that will be accessible in the client. Since these
|
||||
# values are set to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list.
|
||||
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID',
|
||||
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY',
|
||||
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE',
|
||||
'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID',
|
||||
# values are sent to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list.
|
||||
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'MIXPANEL_KEY',
|
||||
'STRIPE_PUBLISHABLE_KEY', 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN',
|
||||
'AUTHENTICATION_TYPE', 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT',
|
||||
'CONTACT_INFO']
|
||||
|
||||
|
||||
|
@ -108,22 +107,11 @@ class DefaultConfig(object):
|
|||
SENTRY_PUBLIC_DSN = None
|
||||
|
||||
# Github Config
|
||||
GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token'
|
||||
GITHUB_USER_URL = 'https://api.github.com/user'
|
||||
GITHUB_USER_EMAILS = GITHUB_USER_URL + '/emails'
|
||||
|
||||
GITHUB_CLIENT_ID = ''
|
||||
GITHUB_CLIENT_SECRET = ''
|
||||
|
||||
GITHUB_LOGIN_CLIENT_ID = ''
|
||||
GITHUB_LOGIN_CLIENT_SECRET = ''
|
||||
GITHUB_LOGIN_CONFIG = None
|
||||
GITHUB_TRIGGER_CONFIG = None
|
||||
|
||||
# Google Config.
|
||||
GOOGLE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
|
||||
GOOGLE_USER_URL = 'https://www.googleapis.com/oauth2/v1/userinfo'
|
||||
|
||||
GOOGLE_LOGIN_CLIENT_ID = ''
|
||||
GOOGLE_LOGIN_CLIENT_SECRET = ''
|
||||
GOOGLE_LOGIN_CONFIG = None
|
||||
|
||||
# Requests based HTTP client with a large request pool
|
||||
HTTPCLIENT = build_requests_session()
|
||||
|
@ -179,6 +167,9 @@ class DefaultConfig(object):
|
|||
|
||||
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
|
||||
|
||||
# Health checker.
|
||||
HEALTH_CHECKER = ('LocalHealthCheck', {})
|
||||
|
||||
# Userfiles
|
||||
USERFILES_LOCATION = 'local_us'
|
||||
USERFILES_PATH = 'userfiles/'
|
||||
|
@ -186,3 +177,6 @@ class DefaultConfig(object):
|
|||
# Build logs archive
|
||||
LOG_ARCHIVE_LOCATION = 'local_us'
|
||||
LOG_ARCHIVE_PATH = 'logarchive/'
|
||||
|
||||
# For enterprise:
|
||||
MAXIMUM_REPOSITORY_USAGE = 20
|
||||
|
|
116
data/database.py
116
data/database.py
|
@ -35,6 +35,36 @@ class CallableProxy(Proxy):
|
|||
raise AttributeError('Cannot use uninitialized Proxy.')
|
||||
return self.obj(*args, **kwargs)
|
||||
|
||||
|
||||
class CloseForLongOperation(object):
|
||||
""" Helper object which disconnects the database then reconnects after the nested operation
|
||||
completes.
|
||||
"""
|
||||
|
||||
def __init__(self, config_object):
|
||||
self.config_object = config_object
|
||||
|
||||
def __enter__(self):
|
||||
close_db_filter(None)
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
# Note: Nothing to do. The next SQL call will reconnect automatically.
|
||||
pass
|
||||
|
||||
|
||||
class UseThenDisconnect(object):
|
||||
""" Helper object for conducting work with a database and then tearing it down. """
|
||||
|
||||
def __init__(self, config_object):
|
||||
self.config_object = config_object
|
||||
|
||||
def __enter__(self):
|
||||
configure(self.config_object)
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
close_db_filter(None)
|
||||
|
||||
|
||||
db = Proxy()
|
||||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
|
@ -56,6 +86,7 @@ def _db_from_url(url, db_kwargs):
|
|||
|
||||
|
||||
def configure(config_object):
|
||||
logger.debug('Configuring database')
|
||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||
write_db_uri = config_object['DB_URI']
|
||||
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
||||
|
@ -90,6 +121,15 @@ def close_db_filter(_):
|
|||
read_slave.close()
|
||||
|
||||
|
||||
class QuayUserField(ForeignKeyField):
|
||||
def __init__(self, allows_robots=False, *args, **kwargs):
|
||||
self.allows_robots = allows_robots
|
||||
if not 'rel_model' in kwargs:
|
||||
kwargs['rel_model'] = User
|
||||
|
||||
super(QuayUserField, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class BaseModel(ReadSlaveModel):
|
||||
class Meta:
|
||||
database = db
|
||||
|
@ -109,6 +149,19 @@ class User(BaseModel):
|
|||
invalid_login_attempts = IntegerField(default=0)
|
||||
last_invalid_login = DateTimeField(default=datetime.utcnow)
|
||||
|
||||
def delete_instance(self, recursive=False, delete_nullable=False):
|
||||
# If we are deleting a robot account, only execute the subset of queries necessary.
|
||||
if self.robot:
|
||||
# For all the model dependencies, only delete those that allow robots.
|
||||
for query, fk in self.dependencies(search_nullable=True):
|
||||
if isinstance(fk, QuayUserField) and fk.allows_robots:
|
||||
model = fk.model_class
|
||||
model.delete().where(query).execute()
|
||||
|
||||
# Delete the instance itself.
|
||||
super(User, self).delete_instance(recursive=False, delete_nullable=False)
|
||||
else:
|
||||
super(User, self).delete_instance(recursive=recursive, delete_nullable=delete_nullable)
|
||||
|
||||
class TeamRole(BaseModel):
|
||||
name = CharField(index=True)
|
||||
|
@ -116,7 +169,7 @@ class TeamRole(BaseModel):
|
|||
|
||||
class Team(BaseModel):
|
||||
name = CharField(index=True)
|
||||
organization = ForeignKeyField(User, index=True)
|
||||
organization = QuayUserField(index=True)
|
||||
role = ForeignKeyField(TeamRole)
|
||||
description = TextField(default='')
|
||||
|
||||
|
@ -130,7 +183,7 @@ class Team(BaseModel):
|
|||
|
||||
|
||||
class TeamMember(BaseModel):
|
||||
user = ForeignKeyField(User, index=True)
|
||||
user = QuayUserField(allows_robots=True, index=True)
|
||||
team = ForeignKeyField(Team, index=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -144,7 +197,7 @@ class TeamMember(BaseModel):
|
|||
|
||||
class TeamMemberInvite(BaseModel):
|
||||
# Note: Either user OR email will be filled in, but not both.
|
||||
user = ForeignKeyField(User, index=True, null=True)
|
||||
user = QuayUserField(index=True, null=True)
|
||||
email = CharField(null=True)
|
||||
team = ForeignKeyField(Team, index=True)
|
||||
inviter = ForeignKeyField(User, related_name='inviter')
|
||||
|
@ -156,7 +209,7 @@ class LoginService(BaseModel):
|
|||
|
||||
|
||||
class FederatedLogin(BaseModel):
|
||||
user = ForeignKeyField(User, index=True)
|
||||
user = QuayUserField(allows_robots=True, index=True)
|
||||
service = ForeignKeyField(LoginService, index=True)
|
||||
service_ident = CharField()
|
||||
metadata_json = TextField(default='{}')
|
||||
|
@ -178,7 +231,7 @@ class Visibility(BaseModel):
|
|||
|
||||
|
||||
class Repository(BaseModel):
|
||||
namespace_user = ForeignKeyField(User, null=True)
|
||||
namespace_user = QuayUserField(null=True)
|
||||
name = CharField()
|
||||
visibility = ForeignKeyField(Visibility)
|
||||
description = TextField(null=True)
|
||||
|
@ -192,6 +245,24 @@ class Repository(BaseModel):
|
|||
(('namespace_user', 'name'), True),
|
||||
)
|
||||
|
||||
def delete_instance(self, recursive=False, delete_nullable=False):
|
||||
# Note: peewee generates extra nested deletion statements here that are slow and unnecessary.
|
||||
# Therefore, we define our own deletion order here and use the dependency system to verify it.
|
||||
ordered_dependencies = [RepositoryAuthorizedEmail, RepositoryTag, Image, LogEntry,
|
||||
RepositoryBuild, RepositoryBuildTrigger, RepositoryNotification,
|
||||
RepositoryPermission, AccessToken]
|
||||
|
||||
for query, fk in self.dependencies(search_nullable=True):
|
||||
model = fk.model_class
|
||||
if not model in ordered_dependencies:
|
||||
raise Exception('Missing repository deletion dependency: %s', model)
|
||||
|
||||
for model in ordered_dependencies:
|
||||
model.delete().where(model.repository == self).execute()
|
||||
|
||||
# Delete the repository itself.
|
||||
super(Repository, self).delete_instance(recursive=False, delete_nullable=False)
|
||||
|
||||
|
||||
class Role(BaseModel):
|
||||
name = CharField(index=True, unique=True)
|
||||
|
@ -199,7 +270,7 @@ class Role(BaseModel):
|
|||
|
||||
class RepositoryPermission(BaseModel):
|
||||
team = ForeignKeyField(Team, index=True, null=True)
|
||||
user = ForeignKeyField(User, index=True, null=True)
|
||||
user = QuayUserField(allows_robots=True, index=True, null=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
role = ForeignKeyField(Role)
|
||||
|
||||
|
@ -213,12 +284,12 @@ class RepositoryPermission(BaseModel):
|
|||
|
||||
|
||||
class PermissionPrototype(BaseModel):
|
||||
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto')
|
||||
org = QuayUserField(index=True, related_name='orgpermissionproto')
|
||||
uuid = CharField(default=uuid_generator)
|
||||
activating_user = ForeignKeyField(User, index=True, null=True,
|
||||
related_name='userpermissionproto')
|
||||
delegate_user = ForeignKeyField(User, related_name='receivingpermission',
|
||||
null=True)
|
||||
activating_user = QuayUserField(allows_robots=True, index=True, null=True,
|
||||
related_name='userpermissionproto')
|
||||
delegate_user = QuayUserField(allows_robots=True,related_name='receivingpermission',
|
||||
null=True)
|
||||
delegate_team = ForeignKeyField(Team, related_name='receivingpermission',
|
||||
null=True)
|
||||
role = ForeignKeyField(Role)
|
||||
|
@ -249,16 +320,16 @@ class RepositoryBuildTrigger(BaseModel):
|
|||
uuid = CharField(default=uuid_generator)
|
||||
service = ForeignKeyField(BuildTriggerService, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
connected_user = ForeignKeyField(User)
|
||||
connected_user = QuayUserField()
|
||||
auth_token = CharField()
|
||||
config = TextField(default='{}')
|
||||
write_token = ForeignKeyField(AccessToken, null=True)
|
||||
pull_robot = ForeignKeyField(User, null=True, related_name='triggerpullrobot')
|
||||
pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot')
|
||||
|
||||
|
||||
class EmailConfirmation(BaseModel):
|
||||
code = CharField(default=random_string_generator(), unique=True, index=True)
|
||||
user = ForeignKeyField(User)
|
||||
user = QuayUserField()
|
||||
pw_reset = BooleanField(default=False)
|
||||
new_email = CharField(null=True)
|
||||
email_confirm = BooleanField(default=False)
|
||||
|
@ -315,7 +386,7 @@ class Image(BaseModel):
|
|||
# to be globally unique we can't treat them as such for permissions and
|
||||
# security reasons. So rather than Repository <-> Image being many to many
|
||||
# each image now belongs to exactly one repository.
|
||||
docker_image_id = CharField()
|
||||
docker_image_id = CharField(index=True)
|
||||
repository = ForeignKeyField(Repository)
|
||||
|
||||
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
|
||||
|
@ -365,7 +436,7 @@ class RepositoryBuild(BaseModel):
|
|||
started = DateTimeField(default=datetime.now)
|
||||
display_name = CharField()
|
||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
|
||||
pull_robot = QuayUserField(null=True, related_name='buildpullrobot')
|
||||
logs_archived = BooleanField(default=False)
|
||||
|
||||
|
||||
|
@ -384,11 +455,10 @@ class LogEntryKind(BaseModel):
|
|||
|
||||
class LogEntry(BaseModel):
|
||||
kind = ForeignKeyField(LogEntryKind, index=True)
|
||||
account = ForeignKeyField(User, index=True, related_name='account')
|
||||
performer = ForeignKeyField(User, index=True, null=True,
|
||||
related_name='performer')
|
||||
account = QuayUserField(index=True, related_name='account')
|
||||
performer = QuayUserField(allows_robots=True, index=True, null=True,
|
||||
related_name='performer')
|
||||
repository = ForeignKeyField(Repository, index=True, null=True)
|
||||
access_token = ForeignKeyField(AccessToken, null=True)
|
||||
datetime = DateTimeField(default=datetime.now, index=True)
|
||||
ip = CharField(null=True)
|
||||
metadata_json = TextField(default='{}')
|
||||
|
@ -399,7 +469,7 @@ class OAuthApplication(BaseModel):
|
|||
client_secret = CharField(default=random_string_generator(length=40))
|
||||
redirect_uri = CharField()
|
||||
application_uri = CharField()
|
||||
organization = ForeignKeyField(User)
|
||||
organization = QuayUserField()
|
||||
|
||||
name = CharField()
|
||||
description = TextField(default='')
|
||||
|
@ -416,7 +486,7 @@ class OAuthAuthorizationCode(BaseModel):
|
|||
class OAuthAccessToken(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
application = ForeignKeyField(OAuthApplication)
|
||||
authorized_user = ForeignKeyField(User)
|
||||
authorized_user = QuayUserField()
|
||||
scope = CharField()
|
||||
access_token = CharField(index=True)
|
||||
token_type = CharField(default='Bearer')
|
||||
|
@ -432,7 +502,7 @@ class NotificationKind(BaseModel):
|
|||
class Notification(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
kind = ForeignKeyField(NotificationKind, index=True)
|
||||
target = ForeignKeyField(User, index=True)
|
||||
target = QuayUserField(index=True)
|
||||
metadata_json = TextField(default='{}')
|
||||
created = DateTimeField(default=datetime.now, index=True)
|
||||
dismissed = BooleanField(default=False)
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
from urllib import unquote
|
||||
from urllib import unquote, quote
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
from data.database import all_models, db
|
||||
|
@ -24,6 +24,11 @@ if 'GENMIGRATE' in os.environ:
|
|||
else:
|
||||
db_uri = 'postgresql://postgres@%s/genschema' % (docker_host_ip)
|
||||
|
||||
if 'DB_URI' in os.environ:
|
||||
db_uri = os.environ['DB_URI']
|
||||
|
||||
app.config['DB_URI'] = db_uri
|
||||
|
||||
config = context.config
|
||||
config.set_main_option('sqlalchemy.url', db_uri)
|
||||
|
||||
|
@ -69,7 +74,7 @@ def run_migrations_online():
|
|||
|
||||
"""
|
||||
|
||||
if isinstance(db.obj, SqliteDatabase) and not 'GENMIGRATE' in os.environ:
|
||||
if isinstance(db.obj, SqliteDatabase) and not 'GENMIGRATE' in os.environ and not 'DB_URI' in os.environ:
|
||||
print ('Skipping Sqlite migration!')
|
||||
return
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ up_mysql() {
|
|||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||
|
||||
# Sleep for 5s to get MySQL get started.
|
||||
echo 'Sleeping for 5...'
|
||||
sleep 5
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
# Add the database to mysql.
|
||||
docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
"""Add log entry kind for verbs
|
||||
|
||||
Revision ID: 204abf14783d
|
||||
Revises: 2430f55c41d5
|
||||
Create Date: 2014-10-29 15:38:06.100915
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '204abf14783d'
|
||||
down_revision = '2430f55c41d5'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables):
|
||||
op.bulk_insert(tables.logentrykind,
|
||||
[
|
||||
{'id': 46, 'name':'repo_verb'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('repo_verb')))
|
||||
|
||||
)
|
|
@ -16,7 +16,9 @@ from util.uncompressedsize import backfill_sizes_from_data
|
|||
|
||||
|
||||
def upgrade(tables):
|
||||
backfill_sizes_from_data()
|
||||
# Note: Doing non-alembic operations inside alembic can cause a deadlock. This call has been
|
||||
# moved to runmigration.sh.
|
||||
pass
|
||||
|
||||
def downgrade(tables):
|
||||
pass
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
"""Add an index to the docker_image_id field
|
||||
|
||||
Revision ID: 313d297811c4
|
||||
Revises: 204abf14783d
|
||||
Create Date: 2014-11-13 12:40:57.414787
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '313d297811c4'
|
||||
down_revision = '204abf14783d'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('image_docker_image_id', table_name='image')
|
||||
### end Alembic commands ###
|
|
@ -12,7 +12,6 @@ down_revision = '82297d834ad'
|
|||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables):
|
||||
op.bulk_insert(tables.logentrykind,
|
||||
|
|
|
@ -17,7 +17,7 @@ from sqlalchemy.dialects import mysql
|
|||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
|
||||
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now()))
|
||||
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import dateutil.parser
|
||||
import json
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, date
|
||||
|
||||
from data.database import (User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
||||
Visibility, RepositoryTag, EmailConfirmation, FederatedLogin,
|
||||
|
@ -14,7 +14,7 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
|
|||
ExternalNotificationEvent, ExternalNotificationMethod,
|
||||
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
||||
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
||||
db, BUILD_PHASE)
|
||||
db, BUILD_PHASE, QuayUserField)
|
||||
from peewee import JOIN_LEFT_OUTER, fn
|
||||
from util.validation import (validate_username, validate_email, validate_password,
|
||||
INVALID_PASSWORD_MESSAGE)
|
||||
|
@ -288,6 +288,7 @@ def delete_robot(robot_username):
|
|||
try:
|
||||
robot = User.get(username=robot_username, robot=True)
|
||||
robot.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
except User.DoesNotExist:
|
||||
raise InvalidRobotException('Could not find robot with username: %s' %
|
||||
robot_username)
|
||||
|
@ -632,7 +633,7 @@ def get_matching_users(username_prefix, robot_namespace=None,
|
|||
|
||||
query = (User
|
||||
.select(User.username, User.robot)
|
||||
.group_by(User.username)
|
||||
.group_by(User.username, User.robot)
|
||||
.where(direct_user_query))
|
||||
|
||||
if organization:
|
||||
|
@ -829,8 +830,10 @@ def _filter_to_repos_for_user(query, username=None, namespace=None,
|
|||
if namespace:
|
||||
where_clause = where_clause & (Namespace.username == namespace)
|
||||
|
||||
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
|
||||
# remove this hack.
|
||||
if include_public:
|
||||
new_clause = (Visibility.name == 'public')
|
||||
new_clause = (Repository.visibility == _get_public_repo_visibility())
|
||||
if where_clause:
|
||||
where_clause = where_clause | new_clause
|
||||
else:
|
||||
|
@ -839,6 +842,16 @@ def _filter_to_repos_for_user(query, username=None, namespace=None,
|
|||
return query.where(where_clause)
|
||||
|
||||
|
||||
_public_repo_visibility_cache = None
|
||||
def _get_public_repo_visibility():
|
||||
global _public_repo_visibility_cache
|
||||
|
||||
if not _public_repo_visibility_cache:
|
||||
_public_repo_visibility_cache = Visibility.get(name='public')
|
||||
|
||||
return _public_repo_visibility_cache
|
||||
|
||||
|
||||
def get_matching_repositories(repo_term, username=None):
|
||||
namespace_term = repo_term
|
||||
name_term = repo_term
|
||||
|
@ -1059,16 +1072,26 @@ def get_repository(namespace_name, repository_name):
|
|||
return None
|
||||
|
||||
|
||||
def get_repo_image(namespace_name, repository_name, image_id):
|
||||
def get_repo_image(namespace_name, repository_name, docker_image_id):
|
||||
def limit_to_image_id(query):
|
||||
return query.where(Image.docker_image_id == image_id)
|
||||
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
||||
|
||||
query = _get_repository_images(namespace_name, repository_name, limit_to_image_id)
|
||||
try:
|
||||
return query.get()
|
||||
except Image.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
|
||||
def limit_to_image_id(query):
|
||||
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
||||
|
||||
images = _get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
|
||||
if not images:
|
||||
return None
|
||||
else:
|
||||
return images[0]
|
||||
|
||||
return images[0]
|
||||
|
||||
def repository_is_public(namespace_name, repository_name):
|
||||
try:
|
||||
|
@ -1161,20 +1184,21 @@ def __translate_ancestry(old_ancestry, translations, repository, username, prefe
|
|||
if old_ancestry == '/':
|
||||
return '/'
|
||||
|
||||
def translate_id(old_id):
|
||||
def translate_id(old_id, docker_image_id):
|
||||
logger.debug('Translating id: %s', old_id)
|
||||
if old_id not in translations:
|
||||
# Figure out which docker_image_id the old id refers to, then find a
|
||||
# a local one
|
||||
old = Image.select(Image.docker_image_id).where(Image.id == old_id).get()
|
||||
image_in_repo = find_create_or_link_image(old.docker_image_id, repository, username,
|
||||
image_in_repo = find_create_or_link_image(docker_image_id, repository, username,
|
||||
translations, preferred_location)
|
||||
translations[old_id] = image_in_repo.id
|
||||
|
||||
return translations[old_id]
|
||||
|
||||
# Select all the ancestor Docker IDs in a single query.
|
||||
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
|
||||
new_ids = [str(translate_id(old_id)) for old_id in old_ids]
|
||||
query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
|
||||
old_images = {i.id: i.docker_image_id for i in query}
|
||||
|
||||
# Translate the old images into new ones.
|
||||
new_ids = [str(translate_id(old_id, old_images[old_id])) for old_id in old_ids]
|
||||
return '/%s/' % '/'.join(new_ids)
|
||||
|
||||
|
||||
|
@ -1186,36 +1210,22 @@ def _create_storage(location_name):
|
|||
return storage
|
||||
|
||||
|
||||
def find_create_or_link_image(docker_image_id, repository, username, translations,
|
||||
preferred_location):
|
||||
def _find_or_link_image(existing_image, repository, username, translations, preferred_location):
|
||||
# TODO(jake): This call is currently recursively done under a single transaction. Can we make
|
||||
# it instead be done under a set of transactions?
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Check for an existing image, under the transaction, to make sure it doesn't already exist.
|
||||
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
||||
docker_image_id)
|
||||
existing_image.docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.distinct()
|
||||
.join(ImageStorage)
|
||||
.switch(Image)
|
||||
.join(Repository)
|
||||
.join(Visibility)
|
||||
.switch(Repository)
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||
.switch(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(ImageStorage.uploading == False))
|
||||
|
||||
query = (_filter_to_repos_for_user(query, username)
|
||||
.where(Image.docker_image_id == docker_image_id))
|
||||
|
||||
new_image_ancestry = '/'
|
||||
origin_image_id = None
|
||||
# Make sure the existing base image still exists.
|
||||
try:
|
||||
to_copy = query.get()
|
||||
to_copy = Image.select().join(ImageStorage).where(Image.id == existing_image.id).get()
|
||||
|
||||
msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
|
||||
logger.debug(msg, docker_image_id, to_copy.storage.uuid)
|
||||
logger.debug(msg, existing_image.docker_image_id, to_copy.storage.uuid)
|
||||
|
||||
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repository,
|
||||
username, preferred_location)
|
||||
|
@ -1223,25 +1233,71 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
|
|||
storage = to_copy.storage
|
||||
storage.locations = {placement.location.name
|
||||
for placement in storage.imagestorageplacement_set}
|
||||
origin_image_id = to_copy.id
|
||||
|
||||
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
||||
repository=repository, storage=storage,
|
||||
ancestors=new_image_ancestry)
|
||||
|
||||
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
||||
translations[existing_image.id] = new_image.id
|
||||
return new_image
|
||||
except Image.DoesNotExist:
|
||||
logger.debug('Creating new storage for docker id: %s', docker_image_id)
|
||||
storage = _create_storage(preferred_location)
|
||||
|
||||
logger.debug('Storage locations: %s', storage.locations)
|
||||
|
||||
new_image = Image.create(docker_image_id=docker_image_id,
|
||||
repository=repository, storage=storage,
|
||||
ancestors=new_image_ancestry)
|
||||
|
||||
logger.debug('new_image storage locations: %s', new_image.storage.locations)
|
||||
return None
|
||||
|
||||
|
||||
if origin_image_id:
|
||||
logger.debug('Storing translation %s -> %s', origin_image_id, new_image.id)
|
||||
translations[origin_image_id] = new_image.id
|
||||
def find_create_or_link_image(docker_image_id, repository, username, translations,
|
||||
preferred_location):
|
||||
|
||||
return new_image
|
||||
# First check for the image existing in the repository. If found, we simply return it.
|
||||
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
# We next check to see if there is an existing storage the new image can link to.
|
||||
existing_image_query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.distinct()
|
||||
.join(ImageStorage)
|
||||
.switch(Image)
|
||||
.join(Repository)
|
||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||
.switch(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(ImageStorage.uploading == False))
|
||||
|
||||
existing_image_query = (_filter_to_repos_for_user(existing_image_query, username)
|
||||
.where(Image.docker_image_id == docker_image_id))
|
||||
|
||||
# If there is an existing image, we try to translate its ancestry and copy its storage.
|
||||
new_image = None
|
||||
try:
|
||||
logger.debug('Looking up existing image for ID: %s', docker_image_id)
|
||||
existing_image = existing_image_query.get()
|
||||
|
||||
logger.debug('Existing image %s found for ID: %s', existing_image.id, docker_image_id)
|
||||
new_image = _find_or_link_image(existing_image, repository, username, translations,
|
||||
preferred_location)
|
||||
if new_image:
|
||||
return new_image
|
||||
except Image.DoesNotExist:
|
||||
logger.debug('No existing image found for ID: %s', docker_image_id)
|
||||
pass
|
||||
|
||||
# Otherwise, create a new storage directly.
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Final check for an existing image, under the transaction.
|
||||
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
return repo_image
|
||||
|
||||
logger.debug('Creating new storage for docker id: %s', docker_image_id)
|
||||
storage = _create_storage(preferred_location)
|
||||
|
||||
return Image.create(docker_image_id=docker_image_id,
|
||||
repository=repository, storage=storage,
|
||||
ancestors='/')
|
||||
|
||||
|
||||
def find_or_create_derived_storage(source, transformation_name, preferred_location):
|
||||
|
@ -1355,6 +1411,15 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
|
|||
fetched.storage.save()
|
||||
return fetched
|
||||
|
||||
def _get_repository_images(namespace_name, repository_name, query_modifier):
|
||||
query = (Image
|
||||
.select()
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
||||
|
||||
query = query_modifier(query)
|
||||
return query
|
||||
|
||||
def _get_repository_images_base(namespace_name, repository_name, query_modifier):
|
||||
query = (ImageStoragePlacement
|
||||
|
@ -1391,6 +1456,20 @@ def _get_repository_images_base(namespace_name, repository_name, query_modifier)
|
|||
return images.values()
|
||||
|
||||
|
||||
def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
|
||||
return (Image
|
||||
.select()
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
Image.docker_image_id << docker_image_ids))
|
||||
|
||||
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
|
||||
def modify_query(q):
|
||||
return q.where(Image.docker_image_id << docker_image_ids)
|
||||
|
||||
return _get_repository_images_base(namespace_name, repository_name, modify_query)
|
||||
|
||||
def get_repository_images(namespace_name, repository_name):
|
||||
return _get_repository_images_base(namespace_name, repository_name, lambda q: q)
|
||||
|
||||
|
@ -1406,7 +1485,12 @@ def list_repository_tags(namespace_name, repository_name):
|
|||
|
||||
|
||||
def garbage_collect_repository(namespace_name, repository_name):
|
||||
storage_id_whitelist = {}
|
||||
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# TODO (jake): We could probably select this and all the images in a single query using
|
||||
# a different kind of join.
|
||||
|
||||
# Get a list of all images used by tags in the repository
|
||||
tag_query = (RepositoryTag
|
||||
.select(RepositoryTag, Image, ImageStorage)
|
||||
|
@ -1425,29 +1509,31 @@ def garbage_collect_repository(namespace_name, repository_name):
|
|||
referenced_anscestors = referenced_anscestors.union(set(ancestor_list))
|
||||
referenced_anscestors.add(tag.image.id)
|
||||
|
||||
all_repo_images = get_repository_images(namespace_name, repository_name)
|
||||
all_repo_images = _get_repository_images(namespace_name, repository_name, lambda q: q)
|
||||
all_images = {int(img.id): img for img in all_repo_images}
|
||||
to_remove = set(all_images.keys()).difference(referenced_anscestors)
|
||||
|
||||
if len(to_remove) > 0:
|
||||
logger.info('Cleaning up unreferenced images: %s', to_remove)
|
||||
storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove}
|
||||
|
||||
Image.delete().where(Image.id << list(to_remove)).execute()
|
||||
|
||||
garbage_collect_storage(storage_id_whitelist)
|
||||
if len(to_remove) > 0:
|
||||
logger.info('Garbage collecting storage for images: %s', to_remove)
|
||||
garbage_collect_storage(storage_id_whitelist)
|
||||
|
||||
return len(to_remove)
|
||||
|
||||
|
||||
def garbage_collect_storage(storage_id_whitelist):
|
||||
# We are going to make the conscious decision to not delete image storage inside the transaction
|
||||
# This may end up producing garbage in s3, trading off for higher availability in the database
|
||||
if len(storage_id_whitelist) == 0:
|
||||
return
|
||||
|
||||
def placements_query_to_paths_set(placements_query):
|
||||
return {(placement.location.name, config.store.image_path(placement.storage.uuid))
|
||||
for placement in placements_query}
|
||||
|
||||
def orphaned_storage_query(select_base_query, candidates):
|
||||
def orphaned_storage_query(select_base_query, candidates, group_by):
|
||||
return (select_base_query
|
||||
.switch(ImageStorage)
|
||||
.join(Image, JOIN_LEFT_OUTER)
|
||||
|
@ -1455,14 +1541,19 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
.join(DerivedImageStorage, JOIN_LEFT_OUTER,
|
||||
on=(ImageStorage.id == DerivedImageStorage.derivative))
|
||||
.where(ImageStorage.id << list(candidates))
|
||||
.group_by(ImageStorage)
|
||||
.group_by(*group_by)
|
||||
.having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0)))
|
||||
|
||||
logger.debug('Garbage collecting storage from candidates: %s', storage_id_whitelist)
|
||||
# Note: We remove the derived image storage in its own transaction as a way to reduce the
|
||||
# time that the transaction holds on the database indicies. This could result in a derived
|
||||
# image storage being deleted for an image storage which is later reused during this time,
|
||||
# but since these are caches anyway, it isn't terrible and worth the tradeoff (for now).
|
||||
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Find out which derived storages will be removed, and add them to the whitelist
|
||||
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||
storage_id_whitelist))
|
||||
storage_id_whitelist,
|
||||
(ImageStorage.id,)))
|
||||
|
||||
if len(orphaned_from_candidates) > 0:
|
||||
derived_to_remove = (ImageStorage
|
||||
|
@ -1478,6 +1569,12 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
.where(DerivedImageStorage.source << orphaned_from_candidates)
|
||||
.execute())
|
||||
|
||||
# Note: Both of these deletes must occur in the same transaction (unfortunately) because a
|
||||
# storage without any placement is invalid, and a placement cannot exist without a storage.
|
||||
# TODO(jake): We might want to allow for null storages on placements, which would allow us to
|
||||
# delete the storages, then delete the placements in a non-transaction.
|
||||
logger.debug('Garbage collecting storages from candidates: %s', storage_id_whitelist)
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Track all of the data that should be removed from blob storage
|
||||
placements_to_remove = orphaned_storage_query(ImageStoragePlacement
|
||||
.select(ImageStoragePlacement,
|
||||
|
@ -1486,7 +1583,10 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage),
|
||||
storage_id_whitelist)
|
||||
storage_id_whitelist,
|
||||
(ImageStorage, ImageStoragePlacement,
|
||||
ImageStorageLocation))
|
||||
|
||||
paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone())
|
||||
|
||||
# Remove the placements for orphaned storages
|
||||
|
@ -1499,14 +1599,17 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
|
||||
# Remove the all orphaned storages
|
||||
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||
storage_id_whitelist))
|
||||
storage_id_whitelist,
|
||||
(ImageStorage.id,)))
|
||||
if len(orphaned_storages) > 0:
|
||||
(ImageStorage
|
||||
.delete()
|
||||
.where(ImageStorage.id << orphaned_storages)
|
||||
.execute())
|
||||
|
||||
# Delete the actual blob storage
|
||||
# We are going to make the conscious decision to not delete image storage blobs inside
|
||||
# transactions.
|
||||
# This may end up producing garbage in s3, trading off for higher availability in the database.
|
||||
for location_name, image_path in paths_to_remove:
|
||||
logger.debug('Removing %s from %s', image_path, location_name)
|
||||
config.store.remove({location_name}, image_path)
|
||||
|
@ -1527,7 +1630,7 @@ def get_tag_image(namespace_name, repository_name, tag_name):
|
|||
|
||||
|
||||
def get_image_by_id(namespace_name, repository_name, docker_image_id):
|
||||
image = get_repo_image(namespace_name, repository_name, docker_image_id)
|
||||
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
|
||||
if not image:
|
||||
raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' %
|
||||
(docker_image_id, namespace_name, repository_name))
|
||||
|
@ -1714,7 +1817,7 @@ def purge_repository(namespace_name, repository_name):
|
|||
|
||||
# Delete the rest of the repository metadata
|
||||
fetched = _get_repository(namespace_name, repository_name)
|
||||
fetched.delete_instance(recursive=True)
|
||||
fetched.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
|
||||
def get_private_repo_count(username):
|
||||
|
@ -1758,11 +1861,10 @@ def get_repository_delegate_tokens(namespace_name, repository_name):
|
|||
|
||||
def get_repo_delegate_token(namespace_name, repository_name, code):
|
||||
repo_query = get_repository_delegate_tokens(namespace_name, repository_name)
|
||||
found = list(repo_query.where(AccessToken.code == code))
|
||||
|
||||
if found:
|
||||
return found[0]
|
||||
else:
|
||||
try:
|
||||
return repo_query.where(AccessToken.code == code).get()
|
||||
except AccessToken.DoesNotExist:
|
||||
raise InvalidTokenException('Unable to find token with code: %s' % code)
|
||||
|
||||
|
||||
|
@ -1937,9 +2039,9 @@ def list_logs(start_time, end_time, performer=None, repository=None, namespace=N
|
|||
if namespace:
|
||||
joined = joined.where(User.username == namespace)
|
||||
|
||||
return joined.where(
|
||||
return list(joined.where(
|
||||
LogEntry.datetime >= start_time,
|
||||
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc())
|
||||
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()))
|
||||
|
||||
|
||||
def log_action(kind_name, user_or_organization_name, performer=None,
|
||||
|
@ -1951,7 +2053,7 @@ def log_action(kind_name, user_or_organization_name, performer=None,
|
|||
kind = LogEntryKind.get(LogEntryKind.name == kind_name)
|
||||
account = User.get(User.username == user_or_organization_name)
|
||||
LogEntry.create(kind=kind, account=account, performer=performer,
|
||||
repository=repository, access_token=access_token, ip=ip,
|
||||
repository=repository, ip=ip,
|
||||
metadata_json=json.dumps(metadata), datetime=timestamp)
|
||||
|
||||
|
||||
|
@ -2239,6 +2341,18 @@ def confirm_team_invite(code, user):
|
|||
found.delete_instance()
|
||||
return (team, inviter)
|
||||
|
||||
|
||||
def get_repository_usage():
|
||||
one_month_ago = date.today() - timedelta(weeks=4)
|
||||
repo_pull = LogEntryKind.get(name = 'pull_repo')
|
||||
repo_verb = LogEntryKind.get(name = 'repo_verb')
|
||||
return (LogEntry.select(LogEntry.ip, LogEntry.repository)
|
||||
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
||||
.where(~(LogEntry.repository >> None))
|
||||
.where(LogEntry.datetime >= one_month_ago)
|
||||
.group_by(LogEntry.ip, LogEntry.repository)
|
||||
.count())
|
||||
|
||||
def archivable_buildlogs_query():
|
||||
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||
return (RepositoryBuild.select()
|
||||
|
|
|
@ -42,10 +42,10 @@ class WorkQueue(object):
|
|||
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
||||
|
||||
def update_metrics(self):
|
||||
if self._reporter is None:
|
||||
return
|
||||
|
||||
with self._transaction_factory(db):
|
||||
if self._reporter is None:
|
||||
return
|
||||
|
||||
now = datetime.utcnow()
|
||||
name_match_query = self._name_match_query()
|
||||
|
||||
|
@ -55,7 +55,7 @@ class WorkQueue(object):
|
|||
avialable_query = self._available_jobs(now, name_match_query, running_query)
|
||||
available_count = avialable_query.select(QueueItem.queue_name).distinct().count()
|
||||
|
||||
self._reporter(self._currently_processing, running_count, running_count + available_count)
|
||||
self._reporter(self._currently_processing, running_count, running_count + available_count)
|
||||
|
||||
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
|
||||
"""
|
||||
|
|
|
@ -73,7 +73,7 @@ class RepositoryImage(RepositoryParamResource):
|
|||
@nickname('getImage')
|
||||
def get(self, namespace, repository, image_id):
|
||||
""" Get the information available for the specified image. """
|
||||
image = model.get_repo_image(namespace, repository, image_id)
|
||||
image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not image:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -94,7 +94,7 @@ class RepositoryImageChanges(RepositoryParamResource):
|
|||
@nickname('getImageChanges')
|
||||
def get(self, namespace, repository, image_id):
|
||||
""" Get the list of changes for the specified image. """
|
||||
image = model.get_repo_image(namespace, repository, image_id)
|
||||
image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
if not image:
|
||||
raise NotFound()
|
||||
|
|
|
@ -52,6 +52,25 @@ def user_view(user):
|
|||
'super_user': user.username in app.config['SUPER_USERS']
|
||||
}
|
||||
|
||||
@resource('/v1/superuser/usage/')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class UsageInformation(ApiResource):
|
||||
""" Resource for returning the usage information for enterprise customers. """
|
||||
@require_fresh_login
|
||||
@nickname('getSystemUsage')
|
||||
def get(self):
|
||||
""" Returns the number of repository handles currently held. """
|
||||
if SuperUserPermission().can():
|
||||
return {
|
||||
'usage': model.get_repository_usage(),
|
||||
'allowed': app.config.get('MAXIMUM_REPOSITORY_USAGE', 20)
|
||||
}
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
|
||||
@resource('/v1/superuser/users/')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import logging
|
||||
import requests
|
||||
|
||||
from flask import request, redirect, url_for, Blueprint
|
||||
from flask.ext.login import current_user
|
||||
|
||||
from endpoints.common import render_page_template, common_login, route_show_if
|
||||
from app import app, analytics, get_app_url
|
||||
from app import app, analytics, get_app_url, github_login, google_login, github_trigger
|
||||
from data import model
|
||||
from util.names import parse_repository_name
|
||||
from util.validation import generate_valid_usernames
|
||||
|
@ -29,20 +30,16 @@ def render_ologin_error(service_name,
|
|||
service_url=get_app_url(),
|
||||
user_creation=features.USER_CREATION)
|
||||
|
||||
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
|
||||
redirect_suffix=''):
|
||||
def exchange_code_for_token(code, service, form_encode=False, redirect_suffix=''):
|
||||
code = request.args.get('code')
|
||||
id_config = service_name + '_LOGIN_CLIENT_ID' if for_login else service_name + '_CLIENT_ID'
|
||||
secret_config = service_name + '_LOGIN_CLIENT_SECRET' if for_login else service_name + '_CLIENT_SECRET'
|
||||
|
||||
payload = {
|
||||
'client_id': app.config[id_config],
|
||||
'client_secret': app.config[secret_config],
|
||||
'client_id': service.client_id(),
|
||||
'client_secret': service.client_secret(),
|
||||
'code': code,
|
||||
'grant_type': 'authorization_code',
|
||||
'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'],
|
||||
app.config['SERVER_HOSTNAME'],
|
||||
service_name.lower(),
|
||||
service.service_name().lower(),
|
||||
redirect_suffix)
|
||||
}
|
||||
|
||||
|
@ -50,12 +47,11 @@ def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_en
|
|||
'Accept': 'application/json'
|
||||
}
|
||||
|
||||
token_url = service.token_endpoint()
|
||||
if form_encode:
|
||||
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
|
||||
data=payload, headers=headers)
|
||||
get_access_token = client.post(token_url, data=payload, headers=headers)
|
||||
else:
|
||||
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'],
|
||||
params=payload, headers=headers)
|
||||
get_access_token = client.post(token_url, params=payload, headers=headers)
|
||||
|
||||
json_data = get_access_token.json()
|
||||
if not json_data:
|
||||
|
@ -65,25 +61,20 @@ def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_en
|
|||
return token
|
||||
|
||||
|
||||
def get_github_user(token):
|
||||
token_param = {
|
||||
'access_token': token,
|
||||
}
|
||||
get_user = client.get(app.config['GITHUB_USER_URL'], params=token_param)
|
||||
|
||||
return get_user.json()
|
||||
|
||||
|
||||
def get_google_user(token):
|
||||
def get_user(service, token):
|
||||
token_param = {
|
||||
'access_token': token,
|
||||
'alt': 'json',
|
||||
}
|
||||
get_user = client.get(service.user_endpoint(), params=token_param)
|
||||
if get_user.status_code != requests.codes.ok:
|
||||
return {}
|
||||
|
||||
get_user = client.get(app.config['GOOGLE_USER_URL'], params=token_param)
|
||||
return get_user.json()
|
||||
|
||||
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
|
||||
|
||||
def conduct_oauth_login(service, user_id, username, email, metadata={}):
|
||||
service_name = service.service_name()
|
||||
to_login = model.verify_federated_login(service_name.lower(), user_id)
|
||||
if not to_login:
|
||||
# See if we can create a new user.
|
||||
|
@ -93,8 +84,15 @@ def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
|
|||
|
||||
# Try to create the user
|
||||
try:
|
||||
valid = next(generate_valid_usernames(username))
|
||||
to_login = model.create_federated_user(valid, email, service_name.lower(),
|
||||
new_username = None
|
||||
for valid in generate_valid_usernames(username):
|
||||
if model.get_user_or_org(valid):
|
||||
continue
|
||||
|
||||
new_username = valid
|
||||
break
|
||||
|
||||
to_login = model.create_federated_user(new_username, email, service_name.lower(),
|
||||
user_id, set_password_notification=True,
|
||||
metadata=metadata)
|
||||
|
||||
|
@ -138,8 +136,8 @@ def google_oauth_callback():
|
|||
if error:
|
||||
return render_ologin_error('Google', error)
|
||||
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', form_encode=True)
|
||||
user_data = get_google_user(token)
|
||||
token = exchange_code_for_token(request.args.get('code'), google_login, form_encode=True)
|
||||
user_data = get_user(google_login, token)
|
||||
if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
|
||||
return render_ologin_error('Google')
|
||||
|
||||
|
@ -148,7 +146,7 @@ def google_oauth_callback():
|
|||
'service_username': user_data['email']
|
||||
}
|
||||
|
||||
return conduct_oauth_login('Google', user_data['id'], username, user_data['email'],
|
||||
return conduct_oauth_login(google_login, user_data['id'], username, user_data['email'],
|
||||
metadata=metadata)
|
||||
|
||||
|
||||
|
@ -159,8 +157,8 @@ def github_oauth_callback():
|
|||
if error:
|
||||
return render_ologin_error('GitHub', error)
|
||||
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
|
||||
user_data = get_github_user(token)
|
||||
token = exchange_code_for_token(request.args.get('code'), github_login)
|
||||
user_data = get_user(github_login, token)
|
||||
if not user_data or not 'login' in user_data:
|
||||
return render_ologin_error('GitHub')
|
||||
|
||||
|
@ -174,7 +172,7 @@ def github_oauth_callback():
|
|||
token_param = {
|
||||
'access_token': token,
|
||||
}
|
||||
get_email = client.get(app.config['GITHUB_USER_EMAILS'], params=token_param,
|
||||
get_email = client.get(github_login.email_endpoint(), params=token_param,
|
||||
headers=v3_media_type)
|
||||
|
||||
# We will accept any email, but we prefer the primary
|
||||
|
@ -188,17 +186,17 @@ def github_oauth_callback():
|
|||
'service_username': username
|
||||
}
|
||||
|
||||
return conduct_oauth_login('github', github_id, username, found_email, metadata=metadata)
|
||||
return conduct_oauth_login(github_login, github_id, username, found_email, metadata=metadata)
|
||||
|
||||
|
||||
@callback.route('/google/callback/attach', methods=['GET'])
|
||||
@route_show_if(features.GOOGLE_LOGIN)
|
||||
@require_session_login
|
||||
def google_oauth_attach():
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE',
|
||||
token = exchange_code_for_token(request.args.get('code'), google_login,
|
||||
redirect_suffix='/attach', form_encode=True)
|
||||
|
||||
user_data = get_google_user(token)
|
||||
user_data = get_user(google_login, token)
|
||||
if not user_data or not user_data.get('id', None):
|
||||
return render_ologin_error('Google')
|
||||
|
||||
|
@ -224,8 +222,8 @@ def google_oauth_attach():
|
|||
@route_show_if(features.GITHUB_LOGIN)
|
||||
@require_session_login
|
||||
def github_oauth_attach():
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
|
||||
user_data = get_github_user(token)
|
||||
token = exchange_code_for_token(request.args.get('code'), github_login)
|
||||
user_data = get_user(github_login, token)
|
||||
if not user_data:
|
||||
return render_ologin_error('GitHub')
|
||||
|
||||
|
@ -255,8 +253,7 @@ def github_oauth_attach():
|
|||
def attach_github_build_trigger(namespace, repository):
|
||||
permission = AdministerRepositoryPermission(namespace, repository)
|
||||
if permission.can():
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB',
|
||||
for_login=False)
|
||||
token = exchange_code_for_token(request.args.get('code'), github_trigger)
|
||||
repo = model.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
msg = 'Invalid repository: %s/%s' % (namespace, repository)
|
||||
|
|
|
@ -11,7 +11,8 @@ from random import SystemRandom
|
|||
|
||||
from data import model
|
||||
from data.database import db
|
||||
from app import app, login_manager, dockerfile_build_queue, notification_queue
|
||||
from app import app, login_manager, dockerfile_build_queue, notification_queue, oauth_apps
|
||||
|
||||
from auth.permissions import QuayDeferredPermissionUser
|
||||
from auth import scopes
|
||||
from endpoints.api.discovery import swagger_route_data
|
||||
|
@ -20,6 +21,7 @@ from functools import wraps
|
|||
from config import getFrontendVisibleConfig
|
||||
from external_libraries import get_external_javascript, get_external_css
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
from util.useremails import CannotSendEmailException
|
||||
|
||||
import features
|
||||
|
||||
|
@ -129,6 +131,10 @@ def handle_dme(ex):
|
|||
logger.exception(ex)
|
||||
return make_response(json.dumps({'message': ex.message}), 400)
|
||||
|
||||
@app.errorhandler(CannotSendEmailException)
|
||||
def handle_emailexception(ex):
|
||||
message = 'Could not send email. Please contact an administrator and report this problem.'
|
||||
return make_response(json.dumps({'message': message}), 400)
|
||||
|
||||
def random_string():
|
||||
random = SystemRandom()
|
||||
|
@ -171,6 +177,13 @@ def render_page_template(name, **kwargs):
|
|||
external_styles = get_external_css(local=not app.config.get('USE_CDN', True))
|
||||
external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True))
|
||||
|
||||
def get_oauth_config():
|
||||
oauth_config = {}
|
||||
for oauth_app in oauth_apps:
|
||||
oauth_config[oauth_app.key_name] = oauth_app.get_public_config()
|
||||
|
||||
return oauth_config
|
||||
|
||||
contact_href = None
|
||||
if len(app.config.get('CONTACT_INFO', [])) == 1:
|
||||
contact_href = app.config['CONTACT_INFO'][0]
|
||||
|
@ -184,6 +197,7 @@ def render_page_template(name, **kwargs):
|
|||
library_scripts=library_scripts,
|
||||
feature_set=json.dumps(features.get_features()),
|
||||
config_set=json.dumps(getFrontendVisibleConfig(app.config)),
|
||||
oauth_set=json.dumps(get_oauth_config()),
|
||||
mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
|
||||
google_analytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''),
|
||||
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
|
||||
|
@ -265,5 +279,4 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
spawn_notification(repository, 'build_queued', event_data,
|
||||
subpage='build?current=%s' % build_request.uuid,
|
||||
pathargs=['build', build_request.uuid])
|
||||
return build_request
|
||||
|
||||
return build_request
|
|
@ -8,7 +8,7 @@ from collections import OrderedDict
|
|||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import analytics, app, authentication, userevents, storage
|
||||
from app import app, authentication, userevents, storage
|
||||
from auth.auth import process_auth
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from util.names import parse_repository_name
|
||||
|
@ -17,6 +17,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
|||
ReadRepositoryPermission, CreateRepositoryPermission)
|
||||
|
||||
from util.http import abort
|
||||
from endpoints.trackhelper import track_and_log
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
|
||||
import features
|
||||
|
@ -222,13 +223,20 @@ def create_repository(namespace, repository):
|
|||
repo = model.create_repository(namespace, repository,
|
||||
get_authenticated_user())
|
||||
|
||||
profile.debug('Determining added images')
|
||||
added_images = OrderedDict([(desc['id'], desc)
|
||||
for desc in image_descriptions])
|
||||
profile.debug('Determining already added images')
|
||||
added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions])
|
||||
new_repo_images = dict(added_images)
|
||||
|
||||
for existing in model.get_repository_images(namespace, repository):
|
||||
if existing.docker_image_id in new_repo_images:
|
||||
# Optimization: Lookup any existing images in the repository with matching docker IDs and
|
||||
# remove them from the added dict, so we don't need to look them up one-by-one.
|
||||
def chunks(l, n):
|
||||
for i in xrange(0, len(l), n):
|
||||
yield l[i:i+n]
|
||||
|
||||
# Note: We do this in chunks in an effort to not hit the SQL query size limit.
|
||||
for chunk in chunks(new_repo_images.keys(), 50):
|
||||
existing_images = model.lookup_repository_images(namespace, repository, chunk)
|
||||
for existing in existing_images:
|
||||
added_images.pop(existing.docker_image_id)
|
||||
|
||||
profile.debug('Creating/Linking necessary images')
|
||||
|
@ -240,49 +248,8 @@ def create_repository(namespace, repository):
|
|||
|
||||
|
||||
profile.debug('Created images')
|
||||
response = make_response('Created', 201)
|
||||
|
||||
extra_params = {
|
||||
'repository': '%s/%s' % (namespace, repository),
|
||||
}
|
||||
|
||||
metadata = {
|
||||
'repo': repository,
|
||||
'namespace': namespace
|
||||
}
|
||||
|
||||
if get_validated_oauth_token():
|
||||
analytics.track(username, 'push_repo', extra_params)
|
||||
|
||||
oauth_token = get_validated_oauth_token()
|
||||
metadata['oauth_token_id'] = oauth_token.id
|
||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
elif get_authenticated_user():
|
||||
username = get_authenticated_user().username
|
||||
|
||||
analytics.track(username, 'push_repo', extra_params)
|
||||
metadata['username'] = username
|
||||
|
||||
# Mark that the user has started pushing the repo.
|
||||
user_data = {
|
||||
'action': 'push_repo',
|
||||
'repository': repository,
|
||||
'namespace': namespace
|
||||
}
|
||||
|
||||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', user_data)
|
||||
|
||||
elif get_validated_token():
|
||||
analytics.track(get_validated_token().code, 'push_repo', extra_params)
|
||||
metadata['token'] = get_validated_token().friendly_name
|
||||
metadata['token_code'] = get_validated_token().code
|
||||
|
||||
model.log_action('push_repo', namespace, performer=get_authenticated_user(),
|
||||
ip=request.remote_addr, metadata=metadata, repository=repo)
|
||||
|
||||
return response
|
||||
track_and_log('push_repo', repo)
|
||||
return make_response('Created', 201)
|
||||
|
||||
|
||||
@index.route('/repositories/<path:repository>/images', methods=['PUT'])
|
||||
|
@ -360,38 +327,7 @@ def get_repository_images(namespace, repository):
|
|||
resp = make_response(json.dumps(all_images), 200)
|
||||
resp.mimetype = 'application/json'
|
||||
|
||||
metadata = {
|
||||
'repo': repository,
|
||||
'namespace': namespace,
|
||||
}
|
||||
|
||||
profile.debug('Logging the pull to Mixpanel and the log system')
|
||||
if get_validated_oauth_token():
|
||||
oauth_token = get_validated_oauth_token()
|
||||
metadata['oauth_token_id'] = oauth_token.id
|
||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
elif get_authenticated_user():
|
||||
metadata['username'] = get_authenticated_user().username
|
||||
elif get_validated_token():
|
||||
metadata['token'] = get_validated_token().friendly_name
|
||||
metadata['token_code'] = get_validated_token().code
|
||||
else:
|
||||
metadata['public'] = True
|
||||
|
||||
pull_username = 'anonymous'
|
||||
if get_authenticated_user():
|
||||
pull_username = get_authenticated_user().username
|
||||
|
||||
extra_params = {
|
||||
'repository': '%s/%s' % (namespace, repository),
|
||||
}
|
||||
|
||||
analytics.track(pull_username, 'pull_repo', extra_params)
|
||||
model.log_action('pull_repo', namespace,
|
||||
performer=get_authenticated_user(),
|
||||
ip=request.remote_addr, metadata=metadata,
|
||||
repository=repo)
|
||||
track_and_log('pull_repo', repo)
|
||||
return resp
|
||||
|
||||
abort(403)
|
||||
|
|
|
@ -7,13 +7,13 @@ from functools import wraps
|
|||
from datetime import datetime
|
||||
from time import time
|
||||
|
||||
from app import storage as store, image_diff_queue
|
||||
from app import storage as store, image_diff_queue, app
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from util import checksums, changes
|
||||
from util.http import abort, exact_abort
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from data import model
|
||||
from data import model, database
|
||||
from util import gzipstream
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ def require_completion(f):
|
|||
@wraps(f)
|
||||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
image_id = kwargs['image_id']
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if image_is_uploading(repo_image):
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=kwargs['image_id'])
|
||||
|
@ -103,7 +103,7 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
profile.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
profile.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
|
@ -136,7 +136,7 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
profile.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
profile.debug('Looking up the layer path')
|
||||
try:
|
||||
|
@ -151,6 +151,10 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
return resp
|
||||
|
||||
profile.debug('Streaming layer data')
|
||||
|
||||
# Close the database handle here for this process before we send the long download.
|
||||
database.close_db_filter(None)
|
||||
|
||||
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
|
||||
except (IOError, AttributeError):
|
||||
profile.debug('Image not found')
|
||||
|
@ -170,7 +174,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
abort(403)
|
||||
|
||||
profile.debug('Retrieving image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
try:
|
||||
profile.debug('Retrieving image data')
|
||||
uuid = repo_image.storage.uuid
|
||||
|
@ -213,7 +217,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
sr.add_handler(sum_hndlr)
|
||||
|
||||
# Stream write the data to storage.
|
||||
store.stream_write(repo_image.storage.locations, layer_path, sr)
|
||||
with database.CloseForLongOperation(app.config):
|
||||
store.stream_write(repo_image.storage.locations, layer_path, sr)
|
||||
|
||||
# Append the computed checksum.
|
||||
csums = []
|
||||
|
@ -294,7 +299,7 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
issue='missing-checksum-cookie', image_id=image_id)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image or not repo_image.storage:
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
|
@ -350,7 +355,7 @@ def get_image_json(namespace, repository, image_id, headers):
|
|||
abort(403)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
profile.debug('Looking up repo layer data')
|
||||
try:
|
||||
|
@ -381,7 +386,7 @@ def get_image_ancestry(namespace, repository, image_id, headers):
|
|||
abort(403)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
profile.debug('Looking up image data')
|
||||
try:
|
||||
|
@ -445,7 +450,7 @@ def put_image_json(namespace, repository, image_id):
|
|||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
profile.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
|
@ -462,7 +467,7 @@ def put_image_json(namespace, repository, image_id):
|
|||
parent_image = None
|
||||
if parent_id:
|
||||
profile.debug('Looking up parent image')
|
||||
parent_image = model.get_repo_image(namespace, repository, parent_id)
|
||||
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
|
||||
|
||||
parent_uuid = parent_image and parent_image.storage.uuid
|
||||
parent_locations = parent_image and parent_image.storage.locations
|
||||
|
@ -515,7 +520,7 @@ def put_image_json(namespace, repository, image_id):
|
|||
def process_image_changes(namespace, repository, image_id):
|
||||
logger.debug('Generating diffs for image: %s' % image_id)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
logger.warning('No image for id: %s', image_id)
|
||||
return None, None
|
||||
|
|
62
endpoints/trackhelper.py
Normal file
62
endpoints/trackhelper.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
import logging
|
||||
|
||||
from app import analytics, app, userevents
|
||||
from data import model
|
||||
from flask import request
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
||||
def track_and_log(event_name, repo, **kwargs):
|
||||
repository = repo.name
|
||||
namespace = repo.namespace_user.username
|
||||
metadata = {
|
||||
'repo': repository,
|
||||
'namespace': namespace,
|
||||
}
|
||||
metadata.update(kwargs)
|
||||
|
||||
analytics_id = 'anonymous'
|
||||
|
||||
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||
if get_validated_oauth_token():
|
||||
oauth_token = get_validated_oauth_token()
|
||||
metadata['oauth_token_id'] = oauth_token.id
|
||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
analytics_id = 'oauth:' + oauth_token.id
|
||||
elif get_authenticated_user():
|
||||
metadata['username'] = get_authenticated_user().username
|
||||
analytics_id = get_authenticated_user().username
|
||||
elif get_validated_token():
|
||||
metadata['token'] = get_validated_token().friendly_name
|
||||
metadata['token_code'] = get_validated_token().code
|
||||
analytics_id = 'token:' + get_validated_token().code
|
||||
else:
|
||||
metadata['public'] = True
|
||||
analytics_id = 'anonymous'
|
||||
|
||||
extra_params = {
|
||||
'repository': '%s/%s' % (namespace, repository),
|
||||
}
|
||||
|
||||
# Publish the user event (if applicable)
|
||||
if get_authenticated_user():
|
||||
user_event_data = {
|
||||
'action': event_name,
|
||||
'repository': repository,
|
||||
'namespace': namespace
|
||||
}
|
||||
|
||||
event = userevents.get_event(get_authenticated_user().username)
|
||||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
||||
# Save the action to mixpanel.
|
||||
analytics.track(analytics_id, event_name, extra_params)
|
||||
|
||||
# Log the action to the database.
|
||||
model.log_action(event_name, namespace,
|
||||
performer=get_authenticated_user(),
|
||||
ip=request.remote_addr, metadata=metadata,
|
||||
repository=repo)
|
|
@ -8,7 +8,7 @@ import re
|
|||
from github import Github, UnknownObjectException, GithubException
|
||||
from tempfile import SpooledTemporaryFile
|
||||
|
||||
from app import app, userfiles as user_files
|
||||
from app import app, userfiles as user_files, github_trigger
|
||||
from util.tarfileappender import TarfileAppender
|
||||
|
||||
|
||||
|
@ -150,8 +150,8 @@ def raise_unsupported():
|
|||
class GithubBuildTrigger(BuildTrigger):
|
||||
@staticmethod
|
||||
def _get_client(auth_token):
|
||||
return Github(auth_token, client_id=app.config['GITHUB_CLIENT_ID'],
|
||||
client_secret=app.config['GITHUB_CLIENT_SECRET'])
|
||||
return Github(auth_token, client_id=github_trigger.client_id(),
|
||||
client_secret=github_trigger.client_secret())
|
||||
|
||||
@classmethod
|
||||
def service_name(cls):
|
||||
|
@ -231,15 +231,16 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
|
||||
return repos_by_org
|
||||
|
||||
def matches_branch(self, branch_name, regex):
|
||||
def matches_ref(self, ref, regex):
|
||||
match_string = ref.split('/', 1)[1]
|
||||
if not regex:
|
||||
return False
|
||||
|
||||
m = regex.match(branch_name)
|
||||
m = regex.match(match_string)
|
||||
if not m:
|
||||
return False
|
||||
|
||||
return len(m.group(0)) == len(branch_name)
|
||||
return len(m.group(0)) == len(match_string)
|
||||
|
||||
def list_build_subdirs(self, auth_token, config):
|
||||
gh_client = self._get_client(auth_token)
|
||||
|
@ -250,11 +251,11 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
|
||||
# Find the first matching branch.
|
||||
branches = None
|
||||
if 'branch_regex' in config:
|
||||
if 'branchtag_regex' in config:
|
||||
try:
|
||||
regex = re.compile(config['branch_regex'])
|
||||
regex = re.compile(config['branchtag_regex'])
|
||||
branches = [branch.name for branch in repo.get_branches()
|
||||
if self.matches_branch(branch.name, regex)]
|
||||
if self.matches_ref('refs/heads/' + branch.name, regex)]
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -370,14 +371,13 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
commit_sha = payload['head_commit']['id']
|
||||
commit_message = payload['head_commit'].get('message', '')
|
||||
|
||||
if 'branch_regex' in config:
|
||||
if 'branchtag_regex' in config:
|
||||
try:
|
||||
regex = re.compile(config['branch_regex'])
|
||||
regex = re.compile(config['branchtag_regex'])
|
||||
except:
|
||||
regex = re.compile('.*')
|
||||
|
||||
branch = ref.split('/')[-1]
|
||||
if not self.matches_branch(branch, regex):
|
||||
if not self.matches_ref(ref, regex):
|
||||
raise SkipRequestException()
|
||||
|
||||
if should_skip_commit(commit_message):
|
||||
|
@ -403,17 +403,31 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
|
||||
gh_client = self._get_client(auth_token)
|
||||
repo = gh_client.get_repo(source)
|
||||
master = repo.get_branch(repo.default_branch)
|
||||
master_sha = master.commit.sha
|
||||
short_sha = GithubBuildTrigger.get_display_name(master_sha)
|
||||
ref = 'refs/heads/%s' % (run_parameters.get('branch_name') or repo.default_branch)
|
||||
branch_name = run_parameters.get('branch_name') or repo.default_branch
|
||||
branch = repo.get_branch(branch_name)
|
||||
branch_sha = branch.commit.sha
|
||||
short_sha = GithubBuildTrigger.get_display_name(branch_sha)
|
||||
ref = 'refs/heads/%s' % (branch_name)
|
||||
|
||||
return self._prepare_build(config, repo, master_sha, short_sha, ref)
|
||||
return self._prepare_build(config, repo, branch_sha, short_sha, ref)
|
||||
except GithubException as ghe:
|
||||
raise TriggerStartException(ghe.data['message'])
|
||||
|
||||
|
||||
def list_field_values(self, auth_token, config, field_name):
|
||||
if field_name == 'refs':
|
||||
branches = self.list_field_values(auth_token, config, 'branch_name')
|
||||
tags = self.list_field_values(auth_token, config, 'tag_name')
|
||||
|
||||
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
||||
[{'kind': 'tag', 'name': tag} for tag in tags])
|
||||
|
||||
if field_name == 'tag_name':
|
||||
gh_client = self._get_client(auth_token)
|
||||
source = config['build_source']
|
||||
repo = gh_client.get_repo(source)
|
||||
return [tag.name for tag in repo.get_tags()]
|
||||
|
||||
if field_name == 'branch_name':
|
||||
gh_client = self._get_client(auth_token)
|
||||
source = config['build_source']
|
||||
|
|
|
@ -2,13 +2,15 @@ import logging
|
|||
import json
|
||||
import hashlib
|
||||
|
||||
from flask import redirect, Blueprint, abort, send_file
|
||||
from flask import redirect, Blueprint, abort, send_file, request
|
||||
|
||||
from app import app
|
||||
from auth.auth import process_auth
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data import model
|
||||
from data import database
|
||||
from endpoints.trackhelper import track_and_log
|
||||
from storage import Storage
|
||||
|
||||
from util.queuefile import QueueFile
|
||||
|
@ -16,28 +18,33 @@ from util.queueprocess import QueueProcess
|
|||
from util.gzipwrap import GzipWrap
|
||||
from util.dockerloadformat import build_docker_load_stream
|
||||
|
||||
|
||||
verbs = Blueprint('verbs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_list):
|
||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_id_list):
|
||||
store = Storage(app)
|
||||
|
||||
# For performance reasons, we load the full image list here, cache it, then disconnect from
|
||||
# the database.
|
||||
with database.UseThenDisconnect(app.config):
|
||||
image_list = list(model.get_matching_repository_images(namespace, repository, image_id_list))
|
||||
|
||||
image_list.sort(key=lambda image: image_id_list.index(image.docker_image_id))
|
||||
|
||||
def get_next_image():
|
||||
for current_image_id in image_list:
|
||||
yield model.get_repo_image(namespace, repository, current_image_id)
|
||||
for current_image in image_list:
|
||||
yield current_image
|
||||
|
||||
def get_next_layer():
|
||||
for current_image_id in image_list:
|
||||
current_image_entry = model.get_repo_image(namespace, repository, current_image_id)
|
||||
for current_image_entry in image_list:
|
||||
current_image_path = store.image_layer_path(current_image_entry.storage.uuid)
|
||||
current_image_stream = store.stream_read_file(current_image_entry.storage.locations,
|
||||
current_image_path)
|
||||
|
||||
current_image_id = current_image_entry.id
|
||||
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
|
||||
yield current_image_stream
|
||||
|
||||
database.configure(app.config)
|
||||
stream = build_docker_load_stream(namespace, repository, tag, synthetic_image_id, image_json,
|
||||
get_next_image, get_next_layer)
|
||||
|
||||
|
@ -45,12 +52,13 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
|
|||
|
||||
|
||||
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
|
||||
database.configure(app.config)
|
||||
store = Storage(app)
|
||||
|
||||
def handle_exception(ex):
|
||||
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
||||
with database.UseThenDisconnect(app.config):
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
||||
queue_file.add_exception_handler(handle_exception)
|
||||
|
||||
|
@ -59,9 +67,10 @@ def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, que
|
|||
queue_file.close()
|
||||
|
||||
if not queue_file.raised_exception:
|
||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading.uploading = False
|
||||
done_uploading.save()
|
||||
with database.UseThenDisconnect(app.config):
|
||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading.uploading = False
|
||||
done_uploading.save()
|
||||
|
||||
|
||||
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
|
||||
|
@ -76,10 +85,13 @@ def get_squashed_tag(namespace, repository, tag):
|
|||
abort(404)
|
||||
|
||||
# Lookup the tag's image and storage.
|
||||
repo_image = model.get_repo_image(namespace, repository, tag_image.docker_image_id)
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
||||
if not repo_image:
|
||||
abort(404)
|
||||
|
||||
# Log the action.
|
||||
track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash')
|
||||
|
||||
store = Storage(app)
|
||||
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
|
||||
store.preferred_locations[0])
|
||||
|
@ -91,6 +103,9 @@ def get_squashed_tag(namespace, repository, tag):
|
|||
logger.debug('Redirecting to download URL for derived image %s', derived.uuid)
|
||||
return redirect(download_url)
|
||||
|
||||
# Close the database handle here for this process before we send the long download.
|
||||
database.close_db_filter(None)
|
||||
|
||||
logger.debug('Sending cached derived image %s', derived.uuid)
|
||||
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
|
||||
|
||||
|
@ -128,6 +143,9 @@ def get_squashed_tag(namespace, repository, tag):
|
|||
storage_args = (derived.uuid, derived.locations, storage_queue_file)
|
||||
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
|
||||
|
||||
# Close the database handle here for this process before we send the long download.
|
||||
database.close_db_filter(None)
|
||||
|
||||
# Return the client's data.
|
||||
return send_file(client_queue_file)
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ from flask import (abort, redirect, request, url_for, make_response, Response,
|
|||
Blueprint, send_from_directory, jsonify)
|
||||
from flask.ext.login import current_user
|
||||
from urlparse import urlparse
|
||||
from health.healthcheck import HealthCheck
|
||||
|
||||
from data import model
|
||||
from data.model.oauth import DatabaseAuthorizationProvider
|
||||
|
@ -151,6 +152,20 @@ def v1():
|
|||
return index('')
|
||||
|
||||
|
||||
@web.route('/health', methods=['GET'])
|
||||
@no_cache
|
||||
def health():
|
||||
db_healthy = model.check_health()
|
||||
buildlogs_healthy = build_logs.check_health()
|
||||
|
||||
check = HealthCheck.get_check(app.config['HEALTH_CHECKER'][0], app.config['HEALTH_CHECKER'][1])
|
||||
(data, is_healthy) = check.conduct_healthcheck(db_healthy, buildlogs_healthy)
|
||||
|
||||
response = jsonify(dict(data = data, is_healthy = is_healthy))
|
||||
response.status_code = 200 if is_healthy else 503
|
||||
return response
|
||||
|
||||
|
||||
@web.route('/status', methods=['GET'])
|
||||
@no_cache
|
||||
def status():
|
||||
|
@ -160,6 +175,7 @@ def status():
|
|||
response = jsonify({
|
||||
'db_healthy': db_healthy,
|
||||
'buildlogs_healthy': buildlogs_healthy,
|
||||
'is_testing': app.config['TESTING'],
|
||||
})
|
||||
response.status_code = 200 if db_healthy and buildlogs_healthy else 503
|
||||
|
||||
|
|
0
health/__init__.py
Normal file
0
health/__init__.py
Normal file
84
health/healthcheck.py
Normal file
84
health/healthcheck.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
import boto.rds2
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class HealthCheck(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
|
||||
"""
|
||||
Conducts any custom healthcheck work, returning a dict representing the HealthCheck
|
||||
output and a boolean indicating whether the instance is healthy.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def get_check(cls, name, parameters):
|
||||
for subc in cls.__subclasses__():
|
||||
if subc.check_name() == name:
|
||||
return subc(**parameters)
|
||||
|
||||
raise Exception('Unknown health check with name %s' % name)
|
||||
|
||||
|
||||
class LocalHealthCheck(HealthCheck):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def check_name(cls):
|
||||
return 'LocalHealthCheck'
|
||||
|
||||
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
|
||||
data = {
|
||||
'db_healthy': db_healthy,
|
||||
'buildlogs_healthy': buildlogs_healthy
|
||||
}
|
||||
|
||||
return (data, db_healthy and buildlogs_healthy)
|
||||
|
||||
|
||||
class ProductionHealthCheck(HealthCheck):
|
||||
def __init__(self, access_key, secret_key):
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
|
||||
@classmethod
|
||||
def check_name(cls):
|
||||
return 'ProductionHealthCheck'
|
||||
|
||||
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
|
||||
data = {
|
||||
'db_healthy': db_healthy,
|
||||
'buildlogs_healthy': buildlogs_healthy
|
||||
}
|
||||
|
||||
# Only report unhealthy if the machine cannot connect to the DB. Redis isn't required for
|
||||
# mission critical/high avaliability operations.
|
||||
if not db_healthy:
|
||||
# If the database is marked as unhealthy, check the status of RDS directly. If RDS is
|
||||
# reporting as available, then the problem is with this instance. Otherwise, the problem is
|
||||
# with RDS, and we can keep this machine as 'healthy'.
|
||||
is_rds_working = False
|
||||
try:
|
||||
region = boto.rds2.connect_to_region('us-east-1',
|
||||
aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key)
|
||||
response = region.describe_db_instances()['DescribeDBInstancesResponse']
|
||||
result = response['DescribeDBInstancesResult']
|
||||
instances = result['DBInstances']
|
||||
status = instances[0]['DBInstanceStatus']
|
||||
is_rds_working = status == 'available'
|
||||
except:
|
||||
logger.exception("Exception while checking RDS status")
|
||||
pass
|
||||
|
||||
data['db_available_checked'] = True
|
||||
data['db_available_status'] = is_rds_working
|
||||
|
||||
# If RDS is down, then we still report the machine as healthy, so that it can handle
|
||||
# requests once RDS comes back up.
|
||||
return (data, not is_rds_working)
|
||||
|
||||
return (data, db_healthy)
|
23
initdb.py
23
initdb.py
|
@ -3,11 +3,12 @@ import json
|
|||
import hashlib
|
||||
import random
|
||||
import calendar
|
||||
import os
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from email.utils import formatdate
|
||||
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
|
||||
savepoint_sqlite)
|
||||
savepoint_sqlite, savepoint)
|
||||
from uuid import UUID
|
||||
|
||||
from data.database import *
|
||||
|
@ -34,6 +35,8 @@ SAMPLE_CMDS = [["/bin/bash"],
|
|||
REFERENCE_DATE = datetime(2013, 6, 23)
|
||||
TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
|
||||
|
||||
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
|
||||
|
||||
def __gen_checksum(image_id):
|
||||
h = hashlib.md5(image_id)
|
||||
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
|
||||
|
@ -144,7 +147,7 @@ def setup_database_for_testing(testcase):
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
global db_initialized_for_testing
|
||||
|
@ -156,12 +159,18 @@ def setup_database_for_testing(testcase):
|
|||
initialize_database()
|
||||
populate_database()
|
||||
|
||||
# Enable foreign key constraints.
|
||||
if not IS_TESTING_REAL_DATABASE:
|
||||
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
||||
|
||||
db_initialized_for_testing = True
|
||||
|
||||
# Create a savepoint for the testcase.
|
||||
global testcases
|
||||
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
|
||||
|
||||
global testcases
|
||||
testcases[testcase] = {}
|
||||
testcases[testcase]['savepoint'] = savepoint_sqlite(db)
|
||||
testcases[testcase]['savepoint'] = test_savepoint
|
||||
testcases[testcase]['savepoint'].__enter__()
|
||||
|
||||
def initialize_database():
|
||||
|
@ -240,6 +249,8 @@ def initialize_database():
|
|||
|
||||
LogEntryKind.create(name='regenerate_robot_token')
|
||||
|
||||
LogEntryKind.create(name='repo_verb')
|
||||
|
||||
ImageStorageLocation.create(name='local_eu')
|
||||
ImageStorageLocation.create(name='local_us')
|
||||
|
||||
|
@ -281,7 +292,7 @@ def wipe_database():
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
drop_model_tables(all_models, fail_silently=True)
|
||||
|
@ -549,7 +560,7 @@ if __name__ == '__main__':
|
|||
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
|
||||
logging.basicConfig(level=log_level)
|
||||
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to initialize production database!')
|
||||
|
||||
initialize_database()
|
||||
|
|
|
@ -19,6 +19,23 @@
|
|||
}
|
||||
}
|
||||
|
||||
.scrollable-menu {
|
||||
max-height: 400px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.dropdown.input-group-addon {
|
||||
padding: 0px;
|
||||
border: 0px;
|
||||
background-color: transparent;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.dropdown.input-group-addon .dropdown-toggle {
|
||||
border-left: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
}
|
||||
|
||||
#quay-logo {
|
||||
width: 100px;
|
||||
|
@ -3114,38 +3131,38 @@ p.editable:hover i {
|
|||
stroke-width: 1.5px;
|
||||
}
|
||||
|
||||
.usage-chart {
|
||||
.usage-chart-element {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
width: 200px;
|
||||
height: 200px;
|
||||
}
|
||||
|
||||
.usage-chart .count-text {
|
||||
.usage-chart-element .count-text {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
.usage-chart.limit-at path.arc-0 {
|
||||
.usage-chart-element.limit-at path.arc-0 {
|
||||
fill: #c09853;
|
||||
}
|
||||
|
||||
.usage-chart.limit-over path.arc-0 {
|
||||
.usage-chart-element.limit-over path.arc-0 {
|
||||
fill: #b94a48;
|
||||
}
|
||||
|
||||
.usage-chart.limit-near path.arc-0 {
|
||||
.usage-chart-element.limit-near path.arc-0 {
|
||||
fill: #468847;
|
||||
}
|
||||
|
||||
.usage-chart.limit-over path.arc-1 {
|
||||
.usage-chart-element.limit-over path.arc-1 {
|
||||
fill: #fcf8e3;
|
||||
}
|
||||
|
||||
.usage-chart.limit-at path.arc-1 {
|
||||
.usage-chart-element.limit-at path.arc-1 {
|
||||
fill: #f2dede;
|
||||
}
|
||||
|
||||
.usage-chart.limit-near path.arc-1 {
|
||||
.usage-chart-element.limit-near path.arc-1 {
|
||||
fill: #dff0d8;
|
||||
}
|
||||
|
||||
|
@ -4109,20 +4126,37 @@ pre.command:before {
|
|||
border-bottom-left-radius: 0px;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .branch-reference.not-match {
|
||||
color: #ccc !important;
|
||||
.trigger-setup-github-element .ref-reference {
|
||||
color: #ccc;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .branch-reference.not-match a {
|
||||
color: #ccc !important;
|
||||
.trigger-setup-github-element .ref-reference span {
|
||||
cursor: pointer;
|
||||
text-decoration: line-through;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .branch-filter {
|
||||
.trigger-setup-github-element .ref-reference:hover {
|
||||
color: #3276b1;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .ref-reference:hover span {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .ref-reference.match {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .ref-reference.match span {
|
||||
text-decoration: none;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .ref-filter {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .branch-filter span {
|
||||
.trigger-setup-github-element .ref-filter span {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
|
@ -4145,19 +4179,37 @@ pre.command:before {
|
|||
padding-left: 6px;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .matching-branches {
|
||||
.trigger-setup-github-element .matching-refs {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .matching-branches li:before {
|
||||
.trigger-setup-github-element .ref-matches {
|
||||
padding-left: 70px;
|
||||
position: relative;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .ref-matches .kind {
|
||||
font-weight: bold;
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .matching-refs.tags li:before {
|
||||
content: "\f02b";
|
||||
font-family: FontAwesome;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .matching-refs.branches li:before {
|
||||
content: "\f126";
|
||||
font-family: FontAwesome;
|
||||
}
|
||||
|
||||
.trigger-setup-github-element .matching-branches li {
|
||||
.trigger-setup-github-element .matching-refs li {
|
||||
list-style: none;
|
||||
display: inline-block;
|
||||
margin-left: 10px;
|
||||
|
@ -4333,11 +4385,14 @@ pre.command:before {
|
|||
}
|
||||
|
||||
.trigger-pull-credentials {
|
||||
margin-top: 4px;
|
||||
padding-left: 26px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.trigger-pull-credentials .entity-reference {
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
.trigger-pull-credentials .context-tooltip {
|
||||
color: gray;
|
||||
margin-right: 4px;
|
||||
|
@ -4345,7 +4400,8 @@ pre.command:before {
|
|||
|
||||
.trigger-description .trigger-description-subtitle {
|
||||
display: inline-block;
|
||||
margin-right: 34px;
|
||||
width: 100px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.trigger-option-section:not(:first-child) {
|
||||
|
|
|
@ -2,8 +2,15 @@
|
|||
<span ng-if="provider == 'github'">
|
||||
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px" ng-disabled="signingIn">
|
||||
<i class="fa fa-github fa-lg"></i>
|
||||
<span ng-if="action != 'attach'">Sign In with GitHub</span>
|
||||
<span ng-if="action == 'attach'">Attach to GitHub Account</span>
|
||||
<span ng-if="action != 'attach'">
|
||||
Sign In with GitHub
|
||||
<span ng-if="isEnterprise('github')">Enterprise</span>
|
||||
</span>
|
||||
<span ng-if="action == 'attach'">
|
||||
Attach to GitHub
|
||||
<span ng-if="isEnterprise('github')">Enterprise</span>
|
||||
Account
|
||||
</span>
|
||||
</a>
|
||||
</span>
|
||||
|
||||
|
|
|
@ -24,10 +24,11 @@
|
|||
</div>
|
||||
|
||||
<!-- Chart -->
|
||||
<div>
|
||||
<div id="repository-usage-chart" class="usage-chart limit-{{limit}}"></div>
|
||||
<span class="usage-caption" ng-show="chart">Repository Usage</span>
|
||||
</div>
|
||||
<div class="usage-chart" total="subscribedPlan.privateRepos || 0"
|
||||
current="subscription.usedPrivateRepos || 0"
|
||||
limit="limit"
|
||||
usage-title="Repository Usage"
|
||||
ng-show="!planLoading"></div>
|
||||
|
||||
<!-- Plans Table -->
|
||||
<table class="table table-hover plans-list-table" ng-show="!planLoading">
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
Push to GitHub repository <a href="https://github.com/{{ trigger.config.build_source }}" target="_new">{{ trigger.config.build_source }}</a>
|
||||
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="!short">
|
||||
<div>
|
||||
<span class="trigger-description-subtitle">Branches:</span>
|
||||
<span ng-if="trigger.config.branch_regex">Matching Regular Expression {{ trigger.config.branch_regex }}</span>
|
||||
<span ng-if="!trigger.config.branch_regex">(All Branches)</span>
|
||||
<span class="trigger-description-subtitle">Branches/Tags:</span>
|
||||
<span ng-if="trigger.config.branchtag_regex">Matching Regular Expression {{ trigger.config.branchtag_regex }}</span>
|
||||
<span ng-if="!trigger.config.branchtag_regex">(All Branches and Tags)</span>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
|
|
|
@ -2,27 +2,27 @@
|
|||
<!-- Current selected info -->
|
||||
<div class="selected-info" ng-show="nextStepCounter > 0">
|
||||
<table style="width: 100%;">
|
||||
<tr ng-show="currentRepo && nextStepCounter > 0">
|
||||
<tr ng-show="state.currentRepo && nextStepCounter > 0">
|
||||
<td width="200px">
|
||||
Repository:
|
||||
</td>
|
||||
<td>
|
||||
<div class="current-repo">
|
||||
<img class="dropdown-select-icon github-org-icon"
|
||||
ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
|
||||
{{ currentRepo.repo }}
|
||||
ng-src="{{ state.currentRepo.avatar_url ? state.currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
|
||||
<a ng-href="https://github.com/{{ state.currentRepo.repo }}" target="_blank">{{ state.currentRepo.repo }}</a>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr ng-show="nextStepCounter > 1">
|
||||
<td>
|
||||
Branches:
|
||||
Branches and Tags:
|
||||
</td>
|
||||
<td>
|
||||
<div class="branch-filter">
|
||||
<span ng-if="!state.hasBranchFilter">(All Branches)</span>
|
||||
<span ng-if="state.hasBranchFilter">Regular Expression: <code>{{ state.branchFilter }}</code></span>
|
||||
<div class="ref-filter">
|
||||
<span ng-if="!state.hasBranchTagFilter">(Build All)</span>
|
||||
<span ng-if="state.hasBranchTagFilter">Regular Expression: <code>{{ state.branchTagFilter }}</code></span>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -45,18 +45,18 @@
|
|||
<div class="step-view" next-step-counter="nextStepCounter" current-step-valid="currentStepValid"
|
||||
steps-completed="stepsCompleted()">
|
||||
<!-- Repository select -->
|
||||
<div class="step-view-step" complete-condition="currentRepo" load-callback="loadRepositories(callback)"
|
||||
<div class="step-view-step" complete-condition="state.currentRepo" load-callback="loadRepositories(callback)"
|
||||
load-message="Loading Repositories">
|
||||
<div style="margin-bottom: 12px">Please choose the GitHub repository that will trigger the build:</div>
|
||||
<div class="dropdown-select" placeholder="'Select a repository'" selected-item="currentRepo"
|
||||
lookahead-items="repoLookahead">
|
||||
<div class="dropdown-select" placeholder="'Enter or select a repository'" selected-item="state.currentRepo"
|
||||
lookahead-items="repoLookahead" allow-custom-input="true">
|
||||
<!-- Icons -->
|
||||
<i class="dropdown-select-icon none-icon fa fa-github fa-lg"></i>
|
||||
<img class="dropdown-select-icon github-org-icon"
|
||||
ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
|
||||
ng-src="{{ state.currentRepo.avatar_url ? state.currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
|
||||
|
||||
<!-- Dropdown menu -->
|
||||
<ul class="dropdown-select-menu" role="menu">
|
||||
<ul class="dropdown-select-menu scrollable-menu" role="menu">
|
||||
<li ng-repeat-start="org in orgs" role="presentation" class="dropdown-header github-org-header">
|
||||
<img ng-src="{{ org.info.avatar_url }}" class="github-org-icon">{{ org.info.name }}
|
||||
</li>
|
||||
|
@ -68,45 +68,76 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Branch filter/select -->
|
||||
<div class="step-view-step" complete-condition="!state.hasBranchFilter || state.branchFilter"
|
||||
load-callback="loadBranches(callback)"
|
||||
load-message="Loading Branches">
|
||||
<!-- Branch/Tag filter/select -->
|
||||
<div class="step-view-step" complete-condition="!state.hasBranchTagFilter || state.branchTagFilter"
|
||||
load-callback="loadBranchesAndTags(callback)"
|
||||
load-message="Loading Branches and Tags">
|
||||
|
||||
<div style="margin-bottom: 12px">Please choose the branches to which this trigger will apply:</div>
|
||||
<div style="margin-bottom: 12px">Please choose the branches and tags to which this trigger will apply:</div>
|
||||
<div style="margin-left: 20px;">
|
||||
<div class="btn-group btn-group-sm" style="margin-bottom: 12px">
|
||||
<button type="button" class="btn btn-default"
|
||||
ng-class="state.hasBranchFilter ? '' : 'active btn-info'" ng-click="state.hasBranchFilter = false">
|
||||
All Branches
|
||||
ng-class="state.hasBranchTagFilter ? '' : 'active btn-info'" ng-click="state.hasBranchTagFilter = false">
|
||||
All Branches and Tags
|
||||
</button>
|
||||
<button type="button" class="btn btn-default"
|
||||
ng-class="state.hasBranchFilter ? 'active btn-info' : ''" ng-click="state.hasBranchFilter = true">
|
||||
ng-class="state.hasBranchTagFilter ? 'active btn-info' : ''" ng-click="state.hasBranchTagFilter = true">
|
||||
Matching Regular Expression
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div ng-show="state.hasBranchFilter" style="margin-top: 10px;">
|
||||
<div ng-show="state.hasBranchTagFilter" style="margin-top: 10px;">
|
||||
<form>
|
||||
<input class="form-control" type="text" ng-model="state.branchFilter"
|
||||
placeholder="(Regular expression)" required>
|
||||
<div class="form-group">
|
||||
<div class="input-group">
|
||||
<input class="form-control" type="text" ng-model="state.branchTagFilter"
|
||||
placeholder="(Regular expression. Examples: heads/branchname, tags/tagname)" required>
|
||||
<div class="dropdown input-group-addon">
|
||||
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
|
||||
<span class="caret"></span>
|
||||
</button>
|
||||
<ul class="dropdown-menu pull-right">
|
||||
<li><a href="javascript:void(0)" ng-click="state.branchTagFilter = 'heads/.+'">
|
||||
<i class="fa fa-code-fork"></i>All Branches</a>
|
||||
</li>
|
||||
<li><a href="javascript:void(0)" ng-click="state.branchTagFilter = 'tags/.+'">
|
||||
<i class="fa fa-tag"></i>All Tags</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
|
||||
<div style="margin-top: 10px">
|
||||
<div ng-if="branchNames.length">
|
||||
Branches:
|
||||
<ul class="matching-branches">
|
||||
<div class="ref-matches" ng-if="branchNames.length">
|
||||
<span class="kind">Branches:</span>
|
||||
<ul class="matching-refs branches">
|
||||
<li ng-repeat="branchName in branchNames | limitTo:20"
|
||||
class="branch-reference"
|
||||
ng-class="isMatchingBranch(branchName, state.branchFilter) ? 'match' : 'not-match'">
|
||||
<a href="https://github.com/{{ currentRepo.repo }}/tree/{{ branchName }}" target="_blank">
|
||||
class="ref-reference"
|
||||
ng-class="isMatching('heads', branchName, state.branchTagFilter) ? 'match' : 'not-match'">
|
||||
<span ng-click="addRef('heads', branchName)" target="_blank">
|
||||
{{ branchName }}
|
||||
</a>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
<span ng-if="branchNames.length > 20">...</span>
|
||||
</div>
|
||||
<div ng-if="state.branchFilter && !branchNames.length"
|
||||
<div class="ref-matches" ng-if="tagNames.length" style="margin-bottom: -20px">
|
||||
<span class="kind">Tags:</span>
|
||||
<ul class="matching-refs tags">
|
||||
<li ng-repeat="tagName in tagNames | limitTo:20"
|
||||
class="ref-reference"
|
||||
ng-class="isMatching('tags', tagName, state.branchTagFilter) ? 'match' : 'not-match'">
|
||||
<span ng-click="addRef('tags', tagName)" target="_blank">
|
||||
{{ tagName }}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
<span ng-if="tagNames.length > 20">...</span>
|
||||
</div>
|
||||
<div ng-if="state.branchTagFilter && !branchNames.length"
|
||||
style="margin-top: 10px">
|
||||
<strong>Warning:</strong> No branches found
|
||||
</div>
|
||||
|
@ -147,7 +178,7 @@
|
|||
|
||||
<div class="quay-spinner" ng-show="!locations && !locationError"></div>
|
||||
<div class="alert alert-warning" ng-show="locations && !locations.length">
|
||||
Warning: No Dockerfiles were found in {{ currentRepo.repo }}
|
||||
Warning: No Dockerfiles were found in {{ state.currentRepo.repo }}
|
||||
</div>
|
||||
<div class="alert alert-warning" ng-show="locationError">
|
||||
{{ locationError }}
|
||||
|
|
2
static/directives/usage-chart.html
Normal file
2
static/directives/usage-chart.html
Normal file
|
@ -0,0 +1,2 @@
|
|||
<span id="usage-chart-element" class="usage-chart-element" ng-class="'limit-' + limit" ng-show="total != null"></span>
|
||||
<span class="usage-caption" ng-show="total != null && usageTitle">{{ usageTitle }}</span>
|
260
static/js/app.js
260
static/js/app.js
|
@ -621,7 +621,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
}]);
|
||||
|
||||
|
||||
$provide.factory('TriggerService', ['UtilService', '$sanitize', function(UtilService, $sanitize) {
|
||||
$provide.factory('TriggerService', ['UtilService', '$sanitize', 'KeyService',
|
||||
function(UtilService, $sanitize, KeyService) {
|
||||
var triggerService = {};
|
||||
|
||||
var triggerTypes = {
|
||||
|
@ -640,10 +641,29 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
'type': 'option',
|
||||
'name': 'branch_name'
|
||||
}
|
||||
]
|
||||
],
|
||||
|
||||
'get_redirect_url': function(namespace, repository) {
|
||||
var redirect_uri = KeyService['githubRedirectUri'] + '/trigger/' +
|
||||
namespace + '/' + repository;
|
||||
|
||||
var authorize_url = KeyService['githubTriggerAuthorizeUrl'];
|
||||
var client_id = KeyService['githubTriggerClientId'];
|
||||
|
||||
return authorize_url + 'client_id=' + client_id +
|
||||
'&scope=repo,user:email&redirect_uri=' + redirect_uri;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
triggerService.getRedirectUrl = function(name, namespace, repository) {
|
||||
var type = triggerTypes[name];
|
||||
if (!type) {
|
||||
return '';
|
||||
}
|
||||
return type['get_redirect_url'](namespace, repository);
|
||||
};
|
||||
|
||||
triggerService.getDescription = function(name, config) {
|
||||
var type = triggerTypes[name];
|
||||
if (!type) {
|
||||
|
@ -1313,29 +1333,37 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
'id': 'repo_push',
|
||||
'title': 'Push to Repository',
|
||||
'icon': 'fa-upload'
|
||||
},
|
||||
{
|
||||
'id': 'build_queued',
|
||||
'title': 'Dockerfile Build Queued',
|
||||
'icon': 'fa-tasks'
|
||||
},
|
||||
{
|
||||
'id': 'build_start',
|
||||
'title': 'Dockerfile Build Started',
|
||||
'icon': 'fa-circle-o-notch'
|
||||
},
|
||||
{
|
||||
'id': 'build_success',
|
||||
'title': 'Dockerfile Build Successfully Completed',
|
||||
'icon': 'fa-check-circle-o'
|
||||
},
|
||||
{
|
||||
'id': 'build_failure',
|
||||
'title': 'Dockerfile Build Failed',
|
||||
'icon': 'fa-times-circle-o'
|
||||
}
|
||||
];
|
||||
|
||||
if (Features.BUILD_SUPPORT) {
|
||||
var buildEvents = [
|
||||
{
|
||||
'id': 'build_queued',
|
||||
'title': 'Dockerfile Build Queued',
|
||||
'icon': 'fa-tasks'
|
||||
},
|
||||
{
|
||||
'id': 'build_start',
|
||||
'title': 'Dockerfile Build Started',
|
||||
'icon': 'fa-circle-o-notch'
|
||||
},
|
||||
{
|
||||
'id': 'build_success',
|
||||
'title': 'Dockerfile Build Successfully Completed',
|
||||
'icon': 'fa-check-circle-o'
|
||||
},
|
||||
{
|
||||
'id': 'build_failure',
|
||||
'title': 'Dockerfile Build Failed',
|
||||
'icon': 'fa-times-circle-o'
|
||||
}];
|
||||
|
||||
for (var i = 0; i < buildEvents.length; ++i) {
|
||||
events.push(buildEvents[i]);
|
||||
}
|
||||
}
|
||||
|
||||
var methods = [
|
||||
{
|
||||
'id': 'quay_notification',
|
||||
|
@ -1538,7 +1566,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) {
|
||||
return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}';
|
||||
} else {
|
||||
return 'Repository {repository} has been pushed';
|
||||
return 'Repository {repository} fhas been pushed';
|
||||
}
|
||||
},
|
||||
'page': function(metadata) {
|
||||
|
@ -1686,21 +1714,31 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
|
||||
$provide.factory('KeyService', ['$location', 'Config', function($location, Config) {
|
||||
var keyService = {}
|
||||
var oauth = window.__oauth;
|
||||
|
||||
keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY'];
|
||||
|
||||
keyService['githubClientId'] = Config['GITHUB_CLIENT_ID'];
|
||||
keyService['githubLoginClientId'] = Config['GITHUB_LOGIN_CLIENT_ID'];
|
||||
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
|
||||
keyService['githubTriggerClientId'] = oauth['GITHUB_TRIGGER_CONFIG']['CLIENT_ID'];
|
||||
keyService['githubLoginClientId'] = oauth['GITHUB_LOGIN_CONFIG']['CLIENT_ID'];
|
||||
keyService['googleLoginClientId'] = oauth['GOOGLE_LOGIN_CONFIG']['CLIENT_ID'];
|
||||
|
||||
keyService['googleLoginClientId'] = Config['GOOGLE_LOGIN_CLIENT_ID'];
|
||||
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
|
||||
keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback');
|
||||
|
||||
keyService['googleLoginUrl'] = 'https://accounts.google.com/o/oauth2/auth?response_type=code&';
|
||||
keyService['githubLoginUrl'] = 'https://github.com/login/oauth/authorize?';
|
||||
keyService['githubLoginUrl'] = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
keyService['googleLoginUrl'] = oauth['GOOGLE_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
|
||||
keyService['githubEndpoint'] = oauth['GITHUB_LOGIN_CONFIG']['GITHUB_ENDPOINT'];
|
||||
|
||||
keyService['githubTriggerAuthorizeUrl'] = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
|
||||
|
||||
keyService['googleLoginScope'] = 'openid email';
|
||||
keyService['githubLoginScope'] = 'user:email';
|
||||
keyService['googleLoginScope'] = 'openid email';
|
||||
|
||||
keyService.isEnterprise = function(service) {
|
||||
var isGithubEnterprise = keyService['githubLoginUrl'].indexOf('https://github.com/') < 0;
|
||||
return service == 'github' && isGithubEnterprise;
|
||||
};
|
||||
|
||||
keyService.getExternalLoginUrl = function(service, action) {
|
||||
var state_clause = '';
|
||||
|
@ -2548,7 +2586,10 @@ quayApp.directive('focusablePopoverContent', ['$timeout', '$popover', function (
|
|||
$body = $('body');
|
||||
var hide = function() {
|
||||
$body.off('click');
|
||||
|
||||
if (!scope) { return; }
|
||||
scope.$apply(function() {
|
||||
if (!scope) { return; }
|
||||
scope.$hide();
|
||||
});
|
||||
};
|
||||
|
@ -2645,9 +2686,9 @@ quayApp.directive('userSetup', function () {
|
|||
$scope.errorMessage = '';
|
||||
$scope.sent = true;
|
||||
$scope.sendingRecovery = false;
|
||||
}, function(result) {
|
||||
}, function(resp) {
|
||||
$scope.invalidRecovery = true;
|
||||
$scope.errorMessage = result.data;
|
||||
$scope.errorMessage = ApiService.getErrorMessage(resp, 'Cannot send recovery email');
|
||||
$scope.sent = false;
|
||||
$scope.sendingRecovery = false;
|
||||
});
|
||||
|
@ -2681,6 +2722,8 @@ quayApp.directive('externalLoginButton', function () {
|
|||
},
|
||||
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
|
||||
$scope.signingIn = false;
|
||||
$scope.isEnterprise = KeyService.isEnterprise;
|
||||
|
||||
$scope.startSignin = function(service) {
|
||||
$scope.signInStarted({'service': service});
|
||||
|
||||
|
@ -3137,6 +3180,22 @@ quayApp.directive('logsView', function () {
|
|||
'delete_robot': 'Delete Robot Account: {robot}',
|
||||
'create_repo': 'Create Repository: {repo}',
|
||||
'push_repo': 'Push to repository: {repo}',
|
||||
'repo_verb': function(metadata) {
|
||||
var prefix = '';
|
||||
if (metadata.verb == 'squash') {
|
||||
prefix = 'Pull of squashed tag {tag}'
|
||||
}
|
||||
|
||||
if (metadata.token) {
|
||||
prefix += ' via token {token}';
|
||||
} else if (metadata.username) {
|
||||
prefix += ' by {username}';
|
||||
} else {
|
||||
prefix += ' by {_ip}';
|
||||
}
|
||||
|
||||
return prefix;
|
||||
},
|
||||
'pull_repo': function(metadata) {
|
||||
if (metadata.token) {
|
||||
return 'Pull repository {repo} via token {token}';
|
||||
|
@ -3267,6 +3326,7 @@ quayApp.directive('logsView', function () {
|
|||
'delete_robot': 'Delete Robot Account',
|
||||
'create_repo': 'Create Repository',
|
||||
'push_repo': 'Push to repository',
|
||||
'repo_verb': 'Pull Repo Verb',
|
||||
'pull_repo': 'Pull repository',
|
||||
'delete_repo': 'Delete repository',
|
||||
'change_repo_permission': 'Change repository permission',
|
||||
|
@ -3358,7 +3418,6 @@ quayApp.directive('logsView', function () {
|
|||
$scope.logsPath = '/api/v1/' + url;
|
||||
|
||||
if (!$scope.chart) {
|
||||
window.console.log('creating chart');
|
||||
$scope.chart = new LogUsageChart(logKinds);
|
||||
$($scope.chart).bind('filteringChanged', function(e) {
|
||||
$scope.$apply(function() { $scope.kindsAllowed = e.allowed; });
|
||||
|
@ -4541,23 +4600,6 @@ quayApp.directive('planManager', function () {
|
|||
$scope.planChanged({ 'plan': subscribedPlan });
|
||||
}
|
||||
|
||||
if (sub.usedPrivateRepos > $scope.subscribedPlan.privateRepos) {
|
||||
$scope.limit = 'over';
|
||||
} else if (sub.usedPrivateRepos == $scope.subscribedPlan.privateRepos) {
|
||||
$scope.limit = 'at';
|
||||
} else if (sub.usedPrivateRepos >= $scope.subscribedPlan.privateRepos * 0.7) {
|
||||
$scope.limit = 'near';
|
||||
} else {
|
||||
$scope.limit = 'none';
|
||||
}
|
||||
|
||||
if (!$scope.chart) {
|
||||
$scope.chart = new UsageChart();
|
||||
$scope.chart.draw('repository-usage-chart');
|
||||
}
|
||||
|
||||
$scope.chart.update(sub.usedPrivateRepos || 0, $scope.subscribedPlan.privateRepos || 0);
|
||||
|
||||
$scope.planChanging = false;
|
||||
$scope.planLoading = false;
|
||||
});
|
||||
|
@ -4800,11 +4842,14 @@ quayApp.directive('stepView', function ($compile) {
|
|||
|
||||
this.next = function() {
|
||||
if (this.currentStepIndex >= 0) {
|
||||
if (!this.getCurrentStep().scope.completeCondition) {
|
||||
var currentStep = this.getCurrentStep();
|
||||
if (!currentStep || !currentStep.scope) { return; }
|
||||
|
||||
if (!currentStep.scope.completeCondition) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.getCurrentStep().element.hide();
|
||||
currentStep.element.hide();
|
||||
|
||||
if (this.unwatch) {
|
||||
this.unwatch();
|
||||
|
@ -5259,25 +5304,42 @@ quayApp.directive('triggerSetupGithub', function () {
|
|||
controller: function($scope, $element, ApiService) {
|
||||
$scope.analyzeCounter = 0;
|
||||
$scope.setupReady = false;
|
||||
|
||||
$scope.refs = null;
|
||||
$scope.branchNames = null;
|
||||
$scope.tagNames = null;
|
||||
|
||||
$scope.state = {
|
||||
'branchFilter': '',
|
||||
'hasBranchFilter': false,
|
||||
'currentRepo': null,
|
||||
'branchTagFilter': '',
|
||||
'hasBranchTagFilter': false,
|
||||
'isInvalidLocation': true,
|
||||
'currentLocation': null
|
||||
};
|
||||
|
||||
$scope.isMatchingBranch = function(branchName, filter) {
|
||||
$scope.isMatching = function(kind, name, filter) {
|
||||
try {
|
||||
var patt = new RegExp(filter);
|
||||
} catch (ex) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var m = branchName.match(patt);
|
||||
return m && m[0].length == branchName.length;
|
||||
var fullname = (kind + '/' + name);
|
||||
var m = fullname.match(patt);
|
||||
return m && m[0].length == fullname.length;
|
||||
}
|
||||
|
||||
$scope.addRef = function(kind, name) {
|
||||
if ($scope.isMatching(kind, name, $scope.state.branchTagFilter)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var newFilter = kind + '/' + name;
|
||||
var existing = $scope.state.branchTagFilter;
|
||||
if (existing) {
|
||||
$scope.state.branchTagFilter = '(' + existing + ')|(' + newFilter + ')';
|
||||
} else {
|
||||
$scope.state.branchTagFilter = newFilter;
|
||||
}
|
||||
}
|
||||
|
||||
$scope.stepsCompleted = function() {
|
||||
|
@ -5297,17 +5359,29 @@ quayApp.directive('triggerSetupGithub', function () {
|
|||
}, ApiService.errorDisplay('Cannot load repositories'));
|
||||
};
|
||||
|
||||
$scope.loadBranches = function(callback) {
|
||||
$scope.loadBranchesAndTags = function(callback) {
|
||||
var params = {
|
||||
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
|
||||
'trigger_uuid': $scope.trigger['id'],
|
||||
'field_name': 'branch_name'
|
||||
'field_name': 'refs'
|
||||
};
|
||||
|
||||
ApiService.listTriggerFieldValues($scope.trigger['config'], params).then(function(resp) {
|
||||
$scope.branchNames = resp['values'];
|
||||
$scope.refs = resp['values'];
|
||||
$scope.branchNames = [];
|
||||
$scope.tagNames = [];
|
||||
|
||||
for (var i = 0; i < $scope.refs.length; ++i) {
|
||||
var ref = $scope.refs[i];
|
||||
if (ref.kind == 'branch') {
|
||||
$scope.branchNames.push(ref.name);
|
||||
} else {
|
||||
$scope.tagNames.push(ref.name);
|
||||
}
|
||||
}
|
||||
|
||||
callback();
|
||||
}, ApiService.errorDisplay('Cannot load branch names'));
|
||||
}, ApiService.errorDisplay('Cannot load branch and tag names'));
|
||||
};
|
||||
|
||||
$scope.loadLocations = function(callback) {
|
||||
|
@ -5357,7 +5431,7 @@ quayApp.directive('triggerSetupGithub', function () {
|
|||
};
|
||||
|
||||
$scope.selectRepo = function(repo, org) {
|
||||
$scope.currentRepo = {
|
||||
$scope.state.currentRepo = {
|
||||
'repo': repo,
|
||||
'avatar_url': org['info']['avatar_url'],
|
||||
'toString': function() {
|
||||
|
@ -5408,19 +5482,19 @@ quayApp.directive('triggerSetupGithub', function () {
|
|||
$scope.repoLookahead = repos;
|
||||
};
|
||||
|
||||
$scope.$watch('currentRepo', function(repo) {
|
||||
if (repo) {
|
||||
$scope.$watch('state.currentRepo', function(repo) {
|
||||
if (repo) {
|
||||
$scope.selectRepoInternal(repo);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$watch('state.branchFilter', function(bf) {
|
||||
$scope.$watch('state.branchTagFilter', function(bf) {
|
||||
if (!$scope.trigger) { return; }
|
||||
|
||||
if ($scope.state.hasBranchFilter) {
|
||||
$scope.trigger['config']['branch_regex'] = bf;
|
||||
if ($scope.state.hasBranchTagFilter) {
|
||||
$scope.trigger['config']['branchtag_regex'] = bf;
|
||||
} else {
|
||||
delete $scope.trigger['config']['branch_regex'];
|
||||
delete $scope.trigger['config']['branchtag_regex'];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -5901,6 +5975,54 @@ quayApp.directive('notificationsBubble', function () {
|
|||
});
|
||||
|
||||
|
||||
quayApp.directive('usageChart', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
templateUrl: '/static/directives/usage-chart.html',
|
||||
replace: false,
|
||||
transclude: false,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'current': '=current',
|
||||
'total': '=total',
|
||||
'limit': '=limit',
|
||||
'usageTitle': '@usageTitle'
|
||||
},
|
||||
controller: function($scope, $element) {
|
||||
$scope.limit = "";
|
||||
|
||||
var chart = null;
|
||||
|
||||
var update = function() {
|
||||
if ($scope.current == null || $scope.total == null) { return; }
|
||||
if (!chart) {
|
||||
chart = new UsageChart();
|
||||
chart.draw('usage-chart-element');
|
||||
}
|
||||
|
||||
var current = $scope.current || 0;
|
||||
var total = $scope.total || 0;
|
||||
if (current > total) {
|
||||
$scope.limit = 'over';
|
||||
} else if (current == total) {
|
||||
$scope.limit = 'at';
|
||||
} else if (current >= total * 0.7) {
|
||||
$scope.limit = 'near';
|
||||
} else {
|
||||
$scope.limit = 'none';
|
||||
}
|
||||
|
||||
chart.update($scope.current, $scope.total);
|
||||
};
|
||||
|
||||
$scope.$watch('current', update);
|
||||
$scope.$watch('total', update);
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
});
|
||||
|
||||
|
||||
quayApp.directive('notificationView', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
|
|
|
@ -1330,15 +1330,13 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerServi
|
|||
var name = $routeParams.name;
|
||||
|
||||
$scope.Features = Features;
|
||||
$scope.permissions = {'team': [], 'user': []};
|
||||
$scope.TriggerService = TriggerService;
|
||||
|
||||
$scope.permissions = {'team': [], 'user': [], 'loading': 2};
|
||||
$scope.logsShown = 0;
|
||||
$scope.deleting = false;
|
||||
|
||||
$scope.permissionCache = {};
|
||||
|
||||
$scope.githubRedirectUri = KeyService.githubRedirectUri;
|
||||
$scope.githubClientId = KeyService.githubClientId;
|
||||
|
||||
$scope.showTriggerSetupCounter = 0;
|
||||
|
||||
$scope.getBadgeFormat = function(format, repo) {
|
||||
|
@ -1680,6 +1678,7 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerServi
|
|||
var permissionsFetch = Restangular.one('repository/' + namespace + '/' + name + '/permissions/' + kind + '/');
|
||||
permissionsFetch.get().then(function(resp) {
|
||||
$scope.permissions[kind] = resp.permissions;
|
||||
$scope.permissions['loading']--;
|
||||
}, function() {
|
||||
$scope.permissions[kind] = null;
|
||||
});
|
||||
|
@ -1739,6 +1738,7 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
if (login.service == 'github') {
|
||||
$scope.hasGithubLogin = true;
|
||||
$scope.githubLogin = login.metadata['service_username'];
|
||||
$scope.githubEndpoint = KeyService['githubEndpoint'];
|
||||
}
|
||||
|
||||
if (login.service == 'google') {
|
||||
|
@ -2049,12 +2049,10 @@ function V1Ctrl($scope, $location, UserService) {
|
|||
UserService.updateUserIn($scope);
|
||||
}
|
||||
|
||||
function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService, PlanService, KeyService, Features) {
|
||||
function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService, PlanService, TriggerService, Features) {
|
||||
UserService.updateUserIn($scope);
|
||||
|
||||
$scope.Features = Features;
|
||||
$scope.githubRedirectUri = KeyService.githubRedirectUri;
|
||||
$scope.githubClientId = KeyService.githubClientId;
|
||||
|
||||
$scope.repo = {
|
||||
'is_public': 0,
|
||||
|
@ -2133,9 +2131,7 @@ function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService
|
|||
|
||||
// Conduct the Github redirect if applicable.
|
||||
if ($scope.repo.initialize == 'github') {
|
||||
window.location = 'https://github.com/login/oauth/authorize?client_id=' + $scope.githubClientId +
|
||||
'&scope=repo,user:email&redirect_uri=' + $scope.githubRedirectUri + '/trigger/' +
|
||||
repo.namespace + '/' + repo.name;
|
||||
window.location = TriggerService.getRedirectUrl('github', repo.namespace, repo.name);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -2808,6 +2804,15 @@ function SuperUserAdminCtrl($scope, ApiService, Features, UserService) {
|
|||
$scope.logsCounter = 0;
|
||||
$scope.newUser = {};
|
||||
$scope.createdUsers = [];
|
||||
$scope.systemUsage = null;
|
||||
|
||||
$scope.getUsage = function() {
|
||||
if ($scope.systemUsage) { return; }
|
||||
|
||||
ApiService.getSystemUsage().then(function(resp) {
|
||||
$scope.systemUsage = resp;
|
||||
}, ApiService.errorDisplay('Cannot load system usage. Please contact support.'))
|
||||
}
|
||||
|
||||
$scope.loadLogs = function() {
|
||||
$scope.logsCounter++;
|
||||
|
|
|
@ -377,6 +377,23 @@ ImageHistoryTree.prototype.expandCollapsed_ = function(imageNode) {
|
|||
};
|
||||
|
||||
|
||||
/**
|
||||
* Returns the level of the node in the tree. Recursively computes and updates
|
||||
* if necessary.
|
||||
*/
|
||||
ImageHistoryTree.prototype.calculateLevel_ = function(node) {
|
||||
if (node['level'] != null) {
|
||||
return node['level'];
|
||||
}
|
||||
|
||||
if (node['parent'] == null) {
|
||||
return node['level'] = 0;
|
||||
}
|
||||
|
||||
return node['level'] = (this.calculateLevel_(node['parent']) + 1);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Builds the root node for the tree.
|
||||
*/
|
||||
|
@ -392,11 +409,16 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
|
|||
var imageByDockerId = {};
|
||||
for (var i = 0; i < this.images_.length; ++i) {
|
||||
var image = this.images_[i];
|
||||
|
||||
// Skip images that are currently uploading.
|
||||
if (image.uploading) { continue; }
|
||||
|
||||
var imageNode = {
|
||||
"name": image.id.substr(0, 12),
|
||||
"children": [],
|
||||
"image": image,
|
||||
"tags": image.tags
|
||||
"tags": image.tags,
|
||||
"level": null
|
||||
};
|
||||
imageByDockerId[image.id] = imageNode;
|
||||
}
|
||||
|
@ -405,6 +427,7 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
|
|||
// For each node, attach it to its immediate parent. If there is no immediate parent,
|
||||
// then the node is the root.
|
||||
var roots = [];
|
||||
var nodeCountsByLevel = {};
|
||||
for (var i = 0; i < this.images_.length; ++i) {
|
||||
var image = this.images_[i];
|
||||
|
||||
|
@ -420,10 +443,27 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
|
|||
imageNode.parent = parent;
|
||||
parent.children.push(imageNode);
|
||||
} else {
|
||||
imageNode['level'] = 0;
|
||||
roots.push(imageNode);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate each node's level.
|
||||
for (var i = 0; i < this.images_.length; ++i) {
|
||||
var image = this.images_[i];
|
||||
|
||||
// Skip images that are currently uploading.
|
||||
if (image.uploading) { continue; }
|
||||
|
||||
var imageNode = imageByDockerId[image.id];
|
||||
var level = this.calculateLevel_(imageNode);
|
||||
if (nodeCountsByLevel[level] == null) {
|
||||
nodeCountsByLevel[level] = 1;
|
||||
} else {
|
||||
nodeCountsByLevel[level]++;
|
||||
}
|
||||
}
|
||||
|
||||
// If there are multiple root nodes, then there is at least one branch without shared
|
||||
// ancestry and we use the virtual node. Otherwise, we use the root node found.
|
||||
var root = {
|
||||
|
@ -438,16 +478,12 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
|
|||
|
||||
// Determine the maximum number of nodes at a particular level. This is used to size
|
||||
// the width of the tree properly.
|
||||
var maxChildCount = roots.length;
|
||||
for (var i = 0; i < this.images_.length; ++i) {
|
||||
var image = this.images_[i];
|
||||
|
||||
// Skip images that are currently uploading.
|
||||
if (image.uploading) { continue; }
|
||||
|
||||
var imageNode = imageByDockerId[image.id];
|
||||
maxChildCount = Math.max(maxChildCount, this.determineMaximumChildCount_(imageNode));
|
||||
}
|
||||
var maxChildCount = 0;
|
||||
var maxChildHeight = 0;
|
||||
Object.keys(nodeCountsByLevel).forEach(function(key){
|
||||
maxChildCount = Math.max(maxChildCount, nodeCountsByLevel[key]);
|
||||
maxChildHeight = Math.max(maxChildHeight, key);
|
||||
});
|
||||
|
||||
// Compact the graph so that any single chain of three (or more) images becomes a collapsed
|
||||
// section. We only do this if the max width is > 1 (since for a single width tree, no long
|
||||
|
@ -456,22 +492,21 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
|
|||
this.collapseNodes_(root);
|
||||
}
|
||||
|
||||
// Determine the maximum height of the tree.
|
||||
var maxHeight = this.determineMaximumHeight_(root);
|
||||
// Determine the maximum height of the tree, with collapsed nodes.
|
||||
var maxCollapsedHeight = this.determineMaximumHeight_(root);
|
||||
|
||||
// Finally, set the root node and return.
|
||||
this.root_ = root;
|
||||
|
||||
return {
|
||||
'maxWidth': maxChildCount + 1,
|
||||
'maxHeight': maxHeight
|
||||
'maxHeight': maxCollapsedHeight
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Collapses long single chains of nodes (3 or more) into single nodes to make the graph more
|
||||
* compact.
|
||||
* Determines the height of the tree at its longest chain.
|
||||
*/
|
||||
ImageHistoryTree.prototype.determineMaximumHeight_ = function(node) {
|
||||
var maxHeight = 0;
|
||||
|
|
|
@ -97,7 +97,7 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="row" ng-show="Features.BUILD_SUPPORT">
|
||||
|
||||
<div class="col-md-12">
|
||||
<div class="section">
|
||||
|
|
|
@ -92,8 +92,9 @@
|
|||
<i class="info-icon fa fa-info-circle" data-placement="left" data-content="Allow any number of users, robot accounts or teams to read, write or administer this repository"></i>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
|
||||
<table class="permissions">
|
||||
<!-- Throbber -->
|
||||
<span class="quay-spinner" ng-show="permissions.loading > 0"></span>
|
||||
<table class="permissions" ng-show="permissions.loading <= 0">
|
||||
<thead>
|
||||
<tr>
|
||||
<td style="min-width: 400px;">User<span ng-show="repo.is_organization">/Team</span>/Robot Account</td>
|
||||
|
@ -260,7 +261,7 @@
|
|||
<span class="entity-reference" entity="trigger.pull_robot"></span>
|
||||
</div>
|
||||
</td>
|
||||
<td style="white-space: nowrap;">
|
||||
<td style="white-space: nowrap;" valign="top">
|
||||
<div class="dropdown" style="display: inline-block" ng-visible="trigger.is_active">
|
||||
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown" data-title="Build History" bs-tooltip="tooltip.title" data-container="body"
|
||||
ng-click="loadTriggerBuildHistory(trigger)">
|
||||
|
@ -305,7 +306,11 @@
|
|||
<b class="caret"></b>
|
||||
</button>
|
||||
<ul class="dropdown-menu dropdown-menu-right pull-right">
|
||||
<li><a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=repo,user:email&redirect_uri={{ githubRedirectUri }}/trigger/{{ repo.namespace }}/{{ repo.name }}"><i class="fa fa-github fa-lg"></i>GitHub - Repository Push</a></li>
|
||||
<li>
|
||||
<a href="{{ TriggerService.getRedirectUrl('github', repo.namespace, repo.name) }}">
|
||||
<i class="fa fa-github fa-lg"></i>GitHub - Repository Push
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -13,6 +13,9 @@
|
|||
<li>
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#create-user">Create User</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#usage-counter" ng-click="getUsage()">System Usage</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">System Logs</a>
|
||||
</li>
|
||||
|
@ -27,6 +30,29 @@
|
|||
<div class="logsView" makevisible="logsCounter" all-logs="true"></div>
|
||||
</div>
|
||||
|
||||
<!-- Usage tab -->
|
||||
<div id="usage-counter" class="tab-pane">
|
||||
<div class="quay-spinner" ng-show="systemUsage == null"></div>
|
||||
<div class="usage-chart" total="systemUsage.allowed" limit="systemUsageLimit"
|
||||
current="systemUsage.usage" usage-title="Deployed Repositories"></div>
|
||||
|
||||
<!-- Alerts -->
|
||||
<div class="alert alert-danger" ng-show="systemUsageLimit == 'over' && systemUsage">
|
||||
You have deployed more repositories than your plan allows. Please
|
||||
upgrade your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||
</div>
|
||||
|
||||
<div class="alert alert-warning" ng-show="systemUsageLimit == 'at' && systemUsage">
|
||||
You are at your current plan's number of allowed repositories. It might be time to think about
|
||||
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||
</div>
|
||||
|
||||
<div class="alert alert-success" ng-show="systemUsageLimit == 'near' && systemUsage">
|
||||
You are nearing the number of allowed deployed repositories. It might be time to think about
|
||||
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Create user tab -->
|
||||
<div id="create-user" class="tab-pane">
|
||||
<span class="quay-spinner" ng-show="creatingUser"></span>
|
||||
|
|
|
@ -177,7 +177,7 @@
|
|||
<div class="panel-body">
|
||||
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
|
||||
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
||||
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
||||
<b><a href="{{githubEndpoint}}{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
||||
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||
perform-delete="detachExternalLogin('github')"></span>
|
||||
</div>
|
||||
|
|
|
@ -123,7 +123,7 @@
|
|||
<!-- Content view -->
|
||||
<div class="repo-content" ng-show="currentTag.image_id || currentImage">
|
||||
<!-- Image History -->
|
||||
<div id="image-history" style="max-height: 10px;">
|
||||
<div id="image-history">
|
||||
<div class="row">
|
||||
<!-- Tree View container -->
|
||||
<div class="col-md-8">
|
||||
|
|
|
@ -44,6 +44,7 @@
|
|||
window.__endpoints = {{ route_data|safe }}.apis;
|
||||
window.__features = {{ feature_set|safe }};
|
||||
window.__config = {{ config_set|safe }};
|
||||
window.__oauth = {{ oauth_set|safe }};
|
||||
window.__token = '{{ csrf_token() }}';
|
||||
</script>
|
||||
|
||||
|
|
Binary file not shown.
60
test/fulldbtest.sh
Executable file
60
test/fulldbtest.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
set -e
|
||||
|
||||
up_mysql() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||
|
||||
# Sleep for 5s to get MySQL get started.
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
# Add the database to mysql.
|
||||
docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||
}
|
||||
|
||||
down_mysql() {
|
||||
docker kill mysql
|
||||
docker rm mysql
|
||||
}
|
||||
|
||||
up_postgres() {
|
||||
# Run a SQL database on port 5432 inside of Docker.
|
||||
docker run --name postgres -p 5432:5432 -d postgres
|
||||
|
||||
# Sleep for 5s to get SQL get started.
|
||||
echo 'Sleeping for 5...'
|
||||
sleep 5
|
||||
|
||||
# Add the database to postgres.
|
||||
docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
|
||||
}
|
||||
|
||||
down_postgres() {
|
||||
docker kill postgres
|
||||
docker rm postgres
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
TEST_DATABASE_URI=$1 TEST=true python -m unittest discover
|
||||
}
|
||||
|
||||
# Test (and generate, if requested) via MySQL.
|
||||
echo '> Starting MySQL'
|
||||
up_mysql
|
||||
|
||||
echo '> Running Full Test Suite (mysql)'
|
||||
set +e
|
||||
run_tests "mysql+pymysql://root:password@192.168.59.103/genschema"
|
||||
set -e
|
||||
down_mysql
|
||||
|
||||
# Test via Postgres.
|
||||
echo '> Starting Postgres'
|
||||
up_postgres
|
||||
|
||||
echo '> Running Full Test Suite (postgres)'
|
||||
set +e
|
||||
run_tests "postgresql://postgres@192.168.59.103/genschema"
|
||||
set -e
|
||||
down_postgres
|
||||
|
|
@ -43,7 +43,7 @@ from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPe
|
|||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
|
||||
from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement,
|
||||
SuperUserSendRecoveryEmail)
|
||||
SuperUserSendRecoveryEmail, UsageInformation)
|
||||
|
||||
|
||||
try:
|
||||
|
@ -3636,6 +3636,24 @@ class TestTeamMemberInvite(ApiTestCase):
|
|||
self._run_test('DELETE', 400, 'devtable', None)
|
||||
|
||||
|
||||
class TestUsageInformation(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(UsageInformation)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 401, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
|
|
|
@ -43,7 +43,8 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember,
|
|||
from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository
|
||||
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
|
||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
from endpoints.api.superuser import SuperUserLogs, SuperUserList, SuperUserManagement
|
||||
from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement,
|
||||
UsageInformation)
|
||||
|
||||
try:
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
|
@ -1139,6 +1140,8 @@ class TestChangeRepoVisibility(ApiTestCase):
|
|||
|
||||
class TestDeleteRepository(ApiTestCase):
|
||||
SIMPLE_REPO = ADMIN_ACCESS_USER + '/simple'
|
||||
COMPLEX_REPO = ADMIN_ACCESS_USER + '/complex'
|
||||
|
||||
def test_deleterepo(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
@ -1150,6 +1153,64 @@ class TestDeleteRepository(ApiTestCase):
|
|||
params=dict(repository=self.SIMPLE_REPO),
|
||||
expected_code=404)
|
||||
|
||||
def test_deleterepo2(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
self.deleteResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
self.getResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO),
|
||||
expected_code=404)
|
||||
|
||||
def test_populate_and_delete_repo(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Make sure the repository has come images and tags.
|
||||
self.assertTrue(len(list(model.get_repository_images(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
self.assertTrue(len(list(model.list_repository_tags(ADMIN_ACCESS_USER, 'complex'))) > 0)
|
||||
|
||||
# Add some data for the repository, in addition to is already existing images and tags.
|
||||
repository = model.get_repository(ADMIN_ACCESS_USER, 'complex')
|
||||
|
||||
# Create some access tokens.
|
||||
access_token = model.create_access_token(repository, 'read')
|
||||
model.create_access_token(repository, 'write')
|
||||
|
||||
delegate_token = model.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken', 'read')
|
||||
model.create_delegate_token(ADMIN_ACCESS_USER, 'complex', 'sometoken', 'write')
|
||||
|
||||
# Create some repository builds.
|
||||
model.create_repository_build(repository, access_token, {}, 'someid', 'foobar')
|
||||
model.create_repository_build(repository, delegate_token, {}, 'someid2', 'foobar2')
|
||||
|
||||
# Create some notifications.
|
||||
model.create_repo_notification(repository, 'repo_push', 'hipchat', {})
|
||||
model.create_repo_notification(repository, 'build_queued', 'slack', {})
|
||||
|
||||
# Create some logs.
|
||||
model.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
model.log_action('push_repo', ADMIN_ACCESS_USER, repository=repository)
|
||||
|
||||
# Create some build triggers.
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
model.create_build_trigger(repository, 'github', 'sometoken', user)
|
||||
model.create_build_trigger(repository, 'github', 'anothertoken', user)
|
||||
|
||||
# Create some email authorizations.
|
||||
model.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'a@b.com')
|
||||
model.create_email_authorization_for_repo(ADMIN_ACCESS_USER, 'complex', 'b@c.com')
|
||||
|
||||
# Delete the repository.
|
||||
self.deleteResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO))
|
||||
|
||||
# Verify the repo was deleted.
|
||||
self.getResponse(Repository,
|
||||
params=dict(repository=self.COMPLEX_REPO),
|
||||
expected_code=404)
|
||||
|
||||
|
||||
class TestGetRepository(ApiTestCase):
|
||||
PUBLIC_REPO = PUBLIC_USER + '/publicrepo'
|
||||
|
@ -1267,7 +1328,9 @@ class TestRepoBuilds(ApiTestCase):
|
|||
status_json = self.getJsonResponse(RepositoryBuildStatus,
|
||||
params=dict(repository=ADMIN_ACCESS_USER + '/building', build_uuid=build['id']))
|
||||
|
||||
self.assertEquals(status_json, build)
|
||||
self.assertEquals(status_json['id'], build['id'])
|
||||
self.assertEquals(status_json['resource_key'], build['resource_key'])
|
||||
self.assertEquals(status_json['trigger'], build['trigger'])
|
||||
|
||||
class TestRequestRepoBuild(ApiTestCase):
|
||||
def test_requestrepobuild(self):
|
||||
|
@ -1779,7 +1842,7 @@ class TestOrgSubscription(ApiTestCase):
|
|||
|
||||
class TestUserRobots(ApiTestCase):
|
||||
def getRobotNames(self):
|
||||
return [r['name'] for r in self.getJsonResponse(UserRobotList)['robots']]
|
||||
return [r['name'] for r in self.getJsonResponse(UserRobotList)['robots']]
|
||||
|
||||
def test_robots(self):
|
||||
self.login(NO_ACCESS_USER)
|
||||
|
@ -1833,6 +1896,65 @@ class TestOrgRobots(ApiTestCase):
|
|||
return [r['name'] for r in self.getJsonResponse(OrgRobotList,
|
||||
params=dict(orgname=ORGANIZATION))['robots']]
|
||||
|
||||
def test_delete_robot_after_use(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Create the robot.
|
||||
self.putJsonResponse(OrgRobot,
|
||||
params=dict(orgname=ORGANIZATION, robot_shortname='bender'),
|
||||
expected_code=201)
|
||||
|
||||
# Add the robot to a team.
|
||||
membername = ORGANIZATION + '+bender'
|
||||
self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername))
|
||||
|
||||
# Add a repository permission.
|
||||
self.putJsonResponse(RepositoryUserPermission,
|
||||
params=dict(repository=ORGANIZATION + '/' + ORG_REPO, username=membername),
|
||||
data=dict(role='read'))
|
||||
|
||||
# Add a permission prototype with the robot as the activating user.
|
||||
self.postJsonResponse(PermissionPrototypeList,
|
||||
params=dict(orgname=ORGANIZATION),
|
||||
data=dict(role='read',
|
||||
activating_user={'name': membername},
|
||||
delegate={'kind': 'user',
|
||||
'name': membername}))
|
||||
|
||||
# Add a permission prototype with the robot as the delegating user.
|
||||
self.postJsonResponse(PermissionPrototypeList,
|
||||
params=dict(orgname=ORGANIZATION),
|
||||
data=dict(role='read',
|
||||
delegate={'kind': 'user',
|
||||
'name': membername}))
|
||||
|
||||
# Add a build trigger with the robot as the pull robot.
|
||||
database.BuildTriggerService.create(name='fakeservice')
|
||||
|
||||
# Add a new fake trigger.
|
||||
repo = model.get_repository(ORGANIZATION, ORG_REPO)
|
||||
user = model.get_user(ADMIN_ACCESS_USER)
|
||||
pull_robot = model.get_user(membername)
|
||||
model.create_build_trigger(repo, 'fakeservice', 'sometoken', user, pull_robot=pull_robot)
|
||||
|
||||
# Delete the robot and verify it works.
|
||||
self.deleteResponse(OrgRobot,
|
||||
params=dict(orgname=ORGANIZATION, robot_shortname='bender'))
|
||||
|
||||
# All the above records should now be deleted, along with the robot. We verify a few of the
|
||||
# critical ones below.
|
||||
|
||||
# Check the team.
|
||||
team = model.get_organization_team(ORGANIZATION, 'readers')
|
||||
members = [member.username for member in model.get_organization_team_members(team.id)]
|
||||
self.assertFalse(membername in members)
|
||||
|
||||
# Check the robot itself.
|
||||
self.assertIsNone(model.get_user(membername))
|
||||
|
||||
|
||||
def test_robots(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
@ -1931,7 +2053,14 @@ class TestOrganizationApplications(ApiTestCase):
|
|||
json = self.getJsonResponse(OrganizationApplications, params=dict(orgname=ORGANIZATION))
|
||||
|
||||
self.assertEquals(2, len(json['applications']))
|
||||
self.assertEquals(FAKE_APPLICATION_CLIENT_ID, json['applications'][0]['client_id'])
|
||||
|
||||
found = False
|
||||
for application in json['applications']:
|
||||
if application['client_id'] == FAKE_APPLICATION_CLIENT_ID:
|
||||
found = True
|
||||
break
|
||||
|
||||
self.assertTrue(found)
|
||||
|
||||
# Add a new application.
|
||||
json = self.postJsonResponse(OrganizationApplications, params=dict(orgname=ORGANIZATION),
|
||||
|
@ -1943,7 +2072,6 @@ class TestOrganizationApplications(ApiTestCase):
|
|||
# Retrieve the apps list again
|
||||
list_json = self.getJsonResponse(OrganizationApplications, params=dict(orgname=ORGANIZATION))
|
||||
self.assertEquals(3, len(list_json['applications']))
|
||||
self.assertEquals(json, list_json['applications'][2])
|
||||
|
||||
|
||||
class TestOrganizationApplicationResource(ApiTestCase):
|
||||
|
@ -2358,6 +2486,15 @@ class TestSuperUserList(ApiTestCase):
|
|||
assert len(json['users']) > 0
|
||||
|
||||
|
||||
class TestUsageInformation(ApiTestCase):
|
||||
def test_get_usage(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
json = self.getJsonResponse(UsageInformation)
|
||||
|
||||
assert 'usage' in json
|
||||
assert 'allowed' in json
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def test_get_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
|
|
@ -34,6 +34,17 @@ class TestGarbageColection(unittest.TestCase):
|
|||
for i in range(0, 2):
|
||||
model.find_or_create_derived_storage(image.storage, 'squash', preferred)
|
||||
|
||||
# Add some additional placements to the image.
|
||||
for location_name in ['local_eu']:
|
||||
location = database.ImageStorageLocation.get(name=location_name)
|
||||
|
||||
try:
|
||||
database.ImageStoragePlacement.get(location=location, storage=image.storage)
|
||||
except:
|
||||
continue
|
||||
|
||||
database.ImageStoragePlacement.create(location=location, storage=image.storage)
|
||||
|
||||
return image.storage
|
||||
|
||||
def createRepository(self, namespace=ADMIN_ACCESS_USER, name=REPO, **kwargs):
|
||||
|
|
|
@ -3,6 +3,7 @@ import tarfile
|
|||
|
||||
from StringIO import StringIO
|
||||
from util.streamlayerformat import StreamLayerMerger, AUFS_WHITEOUT
|
||||
from util.tarlayerformat import TarLayerReadException
|
||||
|
||||
class TestStreamLayerMerger(unittest.TestCase):
|
||||
def create_layer(self, **kwargs):
|
||||
|
@ -30,6 +31,9 @@ class TestStreamLayerMerger(unittest.TestCase):
|
|||
|
||||
return output.getvalue()
|
||||
|
||||
def create_empty_layer(self):
|
||||
return ''
|
||||
|
||||
def squash_layers(self, layers):
|
||||
def get_layers():
|
||||
return [StringIO(layer) for layer in layers]
|
||||
|
@ -337,5 +341,59 @@ class TestStreamLayerMerger(unittest.TestCase):
|
|||
self.assertHasFile(squashed, 'foobar/baz/some_file', 'foo')
|
||||
self.assertDoesNotHaveFile(squashed, 'foo/another_file')
|
||||
|
||||
|
||||
def test_delete_root_directory(self):
|
||||
third_layer = self.create_layer(
|
||||
foo = 'build/first_file',
|
||||
bar = 'build/second_file')
|
||||
|
||||
second_layer = self.create_layer(
|
||||
_ = 'build')
|
||||
|
||||
squashed = self.squash_layers([second_layer, third_layer])
|
||||
|
||||
self.assertDoesNotHaveFile(squashed, 'build/first_file')
|
||||
self.assertDoesNotHaveFile(squashed, 'build/second_file')
|
||||
|
||||
|
||||
def test_tar_empty_layer(self):
|
||||
third_layer = self.create_layer(
|
||||
foo = 'build/first_file',
|
||||
bar = 'build/second_file')
|
||||
|
||||
empty_layer = self.create_layer()
|
||||
|
||||
squashed = self.squash_layers([empty_layer, third_layer])
|
||||
|
||||
self.assertHasFile(squashed, 'build/first_file', 'foo')
|
||||
self.assertHasFile(squashed, 'build/second_file', 'bar')
|
||||
|
||||
|
||||
def test_data_empty_layer(self):
|
||||
third_layer = self.create_layer(
|
||||
foo = 'build/first_file',
|
||||
bar = 'build/second_file')
|
||||
|
||||
empty_layer = self.create_empty_layer()
|
||||
|
||||
squashed = self.squash_layers([empty_layer, third_layer])
|
||||
|
||||
self.assertHasFile(squashed, 'build/first_file', 'foo')
|
||||
self.assertHasFile(squashed, 'build/second_file', 'bar')
|
||||
|
||||
|
||||
def test_broken_layer(self):
|
||||
third_layer = self.create_layer(
|
||||
foo = 'build/first_file',
|
||||
bar = 'build/second_file')
|
||||
|
||||
broken_layer = 'not valid data'
|
||||
|
||||
try:
|
||||
self.squash_layers([broken_layer, third_layer])
|
||||
self.fail('Expected exception')
|
||||
except TarLayerReadException as ex:
|
||||
self.assertEquals('Could not read layer', ex.message)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import os
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from config import DefaultConfig
|
||||
|
@ -14,8 +16,11 @@ class FakeTransaction(object):
|
|||
class TestConfig(DefaultConfig):
|
||||
TESTING = True
|
||||
|
||||
DB_URI = 'sqlite:///:memory:'
|
||||
DB_CONNECTION_ARGS = {}
|
||||
DB_URI = os.environ.get('TEST_DATABASE_URI', 'sqlite:///:memory:')
|
||||
DB_CONNECTION_ARGS = {
|
||||
'threadlocals': True,
|
||||
'autorollback': True
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def create_transaction(db):
|
||||
|
@ -23,7 +28,7 @@ class TestConfig(DefaultConfig):
|
|||
|
||||
DB_TRANSACTION_FACTORY = create_transaction
|
||||
|
||||
DISTRIBUTED_STORAGE_CONFIG = {'local_us': ['FakeStorage', {}]}
|
||||
DISTRIBUTED_STORAGE_CONFIG = {'local_us': ['FakeStorage', {}], 'local_eu': ['FakeStorage', {}]}
|
||||
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
|
||||
|
||||
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
|
||||
|
|
|
@ -27,7 +27,7 @@ bad_count = 0
|
|||
good_count = 0
|
||||
|
||||
def resolve_or_create(repo, docker_image_id, new_ancestry):
|
||||
existing = model.get_repo_image(repo.namespace_user.username, repo.name, docker_image_id)
|
||||
existing = model.get_repo_image_extended(repo.namespace_user.username, repo.name, docker_image_id)
|
||||
if existing:
|
||||
logger.debug('Found existing image: %s, %s', existing.id, docker_image_id)
|
||||
return existing
|
||||
|
@ -63,7 +63,7 @@ def all_ancestors_exist(ancestors):
|
|||
cant_fix = []
|
||||
for img in query:
|
||||
try:
|
||||
with_locations = model.get_repo_image(img.repository.namespace_user.username,
|
||||
with_locations = model.get_repo_image_extended(img.repository.namespace_user.username,
|
||||
img.repository.name, img.docker_image_id)
|
||||
ancestry_storage = store.image_ancestry_path(img.storage.uuid)
|
||||
if store.exists(with_locations.storage.locations, ancestry_storage):
|
||||
|
|
50
tools/migratebranchregex.py
Normal file
50
tools/migratebranchregex.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
|
||||
from app import app
|
||||
from data import model
|
||||
from data.database import RepositoryBuildTrigger, configure
|
||||
|
||||
configure(app.config)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def run_branchregex_migration():
|
||||
encountered = set()
|
||||
while True:
|
||||
found = list(RepositoryBuildTrigger.select().where(RepositoryBuildTrigger.config ** "%branch_regex%",
|
||||
~(RepositoryBuildTrigger.config ** "%branchtag_regex%")))
|
||||
found = [f for f in found if not f.uuid in encountered]
|
||||
|
||||
if not found:
|
||||
logger.debug('No additional records found')
|
||||
return
|
||||
|
||||
logger.debug('Found %s records to be changed', len(found))
|
||||
for trigger in found:
|
||||
encountered.add(trigger.uuid)
|
||||
|
||||
try:
|
||||
config = json.loads(trigger.config)
|
||||
except:
|
||||
logging.error("Cannot parse config for trigger %s", trigger.uuid)
|
||||
continue
|
||||
|
||||
logger.debug("Checking trigger %s", trigger.uuid)
|
||||
existing_regex = config['branch_regex']
|
||||
logger.debug("Found branch regex '%s'", existing_regex)
|
||||
|
||||
sub_regex = existing_regex.split('|')
|
||||
new_regex = '|'.join(['heads/' + sub for sub in sub_regex])
|
||||
config['branchtag_regex'] = new_regex
|
||||
|
||||
logger.debug("Updating to branchtag regex '%s'", new_regex)
|
||||
trigger.config = json.dumps(config)
|
||||
trigger.save()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
run_branchregex_migration()
|
19
tools/parsebuildpack.py
Normal file
19
tools/parsebuildpack.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from app import userfiles as user_files
|
||||
|
||||
import workers.dockerfilebuild
|
||||
import requests
|
||||
|
||||
w = workers.dockerfilebuild.DockerfileBuildWorker(100, None)
|
||||
|
||||
resource_key = '5c0a985c-405d-4161-b0ac-603c3757b5f9'
|
||||
resource_url = user_files.get_file_url(resource_key, requires_cors=False)
|
||||
print resource_url
|
||||
|
||||
docker_resource = requests.get(resource_url, stream=True)
|
||||
c_type = docker_resource.headers['content-type']
|
||||
|
||||
if ';' in c_type:
|
||||
c_type = c_type.split(';')[0]
|
||||
|
||||
build_dir = w._mime_processors[c_type](docker_resource)
|
||||
print build_dir
|
|
@ -21,3 +21,6 @@ if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
|||
raise Exception('Missing FROM command in Dockerfile')
|
||||
|
||||
print serialize_dockerfile(parsed_dockerfile)
|
||||
|
||||
with open('Dockerfile.test.out', 'w') as dockerfileobj:
|
||||
dockerfileobj.write(serialize_dockerfile(parsed_dockerfile))
|
||||
|
|
95
util/oauth.py
Normal file
95
util/oauth.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
import urlparse
|
||||
|
||||
class OAuthConfig(object):
|
||||
def __init__(self, app, key_name):
|
||||
self.key_name = key_name
|
||||
self.config = app.config.get(key_name) or {}
|
||||
|
||||
def service_name(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def token_endpoint(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def user_endpoint(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def login_endpoint(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def client_id(self):
|
||||
return self.config.get('CLIENT_ID')
|
||||
|
||||
def client_secret(self):
|
||||
return self.config.get('CLIENT_SECRET')
|
||||
|
||||
def _get_url(self, endpoint, *args):
|
||||
for arg in args:
|
||||
endpoint = urlparse.urljoin(endpoint, arg)
|
||||
|
||||
return endpoint
|
||||
|
||||
|
||||
class GithubOAuthConfig(OAuthConfig):
|
||||
def __init__(self, app, key_name):
|
||||
super(GithubOAuthConfig, self).__init__(app, key_name)
|
||||
|
||||
def service_name(self):
|
||||
return 'GitHub'
|
||||
|
||||
def _endpoint(self):
|
||||
endpoint = self.config.get('GITHUB_ENDPOINT', 'https://github.com')
|
||||
if not endpoint.endswith('/'):
|
||||
endpoint = endpoint + '/'
|
||||
return endpoint
|
||||
|
||||
def authorize_endpoint(self):
|
||||
return self._get_url(self._endpoint(), '/login/oauth/authorize') + '?'
|
||||
|
||||
def token_endpoint(self):
|
||||
return self._get_url(self._endpoint(), '/login/oauth/access_token')
|
||||
|
||||
def _api_endpoint(self):
|
||||
return self.config.get('API_ENDPOINT', self._get_url(self._endpoint(), '/api/v3/'))
|
||||
|
||||
def user_endpoint(self):
|
||||
api_endpoint = self._api_endpoint()
|
||||
return self._get_url(api_endpoint, 'user')
|
||||
|
||||
def email_endpoint(self):
|
||||
api_endpoint = self._api_endpoint()
|
||||
return self._get_url(api_endpoint, 'user/emails')
|
||||
|
||||
def get_public_config(self):
|
||||
return {
|
||||
'CLIENT_ID': self.client_id(),
|
||||
'AUTHORIZE_ENDPOINT': self.authorize_endpoint(),
|
||||
'GITHUB_ENDPOINT': self._endpoint()
|
||||
}
|
||||
|
||||
|
||||
|
||||
class GoogleOAuthConfig(OAuthConfig):
|
||||
def __init__(self, app, key_name):
|
||||
super(GoogleOAuthConfig, self).__init__(app, key_name)
|
||||
|
||||
def service_name(self):
|
||||
return 'Google'
|
||||
|
||||
def authorize_endpoint(self):
|
||||
return 'https://accounts.google.com/o/oauth2/auth?response_type=code&'
|
||||
|
||||
def token_endpoint(self):
|
||||
return 'https://accounts.google.com/o/oauth2/token'
|
||||
|
||||
def user_endpoint(self):
|
||||
return 'https://www.googleapis.com/oauth2/v1/userinfo'
|
||||
|
||||
def get_public_config(self):
|
||||
return {
|
||||
'CLIENT_ID': self.client_id(),
|
||||
'AUTHORIZE_ENDPOINT': self.authorize_endpoint()
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,6 +1,11 @@
|
|||
import os
|
||||
import tarfile
|
||||
|
||||
class TarLayerReadException(Exception):
|
||||
""" Exception raised when reading a layer has failed. """
|
||||
pass
|
||||
|
||||
|
||||
class TarLayerFormat(object):
|
||||
""" Class which creates a generator of the combined TAR data. """
|
||||
def __init__(self, tar_iterator):
|
||||
|
@ -10,9 +15,14 @@ class TarLayerFormat(object):
|
|||
for current_tar in self.tar_iterator():
|
||||
# Read the current TAR. If it is empty, we just continue
|
||||
# to the next one.
|
||||
tar_file = None
|
||||
try:
|
||||
tar_file = tarfile.open(mode='r|*', fileobj=current_tar)
|
||||
except tarfile.ReadError as re:
|
||||
if re.message != 'empty file':
|
||||
raise TarLayerReadException('Could not read layer')
|
||||
|
||||
if not tar_file:
|
||||
continue
|
||||
|
||||
# For each of the tar entries, yield them IF and ONLY IF we have not
|
||||
|
|
|
@ -3,7 +3,7 @@ import zlib
|
|||
import sys
|
||||
|
||||
from data import model
|
||||
from data.database import ImageStorage
|
||||
from data.database import ImageStorage, configure
|
||||
from app import app, storage as store
|
||||
from data.database import db, db_random_func
|
||||
from util.gzipstream import ZLIB_GZIP_WINDOW
|
||||
|
@ -14,31 +14,53 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
CHUNK_SIZE = 5 * 1024 * 1024
|
||||
|
||||
|
||||
def backfill_sizes_from_data():
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.debug('Starting uncompressed image size backfill')
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
logger.debug('Starting uncompressed image size backfill')
|
||||
logger.debug('NOTE: This can be a LONG RUNNING OPERATION. Please wait!')
|
||||
|
||||
# Make sure we have a reference to the current DB.
|
||||
configure(app.config)
|
||||
|
||||
logger.debug('Uncompressed backfill: Database configured')
|
||||
|
||||
# Check for any uncompressed images.
|
||||
has_images = bool(list(ImageStorage
|
||||
.select(ImageStorage.uuid)
|
||||
.where(ImageStorage.uncompressed_size >> None,
|
||||
ImageStorage.image_size > 0,
|
||||
ImageStorage.uploading == False)
|
||||
.limit(1)))
|
||||
|
||||
if not has_images:
|
||||
logger.debug('Uncompressed backfill: No migration needed')
|
||||
return
|
||||
|
||||
logger.debug('Uncompressed backfill: Starting migration')
|
||||
encountered = set()
|
||||
while True:
|
||||
# Load the record from the DB.
|
||||
batch_ids = list(ImageStorage
|
||||
.select(ImageStorage.uuid)
|
||||
.where(ImageStorage.uncompressed_size >> None,
|
||||
ImageStorage.image_size > 0,
|
||||
ImageStorage.uploading == False)
|
||||
.limit(100)
|
||||
.order_by(db_random_func()))
|
||||
|
||||
batch_ids = set([s.uuid for s in batch_ids]) - encountered
|
||||
logger.debug('Found %s images to process', len(batch_ids))
|
||||
if len(batch_ids) == 0:
|
||||
# We're done!
|
||||
return
|
||||
|
||||
for record in batch_ids:
|
||||
uuid = record.uuid
|
||||
counter = 1
|
||||
for uuid in batch_ids:
|
||||
encountered.add(uuid)
|
||||
|
||||
logger.debug('Processing image ID %s (%s/%s)', uuid, counter, len(batch_ids))
|
||||
counter = counter + 1
|
||||
|
||||
try:
|
||||
with_locs = model.get_storage_by_uuid(uuid)
|
||||
|
@ -76,6 +98,8 @@ def backfill_sizes_from_data():
|
|||
|
||||
except model.InvalidImageException:
|
||||
logger.warning('Storage with uuid no longer exists: %s', uuid)
|
||||
except IOError:
|
||||
logger.warning('IOError on %s', uuid)
|
||||
except MemoryError:
|
||||
logger.warning('MemoryError on %s', uuid)
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
import logging
|
||||
import traceback
|
||||
|
||||
from flask.ext.mail import Message
|
||||
|
||||
from app import mail, app, get_app_url
|
||||
|
@ -5,8 +8,12 @@ from data import model
|
|||
from util.gravatar import compute_hash
|
||||
from util.jinjautil import get_template_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
template_env = get_template_env("emails")
|
||||
|
||||
class CannotSendEmailException(Exception):
|
||||
pass
|
||||
|
||||
def send_email(recipient, subject, template_file, parameters):
|
||||
app_title = app.config['REGISTRY_TITLE_SHORT']
|
||||
app_url = get_app_url()
|
||||
|
@ -30,8 +37,12 @@ def send_email(recipient, subject, template_file, parameters):
|
|||
|
||||
msg = Message('[%s] %s' % (app_title, subject), recipients=[recipient])
|
||||
msg.html = rendered_html
|
||||
mail.send(msg)
|
||||
|
||||
try:
|
||||
mail.send(msg)
|
||||
except Exception as ex:
|
||||
logger.exception('Error while trying to send email to %s', recipient)
|
||||
raise CannotSendEmailException(ex.message)
|
||||
|
||||
def send_password_changed(username, email):
|
||||
send_email(email, 'Account password changed', 'passwordchanged', {
|
||||
|
|
|
@ -122,6 +122,8 @@ class Worker(object):
|
|||
with self._current_item_lock:
|
||||
current_queue_item = self.current_queue_item
|
||||
if current_queue_item is None:
|
||||
# Close the db handle.
|
||||
self._close_db_handle()
|
||||
break
|
||||
|
||||
logger.debug('Queue gave us some work: %s', current_queue_item.body)
|
||||
|
@ -143,7 +145,7 @@ class Worker(object):
|
|||
self._stop.set()
|
||||
|
||||
finally:
|
||||
# Close the db handle periodically
|
||||
# Close the db handle.
|
||||
self._close_db_handle()
|
||||
|
||||
if not self._stop.is_set():
|
||||
|
|
Reference in a new issue