Merge branch 'master' into better-emails
This commit is contained in:
commit
b212dbb2ab
65 changed files with 2314 additions and 407 deletions
|
@ -1,10 +1,10 @@
|
|||
FROM phusion/baseimage:0.9.11
|
||||
FROM phusion/baseimage:0.9.13
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV HOME /root
|
||||
|
||||
# Install the dependencies.
|
||||
RUN apt-get update # 21AUG2014
|
||||
RUN apt-get update # 10SEP2014
|
||||
|
||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
FROM phusion/baseimage:0.9.11
|
||||
FROM phusion/baseimage:0.9.13
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV HOME /root
|
||||
|
||||
# Install the dependencies.
|
||||
RUN apt-get update # 21AUG2014
|
||||
RUN apt-get update # 10SEP2014
|
||||
|
||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev
|
||||
|
@ -38,6 +38,7 @@ ADD conf/init/gunicorn /etc/service/gunicorn
|
|||
ADD conf/init/nginx /etc/service/nginx
|
||||
ADD conf/init/diffsworker /etc/service/diffsworker
|
||||
ADD conf/init/notificationworker /etc/service/notificationworker
|
||||
ADD conf/init/buildlogsarchiver /etc/service/buildlogsarchiver
|
||||
|
||||
# Download any external libs.
|
||||
RUN mkdir static/fonts static/ldn
|
||||
|
@ -46,7 +47,7 @@ RUN venv/bin/python -m external_libraries
|
|||
# Run the tests
|
||||
RUN TEST=true venv/bin/python -m unittest discover
|
||||
|
||||
VOLUME ["/conf/stack", "/var/log", "/datastorage"]
|
||||
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp"]
|
||||
|
||||
EXPOSE 443 80
|
||||
|
||||
|
|
4
app.py
4
app.py
|
@ -20,6 +20,7 @@ from util.exceptionlog import Sentry
|
|||
from util.queuemetrics import QueueMetrics
|
||||
from data.billing import Billing
|
||||
from data.buildlogs import BuildLogs
|
||||
from data.archivedlogs import LogArchive
|
||||
from data.queue import WorkQueue
|
||||
from data.userevent import UserEventsBuilderModule
|
||||
from datetime import datetime
|
||||
|
@ -88,7 +89,8 @@ Principal(app, use_sessions=False)
|
|||
login_manager = LoginManager(app)
|
||||
mail = Mail(app)
|
||||
storage = Storage(app)
|
||||
userfiles = Userfiles(app)
|
||||
userfiles = Userfiles(app, storage)
|
||||
log_archive = LogArchive(app, storage)
|
||||
analytics = Analytics(app)
|
||||
billing = Billing(app)
|
||||
sentry = Sentry(app)
|
||||
|
|
|
@ -135,8 +135,15 @@ def process_token(auth):
|
|||
logger.warning('Invalid token format: %s' % auth)
|
||||
abort(401, message='Invalid token format: %(auth)s', issue='invalid-auth-token', auth=auth)
|
||||
|
||||
token_vals = {val[0]: val[1] for val in
|
||||
def safe_get(lst, index, default_value):
|
||||
try:
|
||||
return lst[index]
|
||||
except IndexError:
|
||||
return default_value
|
||||
|
||||
token_vals = {val[0]: safe_get(val, 1, '') for val in
|
||||
(detail.split('=') for detail in token_details)}
|
||||
|
||||
if 'signature' not in token_vals:
|
||||
logger.warning('Token does not contain signature: %s' % auth)
|
||||
abort(401, message='Token does not contain a valid signature: %(auth)s',
|
||||
|
|
2
conf/init/buildlogsarchiver/log/run
Executable file
2
conf/init/buildlogsarchiver/log/run
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
exec svlogd /var/log/buildlogsarchiver/
|
8
conf/init/buildlogsarchiver/run
Executable file
8
conf/init/buildlogsarchiver/run
Executable file
|
@ -0,0 +1,8 @@
|
|||
#! /bin/bash
|
||||
|
||||
echo 'Starting build logs archiver worker'
|
||||
|
||||
cd /
|
||||
venv/bin/python -m workers.buildlogsarchiver 2>&1
|
||||
|
||||
echo 'Diffs worker exited'
|
12
config.py
12
config.py
|
@ -89,10 +89,6 @@ class DefaultConfig(object):
|
|||
# Stripe config
|
||||
BILLING_TYPE = 'FakeStripe'
|
||||
|
||||
# Userfiles
|
||||
USERFILES_TYPE = 'LocalUserfiles'
|
||||
USERFILES_PATH = 'test/data/registry/userfiles'
|
||||
|
||||
# Analytics
|
||||
ANALYTICS_TYPE = 'FakeAnalytics'
|
||||
|
||||
|
@ -172,3 +168,11 @@ class DefaultConfig(object):
|
|||
}
|
||||
|
||||
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
|
||||
|
||||
# Userfiles
|
||||
USERFILES_LOCATION = 'local_us'
|
||||
USERFILES_PATH = 'userfiles/'
|
||||
|
||||
# Build logs archive
|
||||
LOG_ARCHIVE_LOCATION = 'local_us'
|
||||
LOG_ARCHIVE_PATH = 'logarchive/'
|
||||
|
|
56
data/archivedlogs.py
Normal file
56
data/archivedlogs.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
import logging
|
||||
|
||||
from gzip import GzipFile
|
||||
from flask import send_file, abort
|
||||
from cStringIO import StringIO
|
||||
|
||||
from data.userfiles import DelegateUserfiles, UserfilesHandlers
|
||||
|
||||
|
||||
JSON_MIMETYPE = 'application/json'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LogArchiveHandlers(UserfilesHandlers):
|
||||
def get(self, file_id):
|
||||
path = self._files.get_file_id_path(file_id)
|
||||
try:
|
||||
with self._storage.stream_read_file(self._locations, path) as gzip_stream:
|
||||
with GzipFile(fileobj=gzip_stream) as unzipped:
|
||||
unzipped_buffer = StringIO(unzipped.read())
|
||||
return send_file(unzipped_buffer, mimetype=JSON_MIMETYPE)
|
||||
except IOError:
|
||||
abort(404)
|
||||
|
||||
|
||||
class LogArchive(object):
|
||||
def __init__(self, app=None, distributed_storage=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app, distributed_storage)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app, distributed_storage):
|
||||
location = app.config.get('LOG_ARCHIVE_LOCATION')
|
||||
path = app.config.get('LOG_ARCHIVE_PATH', None)
|
||||
|
||||
handler_name = 'logarchive_handlers'
|
||||
|
||||
log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
|
||||
|
||||
app.add_url_rule('/logarchive/<file_id>',
|
||||
view_func=LogArchiveHandlers.as_view(handler_name,
|
||||
distributed_storage=distributed_storage,
|
||||
location=location,
|
||||
files=log_archive))
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['log_archive'] = log_archive
|
||||
return log_archive
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
|
@ -3,7 +3,7 @@ import stripe
|
|||
from datetime import datetime, timedelta
|
||||
from calendar import timegm
|
||||
|
||||
from util.collections import AttrDict
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
PLANS = [
|
||||
# Deprecated Plans
|
||||
|
|
|
@ -2,6 +2,11 @@ import redis
|
|||
import json
|
||||
|
||||
from util.dynamic import import_class
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
ONE_DAY = timedelta(days=1)
|
||||
|
||||
|
||||
class BuildStatusRetrievalError(Exception):
|
||||
pass
|
||||
|
@ -25,7 +30,7 @@ class RedisBuildLogs(object):
|
|||
"""
|
||||
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
|
||||
|
||||
def append_log_message(self, build_id, log_message, log_type=None):
|
||||
def append_log_message(self, build_id, log_message, log_type=None, log_data=None):
|
||||
"""
|
||||
Wraps the message in an envelope and push it to the end of the log entry
|
||||
list and returns the index at which it was inserted.
|
||||
|
@ -37,6 +42,9 @@ class RedisBuildLogs(object):
|
|||
if log_type:
|
||||
log_obj['type'] = log_type
|
||||
|
||||
if log_data:
|
||||
log_obj['data'] = log_data
|
||||
|
||||
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1
|
||||
|
||||
def get_log_entries(self, build_id, start_index):
|
||||
|
@ -51,6 +59,13 @@ class RedisBuildLogs(object):
|
|||
except redis.ConnectionError:
|
||||
raise BuildStatusRetrievalError('Cannot retrieve build logs')
|
||||
|
||||
def expire_log_entries(self, build_id):
|
||||
"""
|
||||
Sets the log entry to expire in 1 day.
|
||||
"""
|
||||
self._redis.expire(self._logs_key(build_id), ONE_DAY)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _status_key(build_id):
|
||||
return 'builds/%s/status' % build_id
|
||||
|
@ -106,4 +121,4 @@ class BuildLogs(object):
|
|||
return buildlogs
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
||||
return getattr(self.state, name, None)
|
||||
|
|
|
@ -8,7 +8,7 @@ from peewee import *
|
|||
from data.read_slave import ReadSlaveModel
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from urlparse import urlparse
|
||||
|
||||
from util.names import urn_generator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -21,8 +21,24 @@ SCHEME_DRIVERS = {
|
|||
'postgresql+psycopg2': PostgresqlDatabase,
|
||||
}
|
||||
|
||||
SCHEME_RANDOM_FUNCTION = {
|
||||
'mysql': fn.Rand,
|
||||
'mysql+pymysql': fn.Rand,
|
||||
'sqlite': fn.Random,
|
||||
'postgresql': fn.Random,
|
||||
'postgresql+psycopg2': fn.Random,
|
||||
}
|
||||
|
||||
class CallableProxy(Proxy):
|
||||
def __call__(self, *args, **kwargs):
|
||||
if self.obj is None:
|
||||
raise AttributeError('Cannot use uninitialized Proxy.')
|
||||
return self.obj(*args, **kwargs)
|
||||
|
||||
db = Proxy()
|
||||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
|
||||
|
||||
def _db_from_url(url, db_kwargs):
|
||||
parsed_url = make_url(url)
|
||||
|
@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs):
|
|||
|
||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
|
||||
|
||||
def configure(config_object):
|
||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||
write_db_uri = config_object['DB_URI']
|
||||
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
||||
|
||||
parsed_write_uri = make_url(write_db_uri)
|
||||
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
||||
|
||||
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
||||
if read_slave_uri is not None:
|
||||
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
||||
|
@ -112,6 +132,15 @@ class TeamMember(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class TeamMemberInvite(BaseModel):
|
||||
# Note: Either user OR email will be filled in, but not both.
|
||||
user = ForeignKeyField(User, index=True, null=True)
|
||||
email = CharField(null=True)
|
||||
team = ForeignKeyField(Team, index=True)
|
||||
inviter = ForeignKeyField(User, related_name='inviter')
|
||||
invite_token = CharField(default=urn_generator(['teaminvite']))
|
||||
|
||||
|
||||
class LoginService(BaseModel):
|
||||
name = CharField(unique=True, index=True)
|
||||
|
||||
|
@ -289,6 +318,16 @@ class RepositoryTag(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class BUILD_PHASE(object):
|
||||
""" Build phases enum """
|
||||
ERROR = 'error'
|
||||
UNPACKING = 'unpacking'
|
||||
PULLING = 'pulling'
|
||||
BUILDING = 'building'
|
||||
PUSHING = 'pushing'
|
||||
COMPLETE = 'complete'
|
||||
|
||||
|
||||
class RepositoryBuild(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
|
@ -300,6 +339,7 @@ class RepositoryBuild(BaseModel):
|
|||
display_name = CharField()
|
||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
|
||||
logs_archived = BooleanField(default=False)
|
||||
|
||||
|
||||
class QueueItem(BaseModel):
|
||||
|
@ -410,4 +450,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
|||
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
|
||||
Notification, ImageStorageLocation, ImageStoragePlacement,
|
||||
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
||||
RepositoryAuthorizedEmail]
|
||||
RepositoryAuthorizedEmail, TeamMemberInvite]
|
||||
|
|
|
@ -8,7 +8,7 @@ from peewee import SqliteDatabase
|
|||
from data.database import all_models, db
|
||||
from app import app
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from util.collections import AttrDict
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
"""Add support for build log migration.
|
||||
|
||||
Revision ID: 34fd69f63809
|
||||
Revises: 4a0c94399f38
|
||||
Create Date: 2014-09-12 11:50:09.217777
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '34fd69f63809'
|
||||
down_revision = '4a0c94399f38'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('repositorybuild', 'logs_archived')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,81 @@
|
|||
"""Email invites for joining a team.
|
||||
|
||||
Revision ID: 51d04d0e7e6f
|
||||
Revises: 34fd69f63809
|
||||
Create Date: 2014-09-15 23:51:35.478232
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '51d04d0e7e6f'
|
||||
down_revision = '34fd69f63809'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('teammemberinvite',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('team_id', sa.Integer(), nullable=False),
|
||||
sa.Column('inviter_id', sa.Integer(), nullable=False),
|
||||
sa.Column('invite_token', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
|
||||
op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
|
||||
op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
# Manually add the new logentrykind types
|
||||
op.bulk_insert(tables.logentrykind,
|
||||
[
|
||||
{'id':42, 'name':'org_invite_team_member'},
|
||||
{'id':43, 'name':'org_team_member_invite_accepted'},
|
||||
{'id':44, 'name':'org_team_member_invite_declined'},
|
||||
{'id':45, 'name':'org_delete_team_member_invite'},
|
||||
])
|
||||
|
||||
op.bulk_insert(tables.notificationkind,
|
||||
[
|
||||
{'id':10, 'name':'org_team_invite'},
|
||||
])
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.notificationkind.delete()
|
||||
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
|
||||
)
|
||||
|
||||
op.drop_index('teammemberinvite_user_id', table_name='teammemberinvite')
|
||||
op.drop_index('teammemberinvite_team_id', table_name='teammemberinvite')
|
||||
op.drop_index('teammemberinvite_inviter_id', table_name='teammemberinvite')
|
||||
op.drop_table('teammemberinvite')
|
||||
### end Alembic commands ###
|
|
@ -12,6 +12,7 @@ from util.backoff import exponential_backoff
|
|||
|
||||
|
||||
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
||||
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -48,6 +49,9 @@ class InvalidRobotException(DataModelException):
|
|||
class InvalidTeamException(DataModelException):
|
||||
pass
|
||||
|
||||
class InvalidTeamMemberException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidPasswordException(DataModelException):
|
||||
pass
|
||||
|
@ -73,6 +77,10 @@ class TooManyUsersException(DataModelException):
|
|||
pass
|
||||
|
||||
|
||||
class UserAlreadyInTeam(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class TooManyLoginAttemptsException(Exception):
|
||||
def __init__(self, message, retry_after):
|
||||
super(TooManyLoginAttemptsException, self).__init__(message)
|
||||
|
@ -332,12 +340,42 @@ def remove_team(org_name, team_name, removed_by_username):
|
|||
team.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
|
||||
def add_or_invite_to_team(inviter, team, user=None, email=None):
|
||||
# If the user is a member of the organization, then we simply add the
|
||||
# user directly to the team. Otherwise, an invite is created for the user/email.
|
||||
# We return None if the user was directly added and the invite object if the user was invited.
|
||||
requires_invite = True
|
||||
if user:
|
||||
orgname = team.organization.username
|
||||
|
||||
# If the user is part of the organization (or a robot), then no invite is required.
|
||||
if user.robot:
|
||||
requires_invite = False
|
||||
if not user.username.startswith(orgname + '+'):
|
||||
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
|
||||
'as it is not a member of the organization')
|
||||
else:
|
||||
Org = User.alias()
|
||||
found = User.select(User.username)
|
||||
found = found.where(User.username == user.username).join(TeamMember).join(Team)
|
||||
found = found.join(Org, on=(Org.username == orgname)).limit(1)
|
||||
requires_invite = not any(found)
|
||||
|
||||
# If we have a valid user and no invite is required, simply add the user to the team.
|
||||
if user and not requires_invite:
|
||||
add_user_to_team(user, team)
|
||||
return None
|
||||
|
||||
email_address = email if not user else None
|
||||
return TeamMemberInvite.create(user=user, email=email_address, team=team, inviter=inviter)
|
||||
|
||||
|
||||
def add_user_to_team(user, team):
|
||||
try:
|
||||
return TeamMember.create(user=user, team=team)
|
||||
except Exception:
|
||||
raise DataModelException('Unable to add user \'%s\' to team: \'%s\'' %
|
||||
(user.username, team.name))
|
||||
raise UserAlreadyInTeam('User \'%s\' is already a member of team \'%s\'' %
|
||||
(user.username, team.name))
|
||||
|
||||
|
||||
def remove_user_from_team(org_name, team_name, username, removed_by_username):
|
||||
|
@ -512,6 +550,13 @@ def get_user(username):
|
|||
return None
|
||||
|
||||
|
||||
def get_user_or_org(username):
|
||||
try:
|
||||
return User.get(User.username == username, User.robot == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_or_org_by_customer_id(customer_id):
|
||||
try:
|
||||
return User.get(User.stripe_id == customer_id)
|
||||
|
@ -635,6 +680,10 @@ def get_organization_team_members(teamid):
|
|||
query = joined.where(Team.id == teamid)
|
||||
return query
|
||||
|
||||
def get_organization_team_member_invites(teamid):
|
||||
joined = TeamMemberInvite.select().join(Team).join(User)
|
||||
query = joined.where(Team.id == teamid)
|
||||
return query
|
||||
|
||||
def get_organization_member_set(orgname):
|
||||
Org = User.alias()
|
||||
|
@ -1824,6 +1873,32 @@ def delete_notifications_by_kind(target, kind_name):
|
|||
Notification.delete().where(Notification.target == target,
|
||||
Notification.kind == kind_ref).execute()
|
||||
|
||||
def delete_matching_notifications(target, kind_name, **kwargs):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
|
||||
# Load all notifications for the user with the given kind.
|
||||
notifications = Notification.select().where(
|
||||
Notification.target == target,
|
||||
Notification.kind == kind_ref)
|
||||
|
||||
# For each, match the metadata to the specified values.
|
||||
for notification in notifications:
|
||||
matches = True
|
||||
try:
|
||||
metadata = json.loads(notification.metadata_json)
|
||||
except:
|
||||
continue
|
||||
|
||||
for (key, value) in kwargs.iteritems():
|
||||
if not key in metadata or metadata[key] != value:
|
||||
matches = False
|
||||
break
|
||||
|
||||
if not matches:
|
||||
continue
|
||||
|
||||
notification.delete_instance()
|
||||
|
||||
|
||||
def get_active_users():
|
||||
return User.select().where(User.organization == False, User.robot == False)
|
||||
|
@ -1831,6 +1906,16 @@ def get_active_users():
|
|||
def get_active_user_count():
|
||||
return get_active_users().count()
|
||||
|
||||
|
||||
def detach_external_login(user, service_name):
|
||||
try:
|
||||
service = LoginService.get(name = service_name)
|
||||
except LoginService.DoesNotExist:
|
||||
return
|
||||
|
||||
FederatedLogin.delete().where(FederatedLogin.user == user,
|
||||
FederatedLogin.service == service).execute()
|
||||
|
||||
def delete_user(user):
|
||||
user.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
|
@ -1879,3 +1964,72 @@ def confirm_email_authorization_for_repo(code):
|
|||
found.save()
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def delete_team_email_invite(team, email):
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
|
||||
found.delete_instance()
|
||||
|
||||
def delete_team_user_invite(team, user):
|
||||
try:
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.user == user, TeamMemberInvite.team == team)
|
||||
except TeamMemberInvite.DoesNotExist:
|
||||
return False
|
||||
|
||||
found.delete_instance()
|
||||
return True
|
||||
|
||||
def lookup_team_invites(user):
|
||||
return TeamMemberInvite.select().where(TeamMemberInvite.user == user)
|
||||
|
||||
def lookup_team_invite(code, user=None):
|
||||
# Lookup the invite code.
|
||||
try:
|
||||
found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
|
||||
except TeamMemberInvite.DoesNotExist:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
if user and found.user != user:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
return found
|
||||
|
||||
def delete_team_invite(code, user=None):
|
||||
found = lookup_team_invite(code, user)
|
||||
|
||||
team = found.team
|
||||
inviter = found.inviter
|
||||
|
||||
found.delete_instance()
|
||||
|
||||
return (team, inviter)
|
||||
|
||||
|
||||
def confirm_team_invite(code, user):
|
||||
found = lookup_team_invite(code)
|
||||
|
||||
# If the invite is for a specific user, we have to confirm that here.
|
||||
if found.user is not None and found.user != user:
|
||||
message = """This invite is intended for user "%s".
|
||||
Please login to that account and try again.""" % found.user.username
|
||||
raise DataModelException(message)
|
||||
|
||||
# Add the user to the team.
|
||||
try:
|
||||
add_user_to_team(user, found.team)
|
||||
except UserAlreadyInTeam:
|
||||
# Ignore.
|
||||
pass
|
||||
|
||||
# Delete the invite and return the team.
|
||||
team = found.team
|
||||
inviter = found.inviter
|
||||
found.delete_instance()
|
||||
return (team, inviter)
|
||||
|
||||
def archivable_buildlogs_query():
|
||||
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||
return (RepositoryBuild.select()
|
||||
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
||||
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
||||
(RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False))
|
||||
|
|
|
@ -46,7 +46,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
def validate_redirect_uri(self, client_id, redirect_uri):
|
||||
try:
|
||||
app = OAuthApplication.get(client_id=client_id)
|
||||
if app.redirect_uri and redirect_uri.startswith(app.redirect_uri):
|
||||
if app.redirect_uri and redirect_uri and redirect_uri.startswith(app.redirect_uri):
|
||||
return True
|
||||
return False
|
||||
except OAuthApplication.DoesNotExist:
|
||||
|
|
|
@ -1,110 +1,35 @@
|
|||
import boto
|
||||
import os
|
||||
import logging
|
||||
import hashlib
|
||||
import magic
|
||||
|
||||
from boto.s3.key import Key
|
||||
from uuid import uuid4
|
||||
from flask import url_for, request, send_file, make_response, abort
|
||||
from flask.views import View
|
||||
from _pyio import BufferedReader
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FakeUserfiles(object):
|
||||
def prepare_for_drop(self, mime_type):
|
||||
return ('http://fake/url', uuid4())
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_file_url(self, file_id, expires_in=300):
|
||||
return ('http://fake/url')
|
||||
|
||||
def get_file_checksum(self, file_id):
|
||||
return 'abcdefg'
|
||||
|
||||
|
||||
class S3FileWriteException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class S3Userfiles(object):
|
||||
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
|
||||
self._initialized = False
|
||||
self._bucket_name = bucket_name
|
||||
self._access_key = s3_access_key
|
||||
self._secret_key = s3_secret_key
|
||||
self._prefix = path
|
||||
self._s3_conn = None
|
||||
self._bucket = None
|
||||
|
||||
def _initialize_s3(self):
|
||||
if not self._initialized:
|
||||
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
|
||||
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
|
||||
self._initialized = True
|
||||
|
||||
def prepare_for_drop(self, mime_type):
|
||||
""" Returns a signed URL to upload a file to our bucket. """
|
||||
self._initialize_s3()
|
||||
logger.debug('Requested upload url with content type: %s' % mime_type)
|
||||
file_id = str(uuid4())
|
||||
full_key = os.path.join(self._prefix, file_id)
|
||||
k = Key(self._bucket, full_key)
|
||||
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
|
||||
encrypt_key=True)
|
||||
return (url, file_id)
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
self._initialize_s3()
|
||||
file_id = str(uuid4())
|
||||
full_key = os.path.join(self._prefix, file_id)
|
||||
k = Key(self._bucket, full_key)
|
||||
logger.debug('Setting s3 content type to: %s' % content_type)
|
||||
k.set_metadata('Content-Type', content_type)
|
||||
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
|
||||
rewind=True)
|
||||
|
||||
if bytes_written == 0:
|
||||
raise S3FileWriteException('Unable to write file to S3')
|
||||
|
||||
return file_id
|
||||
|
||||
def get_file_url(self, file_id, expires_in=300, mime_type=None):
|
||||
self._initialize_s3()
|
||||
full_key = os.path.join(self._prefix, file_id)
|
||||
k = Key(self._bucket, full_key)
|
||||
headers = None
|
||||
if mime_type:
|
||||
headers={'Content-Type': mime_type}
|
||||
|
||||
return k.generate_url(expires_in, headers=headers)
|
||||
|
||||
def get_file_checksum(self, file_id):
|
||||
self._initialize_s3()
|
||||
full_key = os.path.join(self._prefix, file_id)
|
||||
k = self._bucket.lookup(full_key)
|
||||
return k.etag[1:-1][:7]
|
||||
|
||||
|
||||
class UserfilesHandlers(View):
|
||||
methods = ['GET', 'PUT']
|
||||
|
||||
def __init__(self, local_userfiles):
|
||||
self._userfiles = local_userfiles
|
||||
def __init__(self, distributed_storage, location, files):
|
||||
self._storage = distributed_storage
|
||||
self._files = files
|
||||
self._locations = {location}
|
||||
self._magic = magic.Magic(mime=True)
|
||||
|
||||
def get(self, file_id):
|
||||
path = self._userfiles.file_path(file_id)
|
||||
if not os.path.exists(path):
|
||||
path = self._files.get_file_id_path(file_id)
|
||||
try:
|
||||
file_stream = self._storage.stream_read_file(self._locations, path)
|
||||
buffered = BufferedReader(file_stream)
|
||||
file_header_bytes = buffered.peek(1024)
|
||||
return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
|
||||
except IOError:
|
||||
abort(404)
|
||||
|
||||
logger.debug('Sending path: %s' % path)
|
||||
return send_file(path, mimetype=self._magic.from_file(path))
|
||||
|
||||
def put(self, file_id):
|
||||
input_stream = request.stream
|
||||
if request.headers.get('transfer-encoding') == 'chunked':
|
||||
|
@ -112,7 +37,10 @@ class UserfilesHandlers(View):
|
|||
# encoding (Gunicorn)
|
||||
input_stream = request.environ['wsgi.input']
|
||||
|
||||
self._userfiles.store_stream(input_stream, file_id)
|
||||
c_type = request.headers.get('Content-Type', None)
|
||||
|
||||
path = self._files.get_file_id_path(file_id)
|
||||
self._storage.stream_write(self._locations, path, input_stream, c_type)
|
||||
|
||||
return make_response('Okay')
|
||||
|
||||
|
@ -123,99 +51,82 @@ class UserfilesHandlers(View):
|
|||
return self.put(file_id)
|
||||
|
||||
|
||||
class LocalUserfiles(object):
|
||||
def __init__(self, app, path):
|
||||
self._root_path = path
|
||||
self._buffer_size = 64 * 1024 # 64 KB
|
||||
class DelegateUserfiles(object):
|
||||
def __init__(self, app, distributed_storage, location, path, handler_name):
|
||||
self._app = app
|
||||
self._storage = distributed_storage
|
||||
self._locations = {location}
|
||||
self._prefix = path
|
||||
self._handler_name = handler_name
|
||||
|
||||
def _build_url_adapter(self):
|
||||
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
|
||||
script_name=self._app.config['APPLICATION_ROOT'] or '/',
|
||||
url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
|
||||
|
||||
def prepare_for_drop(self, mime_type):
|
||||
def get_file_id_path(self, file_id):
|
||||
return os.path.join(self._prefix, file_id)
|
||||
|
||||
def prepare_for_drop(self, mime_type, requires_cors=True):
|
||||
""" Returns a signed URL to upload a file to our bucket. """
|
||||
logger.debug('Requested upload url with content type: %s' % mime_type)
|
||||
file_id = str(uuid4())
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)
|
||||
path = self.get_file_id_path(file_id)
|
||||
url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors)
|
||||
|
||||
def file_path(self, file_id):
|
||||
if '..' in file_id or file_id.startswith('/'):
|
||||
raise RuntimeError('Invalid Filename')
|
||||
return os.path.join(self._root_path, file_id)
|
||||
if url is None:
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
|
||||
|
||||
def store_stream(self, stream, file_id):
|
||||
path = self.file_path(file_id)
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
return (url, file_id)
|
||||
|
||||
with open(path, 'w') as to_write:
|
||||
while True:
|
||||
try:
|
||||
buf = stream.read(self._buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
to_write.write(buf)
|
||||
except IOError:
|
||||
break
|
||||
def store_file(self, file_like_obj, content_type, content_encoding=None, file_id=None):
|
||||
if file_id is None:
|
||||
file_id = str(uuid4())
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
file_id = str(uuid4())
|
||||
|
||||
# Rewind the file to match what s3 does
|
||||
file_like_obj.seek(0, os.SEEK_SET)
|
||||
|
||||
self.store_stream(file_like_obj, file_id)
|
||||
path = self.get_file_id_path(file_id)
|
||||
self._storage.stream_write(self._locations, path, file_like_obj, content_type,
|
||||
content_encoding)
|
||||
return file_id
|
||||
|
||||
def get_file_url(self, file_id, expires_in=300):
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return url_for('userfiles_handlers', file_id=file_id, _external=True)
|
||||
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
|
||||
path = self.get_file_id_path(file_id)
|
||||
url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors)
|
||||
|
||||
if url is None:
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return url_for(self._handler_name, file_id=file_id, _external=True)
|
||||
|
||||
return url
|
||||
|
||||
def get_file_checksum(self, file_id):
|
||||
path = self.file_path(file_id)
|
||||
sha_hash = hashlib.sha256()
|
||||
with open(path, 'r') as to_hash:
|
||||
while True:
|
||||
buf = to_hash.read(self._buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
sha_hash.update(buf)
|
||||
return sha_hash.hexdigest()[:7]
|
||||
path = self.get_file_id_path(file_id)
|
||||
return self._storage.get_checksum(self._locations, path)
|
||||
|
||||
|
||||
class Userfiles(object):
|
||||
def __init__(self, app=None):
|
||||
def __init__(self, app=None, distributed_storage=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
self.state = self.init_app(app, distributed_storage)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
|
||||
path = app.config.get('USERFILES_PATH', '')
|
||||
def init_app(self, app, distributed_storage):
|
||||
location = app.config.get('USERFILES_LOCATION')
|
||||
path = app.config.get('USERFILES_PATH', None)
|
||||
|
||||
if storage_type == 'LocalUserfiles':
|
||||
userfiles = LocalUserfiles(app, path)
|
||||
app.add_url_rule('/userfiles/<file_id>',
|
||||
view_func=UserfilesHandlers.as_view('userfiles_handlers',
|
||||
local_userfiles=userfiles))
|
||||
handler_name = 'userfiles_handlers'
|
||||
|
||||
elif storage_type == 'S3Userfiles':
|
||||
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
|
||||
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
|
||||
bucket = app.config.get('USERFILES_S3_BUCKET', '')
|
||||
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
|
||||
userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
|
||||
|
||||
elif storage_type == 'FakeUserfiles':
|
||||
userfiles = FakeUserfiles()
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unknown userfiles type: %s' % storage_type)
|
||||
app.add_url_rule('/userfiles/<file_id>',
|
||||
view_func=UserfilesHandlers.as_view(handler_name,
|
||||
distributed_storage=distributed_storage,
|
||||
location=location,
|
||||
files=userfiles))
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
|
|
17
emails/teaminvite.html
Normal file
17
emails/teaminvite.html
Normal file
|
@ -0,0 +1,17 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h3>Invitation to join team {{ teamname }}</h3>
|
||||
|
||||
{{ inviter | user_reference }} has invited you to join team <b>{{ teamname }}</b> under organization {{ organization | user_reference }}.
|
||||
|
||||
<br><br>
|
||||
|
||||
To join the team, please click the following link:<br>
|
||||
{{ app_link('confirminvite?code=' + token) }}
|
||||
|
||||
<br><br>
|
||||
If you were not expecting this invitation, you can ignore this email.
|
||||
|
||||
{% endblock %}
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
from flask import request, redirect
|
||||
|
||||
from app import app, userfiles as user_files, build_logs
|
||||
from app import app, userfiles as user_files, build_logs, log_archive
|
||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||
require_repo_read, require_repo_write, validate_json_request,
|
||||
ApiResource, internal_only, format_date, api, Unauthorized, NotFound)
|
||||
|
@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False):
|
|||
}
|
||||
|
||||
if can_write:
|
||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key)
|
||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
|
||||
|
||||
return resp
|
||||
|
||||
|
@ -215,6 +215,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
|||
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
|
||||
# If the logs have been archived, just redirect to the completed archive
|
||||
if build.logs_archived:
|
||||
return redirect(log_archive.get_file_url(build.uuid))
|
||||
|
||||
start = int(request.args.get('start', 0))
|
||||
|
||||
try:
|
||||
|
@ -257,7 +261,7 @@ class FileDropResource(ApiResource):
|
|||
def post(self):
|
||||
""" Request a URL to which a file may be uploaded. """
|
||||
mime_type = request.get_json()['mimeType']
|
||||
(url, file_id) = user_files.prepare_for_drop(mime_type)
|
||||
(url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
|
||||
return {
|
||||
'url': url,
|
||||
'file_id': str(file_id),
|
||||
|
|
|
@ -1,12 +1,48 @@
|
|||
from flask import request
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
log_action, Unauthorized, NotFound, internal_only, require_scope)
|
||||
log_action, Unauthorized, NotFound, internal_only, require_scope,
|
||||
query_param, truthy_bool, parse_args, require_user_admin)
|
||||
from auth.permissions import AdministerOrganizationPermission, ViewTeamPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from util.useremails import send_org_invite_email
|
||||
from util.gravatar import compute_hash
|
||||
|
||||
def try_accept_invite(code, user):
|
||||
(team, inviter) = model.confirm_team_invite(code, user)
|
||||
|
||||
model.delete_matching_notifications(user, 'org_team_invite', code=code)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_accepted', orgname, {
|
||||
'member': user.username,
|
||||
'team': team.name,
|
||||
'inviter': inviter.username
|
||||
})
|
||||
|
||||
return team
|
||||
|
||||
|
||||
def handle_addinvite_team(inviter, team, user=None, email=None):
|
||||
invite = model.add_or_invite_to_team(inviter, team, user, email)
|
||||
if not invite:
|
||||
# User was added to the team directly.
|
||||
return
|
||||
|
||||
orgname = team.organization.username
|
||||
if user:
|
||||
model.create_notification('org_team_invite', user, metadata = {
|
||||
'code': invite.invite_token,
|
||||
'inviter': inviter.username,
|
||||
'org': orgname,
|
||||
'team': team.name
|
||||
})
|
||||
|
||||
send_org_invite_email(user.username if user else email, user.email if user else email,
|
||||
orgname, team.name, inviter.username, invite.invite_token)
|
||||
return invite
|
||||
|
||||
def team_view(orgname, team):
|
||||
view_permission = ViewTeamPermission(orgname, team.name)
|
||||
|
@ -19,14 +55,28 @@ def team_view(orgname, team):
|
|||
'role': role
|
||||
}
|
||||
|
||||
def member_view(member):
|
||||
def member_view(member, invited=False):
|
||||
return {
|
||||
'name': member.username,
|
||||
'kind': 'user',
|
||||
'is_robot': member.robot,
|
||||
'gravatar': compute_hash(member.email) if not member.robot else None,
|
||||
'invited': invited,
|
||||
}
|
||||
|
||||
|
||||
def invite_view(invite):
|
||||
if invite.user:
|
||||
return member_view(invite.user, invited=True)
|
||||
else:
|
||||
return {
|
||||
'email': invite.email,
|
||||
'kind': 'invite',
|
||||
'gravatar': compute_hash(invite.email),
|
||||
'invited': True
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>')
|
||||
@internal_only
|
||||
class OrganizationTeam(ApiResource):
|
||||
|
@ -114,8 +164,10 @@ class OrganizationTeam(ApiResource):
|
|||
@internal_only
|
||||
class TeamMemberList(ApiResource):
|
||||
""" Resource for managing the list of members for a team. """
|
||||
@parse_args
|
||||
@query_param('includePending', 'Whether to include pending members', type=truthy_bool, default=False)
|
||||
@nickname('getOrganizationTeamMembers')
|
||||
def get(self, orgname, teamname):
|
||||
def get(self, args, orgname, teamname):
|
||||
""" Retrieve the list of members for the specified team. """
|
||||
view_permission = ViewTeamPermission(orgname, teamname)
|
||||
edit_permission = AdministerOrganizationPermission(orgname)
|
||||
|
@ -128,11 +180,18 @@ class TeamMemberList(ApiResource):
|
|||
raise NotFound()
|
||||
|
||||
members = model.get_organization_team_members(team.id)
|
||||
return {
|
||||
'members': {m.username : member_view(m) for m in members},
|
||||
invites = []
|
||||
|
||||
if args['includePending'] and edit_permission.can():
|
||||
invites = model.get_organization_team_member_invites(team.id)
|
||||
|
||||
data = {
|
||||
'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
|
||||
'can_edit': edit_permission.can()
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
|
@ -142,7 +201,7 @@ class TeamMember(ApiResource):
|
|||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrganizationTeamMember')
|
||||
def put(self, orgname, teamname, membername):
|
||||
""" Add a member to an existing team. """
|
||||
""" Adds or invites a member to an existing team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
|
@ -159,23 +218,149 @@ class TeamMember(ApiResource):
|
|||
if not user:
|
||||
raise request_error(message='Unknown user')
|
||||
|
||||
# Add the user to the team.
|
||||
model.add_user_to_team(user, team)
|
||||
log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return member_view(user)
|
||||
# Add or invite the user to the team.
|
||||
inviter = get_authenticated_user()
|
||||
invite = handle_addinvite_team(inviter, team, user=user)
|
||||
if not invite:
|
||||
log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return member_view(user, invited=False)
|
||||
|
||||
# User was invited.
|
||||
log_action('org_invite_team_member', orgname, {
|
||||
'user': membername,
|
||||
'member': membername,
|
||||
'team': teamname
|
||||
})
|
||||
return member_view(user, invited=True)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrganizationTeamMember')
|
||||
def delete(self, orgname, teamname, membername):
|
||||
""" Delete an existing member of a team. """
|
||||
""" Delete a member of a team. If the user is merely invited to join
|
||||
the team, then the invite is removed instead.
|
||||
"""
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Remote the user from the team.
|
||||
invoking_user = get_authenticated_user().username
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Find the member.
|
||||
member = model.get_user(membername)
|
||||
if not member:
|
||||
raise NotFound()
|
||||
|
||||
# First attempt to delete an invite for the user to this team. If none found,
|
||||
# then we try to remove the user directly.
|
||||
if model.delete_team_user_invite(team, member):
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'user': membername,
|
||||
'team': teamname,
|
||||
'member': membername
|
||||
})
|
||||
return 'Deleted', 204
|
||||
|
||||
model.remove_user_from_team(orgname, teamname, membername, invoking_user)
|
||||
log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return 'Deleted', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/invite/<email>')
|
||||
class InviteTeamMember(ApiResource):
|
||||
""" Resource for inviting a team member via email address. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('inviteTeamMemberEmail')
|
||||
def put(self, orgname, teamname, email):
|
||||
""" Invites an email address to an existing team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Invite the email to the team.
|
||||
inviter = get_authenticated_user()
|
||||
invite = handle_addinvite_team(inviter, team, email=email)
|
||||
log_action('org_invite_team_member', orgname, {
|
||||
'email': email,
|
||||
'team': teamname,
|
||||
'member': email
|
||||
})
|
||||
return invite_view(invite)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteTeamMemberEmailInvite')
|
||||
def delete(self, orgname, teamname, email):
|
||||
""" Delete an invite of an email address to join a team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Delete the invite.
|
||||
model.delete_team_email_invite(team, email)
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'email': email,
|
||||
'team': teamname,
|
||||
'member': email
|
||||
})
|
||||
return 'Deleted', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/teaminvite/<code>')
|
||||
@internal_only
|
||||
class TeamMemberInvite(ApiResource):
|
||||
""" Resource for managing invites to jon a team. """
|
||||
@require_user_admin
|
||||
@nickname('acceptOrganizationTeamInvite')
|
||||
def put(self, code):
|
||||
""" Accepts an invite to join a team in an organization. """
|
||||
# Accept the invite for the current user.
|
||||
team = try_accept_invite(code, get_authenticated_user())
|
||||
if not team:
|
||||
raise NotFound()
|
||||
|
||||
orgname = team.organization.username
|
||||
return {
|
||||
'org': orgname,
|
||||
'team': team.name
|
||||
}
|
||||
|
||||
@nickname('declineOrganizationTeamInvite')
|
||||
@require_user_admin
|
||||
def delete(self, code):
|
||||
""" Delete an existing member of a team. """
|
||||
(team, inviter) = model.delete_team_invite(code, get_authenticated_user())
|
||||
|
||||
model.delete_matching_notifications(get_authenticated_user(), 'org_team_invite', code=code)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_declined', orgname, {
|
||||
'member': get_authenticated_user().username,
|
||||
'team': team.name,
|
||||
'inviter': inviter.username
|
||||
})
|
||||
|
||||
return 'Deleted', 204
|
||||
|
|
|
@ -12,6 +12,8 @@ from endpoints.api import (ApiResource, nickname, resource, validate_json_reques
|
|||
license_error, require_fresh_login)
|
||||
from endpoints.api.subscribe import subscribe
|
||||
from endpoints.common import common_login
|
||||
from endpoints.api.team import try_accept_invite
|
||||
|
||||
from data import model
|
||||
from data.billing import get_plan
|
||||
from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission,
|
||||
|
@ -20,6 +22,7 @@ from auth.auth_context import get_authenticated_user
|
|||
from auth import scopes
|
||||
from util.gravatar import compute_hash
|
||||
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email, send_password_changed)
|
||||
from util.names import parse_single_urn
|
||||
|
||||
import features
|
||||
|
||||
|
@ -188,11 +191,15 @@ class User(ApiResource):
|
|||
return user_view(user)
|
||||
|
||||
@nickname('createNewUser')
|
||||
@parse_args
|
||||
@query_param('inviteCode', 'Invitation code given for creating the user.', type=str,
|
||||
default='')
|
||||
@internal_only
|
||||
@validate_json_request('NewUser')
|
||||
def post(self):
|
||||
def post(self, args):
|
||||
""" Create a new user. """
|
||||
user_data = request.get_json()
|
||||
invite_code = args['inviteCode']
|
||||
|
||||
existing_user = model.get_user(user_data['username'])
|
||||
if existing_user:
|
||||
|
@ -203,6 +210,17 @@ class User(ApiResource):
|
|||
user_data['email'])
|
||||
code = model.create_confirm_email_code(new_user)
|
||||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||
|
||||
# Handle any invite codes.
|
||||
parsed_invite = parse_single_urn(invite_code)
|
||||
if parsed_invite is not None:
|
||||
if parsed_invite[0] == 'teaminvite':
|
||||
# Add the user to the team.
|
||||
try:
|
||||
try_accept_invite(invite_code, new_user)
|
||||
except model.DataModelException:
|
||||
pass
|
||||
|
||||
return 'Created', 201
|
||||
except model.TooManyUsersException as ex:
|
||||
raise license_error(exception=ex)
|
||||
|
@ -409,6 +427,19 @@ class Signout(ApiResource):
|
|||
return {'success': True}
|
||||
|
||||
|
||||
|
||||
@resource('/v1/detachexternal/<servicename>')
|
||||
@internal_only
|
||||
class DetachExternal(ApiResource):
|
||||
""" Resource for detaching an external login. """
|
||||
@require_user_admin
|
||||
@nickname('detachExternalLogin')
|
||||
def post(self, servicename):
|
||||
""" Request that the current user be detached from the external login service. """
|
||||
model.detach_external_login(get_authenticated_user(), servicename)
|
||||
return {'success': True}
|
||||
|
||||
|
||||
@resource("/v1/recovery")
|
||||
@internal_only
|
||||
class Recovery(ApiResource):
|
||||
|
|
|
@ -66,6 +66,9 @@ def generate_headers(role='read'):
|
|||
@index.route('/users/', methods=['POST'])
|
||||
def create_user():
|
||||
user_data = request.get_json()
|
||||
if not 'username' in user_data:
|
||||
abort(400, 'Missing username')
|
||||
|
||||
username = user_data['username']
|
||||
password = user_data.get('password', '')
|
||||
|
||||
|
|
|
@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
|
||||
extra_headers = {}
|
||||
|
||||
# Add the Accept-Ranges header if the storage engine supports resumeable
|
||||
# Add the Accept-Ranges header if the storage engine supports resumable
|
||||
# downloads.
|
||||
if store.get_supports_resumeable_downloads(repo_image.storage.locations):
|
||||
profile.debug('Storage supports resumeable downloads')
|
||||
if store.get_supports_resumable_downloads(repo_image.storage.locations):
|
||||
profile.debug('Storage supports resumable downloads')
|
||||
extra_headers['Accept-Ranges'] = 'bytes'
|
||||
|
||||
resp = make_response('')
|
||||
|
|
|
@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
with tarfile.open(fileobj=tarball) as archive:
|
||||
tarball_subdir = archive.getnames()[0]
|
||||
|
||||
# Seek to position 0 to make boto multipart happy
|
||||
tarball.seek(0)
|
||||
|
||||
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
|
||||
|
||||
logger.debug('Successfully prepared job')
|
||||
|
|
|
@ -33,8 +33,8 @@ STATUS_TAGS = app.config['STATUS_TAGS']
|
|||
@web.route('/', methods=['GET'], defaults={'path': ''})
|
||||
@web.route('/organization/<path:path>', methods=['GET'])
|
||||
@no_cache
|
||||
def index(path):
|
||||
return render_page_template('index.html')
|
||||
def index(path, **kwargs):
|
||||
return render_page_template('index.html', **kwargs)
|
||||
|
||||
|
||||
@web.route('/500', methods=['GET'])
|
||||
|
@ -102,7 +102,7 @@ def superuser():
|
|||
|
||||
@web.route('/signin/')
|
||||
@no_cache
|
||||
def signin():
|
||||
def signin(redirect=None):
|
||||
return index('')
|
||||
|
||||
|
||||
|
@ -124,6 +124,13 @@ def new():
|
|||
return index('')
|
||||
|
||||
|
||||
@web.route('/confirminvite')
|
||||
@no_cache
|
||||
def confirm_invite():
|
||||
code = request.values['code']
|
||||
return index('', code=code)
|
||||
|
||||
|
||||
@web.route('/repository/', defaults={'path': ''})
|
||||
@web.route('/repository/<path:path>', methods=['GET'])
|
||||
@no_cache
|
||||
|
|
|
@ -214,7 +214,11 @@ def initialize_database():
|
|||
|
||||
LogEntryKind.create(name='org_create_team')
|
||||
LogEntryKind.create(name='org_delete_team')
|
||||
LogEntryKind.create(name='org_invite_team_member')
|
||||
LogEntryKind.create(name='org_delete_team_member_invite')
|
||||
LogEntryKind.create(name='org_add_team_member')
|
||||
LogEntryKind.create(name='org_team_member_invite_accepted')
|
||||
LogEntryKind.create(name='org_team_member_invite_declined')
|
||||
LogEntryKind.create(name='org_remove_team_member')
|
||||
LogEntryKind.create(name='org_set_team_description')
|
||||
LogEntryKind.create(name='org_set_team_role')
|
||||
|
@ -269,6 +273,7 @@ def initialize_database():
|
|||
NotificationKind.create(name='over_private_usage')
|
||||
NotificationKind.create(name='expiring_license')
|
||||
NotificationKind.create(name='maintenance')
|
||||
NotificationKind.create(name='org_team_invite')
|
||||
|
||||
NotificationKind.create(name='test_notification')
|
||||
|
||||
|
@ -300,7 +305,7 @@ def populate_database():
|
|||
new_user_2.verified = True
|
||||
new_user_2.save()
|
||||
|
||||
new_user_3 = model.create_user('freshuser', 'password', 'no@thanks.com')
|
||||
new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
|
||||
new_user_3.verified = True
|
||||
new_user_3.save()
|
||||
|
||||
|
|
|
@ -21,8 +21,7 @@
|
|||
|
||||
|
||||
#quay-logo {
|
||||
width: 80px;
|
||||
margin-right: 30px;
|
||||
width: 100px;
|
||||
}
|
||||
|
||||
#padding-container {
|
||||
|
@ -145,6 +144,15 @@ nav.navbar-default .navbar-nav>li>a.active {
|
|||
max-width: 320px;
|
||||
}
|
||||
|
||||
.notification-view-element .right-controls button {
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
.notification-view-element .message i.fa {
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
|
||||
.notification-view-element .orginfo {
|
||||
margin-top: 8px;
|
||||
float: left;
|
||||
|
@ -2559,7 +2567,7 @@ p.editable:hover i {
|
|||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.repo-build .build-log-error-element {
|
||||
.repo-build .build-log-error-element .error-message-container {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
margin: 10px;
|
||||
|
@ -2569,7 +2577,7 @@ p.editable:hover i {
|
|||
margin-left: 22px;
|
||||
}
|
||||
|
||||
.repo-build .build-log-error-element i.fa {
|
||||
.repo-build .build-log-error-element .error-message-container i.fa {
|
||||
color: red;
|
||||
position: absolute;
|
||||
top: 13px;
|
||||
|
@ -3559,6 +3567,12 @@ p.editable:hover i {
|
|||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.tt-message {
|
||||
padding: 10px;
|
||||
font-size: 12px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.tt-suggestion p {
|
||||
margin: 0;
|
||||
}
|
||||
|
@ -4638,4 +4652,57 @@ i.slack-icon {
|
|||
|
||||
.external-notification-view-element:hover .side-controls button {
|
||||
border: 1px solid #eee;
|
||||
}
|
||||
|
||||
.member-listing {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.member-listing .section-header {
|
||||
color: #ccc;
|
||||
margin-top: 20px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.member-listing .gravatar {
|
||||
vertical-align: middle;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.member-listing .entity-reference {
|
||||
margin-bottom: 10px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.member-listing .invite-listing {
|
||||
margin-bottom: 10px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.team-view .organization-header .popover {
|
||||
max-width: none !important;
|
||||
}
|
||||
|
||||
.team-view .organization-header .popover.bottom-right .arrow:after {
|
||||
border-bottom-color: #f7f7f7;
|
||||
top: 2px;
|
||||
}
|
||||
|
||||
.team-view .organization-header .popover-content {
|
||||
font-size: 14px;
|
||||
padding-top: 6px;
|
||||
}
|
||||
|
||||
.team-view .organization-header .popover-content input {
|
||||
background: white;
|
||||
}
|
||||
|
||||
.team-view .team-view-add-element .help-text {
|
||||
font-size: 13px;
|
||||
color: #ccc;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.team-view .organization-header .popover-content {
|
||||
min-width: 500px;
|
||||
}
|
|
@ -1,4 +1,23 @@
|
|||
<span bindonce class="build-log-error-element">
|
||||
<i class="fa fa-exclamation-triangle"></i>
|
||||
<span class="error-message" bo-text="error.message"></span>
|
||||
</span>
|
||||
<div bindonce class="build-log-error-element">
|
||||
<span class="error-message-container">
|
||||
<i class="fa fa-exclamation-triangle"></i>
|
||||
<span class="error-message" bo-text="error.message"></span>
|
||||
<span ng-if="error.message == 'HTTP code: 403' && getLocalPullInfo().isLocal">
|
||||
caused by attempting to pull private repository <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a>
|
||||
<span ng-if="getLocalPullInfo().login">with inaccessible crdentials</span>
|
||||
<span ng-if="!getLocalPullInfo().login">without credentials</span>
|
||||
</span>
|
||||
</span>
|
||||
|
||||
|
||||
<div class="alert alert-danger" ng-if="error.message == 'HTTP code: 403' && getLocalPullInfo().isLocal">
|
||||
<div ng-if="getLocalPullInfo().login">
|
||||
Note: The credentials <b>{{ getLocalPullInfo().login.username }}</b> for registry <b>{{ getLocalPullInfo().login.registry }}</b> cannot
|
||||
access repository <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a>.
|
||||
</div>
|
||||
<div ng-if="!getLocalPullInfo().login">
|
||||
Note: No robot account is specified for this build. Without such credentials, this pull will always fail. Please setup a new
|
||||
build trigger with a robot account that has access to <a href="/repository/{{ getLocalPullInfo().repo }}">{{ getLocalPullInfo().repo }}</a> or make that repository public.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<div class="current-item">
|
||||
<div class="dropdown-select-icon-transclude"></div>
|
||||
<input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}"
|
||||
ng-readonly="!lookaheadItems || !lookaheadItems.length"></input>
|
||||
ng-readonly="!allowCustomInput"></input>
|
||||
</div>
|
||||
<div class="dropdown">
|
||||
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
|
||||
|
|
|
@ -7,15 +7,19 @@
|
|||
</span>
|
||||
</span>
|
||||
<span ng-if="entity.kind == 'org'">
|
||||
<img src="//www.gravatar.com/avatar/{{ entity.gravatar }}?s=16&d=identicon">
|
||||
<img ng-src="//www.gravatar.com/avatar/{{ entity.gravatar }}?s={{ gravatarSize || '16' }}&d=identicon">
|
||||
<span class="entity-name">
|
||||
<span ng-if="!getIsAdmin(entity.name)">{{entity.name}}</span>
|
||||
<span ng-if="getIsAdmin(entity.name)"><a href="/organization/{{ entity.name }}">{{entity.name}}</a></span>
|
||||
</span>
|
||||
</span>
|
||||
<span ng-if="entity.kind != 'team' && entity.kind != 'org'">
|
||||
<i class="fa fa-user" ng-show="!entity.is_robot" data-title="User" bs-tooltip="tooltip.title" data-container="body"></i>
|
||||
<i class="fa fa-wrench" ng-show="entity.is_robot" data-title="Robot Account" bs-tooltip="tooltip.title" data-container="body"></i>
|
||||
<img class="gravatar" ng-if="showGravatar == 'true' && entity.gravatar" ng-src="//www.gravatar.com/avatar/{{ entity.gravatar }}?s={{ gravatarSize || '16' }}&d=identicon">
|
||||
<span ng-if="showGravatar != 'true' || !entity.gravatar">
|
||||
<i class="fa fa-user" ng-show="!entity.is_robot" data-title="User" bs-tooltip="tooltip.title" data-container="body"></i>
|
||||
<i class="fa fa-wrench" ng-show="entity.is_robot" data-title="Robot Account" bs-tooltip="tooltip.title" data-container="body"></i>
|
||||
</span>
|
||||
|
||||
<span class="entity-name" ng-if="entity.is_robot">
|
||||
<a href="{{ getRobotUrl(entity.name) }}" ng-if="getIsAdmin(getPrefix(entity.name))">
|
||||
<span class="prefix">{{ getPrefix(entity.name) }}+</span><span>{{ getShortenedName(entity.name) }}</span>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
ng-click="lazyLoad()">
|
||||
<span class="caret"></span>
|
||||
</button>
|
||||
<ul class="dropdown-menu" role="menu" aria-labelledby="entityDropdownMenu">
|
||||
<ul class="dropdown-menu" ng-class="pullRight == 'true' ? 'pull-right': ''" role="menu" aria-labelledby="entityDropdownMenu">
|
||||
<li ng-show="lazyLoading" style="padding: 10px"><div class="quay-spinner"></div></li>
|
||||
|
||||
<li role="presentation" class="dropdown-header" ng-show="!lazyLoading && !robots && !isAdmin && !teams">
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<span class="external-login-button-element">
|
||||
<span ng-if="provider == 'github'">
|
||||
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px">
|
||||
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px" ng-disabled="signingIn">
|
||||
<i class="fa fa-github fa-lg"></i>
|
||||
<span ng-if="action != 'attach'">Sign In with GitHub</span>
|
||||
<span ng-if="action == 'attach'">Attach to GitHub Account</span>
|
||||
|
@ -8,7 +8,7 @@
|
|||
</span>
|
||||
|
||||
<span ng-if="provider == 'google'">
|
||||
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GOOGLE_LOGIN']" ng-click="startSignin('google')">
|
||||
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GOOGLE_LOGIN']" ng-click="startSignin('google')" ng-disabled="signingIn">
|
||||
<i class="fa fa-google fa-lg"></i>
|
||||
<span ng-if="action != 'attach'">Sign In with Google</span>
|
||||
<span ng-if="action == 'attach'">Attach to Google Account</span>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
≡
|
||||
</button>
|
||||
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
|
||||
<img id="quay-logo" src="/static/img/black-horizontal.svg">
|
||||
<img id="quay-logo" src="/static/img/quay-logo.png">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -7,10 +7,13 @@
|
|||
<span class="orgname">{{ notification.organization }}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="datetime">{{ parseDate(notification.created) | date:'medium'}}</div>
|
||||
<div class="right-controls">
|
||||
<a href="javascript:void(0)" ng-if="canDismiss(notification)" ng-click="dismissNotification(notification)">
|
||||
Dismiss Notification
|
||||
</a>
|
||||
<button class="btn" ng-class="'btn-' + action.kind" ng-repeat="action in getActions(notification)" ng-click="action.handler(notification)">
|
||||
{{ action.title }}
|
||||
</button>
|
||||
</div>
|
||||
<div class="datetime">{{ parseDate(notification.created) | date:'medium'}}</div>
|
||||
</div>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
<div class="signin-form-element">
|
||||
<form class="form-signin" ng-submit="signin();">
|
||||
<span class="quay-spinner" ng-show="signingIn"></span>
|
||||
<form class="form-signin" ng-submit="signin();" ng-show="!signingIn">
|
||||
<input type="text" class="form-control input-lg" name="username"
|
||||
placeholder="Username or E-mail Address" ng-model="user.username" autofocus>
|
||||
<input type="password" class="form-control input-lg" name="password"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<div class="signup-form-element">
|
||||
<form class="form-signup" name="signupForm" ng-submit="register()" ngshow="!awaitingConfirmation && !registering">
|
||||
<form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering">
|
||||
<input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/">
|
||||
<input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required>
|
||||
<input type="password" class="form-control" placeholder="Create a password" ng-model="newUser.password" required
|
||||
|
|
17
static/directives/team-view-add.html
Normal file
17
static/directives/team-view-add.html
Normal file
|
@ -0,0 +1,17 @@
|
|||
<div class="team-view-add-element" focusable-popover-content>
|
||||
<div class="entity-search"
|
||||
namespace="orgname" placeholder="'Add a registered user or robot...'"
|
||||
entity-selected="addNewMember(entity)"
|
||||
email-selected="inviteEmail(email)"
|
||||
current-entity="selectedMember"
|
||||
auto-clear="true"
|
||||
allowed-entities="['user', 'robot']"
|
||||
pull-right="true"
|
||||
allow-emails="true"
|
||||
email-message="Press enter to invite the entered e-mail address to this team"
|
||||
ng-show="!addingMember"></div>
|
||||
<div class="quay-spinner" ng-show="addingMember"></div>
|
||||
<div class="help-text" ng-show="!addingMember">
|
||||
Search by Quay.io username or robot account name
|
||||
</div>
|
||||
</div>
|
|
@ -29,7 +29,8 @@
|
|||
<div class="slideinout" ng-show="currentRepo">
|
||||
<div style="margin-top: 10px">Dockerfile Location:</div>
|
||||
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
|
||||
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)">
|
||||
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"
|
||||
allow-custom-input="true">
|
||||
<!-- Icons -->
|
||||
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i>
|
||||
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i>
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
</div>
|
||||
<div id="collapseRegister" class="panel-collapse collapse" ng-class="hasSignedIn() ? 'out' : 'in'">
|
||||
<div class="panel-body">
|
||||
<div class="signup-form"></div>
|
||||
<div class="signup-form" user-registered="handleUserRegistered(username)" invite-code="inviteCode"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
344
static/js/app.js
344
static/js/app.js
|
@ -153,6 +153,14 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
this.currentIndex_ = 0;
|
||||
}
|
||||
|
||||
_ViewArray.prototype.length = function() {
|
||||
return this.entries.length;
|
||||
};
|
||||
|
||||
_ViewArray.prototype.get = function(index) {
|
||||
return this.entries[index];
|
||||
};
|
||||
|
||||
_ViewArray.prototype.push = function(elem) {
|
||||
this.entries.push(elem);
|
||||
this.hasEntries = true;
|
||||
|
@ -215,6 +223,78 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return service;
|
||||
}]);
|
||||
|
||||
/**
|
||||
* Specialized class for conducting an HTTP poll, while properly preventing multiple calls.
|
||||
*/
|
||||
$provide.factory('AngularPollChannel', ['ApiService', '$timeout', function(ApiService, $timeout) {
|
||||
var _PollChannel = function(scope, requester, opt_sleeptime) {
|
||||
this.scope_ = scope;
|
||||
this.requester_ = requester;
|
||||
this.sleeptime_ = opt_sleeptime || (60 * 1000 /* 60s */);
|
||||
this.timer_ = null;
|
||||
|
||||
this.working = false;
|
||||
this.polling = false;
|
||||
|
||||
var that = this;
|
||||
scope.$on('$destroy', function() {
|
||||
that.stop();
|
||||
});
|
||||
};
|
||||
|
||||
_PollChannel.prototype.stop = function() {
|
||||
if (this.timer_) {
|
||||
$timeout.cancel(this.timer_);
|
||||
this.timer_ = null;
|
||||
this.polling_ = false;
|
||||
}
|
||||
|
||||
this.working = false;
|
||||
};
|
||||
|
||||
_PollChannel.prototype.start = function() {
|
||||
// Make sure we invoke call outside the normal digest cycle, since
|
||||
// we'll call $scope.$apply ourselves.
|
||||
var that = this;
|
||||
setTimeout(function() { that.call_(); }, 0);
|
||||
};
|
||||
|
||||
_PollChannel.prototype.call_ = function() {
|
||||
if (this.working) { return; }
|
||||
|
||||
var that = this;
|
||||
this.working = true;
|
||||
this.scope_.$apply(function() {
|
||||
that.requester_(function(status) {
|
||||
if (status) {
|
||||
that.working = false;
|
||||
that.setupTimer_();
|
||||
} else {
|
||||
that.stop();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
_PollChannel.prototype.setupTimer_ = function() {
|
||||
if (this.timer_) { return; }
|
||||
|
||||
var that = this;
|
||||
this.polling = true;
|
||||
this.timer_ = $timeout(function() {
|
||||
that.timer_ = null;
|
||||
that.call_();
|
||||
}, this.sleeptime_)
|
||||
};
|
||||
|
||||
var service = {
|
||||
'create': function(scope, requester, opt_sleeptime) {
|
||||
return new _PollChannel(scope, requester, opt_sleeptime);
|
||||
}
|
||||
};
|
||||
|
||||
return service;
|
||||
}]);
|
||||
|
||||
$provide.factory('DataFileService', [function() {
|
||||
var dataFileService = {};
|
||||
|
@ -419,6 +499,11 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
$provide.factory('UtilService', ['$sanitize', function($sanitize) {
|
||||
var utilService = {};
|
||||
|
||||
utilService.isEmailAddress = function(val) {
|
||||
var emailRegex = /^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/;
|
||||
return emailRegex.test(val);
|
||||
};
|
||||
|
||||
utilService.escapeHtmlString = function(text) {
|
||||
var adjusted = text.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
|
@ -476,7 +561,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
// If an error occurred, report it and done.
|
||||
if (ping < 0) {
|
||||
cached['pings'] = [-1];
|
||||
invokeCallback($scope, pings, callback);
|
||||
invokeCallback($scope, [-1], callback);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -595,7 +680,10 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
|
||||
stringBuilderService.buildString = function(value_or_func, metadata) {
|
||||
var fieldIcons = {
|
||||
'inviter': 'user',
|
||||
'username': 'user',
|
||||
'user': 'user',
|
||||
'email': 'envelope',
|
||||
'activating_username': 'user',
|
||||
'delegate_user': 'user',
|
||||
'delegate_team': 'group',
|
||||
|
@ -796,6 +884,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
// We already have /api/v1/ on the URLs, so remove them from the paths.
|
||||
path = path.substr('/api/v1/'.length, path.length);
|
||||
|
||||
// Build the path, adjusted with the inline parameters.
|
||||
var used = {};
|
||||
var url = '';
|
||||
for (var i = 0; i < path.length; ++i) {
|
||||
var c = path[i];
|
||||
|
@ -807,6 +897,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
throw new Error('Missing parameter: ' + varName);
|
||||
}
|
||||
|
||||
used[varName] = true;
|
||||
url += parameters[varName];
|
||||
i = end;
|
||||
continue;
|
||||
|
@ -815,6 +906,20 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
url += c;
|
||||
}
|
||||
|
||||
// Append any query parameters.
|
||||
var isFirst = true;
|
||||
for (var paramName in parameters) {
|
||||
if (!parameters.hasOwnProperty(paramName)) { continue; }
|
||||
if (used[paramName]) { continue; }
|
||||
|
||||
var value = parameters[paramName];
|
||||
if (value) {
|
||||
url += isFirst ? '?' : '&';
|
||||
url += paramName + '=' + encodeURIComponent(value)
|
||||
isFirst = false;
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
|
@ -1280,7 +1385,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
'name': 'token',
|
||||
'type': 'string',
|
||||
'title': 'Token',
|
||||
'help_url': 'https://{subdomain}.slack.com/services/new/outgoing-webhook'
|
||||
'help_url': 'https://{subdomain}.slack.com/services/new/incoming-webhook'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1316,8 +1421,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return externalNotificationData;
|
||||
}]);
|
||||
|
||||
$provide.factory('NotificationService', ['$rootScope', '$interval', 'UserService', 'ApiService', 'StringBuilderService', 'PlanService', 'UserService', 'Config',
|
||||
function($rootScope, $interval, UserService, ApiService, StringBuilderService, PlanService, UserService, Config) {
|
||||
$provide.factory('NotificationService', ['$rootScope', '$interval', 'UserService', 'ApiService', 'StringBuilderService', 'PlanService', 'UserService', 'Config', '$location',
|
||||
function($rootScope, $interval, UserService, ApiService, StringBuilderService, PlanService, UserService, Config, $location) {
|
||||
var notificationService = {
|
||||
'user': null,
|
||||
'notifications': [],
|
||||
|
@ -1335,6 +1440,28 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
'page': '/about/',
|
||||
'dismissable': true
|
||||
},
|
||||
'org_team_invite': {
|
||||
'level': 'primary',
|
||||
'message': '{inviter} is inviting you to join team {team} under organization {org}',
|
||||
'actions': [
|
||||
{
|
||||
'title': 'Join team',
|
||||
'kind': 'primary',
|
||||
'handler': function(notification) {
|
||||
window.location = '/confirminvite?code=' + notification.metadata['code'];
|
||||
}
|
||||
},
|
||||
{
|
||||
'title': 'Decline',
|
||||
'kind': 'default',
|
||||
'handler': function(notification) {
|
||||
ApiService.declineOrganizationTeamInvite(null, {'code': notification.metadata['code']}).then(function() {
|
||||
notificationService.update();
|
||||
});
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
'password_required': {
|
||||
'level': 'error',
|
||||
'message': 'In order to begin pushing and pulling repositories, a password must be set for your account',
|
||||
|
@ -1429,6 +1556,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
}
|
||||
};
|
||||
|
||||
notificationService.getActions = function(notification) {
|
||||
var kindInfo = notificationKinds[notification['kind']];
|
||||
if (!kindInfo) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return kindInfo['actions'] || [];
|
||||
};
|
||||
|
||||
notificationService.canDismiss = function(notification) {
|
||||
var kindInfo = notificationKinds[notification['kind']];
|
||||
if (!kindInfo) {
|
||||
|
@ -1438,7 +1574,12 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
};
|
||||
|
||||
notificationService.getPage = function(notification) {
|
||||
var page = notificationKinds[notification['kind']]['page'];
|
||||
var kindInfo = notificationKinds[notification['kind']];
|
||||
if (!kindInfo) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var page = kindInfo['page'];
|
||||
if (typeof page != 'string') {
|
||||
page = page(notification['metadata']);
|
||||
}
|
||||
|
@ -1964,7 +2105,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
templateUrl: '/static/partials/plans.html', controller: PlansCtrl}).
|
||||
when('/security/', {title: 'Security', description: 'Security features used when transmitting and storing data',
|
||||
templateUrl: '/static/partials/security.html'}).
|
||||
when('/signin/', {title: 'Sign In', description: 'Sign into ' + title, templateUrl: '/static/partials/signin.html'}).
|
||||
when('/signin/', {title: 'Sign In', description: 'Sign into ' + title, templateUrl: '/static/partials/signin.html', controller: SignInCtrl, reloadOnSearch: false}).
|
||||
when('/new/', {title: 'Create new repository', description: 'Create a new public or private docker repository, optionally constructing from a dockerfile',
|
||||
templateUrl: '/static/partials/new-repo.html', controller: NewRepoCtrl}).
|
||||
when('/organizations/', {title: 'Organizations', description: 'Private docker repository hosting for businesses and organizations',
|
||||
|
@ -1985,6 +2126,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
when('/tour/features', {title: title + ' Features', templateUrl: '/static/partials/tour.html', controller: TourCtrl}).
|
||||
when('/tour/enterprise', {title: 'Enterprise Edition', templateUrl: '/static/partials/tour.html', controller: TourCtrl}).
|
||||
|
||||
when('/confirminvite', {title: 'Confirm Invite', templateUrl: '/static/partials/confirm-invite.html', controller: ConfirmInviteCtrl, reloadOnSearch: false}).
|
||||
|
||||
when('/', {title: 'Hosted Private Docker Registry', templateUrl: '/static/partials/landing.html', controller: LandingCtrl,
|
||||
pageClass: 'landing-page'}).
|
||||
otherwise({redirectTo: '/'});
|
||||
|
@ -2073,6 +2216,19 @@ quayApp.directive('quayShow', function($animate, Features, Config) {
|
|||
});
|
||||
|
||||
|
||||
quayApp.directive('ngIfMedia', function ($animate) {
|
||||
return {
|
||||
transclude: 'element',
|
||||
priority: 600,
|
||||
terminal: true,
|
||||
restrict: 'A',
|
||||
link: buildConditionalLinker($animate, 'ngIfMedia', function(value) {
|
||||
return window.matchMedia(value).matches;
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
quayApp.directive('quaySection', function($animate, $location, $rootScope) {
|
||||
return {
|
||||
priority: 590,
|
||||
|
@ -2206,7 +2362,9 @@ quayApp.directive('entityReference', function () {
|
|||
restrict: 'C',
|
||||
scope: {
|
||||
'entity': '=entity',
|
||||
'namespace': '=namespace'
|
||||
'namespace': '=namespace',
|
||||
'showGravatar': '@showGravatar',
|
||||
'gravatarSize': '@gravatarSize'
|
||||
},
|
||||
controller: function($scope, $element, UserService, UtilService) {
|
||||
$scope.getIsAdmin = function(namespace) {
|
||||
|
@ -2343,6 +2501,36 @@ quayApp.directive('repoBreadcrumb', function () {
|
|||
return directiveDefinitionObject;
|
||||
});
|
||||
|
||||
quayApp.directive('focusablePopoverContent', ['$timeout', '$popover', function ($timeout, $popover) {
|
||||
return {
|
||||
restrict: "A",
|
||||
link: function (scope, element, attrs) {
|
||||
$body = $('body');
|
||||
var hide = function() {
|
||||
$body.off('click');
|
||||
scope.$apply(function() {
|
||||
scope.$hide();
|
||||
});
|
||||
};
|
||||
|
||||
scope.$on('$destroy', function() {
|
||||
$body.off('click');
|
||||
});
|
||||
|
||||
$timeout(function() {
|
||||
$body.on('click', function(evt) {
|
||||
var target = evt.target;
|
||||
var isPanelMember = $(element).has(target).length > 0 || target == element;
|
||||
if (!isPanelMember) {
|
||||
hide();
|
||||
}
|
||||
});
|
||||
|
||||
$(element).find('input').focus();
|
||||
}, 100);
|
||||
}
|
||||
};
|
||||
}]);
|
||||
|
||||
quayApp.directive('repoCircle', function () {
|
||||
var directiveDefinitionObject = {
|
||||
|
@ -2401,8 +2589,12 @@ quayApp.directive('userSetup', function () {
|
|||
restrict: 'C',
|
||||
scope: {
|
||||
'redirectUrl': '=redirectUrl',
|
||||
|
||||
'inviteCode': '=inviteCode',
|
||||
|
||||
'signInStarted': '&signInStarted',
|
||||
'signedIn': '&signedIn'
|
||||
'signedIn': '&signedIn',
|
||||
'userRegistered': '&userRegistered'
|
||||
},
|
||||
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService) {
|
||||
$scope.sendRecovery = function() {
|
||||
|
@ -2417,6 +2609,10 @@ quayApp.directive('userSetup', function () {
|
|||
});
|
||||
};
|
||||
|
||||
$scope.handleUserRegistered = function(username) {
|
||||
$scope.userRegistered({'username': username});
|
||||
};
|
||||
|
||||
$scope.hasSignedIn = function() {
|
||||
return UserService.hasEverLoggedIn();
|
||||
};
|
||||
|
@ -2440,6 +2636,7 @@ quayApp.directive('externalLoginButton', function () {
|
|||
'action': '@action'
|
||||
},
|
||||
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
|
||||
$scope.signingIn = false;
|
||||
$scope.startSignin = function(service) {
|
||||
$scope.signInStarted({'service': service});
|
||||
|
||||
|
@ -2451,6 +2648,7 @@ quayApp.directive('externalLoginButton', function () {
|
|||
|
||||
// Needed to ensure that UI work done by the started callback is finished before the location
|
||||
// changes.
|
||||
$scope.signingIn = true;
|
||||
$timeout(function() {
|
||||
document.location = url;
|
||||
}, 250);
|
||||
|
@ -2476,8 +2674,10 @@ quayApp.directive('signinForm', function () {
|
|||
controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config) {
|
||||
$scope.tryAgainSoon = 0;
|
||||
$scope.tryAgainInterval = null;
|
||||
$scope.signingIn = false;
|
||||
|
||||
$scope.markStarted = function() {
|
||||
$scope.signingIn = true;
|
||||
if ($scope.signInStarted != null) {
|
||||
$scope.signInStarted();
|
||||
}
|
||||
|
@ -2508,25 +2708,30 @@ quayApp.directive('signinForm', function () {
|
|||
$scope.cancelInterval();
|
||||
|
||||
ApiService.signinUser($scope.user).then(function() {
|
||||
$scope.signingIn = false;
|
||||
$scope.needsEmailVerification = false;
|
||||
$scope.invalidCredentials = false;
|
||||
|
||||
if ($scope.signedIn != null) {
|
||||
$scope.signedIn();
|
||||
}
|
||||
|
||||
|
||||
// Load the newly created user.
|
||||
UserService.load();
|
||||
|
||||
// Redirect to the specified page or the landing page
|
||||
// Note: The timeout of 500ms is needed to ensure dialogs containing sign in
|
||||
// forms get removed before the location changes.
|
||||
$timeout(function() {
|
||||
if ($scope.redirectUrl == $location.path()) {
|
||||
return;
|
||||
}
|
||||
$location.path($scope.redirectUrl ? $scope.redirectUrl : '/');
|
||||
var redirectUrl = $scope.redirectUrl;
|
||||
if (redirectUrl == $location.path() || redirectUrl == null) {
|
||||
return;
|
||||
}
|
||||
window.location = (redirectUrl ? redirectUrl : '/');
|
||||
}, 500);
|
||||
}, function(result) {
|
||||
$scope.signingIn = false;
|
||||
|
||||
if (result.status == 429 /* try again later */) {
|
||||
$scope.needsEmailVerification = false;
|
||||
$scope.invalidCredentials = false;
|
||||
|
@ -2560,7 +2765,9 @@ quayApp.directive('signupForm', function () {
|
|||
transclude: true,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'inviteCode': '=inviteCode',
|
||||
|
||||
'userRegistered': '&userRegistered'
|
||||
},
|
||||
controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) {
|
||||
$('.form-signup').popover();
|
||||
|
@ -2572,6 +2779,10 @@ quayApp.directive('signupForm', function () {
|
|||
UIService.hidePopover('#signupButton');
|
||||
$scope.registering = true;
|
||||
|
||||
if ($scope.inviteCode) {
|
||||
$scope.newUser['inviteCode'] = $scope.inviteCode;
|
||||
}
|
||||
|
||||
ApiService.createNewUser($scope.newUser).then(function() {
|
||||
$scope.registering = false;
|
||||
$scope.awaitingConfirmation = true;
|
||||
|
@ -2579,6 +2790,8 @@ quayApp.directive('signupForm', function () {
|
|||
if (Config.MIXPANEL_KEY) {
|
||||
mixpanel.alias($scope.newUser.username);
|
||||
}
|
||||
|
||||
$scope.userRegistered({'username': $scope.newUser.username});
|
||||
}, function(result) {
|
||||
$scope.registering = false;
|
||||
UIService.showFormError('#signupButton', result);
|
||||
|
@ -2696,7 +2909,7 @@ quayApp.directive('dockerAuthDialog', function (Config) {
|
|||
$scope.downloadCfg = function() {
|
||||
var auth = $.base64.encode($scope.username + ":" + $scope.token);
|
||||
config = {}
|
||||
config[Config.getUrl('/v1/')] = {
|
||||
config[Config['SERVER_HOSTNAME']] = {
|
||||
"auth": auth,
|
||||
"email": ""
|
||||
};
|
||||
|
@ -2900,6 +3113,24 @@ quayApp.directive('logsView', function () {
|
|||
'org_delete_team': 'Delete team: {team}',
|
||||
'org_add_team_member': 'Add member {member} to team {team}',
|
||||
'org_remove_team_member': 'Remove member {member} from team {team}',
|
||||
'org_invite_team_member': function(metadata) {
|
||||
if (metadata.user) {
|
||||
return 'Invite {user} to team {team}';
|
||||
} else {
|
||||
return 'Invite {email} to team {team}';
|
||||
}
|
||||
},
|
||||
'org_delete_team_member_invite': function(metadata) {
|
||||
if (metadata.user) {
|
||||
return 'Rescind invite of {user} to team {team}';
|
||||
} else {
|
||||
return 'Rescind invite of {email} to team {team}';
|
||||
}
|
||||
},
|
||||
|
||||
'org_team_member_invite_accepted': 'User {member}, invited by {inviter}, joined team {team}',
|
||||
'org_team_member_invite_declined': 'User {member}, invited by {inviter}, declined to join team {team}',
|
||||
|
||||
'org_set_team_description': 'Change description of team {team}: {description}',
|
||||
'org_set_team_role': 'Change permission of team {team} to {role}',
|
||||
'create_prototype_permission': function(metadata) {
|
||||
|
@ -2980,7 +3211,11 @@ quayApp.directive('logsView', function () {
|
|||
'org_create_team': 'Create team',
|
||||
'org_delete_team': 'Delete team',
|
||||
'org_add_team_member': 'Add team member',
|
||||
'org_invite_team_member': 'Invite team member',
|
||||
'org_delete_team_member_invite': 'Rescind team member invitation',
|
||||
'org_remove_team_member': 'Remove team member',
|
||||
'org_team_member_invite_accepted': 'Team invite accepted',
|
||||
'org_team_member_invite_declined': 'Team invite declined',
|
||||
'org_set_team_description': 'Change team description',
|
||||
'org_set_team_role': 'Change team permission',
|
||||
'create_prototype_permission': 'Create default permission',
|
||||
|
@ -3689,7 +3924,9 @@ quayApp.directive('entitySearch', function () {
|
|||
'allowedEntities': '=allowedEntities',
|
||||
|
||||
'currentEntity': '=currentEntity',
|
||||
|
||||
'entitySelected': '&entitySelected',
|
||||
'emailSelected': '&emailSelected',
|
||||
|
||||
// When set to true, the contents of the control will be cleared as soon
|
||||
// as an entity is selected.
|
||||
|
@ -3697,8 +3934,15 @@ quayApp.directive('entitySearch', function () {
|
|||
|
||||
// Set this property to immediately clear the contents of the control.
|
||||
'clearValue': '=clearValue',
|
||||
|
||||
// Whether e-mail addresses are allowed.
|
||||
'allowEmails': '@allowEmails',
|
||||
'emailMessage': '@emailMessage',
|
||||
|
||||
// True if the menu should pull right.
|
||||
'pullRight': '@pullRight'
|
||||
},
|
||||
controller: function($rootScope, $scope, $element, Restangular, UserService, ApiService, Config) {
|
||||
controller: function($rootScope, $scope, $element, Restangular, UserService, ApiService, UtilService, Config) {
|
||||
$scope.lazyLoading = true;
|
||||
|
||||
$scope.teams = null;
|
||||
|
@ -3895,8 +4139,12 @@ quayApp.directive('entitySearch', function () {
|
|||
return null;
|
||||
}
|
||||
|
||||
if (val.indexOf('@') > 0) {
|
||||
return '<div class="tt-empty">A ' + Config.REGISTRY_TITLE_SHORT + ' username (not an e-mail address) must be specified</div>';
|
||||
if (UtilService.isEmailAddress(val)) {
|
||||
if ($scope.allowEmails) {
|
||||
return '<div class="tt-message">' + $scope.emailMessage + '</div>';
|
||||
} else {
|
||||
return '<div class="tt-empty">A ' + Config.REGISTRY_TITLE_SHORT + ' username (not an e-mail address) must be specified</div>';
|
||||
}
|
||||
}
|
||||
|
||||
var classes = [];
|
||||
|
@ -3952,6 +4200,16 @@ quayApp.directive('entitySearch', function () {
|
|||
}}
|
||||
});
|
||||
|
||||
$(input).on('keypress', function(e) {
|
||||
var val = $(input).val();
|
||||
var code = e.keyCode || e.which;
|
||||
if (code == 13 && $scope.allowEmails && UtilService.isEmailAddress(val)) {
|
||||
$scope.$apply(function() {
|
||||
$scope.emailSelected({'email': val});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
$(input).on('input', function(e) {
|
||||
$scope.$apply(function() {
|
||||
$scope.clearEntityInternal();
|
||||
|
@ -4368,9 +4626,48 @@ quayApp.directive('buildLogError', function () {
|
|||
transclude: false,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'error': '=error'
|
||||
'error': '=error',
|
||||
'entries': '=entries'
|
||||
},
|
||||
controller: function($scope, $element) {
|
||||
controller: function($scope, $element, Config) {
|
||||
$scope.getLocalPullInfo = function() {
|
||||
if ($scope.entries.__localpull !== undefined) {
|
||||
return $scope.entries.__localpull;
|
||||
}
|
||||
|
||||
var localInfo = {
|
||||
'isLocal': false
|
||||
};
|
||||
|
||||
// Find the 'pulling' phase entry, and then extra any metadata found under
|
||||
// it.
|
||||
for (var i = 0; i < $scope.entries.length; ++i) {
|
||||
var entry = $scope.entries[i];
|
||||
if (entry.type == 'phase' && entry.message == 'pulling') {
|
||||
for (var j = 0; j < entry.logs.length(); ++j) {
|
||||
var log = entry.logs.get(j);
|
||||
if (log.data && log.data.phasestep == 'login') {
|
||||
localInfo['login'] = log.data;
|
||||
}
|
||||
|
||||
if (log.data && log.data.phasestep == 'pull') {
|
||||
var repo_url = log.data['repo_url'];
|
||||
var repo_and_tag = repo_url.substring(Config.SERVER_HOSTNAME.length + 1);
|
||||
var tagIndex = repo_and_tag.lastIndexOf(':');
|
||||
var repo = repo_and_tag.substring(0, tagIndex);
|
||||
|
||||
localInfo['repo_url'] = repo_url;
|
||||
localInfo['repo'] = repo;
|
||||
|
||||
localInfo['isLocal'] = repo_url.indexOf(Config.SERVER_HOSTNAME + '/') == 0;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return $scope.entries.__localpull = localInfo;
|
||||
};
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
|
@ -4406,6 +4703,9 @@ quayApp.directive('dropdownSelect', function ($compile) {
|
|||
'selectedItem': '=selectedItem',
|
||||
'placeholder': '=placeholder',
|
||||
'lookaheadItems': '=lookaheadItems',
|
||||
|
||||
'allowCustomInput': '@allowCustomInput',
|
||||
|
||||
'handleItemSelected': '&handleItemSelected',
|
||||
'handleInput': '&handleInput',
|
||||
|
||||
|
@ -5386,6 +5686,10 @@ quayApp.directive('notificationView', function () {
|
|||
$scope.getClass = function(notification) {
|
||||
return NotificationService.getClass(notification);
|
||||
};
|
||||
|
||||
$scope.getActions = function(notification) {
|
||||
return NotificationService.getActions(notification);
|
||||
};
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
|
@ -5601,7 +5905,7 @@ quayApp.directive('dockerfileBuildForm', function () {
|
|||
var data = {
|
||||
'mimeType': mimeType
|
||||
};
|
||||
|
||||
|
||||
var getUploadUrl = ApiService.getFiledropUrl(data).then(function(resp) {
|
||||
conductUpload(file, resp.url, resp.file_id, mimeType);
|
||||
}, function() {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
function SignInCtrl($scope, $location) {
|
||||
$scope.redirectUrl = '/';
|
||||
}
|
||||
|
||||
function GuideCtrl() {
|
||||
}
|
||||
|
||||
|
@ -978,14 +982,9 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
|
|||
}
|
||||
|
||||
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
||||
ansi2html, AngularViewArray) {
|
||||
ansi2html, AngularViewArray, AngularPollChannel) {
|
||||
var namespace = $routeParams.namespace;
|
||||
var name = $routeParams.name;
|
||||
var pollTimerHandle = null;
|
||||
|
||||
$scope.$on('$destroy', function() {
|
||||
stopPollTimer();
|
||||
});
|
||||
|
||||
// Watch for changes to the current parameter.
|
||||
$scope.$on('$routeUpdate', function(){
|
||||
|
@ -995,8 +994,7 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
});
|
||||
|
||||
$scope.builds = null;
|
||||
$scope.polling = false;
|
||||
|
||||
$scope.pollChannel = null;
|
||||
$scope.buildDialogShowCounter = 0;
|
||||
|
||||
$scope.showNewBuildDialog = function() {
|
||||
|
@ -1081,8 +1079,6 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
||||
if (build == $scope.currentBuild) { return; }
|
||||
|
||||
stopPollTimer();
|
||||
|
||||
$scope.logEntries = null;
|
||||
$scope.logStartIndex = null;
|
||||
$scope.currentParentEntry = null;
|
||||
|
@ -1103,47 +1099,35 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
$scope.adjustLogHeight();
|
||||
}, 1);
|
||||
|
||||
// Load the first set of logs.
|
||||
getBuildStatusAndLogs();
|
||||
|
||||
// If the build is currently processing, start the build timer.
|
||||
checkPollTimer();
|
||||
};
|
||||
|
||||
var checkPollTimer = function() {
|
||||
var build = $scope.currentBuild;
|
||||
if (!build) {
|
||||
stopPollTimer();
|
||||
return;
|
||||
// Stop any existing polling.
|
||||
if ($scope.pollChannel) {
|
||||
$scope.pollChannel.stop();
|
||||
}
|
||||
|
||||
// Create a new channel for polling the build status and logs.
|
||||
var conductStatusAndLogRequest = function(callback) {
|
||||
getBuildStatusAndLogs(build, callback);
|
||||
};
|
||||
|
||||
if (build['phase'] != 'complete' && build['phase'] != 'error') {
|
||||
startPollTimer();
|
||||
return true;
|
||||
} else {
|
||||
stopPollTimer();
|
||||
return false;
|
||||
}
|
||||
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
||||
$scope.pollChannel.start();
|
||||
};
|
||||
|
||||
var stopPollTimer = function() {
|
||||
$interval.cancel(pollTimerHandle);
|
||||
};
|
||||
|
||||
var startPollTimer = function() {
|
||||
stopPollTimer();
|
||||
pollTimerHandle = $interval(getBuildStatusAndLogs, 2000);
|
||||
};
|
||||
|
||||
var processLogs = function(logs, startIndex) {
|
||||
var processLogs = function(logs, startIndex, endIndex) {
|
||||
if (!$scope.logEntries) { $scope.logEntries = []; }
|
||||
|
||||
// If the start index given is less than that requested, then we've received a larger
|
||||
// pool of logs, and we need to only consider the new ones.
|
||||
if (startIndex < $scope.logStartIndex) {
|
||||
logs = logs.slice($scope.logStartIndex - startIndex);
|
||||
}
|
||||
|
||||
for (var i = 0; i < logs.length; ++i) {
|
||||
var entry = logs[i];
|
||||
var type = entry['type'] || 'entry';
|
||||
if (type == 'command' || type == 'phase' || type == 'error') {
|
||||
entry['logs'] = AngularViewArray.create();
|
||||
entry['index'] = startIndex + i;
|
||||
entry['index'] = $scope.logStartIndex + i;
|
||||
|
||||
$scope.logEntries.push(entry);
|
||||
$scope.currentParentEntry = entry;
|
||||
|
@ -1151,18 +1135,19 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
$scope.currentParentEntry['logs'].push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return endIndex;
|
||||
};
|
||||
|
||||
var getBuildStatusAndLogs = function() {
|
||||
if (!$scope.currentBuild || $scope.polling) { return; }
|
||||
$scope.polling = true;
|
||||
|
||||
var getBuildStatusAndLogs = function(build, callback) {
|
||||
var params = {
|
||||
'repository': namespace + '/' + name,
|
||||
'build_uuid': $scope.currentBuild.id
|
||||
'build_uuid': build.id
|
||||
};
|
||||
|
||||
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
||||
if (build != $scope.currentBuild) { callback(false); return; }
|
||||
|
||||
// Note: We use extend here rather than replacing as Angular is depending on the
|
||||
// root build object to remain the same object.
|
||||
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
||||
|
@ -1177,22 +1162,16 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
$scope.builds.push(currentBuild);
|
||||
}
|
||||
|
||||
checkPollTimer();
|
||||
|
||||
// Load the updated logs for the build.
|
||||
var options = {
|
||||
'start': $scope.logStartIndex
|
||||
};
|
||||
|
||||
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
||||
if ($scope.logStartIndex != null && resp['start'] != $scope.logStartIndex) {
|
||||
$scope.polling = false;
|
||||
return;
|
||||
}
|
||||
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
||||
if (build != $scope.currentBuild) { callback(false); return; }
|
||||
|
||||
processLogs(resp['logs'], resp['start']);
|
||||
$scope.logStartIndex = resp['total'];
|
||||
$scope.polling = false;
|
||||
// Process the logs we've received.
|
||||
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
||||
|
||||
// If the build status is an error, open the last two log entries.
|
||||
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
||||
|
@ -1205,9 +1184,15 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
||||
}
|
||||
|
||||
// If the build phase is an error or a complete, then we mark the channel
|
||||
// as closed.
|
||||
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
||||
}, function() {
|
||||
$scope.polling = false;
|
||||
callback(false);
|
||||
});
|
||||
}, function() {
|
||||
callback(false);
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -1647,14 +1632,17 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
|
||||
if ($scope.cuser.logins) {
|
||||
for (var i = 0; i < $scope.cuser.logins.length; i++) {
|
||||
if ($scope.cuser.logins[i].service == 'github') {
|
||||
var login = $scope.cuser.logins[i];
|
||||
login.metadata = login.metadata || {};
|
||||
|
||||
if (login.service == 'github') {
|
||||
$scope.hasGithubLogin = true;
|
||||
$scope.githubLogin = $scope.cuser.logins[i].metadata['service_username'];
|
||||
$scope.githubLogin = login.metadata['service_username'];
|
||||
}
|
||||
|
||||
if ($scope.cuser.logins[i].service == 'google') {
|
||||
if (login.service == 'google') {
|
||||
$scope.hasGoogleLogin = true;
|
||||
$scope.googleLogin = $scope.cuser.logins[i].metadata['service_username'];
|
||||
$scope.googleLogin = login.metadata['service_username'];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1797,6 +1785,18 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
UIService.showFormError('#changePasswordForm', result);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.detachExternalLogin = function(kind) {
|
||||
var params = {
|
||||
'servicename': kind
|
||||
};
|
||||
|
||||
ApiService.detachExternalLogin(null, params).then(function() {
|
||||
$scope.hasGithubLogin = false;
|
||||
$scope.hasGoogleLogin = false;
|
||||
UserService.load();
|
||||
}, ApiService.errorDisplay('Count not detach service'));
|
||||
};
|
||||
}
|
||||
|
||||
function ImageViewCtrl($scope, $routeParams, $rootScope, $timeout, ApiService, ImageMetadataService) {
|
||||
|
@ -2283,29 +2283,91 @@ function OrgAdminCtrl($rootScope, $scope, $timeout, Restangular, $routeParams, U
|
|||
loadOrganization();
|
||||
}
|
||||
|
||||
function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams) {
|
||||
function TeamViewCtrl($rootScope, $scope, $timeout, Restangular, ApiService, $routeParams) {
|
||||
var teamname = $routeParams.teamname;
|
||||
var orgname = $routeParams.orgname;
|
||||
|
||||
$scope.orgname = orgname;
|
||||
$scope.teamname = teamname;
|
||||
$scope.addingMember = false;
|
||||
$scope.memberMap = null;
|
||||
|
||||
$rootScope.title = 'Loading...';
|
||||
|
||||
$scope.addNewMember = function(member) {
|
||||
if (!member || $scope.members[member.name]) { return; }
|
||||
$scope.filterFunction = function(invited, robots) {
|
||||
return function(item) {
|
||||
// Note: The !! is needed because is_robot will be undefined for invites.
|
||||
var robot_check = (!!item.is_robot == robots);
|
||||
return robot_check && item.invited == invited;
|
||||
};
|
||||
};
|
||||
|
||||
$scope.inviteEmail = function(email) {
|
||||
if (!email || $scope.memberMap[email]) { return; }
|
||||
|
||||
$scope.addingMember = true;
|
||||
|
||||
var params = {
|
||||
'orgname': orgname,
|
||||
'teamname': teamname,
|
||||
'email': email
|
||||
};
|
||||
|
||||
var errorHandler = ApiService.errorDisplay('Cannot invite team member', function() {
|
||||
$scope.addingMember = false;
|
||||
});
|
||||
|
||||
ApiService.inviteTeamMemberEmail(null, params).then(function(resp) {
|
||||
$scope.members.push(resp);
|
||||
$scope.memberMap[resp.email] = resp;
|
||||
$scope.addingMember = false;
|
||||
}, errorHandler);
|
||||
};
|
||||
|
||||
$scope.addNewMember = function(member) {
|
||||
if (!member || $scope.memberMap[member.name]) { return; }
|
||||
|
||||
var params = {
|
||||
'orgname': orgname,
|
||||
'teamname': teamname,
|
||||
'membername': member.name
|
||||
};
|
||||
|
||||
ApiService.updateOrganizationTeamMember(null, params).then(function(resp) {
|
||||
$scope.members[member.name] = resp;
|
||||
}, function() {
|
||||
$('#cannotChangeMembersModal').modal({});
|
||||
var errorHandler = ApiService.errorDisplay('Cannot add team member', function() {
|
||||
$scope.addingMember = false;
|
||||
});
|
||||
|
||||
$scope.addingMember = true;
|
||||
ApiService.updateOrganizationTeamMember(null, params).then(function(resp) {
|
||||
$scope.members.push(resp);
|
||||
$scope.memberMap[resp.name] = resp;
|
||||
$scope.addingMember = false;
|
||||
}, errorHandler);
|
||||
};
|
||||
|
||||
$scope.revokeInvite = function(inviteInfo) {
|
||||
if (inviteInfo.kind == 'invite') {
|
||||
// E-mail invite.
|
||||
$scope.revokeEmailInvite(inviteInfo.email);
|
||||
} else {
|
||||
// User invite.
|
||||
$scope.removeMember(inviteInfo.name);
|
||||
}
|
||||
};
|
||||
|
||||
$scope.revokeEmailInvite = function(email) {
|
||||
var params = {
|
||||
'orgname': orgname,
|
||||
'teamname': teamname,
|
||||
'email': email
|
||||
};
|
||||
|
||||
ApiService.deleteTeamMemberEmailInvite(null, params).then(function(resp) {
|
||||
if (!$scope.memberMap[email]) { return; }
|
||||
var index = $.inArray($scope.memberMap[email], $scope.members);
|
||||
$scope.members.splice(index, 1);
|
||||
delete $scope.memberMap[email];
|
||||
}, ApiService.errorDisplay('Cannot revoke team invite'));
|
||||
};
|
||||
|
||||
$scope.removeMember = function(username) {
|
||||
|
@ -2316,10 +2378,11 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams)
|
|||
};
|
||||
|
||||
ApiService.deleteOrganizationTeamMember(null, params).then(function(resp) {
|
||||
delete $scope.members[username];
|
||||
}, function() {
|
||||
$('#cannotChangeMembersModal').modal({});
|
||||
});
|
||||
if (!$scope.memberMap[username]) { return; }
|
||||
var index = $.inArray($scope.memberMap[username], $scope.members);
|
||||
$scope.members.splice(index, 1);
|
||||
delete $scope.memberMap[username];
|
||||
}, ApiService.errorDisplay('Cannot remove team member'));
|
||||
};
|
||||
|
||||
$scope.updateForDescription = function(content) {
|
||||
|
@ -2351,7 +2414,8 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams)
|
|||
var loadMembers = function() {
|
||||
var params = {
|
||||
'orgname': orgname,
|
||||
'teamname': teamname
|
||||
'teamname': teamname,
|
||||
'includePending': true
|
||||
};
|
||||
|
||||
$scope.membersResource = ApiService.getOrganizationTeamMembersAsResource(params).get(function(resp) {
|
||||
|
@ -2363,6 +2427,12 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams)
|
|||
'html': true
|
||||
});
|
||||
|
||||
$scope.memberMap = {};
|
||||
for (var i = 0; i < $scope.members.length; ++i) {
|
||||
var current = $scope.members[i];
|
||||
$scope.memberMap[current.name || current.email] = current;
|
||||
}
|
||||
|
||||
return resp.members;
|
||||
});
|
||||
};
|
||||
|
@ -2688,3 +2758,32 @@ function SuperUserAdminCtrl($scope, ApiService, Features, UserService) {
|
|||
function TourCtrl($scope, $location) {
|
||||
$scope.kind = $location.path().substring('/tour/'.length);
|
||||
}
|
||||
|
||||
function ConfirmInviteCtrl($scope, $location, UserService, ApiService, NotificationService) {
|
||||
// Monitor any user changes and place the current user into the scope.
|
||||
$scope.loading = false;
|
||||
$scope.inviteCode = $location.search()['code'] || '';
|
||||
|
||||
UserService.updateUserIn($scope, function(user) {
|
||||
if (!user.anonymous && !$scope.loading) {
|
||||
// Make sure to not redirect now that we have logged in. We'll conduct the redirect
|
||||
// manually.
|
||||
$scope.redirectUrl = null;
|
||||
$scope.loading = true;
|
||||
|
||||
var params = {
|
||||
'code': $location.search()['code']
|
||||
};
|
||||
|
||||
ApiService.acceptOrganizationTeamInvite(null, params).then(function(resp) {
|
||||
NotificationService.update();
|
||||
$location.path('/organization/' + resp.org + '/teams/' + resp.team);
|
||||
}, function(resp) {
|
||||
$scope.loading = false;
|
||||
$scope.invalid = ApiService.getErrorMessage(resp, 'Invalid confirmation code');
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
$scope.redirectUrl = window.location.href;
|
||||
}
|
||||
|
|
15
static/partials/confirm-invite.html
Normal file
15
static/partials/confirm-invite.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
<div class="confirm-invite">
|
||||
<div class="container signin-container">
|
||||
<div class="row">
|
||||
<div class="col-sm-6 col-sm-offset-3">
|
||||
<div class="user-setup" ng-show="user.anonymous" redirect-url="redirectUrl"
|
||||
invite-code="inviteCode">
|
||||
</div>
|
||||
<div class="quay-spinner" ng-show="!user.anonymous && loading"></div>
|
||||
<div class="alert alert-danger" ng-show="!user.anonymous && invalid">
|
||||
{{ invalid }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -77,7 +77,7 @@
|
|||
<span class="container-content build-log-phase" phase="container"></span>
|
||||
</div>
|
||||
<div ng-switch-when="error">
|
||||
<span class="container-content build-log-error" error="container"></span>
|
||||
<span class="container-content build-log-error" error="container" entries="logEntries"></span>
|
||||
</div>
|
||||
<div ng-switch-when="command">
|
||||
<span class="container-content build-log-command" command="container"></span>
|
||||
|
@ -94,7 +94,7 @@
|
|||
</div>
|
||||
</div>
|
||||
<div style="margin-top: 10px">
|
||||
<span class="quay-spinner" ng-show="polling"></span>
|
||||
<span class="quay-spinner" ng-show="pollChannel.working"></span>
|
||||
<button class="btn" ng-show="(build.phase == 'error' || build.phase == 'complete') && build.resource_key"
|
||||
ng-class="build.phase == 'error' ? 'btn-success' : 'btn-default'"
|
||||
ng-click="askRestartBuild(build)">
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<div class="container signin-container">
|
||||
<div class="row">
|
||||
<div class="col-sm-6 col-sm-offset-3">
|
||||
<div class="user-setup" redirect-url="'/'"></div>
|
||||
<div class="user-setup" redirect-url="redirectUrl"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,40 +1,92 @@
|
|||
<div class="resource-view" resource="orgResource" error-message="'No matching organization'">
|
||||
<div class="team-view container">
|
||||
<div class="organization-header" organization="organization" team-name="teamname"></div>
|
||||
<div class="organization-header" organization="organization" team-name="teamname">
|
||||
<div ng-show="canEditMembers" class="side-controls">
|
||||
<div class="hidden-sm hidden-xs">
|
||||
<button class="btn btn-success"
|
||||
id="showAddMember"
|
||||
data-title="Add Team Member"
|
||||
data-content-template="/static/directives/team-view-add.html"
|
||||
data-placement="bottom-right"
|
||||
bs-popover="bs-popover">
|
||||
<i class="fa fa-plus"></i>
|
||||
Add Team Member
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="resource-view" resource="membersResource" error-message="'No matching team found'">
|
||||
<div class="description markdown-input" content="team.description" can-write="organization.is_admin"
|
||||
content-changed="updateForDescription" field-title="'team description'"></div>
|
||||
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">Team Members
|
||||
<i class="info-icon fa fa-info-circle" data-placement="left" data-content="Users that inherit all permissions delegated to this team"></i>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
<table class="permissions">
|
||||
<tr ng-repeat="(name, member) in members">
|
||||
<td class="user entity">
|
||||
<span class="entity-reference" entity="member" namespace="organization.name"></span>
|
||||
</td>
|
||||
<td>
|
||||
<span class="delete-ui" delete-title="'Remove User From Team'" button-title="'Remove'"
|
||||
perform-delete="removeMember(member.name)" ng-if="canEditMembers"></span>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr ng-show="canEditMembers">
|
||||
<td colspan="3">
|
||||
<div class="entity-search" style="width: 100%"
|
||||
namespace="orgname" placeholder="'Add a registered user or robot...'"
|
||||
entity-selected="addNewMember(entity)"
|
||||
current-entity="selectedMember"
|
||||
auto-clear="true"
|
||||
allowed-entities="['user', 'robot']"></div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<div class="empty-message" ng-if="!members.length">
|
||||
This team has no members
|
||||
</div>
|
||||
|
||||
<div class="empty-message" ng-if="members.length && !(members | filter:search).length">
|
||||
No matching team members found
|
||||
</div>
|
||||
|
||||
<table class="member-listing" style="margin-top: -20px" ng-show="members.length">
|
||||
<!-- Members -->
|
||||
<tr ng-if="(members | filter:search | filter: filterFunction(false, false)).length">
|
||||
<td colspan="2"><div class="section-header">Team Members</div></td>
|
||||
</tr>
|
||||
|
||||
<tr ng-repeat="member in members | filter:search | filter: filterFunction(false, false) | orderBy: 'name'">
|
||||
<td class="user entity">
|
||||
<span class="entity-reference" entity="member" namespace="organization.name" show-gravatar="true" gravatar-size="32"></span>
|
||||
</td>
|
||||
<td>
|
||||
<span class="delete-ui" delete-title="'Remove ' + member.name + ' from team'" button-title="'Remove'"
|
||||
perform-delete="removeMember(member.name)" ng-if="canEditMembers"></span>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Robots -->
|
||||
<tr ng-if="(members | filter:search | filter: filterFunction(false, true)).length">
|
||||
<td colspan="2"><div class="section-header">Robot Accounts</div></td>
|
||||
</tr>
|
||||
|
||||
<tr ng-repeat="member in members | filter:search | filter: filterFunction(false, true) | orderBy: 'name'">
|
||||
<td class="user entity">
|
||||
<span class="entity-reference" entity="member" namespace="organization.name"></span>
|
||||
</td>
|
||||
<td>
|
||||
<span class="delete-ui" delete-title="'Remove ' + member.name + ' from team'" button-title="'Remove'"
|
||||
perform-delete="removeMember(member.name)" ng-if="canEditMembers"></span>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Invited -->
|
||||
<tr ng-if="(members | filter:search | filter: filterFunction(true, false)).length">
|
||||
<td colspan="2"><div class="section-header">Invited To Join</div></td>
|
||||
</tr>
|
||||
|
||||
<tr ng-repeat="member in members | filter:search | filter: filterFunction(true, false) | orderBy: 'name'">
|
||||
<td class="user entity">
|
||||
<span ng-if="member.kind != 'invite'">
|
||||
<span class="entity-reference" entity="member" namespace="organization.name" show-gravatar="true" gravatar-size="32"></span>
|
||||
</span>
|
||||
<span class="invite-listing" ng-if="member.kind == 'invite'">
|
||||
<img class="gravatar"ng-src="//www.gravatar.com/avatar/{{ member.gravatar }}?s=32&d=identicon">
|
||||
{{ member.email }}
|
||||
</span>
|
||||
</td>
|
||||
<td>
|
||||
<span class="delete-ui" delete-title="'Revoke invite to join team'" button-title="'Revoke'"
|
||||
perform-delete="revokeInvite(member)" ng-if="canEditMembers"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div ng-show="canEditMembers">
|
||||
<div ng-if-media="'(max-width: 560px)'">
|
||||
<div ng-include="'/static/directives/team-view-add.html'"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -177,10 +177,14 @@
|
|||
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
|
||||
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
||||
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
||||
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||
perform-delete="detachExternalLogin('github')"></span>
|
||||
</div>
|
||||
<div ng-show="hasGithubLogin && !githubLogin" class="lead col-md-8">
|
||||
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
||||
Account attached to Github Account
|
||||
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||
perform-delete="detachExternalLogin('github')"></span>
|
||||
</div>
|
||||
<div ng-show="!hasGithubLogin" class="col-md-4">
|
||||
<span class="external-login-button" provider="github" action="attach"></span>
|
||||
|
@ -197,10 +201,14 @@
|
|||
<div ng-show="hasGoogleLogin && googleLogin" class="lead col-md-8">
|
||||
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
||||
<b>{{ googleLogin }}</b>
|
||||
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||
perform-delete="detachExternalLogin('google')"></span>
|
||||
</div>
|
||||
<div ng-show="hasGoogleLogin && !googleLogin" class="lead col-md-8">
|
||||
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
||||
Account attached to Google Account
|
||||
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||
perform-delete="detachExternalLogin('google')"></span>
|
||||
</div>
|
||||
<div ng-show="!hasGoogleLogin" class="col-md-4">
|
||||
<span class="external-login-button" provider="google" action="attach"></span>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from storage.local import LocalStorage
|
||||
from storage.cloud import S3Storage, GoogleCloudStorage
|
||||
from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage
|
||||
from storage.fakestorage import FakeStorage
|
||||
from storage.distributedstorage import DistributedStorage
|
||||
|
||||
|
@ -8,6 +8,7 @@ STORAGE_DRIVER_CLASSES = {
|
|||
'LocalStorage': LocalStorage,
|
||||
'S3Storage': S3Storage,
|
||||
'GoogleCloudStorage': GoogleCloudStorage,
|
||||
'RadosGWStorage': RadosGWStorage,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -54,10 +54,13 @@ class BaseStorage(StoragePaths):
|
|||
# Set the IO buffer to 64kB
|
||||
buffer_size = 64 * 1024
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60):
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
return None
|
||||
|
||||
def get_supports_resumeable_downloads(self):
|
||||
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
|
||||
return None
|
||||
|
||||
def get_supports_resumable_downloads(self):
|
||||
return False
|
||||
|
||||
def get_content(self, path):
|
||||
|
@ -72,7 +75,7 @@ class BaseStorage(StoragePaths):
|
|||
def stream_read_file(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def list_directory(self, path=None):
|
||||
|
@ -83,3 +86,6 @@ class BaseStorage(StoragePaths):
|
|||
|
||||
def remove(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_checksum(self, path):
|
||||
raise NotImplementedError
|
115
storage/cloud.py
115
storage/cloud.py
|
@ -7,36 +7,39 @@ import boto.gs.connection
|
|||
import boto.s3.key
|
||||
import boto.gs.key
|
||||
|
||||
from io import BufferedIOBase
|
||||
|
||||
from storage.basestorage import BaseStorage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamReadKeyAsFile(object):
|
||||
class StreamReadKeyAsFile(BufferedIOBase):
|
||||
def __init__(self, key):
|
||||
self._key = key
|
||||
self._finished = False
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, tb):
|
||||
self._key.close(fast=True)
|
||||
|
||||
def read(self, amt=None):
|
||||
if self._finished:
|
||||
if self.closed:
|
||||
return None
|
||||
|
||||
resp = self._key.read(amt)
|
||||
if not resp:
|
||||
self._finished = True
|
||||
return resp
|
||||
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self._key.closed
|
||||
|
||||
def close(self):
|
||||
self._key.close(fast=True)
|
||||
|
||||
|
||||
class _CloudStorage(BaseStorage):
|
||||
def __init__(self, connection_class, key_class, upload_params, storage_path, access_key,
|
||||
secret_key, bucket_name):
|
||||
def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name):
|
||||
self._initialized = False
|
||||
self._bucket_name = bucket_name
|
||||
self._access_key = access_key
|
||||
|
@ -45,12 +48,14 @@ class _CloudStorage(BaseStorage):
|
|||
self._connection_class = connection_class
|
||||
self._key_class = key_class
|
||||
self._upload_params = upload_params
|
||||
self._connect_kwargs = connect_kwargs
|
||||
self._cloud_conn = None
|
||||
self._cloud_bucket = None
|
||||
|
||||
def _initialize_cloud_conn(self):
|
||||
if not self._initialized:
|
||||
self._cloud_conn = self._connection_class(self._access_key, self._secret_key)
|
||||
self._cloud_conn = self._connection_class(self._access_key, self._secret_key,
|
||||
**self._connect_kwargs)
|
||||
self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name)
|
||||
self._initialized = True
|
||||
|
||||
|
@ -87,15 +92,22 @@ class _CloudStorage(BaseStorage):
|
|||
key.set_contents_from_string(content, **self._upload_params)
|
||||
return path
|
||||
|
||||
def get_supports_resumeable_downloads(self):
|
||||
def get_supports_resumable_downloads(self):
|
||||
return True
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60):
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
k = self._key_class(self._cloud_bucket, path)
|
||||
return k.generate_url(expires_in)
|
||||
|
||||
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
key = self._key_class(self._cloud_bucket, path)
|
||||
url = key.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, encrypt_key=True)
|
||||
return url
|
||||
|
||||
def stream_read(self, path):
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
|
@ -116,14 +128,23 @@ class _CloudStorage(BaseStorage):
|
|||
raise IOError('No such key: \'{0}\''.format(path))
|
||||
return StreamReadKeyAsFile(key)
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
buffer_size = 5 * 1024 * 1024
|
||||
if self.buffer_size > buffer_size:
|
||||
buffer_size = self.buffer_size
|
||||
path = self._init_path(path)
|
||||
mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params)
|
||||
|
||||
metadata = {}
|
||||
if content_type is not None:
|
||||
metadata['Content-Type'] = content_type
|
||||
|
||||
if content_encoding is not None:
|
||||
metadata['Content-Encoding'] = content_encoding
|
||||
|
||||
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||
**self._upload_params)
|
||||
num_part = 1
|
||||
while True:
|
||||
try:
|
||||
|
@ -179,25 +200,73 @@ class _CloudStorage(BaseStorage):
|
|||
for key in self._cloud_bucket.list(prefix=path):
|
||||
key.delete()
|
||||
|
||||
def get_checksum(self, path):
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
key = self._key_class(self._cloud_bucket, path)
|
||||
k = self._cloud_bucket.lookup(key)
|
||||
if k is None:
|
||||
raise IOError('No such key: \'{0}\''.format(path))
|
||||
|
||||
return k.etag[1:-1][:7]
|
||||
|
||||
|
||||
class S3Storage(_CloudStorage):
|
||||
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
|
||||
upload_params = {
|
||||
'encrypt_key': True,
|
||||
}
|
||||
connect_kwargs = {}
|
||||
super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
upload_params, storage_path, s3_access_key, s3_secret_key,
|
||||
s3_bucket)
|
||||
connect_kwargs, upload_params, storage_path, s3_access_key,
|
||||
s3_secret_key, s3_bucket)
|
||||
|
||||
|
||||
class GoogleCloudStorage(_CloudStorage):
|
||||
def __init__(self, storage_path, access_key, secret_key, bucket_name):
|
||||
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, {},
|
||||
storage_path, access_key, secret_key, bucket_name)
|
||||
upload_params = {}
|
||||
connect_kwargs = {}
|
||||
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key,
|
||||
connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name)
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
key = self._key_class(self._cloud_bucket, path)
|
||||
|
||||
if content_type is not None:
|
||||
key.set_metadata('Content-Type', content_type)
|
||||
|
||||
if content_encoding is not None:
|
||||
key.set_metadata('Content-Encoding', content_encoding)
|
||||
|
||||
key.set_contents_from_stream(fp)
|
||||
|
||||
|
||||
class RadosGWStorage(_CloudStorage):
|
||||
def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name):
|
||||
upload_params = {}
|
||||
connect_kwargs = {
|
||||
'host': hostname,
|
||||
'is_secure': is_secure,
|
||||
'calling_format': boto.s3.connection.OrdinaryCallingFormat(),
|
||||
}
|
||||
super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
connect_kwargs, upload_params, storage_path, access_key,
|
||||
secret_key, bucket_name)
|
||||
|
||||
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
if requires_cors:
|
||||
return None
|
||||
|
||||
return super(RadosGWStorage, self).get_direct_download_url(path, expires_in, requires_cors)
|
||||
|
||||
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
|
||||
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
|
||||
if requires_cors:
|
||||
return None
|
||||
|
||||
return super(RadosGWStorage, self).get_direct_upload_url(path, mime_type, requires_cors)
|
||||
|
|
|
@ -31,6 +31,7 @@ class DistributedStorage(StoragePaths):
|
|||
self.preferred_locations = list(preferred_locations)
|
||||
|
||||
get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url)
|
||||
get_direct_upload_url = _location_aware(BaseStorage.get_direct_upload_url)
|
||||
get_content = _location_aware(BaseStorage.get_content)
|
||||
put_content = _location_aware(BaseStorage.put_content)
|
||||
stream_read = _location_aware(BaseStorage.stream_read)
|
||||
|
@ -39,4 +40,5 @@ class DistributedStorage(StoragePaths):
|
|||
list_directory = _location_aware(BaseStorage.list_directory)
|
||||
exists = _location_aware(BaseStorage.exists)
|
||||
remove = _location_aware(BaseStorage.remove)
|
||||
get_supports_resumeable_downloads = _location_aware(BaseStorage.get_supports_resumeable_downloads)
|
||||
get_checksum = _location_aware(BaseStorage.get_checksum)
|
||||
get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads)
|
||||
|
|
|
@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
|
|||
def stream_read(self, path):
|
||||
yield ''
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
pass
|
||||
|
||||
def remove(self, path):
|
||||
|
@ -22,3 +22,6 @@ class FakeStorage(BaseStorage):
|
|||
|
||||
def exists(self, path):
|
||||
return False
|
||||
|
||||
def get_checksum(self, path):
|
||||
return 'abcdefg'
|
|
@ -1,6 +1,7 @@
|
|||
|
||||
import os
|
||||
import shutil
|
||||
import hashlib
|
||||
import io
|
||||
|
||||
from storage.basestorage import BaseStorage
|
||||
|
||||
|
@ -40,9 +41,9 @@ class LocalStorage(BaseStorage):
|
|||
|
||||
def stream_read_file(self, path):
|
||||
path = self._init_path(path)
|
||||
return open(path, mode='rb')
|
||||
return io.open(path, mode='rb')
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Size is mandatory
|
||||
path = self._init_path(path, create=True)
|
||||
with open(path, mode='wb') as f:
|
||||
|
@ -80,3 +81,14 @@ class LocalStorage(BaseStorage):
|
|||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def get_checksum(self, path):
|
||||
path = self._init_path(path)
|
||||
sha_hash = hashlib.sha256()
|
||||
with open(path, 'r') as to_hash:
|
||||
while True:
|
||||
buf = to_hash.read(self.buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
sha_hash.update(buf)
|
||||
return sha_hash.hexdigest()[:7]
|
||||
|
|
Binary file not shown.
|
@ -8,7 +8,7 @@ from app import app
|
|||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from endpoints.api import api_bp, api
|
||||
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam, TeamMemberInvite
|
||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag
|
||||
from endpoints.api.search import FindRepositories, EntitySearch
|
||||
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
|
||||
|
@ -24,7 +24,7 @@ from endpoints.api.repoemail import RepositoryAuthorizedEmail
|
|||
from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList
|
||||
from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout,
|
||||
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification,
|
||||
VerifyUser)
|
||||
VerifyUser, DetachExternal)
|
||||
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
|
||||
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
|
||||
from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs
|
||||
|
@ -435,6 +435,24 @@ class TestSignin(ApiTestCase):
|
|||
self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'})
|
||||
|
||||
|
||||
class TestDetachExternal(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(DetachExternal, servicename='someservice')
|
||||
|
||||
def test_post_anonymous(self):
|
||||
self._run_test('POST', 401, None, {})
|
||||
|
||||
def test_post_freshuser(self):
|
||||
self._run_test('POST', 200, 'freshuser', {})
|
||||
|
||||
def test_post_reader(self):
|
||||
self._run_test('POST', 200, 'reader', {})
|
||||
|
||||
def test_post_devtable(self):
|
||||
self._run_test('POST', 200, 'devtable', {})
|
||||
|
||||
|
||||
class TestVerifyUser(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
|
@ -3509,6 +3527,36 @@ class TestSuperUserLogs(ApiTestCase):
|
|||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
class TestTeamMemberInvite(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(TeamMemberInvite, code='foobarbaz')
|
||||
|
||||
def test_put_anonymous(self):
|
||||
self._run_test('PUT', 401, None, None)
|
||||
|
||||
def test_put_freshuser(self):
|
||||
self._run_test('PUT', 400, 'freshuser', None)
|
||||
|
||||
def test_put_reader(self):
|
||||
self._run_test('PUT', 400, 'reader', None)
|
||||
|
||||
def test_put_devtable(self):
|
||||
self._run_test('PUT', 400, 'devtable', None)
|
||||
|
||||
def test_delete_anonymous(self):
|
||||
self._run_test('DELETE', 401, None, None)
|
||||
|
||||
def test_delete_freshuser(self):
|
||||
self._run_test('DELETE', 400, 'freshuser', None)
|
||||
|
||||
def test_delete_reader(self):
|
||||
self._run_test('DELETE', 400, 'reader', None)
|
||||
|
||||
def test_delete_devtable(self):
|
||||
self._run_test('DELETE', 400, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
|
@ -3527,7 +3575,6 @@ class TestSuperUserList(ApiTestCase):
|
|||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
|
|
|
@ -11,7 +11,7 @@ from app import app
|
|||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data import model, database
|
||||
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam
|
||||
from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam
|
||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag
|
||||
from endpoints.api.search import FindRepositories, EntitySearch
|
||||
from endpoints.api.image import RepositoryImage, RepositoryImageList
|
||||
|
@ -131,6 +131,10 @@ class ApiTestCase(unittest.TestCase):
|
|||
|
||||
def deleteResponse(self, resource_name, params={}, expected_code=204):
|
||||
rv = self.app.delete(self.url_for(resource_name, params))
|
||||
|
||||
if rv.status_code != expected_code:
|
||||
print 'Mismatch data for resource DELETE %s: %s' % (resource_name, rv.data)
|
||||
|
||||
self.assertEquals(rv.status_code, expected_code)
|
||||
return rv.data
|
||||
|
||||
|
@ -162,6 +166,13 @@ class ApiTestCase(unittest.TestCase):
|
|||
parsed = py_json.loads(data)
|
||||
return parsed
|
||||
|
||||
def assertInTeam(self, data, membername):
|
||||
for memberData in data['members']:
|
||||
if memberData['name'] == membername:
|
||||
return
|
||||
|
||||
self.fail(membername + ' not found in team: ' + json.dumps(data))
|
||||
|
||||
def login(self, username, password='password'):
|
||||
return self.postJsonResponse(Signin, data=dict(username=username, password=password))
|
||||
|
||||
|
@ -380,6 +391,28 @@ class TestCreateNewUser(ApiTestCase):
|
|||
expected_code=201)
|
||||
self.assertEquals('"Created"', data)
|
||||
|
||||
def test_createuser_withteaminvite(self):
|
||||
inviter = model.get_user(ADMIN_ACCESS_USER)
|
||||
team = model.get_organization_team(ORGANIZATION, 'owners')
|
||||
invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com')
|
||||
|
||||
details = {
|
||||
'inviteCode': invite.invite_token
|
||||
}
|
||||
details.update(NEW_USER_DETAILS);
|
||||
|
||||
data = self.postResponse(User,
|
||||
data=details,
|
||||
expected_code=201)
|
||||
self.assertEquals('"Created"', data)
|
||||
|
||||
# Make sure the user was added to the team.
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='owners'))
|
||||
self.assertInTeam(json, NEW_USER_DETAILS['username'])
|
||||
|
||||
|
||||
class TestSignout(ApiTestCase):
|
||||
def test_signout(self):
|
||||
|
@ -741,16 +774,43 @@ class TestGetOrganizationTeamMembers(ApiTestCase):
|
|||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers'))
|
||||
|
||||
assert READ_ACCESS_USER in json['members']
|
||||
self.assertEquals(READ_ACCESS_USER, json['members'][1]['name'])
|
||||
|
||||
|
||||
class TestUpdateOrganizationTeamMember(ApiTestCase):
|
||||
def test_addmember(self):
|
||||
def test_addmember_alreadyteammember(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = READ_ACCESS_USER
|
||||
self.putResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
def test_addmember_orgmember(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = READ_ACCESS_USER
|
||||
self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners',
|
||||
membername=membername))
|
||||
|
||||
# Verify the user was added to the team.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='owners'))
|
||||
|
||||
self.assertInTeam(json, membername)
|
||||
|
||||
|
||||
def test_addmember_robot(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = ORGANIZATION + '+coolrobot'
|
||||
self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=NO_ACCESS_USER))
|
||||
membername=membername))
|
||||
|
||||
|
||||
# Verify the user was added to the team.
|
||||
|
@ -758,10 +818,168 @@ class TestUpdateOrganizationTeamMember(ApiTestCase):
|
|||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers'))
|
||||
|
||||
assert NO_ACCESS_USER in json['members']
|
||||
self.assertInTeam(json, membername)
|
||||
|
||||
|
||||
def test_addmember_invalidrobot(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = 'freshuser+anotherrobot'
|
||||
self.putResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
def test_addmember_nonorgmember(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = NO_ACCESS_USER
|
||||
response = self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners',
|
||||
membername=membername))
|
||||
|
||||
|
||||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Make sure the user is not (yet) part of the team.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers'))
|
||||
|
||||
for member in json['members']:
|
||||
self.assertNotEqual(membername, member['name'])
|
||||
|
||||
|
||||
class TestAcceptTeamMemberInvite(ApiTestCase):
|
||||
def assertInTeam(self, data, membername):
|
||||
for memberData in data['members']:
|
||||
if memberData['name'] == membername:
|
||||
return
|
||||
|
||||
self.fail(membername + ' not found in team: ' + json.dumps(data))
|
||||
|
||||
def test_accept(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Create the invite.
|
||||
membername = NO_ACCESS_USER
|
||||
response = self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners',
|
||||
membername=membername))
|
||||
|
||||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Login as the user.
|
||||
self.login(membername)
|
||||
|
||||
# Accept the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.putJsonResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token))
|
||||
|
||||
# Verify the user is now on the team.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='owners'))
|
||||
|
||||
self.assertInTeam(json, membername)
|
||||
|
||||
# Verify the accept now fails.
|
||||
self.putResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
|
||||
class TestDeclineTeamMemberInvite(ApiTestCase):
|
||||
def test_decline_wronguser(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Create the invite.
|
||||
membername = NO_ACCESS_USER
|
||||
response = self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners',
|
||||
membername=membername))
|
||||
|
||||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Try to decline the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.deleteResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
def test_decline(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Create the invite.
|
||||
membername = NO_ACCESS_USER
|
||||
response = self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='owners',
|
||||
membername=membername))
|
||||
|
||||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Login as the user.
|
||||
self.login(membername)
|
||||
|
||||
# Decline the invite.
|
||||
user = model.get_user(membername)
|
||||
invites = list(model.lookup_team_invites(user))
|
||||
self.assertEquals(1, len(invites))
|
||||
|
||||
self.deleteResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token))
|
||||
|
||||
# Make sure the invite was deleted.
|
||||
self.deleteResponse(TeamMemberInvite,
|
||||
params=dict(code=invites[0].invite_token),
|
||||
expected_code=400)
|
||||
|
||||
|
||||
class TestDeleteOrganizationTeamMember(ApiTestCase):
|
||||
def test_deletememberinvite(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
membername = NO_ACCESS_USER
|
||||
response = self.putJsonResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername))
|
||||
|
||||
|
||||
self.assertEquals(True, response['invited'])
|
||||
|
||||
# Verify the invite was added.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers',
|
||||
includePending=True))
|
||||
|
||||
assert len(json['members']) == 3
|
||||
|
||||
# Delete the invite.
|
||||
self.deleteResponse(TeamMember,
|
||||
params=dict(orgname=ORGANIZATION, teamname='readers',
|
||||
membername=membername))
|
||||
|
||||
|
||||
# Verify the user was removed from the team.
|
||||
json = self.getJsonResponse(TeamMemberList,
|
||||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers',
|
||||
includePending=True))
|
||||
|
||||
assert len(json['members']) == 2
|
||||
|
||||
|
||||
def test_deletemember(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
@ -775,7 +993,7 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
|
|||
params=dict(orgname=ORGANIZATION,
|
||||
teamname='readers'))
|
||||
|
||||
assert not READ_ACCESS_USER in json['members']
|
||||
assert len(json['members']) == 1
|
||||
|
||||
|
||||
class TestCreateRepo(ApiTestCase):
|
||||
|
@ -2120,7 +2338,7 @@ class TestSuperUserManagement(ApiTestCase):
|
|||
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
self.assertEquals('jschorr+test@devtable.com', json['email'])
|
||||
self.assertEquals(False, json['super_user'])
|
||||
|
||||
def test_delete_user(self):
|
||||
|
@ -2143,7 +2361,7 @@ class TestSuperUserManagement(ApiTestCase):
|
|||
# Verify the user exists.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
self.assertEquals('jschorr+test@devtable.com', json['email'])
|
||||
|
||||
# Update the user.
|
||||
self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com'))
|
||||
|
|
|
@ -30,7 +30,7 @@ class TestConfig(DefaultConfig):
|
|||
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
|
||||
BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False]
|
||||
|
||||
USERFILES_TYPE = 'FakeUserfiles'
|
||||
USERFILES_LOCATION = 'local_us'
|
||||
|
||||
FEATURE_SUPER_USERS = True
|
||||
FEATURE_BILLING = True
|
||||
|
|
|
@ -198,3 +198,11 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
return None
|
||||
else:
|
||||
return super(TestBuildLogs, self).get_status(build_id)
|
||||
|
||||
def expire_log_entries(self, build_id):
|
||||
if build_id == self.test_build_id:
|
||||
return
|
||||
if not self.allow_delegate:
|
||||
return None
|
||||
else:
|
||||
return super(TestBuildLogs, self).expire_log_entries(build_id)
|
||||
|
|
23
tools/reparsedockerfile.py
Normal file
23
tools/reparsedockerfile.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile
|
||||
|
||||
with open('Dockerfile.test', 'r') as dockerfileobj:
|
||||
parsed_dockerfile = parse_dockerfile(dockerfileobj.read())
|
||||
|
||||
quay_reponame = 'something'
|
||||
env_command = {
|
||||
'command': 'ENV',
|
||||
'parameters': 'QUAY_REPOSITORY %s' % quay_reponame
|
||||
}
|
||||
|
||||
for index, command in reversed(list(enumerate(parsed_dockerfile.commands))):
|
||||
if command['command'] == 'FROM':
|
||||
new_command_index = index + 1
|
||||
parsed_dockerfile.commands.insert(new_command_index, env_command)
|
||||
break
|
||||
|
||||
image_and_tag_tuple = parsed_dockerfile.get_image_and_tag()
|
||||
print image_and_tag_tuple
|
||||
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
||||
raise Exception('Missing FROM command in Dockerfile')
|
||||
|
||||
print serialize_dockerfile(parsed_dockerfile)
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
|
||||
LINE_CONTINUATION_REGEX = re.compile('\s*\\\s*\n')
|
||||
LINE_CONTINUATION_REGEX = re.compile(r'(\s)*\\(\s)*\n')
|
||||
COMMAND_REGEX = re.compile('([A-Za-z]+)\s(.*)')
|
||||
|
||||
COMMENT_CHARACTER = '#'
|
||||
|
|
|
@ -34,6 +34,27 @@ def parse_robot_username(robot_username):
|
|||
return robot_username.split('+', 2)
|
||||
|
||||
|
||||
def parse_urn(urn):
|
||||
""" Parses a URN, returning a pair that contains a list of URN
|
||||
namespace parts, followed by the URN's unique ID.
|
||||
"""
|
||||
if not urn.startswith('urn:'):
|
||||
return None
|
||||
|
||||
parts = urn[len('urn:'):].split(':')
|
||||
return (parts[0:len(parts) - 1], parts[len(parts) - 1])
|
||||
|
||||
|
||||
def parse_single_urn(urn):
|
||||
""" Parses a URN, returning a pair that contains the first
|
||||
namespace part, followed by the URN's unique ID.
|
||||
"""
|
||||
result = parse_urn(urn)
|
||||
if result is None or not len(result[0]):
|
||||
return None
|
||||
|
||||
return (result[0][0], result[1])
|
||||
|
||||
uuid_generator = lambda: str(uuid4())
|
||||
|
||||
|
||||
|
|
267
util/streamingjsonencoder.py
Normal file
267
util/streamingjsonencoder.py
Normal file
|
@ -0,0 +1,267 @@
|
|||
# Adapted from https://gist.github.com/akaihola/1415730#file-streamingjson-py
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
|
||||
# 2. Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
|
||||
# 3. Neither the name of Django nor the names of its contributors may be used
|
||||
# to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import collections
|
||||
import json
|
||||
from json.encoder import encode_basestring, encode_basestring_ascii, FLOAT_REPR, INFINITY
|
||||
from types import GeneratorType
|
||||
|
||||
|
||||
class StreamingJSONEncoder(json.JSONEncoder):
|
||||
def iterencode(self, o, _one_shot=False):
|
||||
"""Encode the given object and yield each string
|
||||
representation as available.
|
||||
|
||||
For example::
|
||||
|
||||
for chunk in StreamingJSONEncoder().iterencode(bigobject):
|
||||
mysocket.write(chunk)
|
||||
|
||||
This method is a verbatim copy of
|
||||
:meth:`json.JSONEncoder.iterencode`. It is
|
||||
needed because we need to call our patched
|
||||
:func:`streamingjsonencoder._make_iterencode`.
|
||||
"""
|
||||
if self.check_circular:
|
||||
markers = {}
|
||||
else:
|
||||
markers = None
|
||||
if self.ensure_ascii:
|
||||
_encoder = encode_basestring_ascii
|
||||
else:
|
||||
_encoder = encode_basestring
|
||||
if self.encoding != 'utf-8':
|
||||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
||||
if isinstance(o, str):
|
||||
o = o.decode(_encoding)
|
||||
return _orig_encoder(o)
|
||||
|
||||
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
|
||||
# Check for specials. Note that this type of test is processor- and/or
|
||||
# platform-specific, so do tests which don't depend on the internals.
|
||||
|
||||
if o != o:
|
||||
text = 'NaN'
|
||||
elif o == _inf:
|
||||
text = 'Infinity'
|
||||
elif o == _neginf:
|
||||
text = '-Infinity'
|
||||
else:
|
||||
return _repr(o)
|
||||
|
||||
if not allow_nan:
|
||||
raise ValueError("Out of range float values are not JSON compliant: %r"
|
||||
% (o,))
|
||||
|
||||
return text
|
||||
|
||||
_iterencode = _make_iterencode(
|
||||
markers, self.default, _encoder, self.indent, floatstr,
|
||||
self.key_separator, self.item_separator, self.sort_keys,
|
||||
self.skipkeys, _one_shot)
|
||||
return _iterencode(o, 0)
|
||||
|
||||
|
||||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator,
|
||||
_item_separator, _sort_keys, _skipkeys, _one_shot, False=False, True=True,
|
||||
ValueError=ValueError, basestring=basestring, dict=dict, float=float,
|
||||
GeneratorType=GeneratorType, id=id, int=int, isinstance=isinstance, list=list,
|
||||
long=long, str=str, tuple=tuple):
|
||||
"""
|
||||
This is a patched version of
|
||||
:func:`django.utils.simplejson.encoder.iterencode`. Whenever it encounters
|
||||
a generator in the data structure, it encodes it as a JSON list.
|
||||
"""
|
||||
def _iterencode_list(lst, _current_indent_level):
|
||||
if not lst:
|
||||
# note: empty generators aren't caught here, see below
|
||||
yield '[]'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(lst)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = lst
|
||||
buf = '['
|
||||
if _indent is not None:
|
||||
_current_indent_level += 1
|
||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||
separator = _item_separator + newline_indent
|
||||
buf += newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
separator = _item_separator
|
||||
first = True
|
||||
for value in lst:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
buf = separator
|
||||
if isinstance(value, basestring):
|
||||
yield buf + _encoder(value)
|
||||
elif value is None:
|
||||
yield buf + 'null'
|
||||
elif value is True:
|
||||
yield buf + 'true'
|
||||
elif value is False:
|
||||
yield buf + 'false'
|
||||
elif isinstance(value, (int, long)):
|
||||
yield buf + str(value)
|
||||
elif isinstance(value, float):
|
||||
yield buf + _floatstr(value)
|
||||
else:
|
||||
yield buf
|
||||
if isinstance(value, (list, tuple, GeneratorType)):
|
||||
chunks = _iterencode_list(value, _current_indent_level)
|
||||
elif isinstance(value, dict):
|
||||
chunks = _iterencode_dict(value, _current_indent_level)
|
||||
else:
|
||||
chunks = _iterencode(value, _current_indent_level)
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
if first:
|
||||
# we had an empty generator
|
||||
yield buf
|
||||
if newline_indent is not None:
|
||||
_current_indent_level -= 1
|
||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||
yield ']'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode_dict(dct, _current_indent_level):
|
||||
if not dct:
|
||||
yield '{}'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(dct)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = dct
|
||||
yield '{'
|
||||
if _indent is not None:
|
||||
_current_indent_level += 1
|
||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||
item_separator = _item_separator + newline_indent
|
||||
yield newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
item_separator = _item_separator
|
||||
first = True
|
||||
if _sort_keys:
|
||||
items = dct.items()
|
||||
items.sort(key=lambda kv: kv[0])
|
||||
else:
|
||||
items = dct.iteritems()
|
||||
for key, value in items:
|
||||
if isinstance(key, basestring):
|
||||
pass
|
||||
# JavaScript is weakly typed for these, so it makes sense to
|
||||
# also allow them. Many encoders seem to do something like this.
|
||||
elif isinstance(key, float):
|
||||
key = _floatstr(key)
|
||||
elif isinstance(key, (int, long)):
|
||||
key = str(key)
|
||||
elif key is True:
|
||||
key = 'true'
|
||||
elif key is False:
|
||||
key = 'false'
|
||||
elif key is None:
|
||||
key = 'null'
|
||||
elif _skipkeys:
|
||||
continue
|
||||
else:
|
||||
raise TypeError("key %r is not a string" % (key,))
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
yield item_separator
|
||||
yield _encoder(key)
|
||||
yield _key_separator
|
||||
if isinstance(value, basestring):
|
||||
yield _encoder(value)
|
||||
elif value is None:
|
||||
yield 'null'
|
||||
elif value is True:
|
||||
yield 'true'
|
||||
elif value is False:
|
||||
yield 'false'
|
||||
elif isinstance(value, (int, long)):
|
||||
yield str(value)
|
||||
elif isinstance(value, float):
|
||||
yield _floatstr(value)
|
||||
else:
|
||||
if isinstance(value, collections.Mapping):
|
||||
chunks = _iterencode_dict(value, _current_indent_level)
|
||||
elif isinstance(value, collections.Iterable):
|
||||
chunks = _iterencode_list(value, _current_indent_level)
|
||||
else:
|
||||
chunks = _iterencode(value, _current_indent_level)
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
if newline_indent is not None:
|
||||
_current_indent_level -= 1
|
||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||
yield '}'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode(o, _current_indent_level):
|
||||
if isinstance(o, basestring):
|
||||
yield _encoder(o)
|
||||
elif o is None:
|
||||
yield 'null'
|
||||
elif o is True:
|
||||
yield 'true'
|
||||
elif o is False:
|
||||
yield 'false'
|
||||
elif isinstance(o, (int, long)):
|
||||
yield str(o)
|
||||
elif isinstance(o, float):
|
||||
yield _floatstr(o)
|
||||
elif isinstance(o, collections.Mapping):
|
||||
for chunk in _iterencode_dict(o, _current_indent_level):
|
||||
yield chunk
|
||||
elif isinstance(o, collections.Iterable):
|
||||
for chunk in _iterencode_list(o, _current_indent_level):
|
||||
yield chunk
|
||||
else:
|
||||
if markers is not None:
|
||||
markerid = id(o)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = o
|
||||
o = _default(o)
|
||||
for chunk in _iterencode(o, _current_indent_level):
|
||||
yield chunk
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
return _iterencode
|
|
@ -6,7 +6,7 @@ from data import model
|
|||
from util.gravatar import compute_hash
|
||||
|
||||
def user_reference(username):
|
||||
user = model.get_user(username)
|
||||
user = model.get_user_or_org(username)
|
||||
if not user:
|
||||
return username
|
||||
|
||||
|
@ -123,6 +123,14 @@ def send_payment_failed(email, username):
|
|||
'username': username
|
||||
})
|
||||
|
||||
def send_org_invite_email(member_name, member_email, orgname, team, adder, code):
|
||||
send_email(member_email, 'Invitation to join team', 'teaminvite', {
|
||||
'inviter': adder,
|
||||
'token': code,
|
||||
'organization': orgname,
|
||||
'teamname': team
|
||||
})
|
||||
|
||||
|
||||
def send_invoice_email(email, contents):
|
||||
# Note: This completely generates the contents of the email, so we don't use the
|
||||
|
|
56
workers/buildlogsarchiver.py
Normal file
56
workers/buildlogsarchiver.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
import logging
|
||||
|
||||
from apscheduler.schedulers.blocking import BlockingScheduler
|
||||
from peewee import fn
|
||||
from tempfile import SpooledTemporaryFile
|
||||
from gzip import GzipFile
|
||||
|
||||
from data import model
|
||||
from data.archivedlogs import JSON_MIMETYPE
|
||||
from data.database import RepositoryBuild, db_random_func
|
||||
from app import build_logs, log_archive
|
||||
from util.streamingjsonencoder import StreamingJSONEncoder
|
||||
|
||||
POLL_PERIOD_SECONDS = 30
|
||||
MEMORY_TEMPFILE_SIZE = 64 * 1024 # Large enough to handle approximately 99% of builds in memory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
sched = BlockingScheduler()
|
||||
|
||||
@sched.scheduled_job(trigger='interval', seconds=30)
|
||||
def archive_redis_buildlogs():
|
||||
""" Archive a single build, choosing a candidate at random. This process must be idempotent to
|
||||
avoid needing two-phase commit. """
|
||||
try:
|
||||
# Get a random build to archive
|
||||
to_archive = model.archivable_buildlogs_query().order_by(db_random_func()).get()
|
||||
logger.debug('Archiving: %s', to_archive.uuid)
|
||||
|
||||
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
|
||||
to_encode = {
|
||||
'start': 0,
|
||||
'total': length,
|
||||
'logs': entries,
|
||||
}
|
||||
|
||||
with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
|
||||
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||
zipstream.write(chunk)
|
||||
|
||||
tempfile.seek(0)
|
||||
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||
file_id=to_archive.uuid)
|
||||
|
||||
to_archive.logs_archived = True
|
||||
to_archive.save()
|
||||
|
||||
build_logs.expire_log_entries(to_archive.uuid)
|
||||
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
logger.debug('No more builds to archive')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
sched.start()
|
|
@ -1,6 +1,7 @@
|
|||
import logging.config
|
||||
|
||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||
if __name__ == "__main__":
|
||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||
|
||||
import logging
|
||||
import argparse
|
||||
|
@ -23,6 +24,7 @@ from collections import defaultdict
|
|||
from requests.exceptions import ConnectionError
|
||||
|
||||
from data import model
|
||||
from data.database import BUILD_PHASE
|
||||
from workers.worker import Worker, WorkerUnhealthyException, JobException
|
||||
from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
|
@ -223,6 +225,13 @@ class DockerfileBuildContext(object):
|
|||
if self._pull_credentials:
|
||||
logger.debug('Logging in with pull credentials: %s@%s',
|
||||
self._pull_credentials['username'], self._pull_credentials['registry'])
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
'phasestep': 'login',
|
||||
'username': self._pull_credentials['username'],
|
||||
'registry': self._pull_credentials['registry']
|
||||
})
|
||||
|
||||
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||
registry=self._pull_credentials['registry'], reauth=True)
|
||||
|
||||
|
@ -233,7 +242,12 @@ class DockerfileBuildContext(object):
|
|||
raise JobException('Missing FROM command in Dockerfile')
|
||||
|
||||
image_and_tag = ':'.join(image_and_tag_tuple)
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag)
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
'phasestep': 'pull',
|
||||
'repo_url': image_and_tag
|
||||
})
|
||||
|
||||
pull_status = self._build_cl.pull(image_and_tag, stream=True)
|
||||
|
||||
self.__monitor_completion(pull_status, 'Downloading', self._status, 'pull_completion')
|
||||
|
@ -495,7 +509,7 @@ class DockerfileBuildWorker(Worker):
|
|||
|
||||
job_config = json.loads(repository_build.job_config)
|
||||
|
||||
resource_url = user_files.get_file_url(repository_build.resource_key)
|
||||
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
|
||||
tag_names = job_config['docker_tags']
|
||||
build_subdir = job_config['build_subdir']
|
||||
repo = job_config['repository']
|
||||
|
@ -545,7 +559,7 @@ class DockerfileBuildWorker(Worker):
|
|||
|
||||
if c_type not in self._mime_processors:
|
||||
log_appender('error', build_logs.PHASE)
|
||||
repository_build.phase = 'error'
|
||||
repository_build.phase = BUILD_PHASE.ERROR
|
||||
repository_build.save()
|
||||
message = 'Unknown mime-type: %s' % c_type
|
||||
log_appender(message, build_logs.ERROR)
|
||||
|
@ -554,7 +568,7 @@ class DockerfileBuildWorker(Worker):
|
|||
|
||||
# Try to build the build directory package from the buildpack.
|
||||
log_appender('unpacking', build_logs.PHASE)
|
||||
repository_build.phase = 'unpacking'
|
||||
repository_build.phase = BUILD_PHASE.UNPACKING
|
||||
repository_build.save()
|
||||
|
||||
build_dir = None
|
||||
|
@ -572,20 +586,20 @@ class DockerfileBuildWorker(Worker):
|
|||
repository_build.uuid, self._cache_size_gb,
|
||||
pull_credentials) as build_ctxt:
|
||||
log_appender('pulling', build_logs.PHASE)
|
||||
repository_build.phase = 'pulling'
|
||||
repository_build.phase = BUILD_PHASE.PULLING
|
||||
repository_build.save()
|
||||
build_ctxt.pull()
|
||||
|
||||
self.extend_processing(RESERVATION_TIME)
|
||||
|
||||
log_appender('building', build_logs.PHASE)
|
||||
repository_build.phase = 'building'
|
||||
repository_build.phase = BUILD_PHASE.BUILDING
|
||||
repository_build.save()
|
||||
built_image = build_ctxt.build(self.extend_processing)
|
||||
|
||||
if not built_image:
|
||||
log_appender('error', build_logs.PHASE)
|
||||
repository_build.phase = 'error'
|
||||
repository_build.phase = BUILD_PHASE.ERROR
|
||||
repository_build.save()
|
||||
|
||||
message = 'Unable to build dockerfile.'
|
||||
|
@ -598,13 +612,13 @@ class DockerfileBuildWorker(Worker):
|
|||
self.extend_processing(RESERVATION_TIME)
|
||||
|
||||
log_appender('pushing', build_logs.PHASE)
|
||||
repository_build.phase = 'pushing'
|
||||
repository_build.phase = BUILD_PHASE.PUSHING
|
||||
repository_build.save()
|
||||
|
||||
build_ctxt.push(built_image)
|
||||
|
||||
log_appender('complete', build_logs.PHASE)
|
||||
repository_build.phase = 'complete'
|
||||
repository_build.phase = BUILD_PHASE.COMPLETE
|
||||
repository_build.save()
|
||||
|
||||
# Spawn a notification that the build has completed.
|
||||
|
@ -641,20 +655,20 @@ class DockerfileBuildWorker(Worker):
|
|||
sentry.client.captureException()
|
||||
log_appender('error', build_logs.PHASE)
|
||||
logger.exception('Exception when processing request.')
|
||||
repository_build.phase = 'error'
|
||||
repository_build.phase = BUILD_PHASE.ERROR
|
||||
repository_build.save()
|
||||
log_appender(str(exc), build_logs.ERROR)
|
||||
|
||||
# Raise the exception to the queue.
|
||||
raise JobException(str(exc))
|
||||
|
||||
if __name__ == "__main__":
|
||||
desc = 'Worker daemon to monitor dockerfile build'
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument('--cachegb', default=20, type=float,
|
||||
help='Maximum cache size in gigabytes.')
|
||||
args = parser.parse_args()
|
||||
|
||||
desc = 'Worker daemon to monitor dockerfile build'
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument('--cachegb', default=20, type=float,
|
||||
help='Maximum cache size in gigabytes.')
|
||||
args = parser.parse_args()
|
||||
|
||||
worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue,
|
||||
reservation_seconds=RESERVATION_TIME)
|
||||
worker.start(start_status_server_port=8000)
|
||||
worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue,
|
||||
reservation_seconds=RESERVATION_TIME)
|
||||
worker.start(start_status_server_port=8000)
|
||||
|
|
Reference in a new issue