diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 04efe38f0..159c7867c 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev diff --git a/Dockerfile.web b/Dockerfile.web index e1d253632..605b088b3 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev @@ -38,6 +38,7 @@ ADD conf/init/gunicorn /etc/service/gunicorn ADD conf/init/nginx /etc/service/nginx ADD conf/init/diffsworker /etc/service/diffsworker ADD conf/init/notificationworker /etc/service/notificationworker +ADD conf/init/buildlogsarchiver /etc/service/buildlogsarchiver # Download any external libs. RUN mkdir static/fonts static/ldn @@ -46,7 +47,7 @@ RUN venv/bin/python -m external_libraries # Run the tests RUN TEST=true venv/bin/python -m unittest discover -VOLUME ["/conf/stack", "/var/log", "/datastorage"] +VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp"] EXPOSE 443 80 diff --git a/app.py b/app.py index 81c59a30c..8f0a57d62 100644 --- a/app.py +++ b/app.py @@ -20,6 +20,7 @@ from util.exceptionlog import Sentry from util.queuemetrics import QueueMetrics from data.billing import Billing from data.buildlogs import BuildLogs +from data.archivedlogs import LogArchive from data.queue import WorkQueue from data.userevent import UserEventsBuilderModule from datetime import datetime @@ -88,7 +89,8 @@ Principal(app, use_sessions=False) login_manager = LoginManager(app) mail = Mail(app) storage = Storage(app) -userfiles = Userfiles(app) +userfiles = Userfiles(app, storage) +log_archive = LogArchive(app, storage) analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) diff --git a/auth/auth.py b/auth/auth.py index 3616792ad..a81876e54 100644 --- a/auth/auth.py +++ b/auth/auth.py @@ -135,8 +135,15 @@ def process_token(auth): logger.warning('Invalid token format: %s' % auth) abort(401, message='Invalid token format: %(auth)s', issue='invalid-auth-token', auth=auth) - token_vals = {val[0]: val[1] for val in + def safe_get(lst, index, default_value): + try: + return lst[index] + except IndexError: + return default_value + + token_vals = {val[0]: safe_get(val, 1, '') for val in (detail.split('=') for detail in token_details)} + if 'signature' not in token_vals: logger.warning('Token does not contain signature: %s' % auth) abort(401, message='Token does not contain a valid signature: %(auth)s', diff --git a/conf/init/buildlogsarchiver/log/run b/conf/init/buildlogsarchiver/log/run new file mode 100755 index 000000000..c35fb1fb9 --- /dev/null +++ b/conf/init/buildlogsarchiver/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec svlogd /var/log/buildlogsarchiver/ \ No newline at end of file diff --git a/conf/init/buildlogsarchiver/run b/conf/init/buildlogsarchiver/run new file mode 100755 index 000000000..df3d4b05f --- /dev/null +++ b/conf/init/buildlogsarchiver/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting build logs archiver worker' + +cd / +venv/bin/python -m workers.buildlogsarchiver 2>&1 + +echo 'Diffs worker exited' \ No newline at end of file diff --git a/config.py b/config.py index f797cb36a..f810007e8 100644 --- a/config.py +++ b/config.py @@ -89,10 +89,6 @@ class DefaultConfig(object): # Stripe config BILLING_TYPE = 'FakeStripe' - # Userfiles - USERFILES_TYPE = 'LocalUserfiles' - USERFILES_PATH = 'test/data/registry/userfiles' - # Analytics ANALYTICS_TYPE = 'FakeAnalytics' @@ -172,3 +168,11 @@ class DefaultConfig(object): } DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] + + # Userfiles + USERFILES_LOCATION = 'local_us' + USERFILES_PATH = 'userfiles/' + + # Build logs archive + LOG_ARCHIVE_LOCATION = 'local_us' + LOG_ARCHIVE_PATH = 'logarchive/' diff --git a/data/archivedlogs.py b/data/archivedlogs.py new file mode 100644 index 000000000..e190b9782 --- /dev/null +++ b/data/archivedlogs.py @@ -0,0 +1,56 @@ +import logging + +from gzip import GzipFile +from flask import send_file, abort +from cStringIO import StringIO + +from data.userfiles import DelegateUserfiles, UserfilesHandlers + + +JSON_MIMETYPE = 'application/json' + + +logger = logging.getLogger(__name__) + + +class LogArchiveHandlers(UserfilesHandlers): + def get(self, file_id): + path = self._files.get_file_id_path(file_id) + try: + with self._storage.stream_read_file(self._locations, path) as gzip_stream: + with GzipFile(fileobj=gzip_stream) as unzipped: + unzipped_buffer = StringIO(unzipped.read()) + return send_file(unzipped_buffer, mimetype=JSON_MIMETYPE) + except IOError: + abort(404) + + +class LogArchive(object): + def __init__(self, app=None, distributed_storage=None): + self.app = app + if app is not None: + self.state = self.init_app(app, distributed_storage) + else: + self.state = None + + def init_app(self, app, distributed_storage): + location = app.config.get('LOG_ARCHIVE_LOCATION') + path = app.config.get('LOG_ARCHIVE_PATH', None) + + handler_name = 'logarchive_handlers' + + log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name) + + app.add_url_rule('/logarchive/', + view_func=LogArchiveHandlers.as_view(handler_name, + distributed_storage=distributed_storage, + location=location, + files=log_archive)) + + # register extension with app + app.extensions = getattr(app, 'extensions', {}) + app.extensions['log_archive'] = log_archive + return log_archive + + def __getattr__(self, name): + return getattr(self.state, name, None) diff --git a/data/billing.py b/data/billing.py index 8c604aac2..e1510c054 100644 --- a/data/billing.py +++ b/data/billing.py @@ -3,7 +3,7 @@ import stripe from datetime import datetime, timedelta from calendar import timegm -from util.collections import AttrDict +from util.morecollections import AttrDict PLANS = [ # Deprecated Plans diff --git a/data/buildlogs.py b/data/buildlogs.py index 2ccd03899..9128390af 100644 --- a/data/buildlogs.py +++ b/data/buildlogs.py @@ -2,6 +2,11 @@ import redis import json from util.dynamic import import_class +from datetime import timedelta + + +ONE_DAY = timedelta(days=1) + class BuildStatusRetrievalError(Exception): pass @@ -25,7 +30,7 @@ class RedisBuildLogs(object): """ return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - def append_log_message(self, build_id, log_message, log_type=None): + def append_log_message(self, build_id, log_message, log_type=None, log_data=None): """ Wraps the message in an envelope and push it to the end of the log entry list and returns the index at which it was inserted. @@ -37,6 +42,9 @@ class RedisBuildLogs(object): if log_type: log_obj['type'] = log_type + if log_data: + log_obj['data'] = log_data + return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1 def get_log_entries(self, build_id, start_index): @@ -51,6 +59,13 @@ class RedisBuildLogs(object): except redis.ConnectionError: raise BuildStatusRetrievalError('Cannot retrieve build logs') + def expire_log_entries(self, build_id): + """ + Sets the log entry to expire in 1 day. + """ + self._redis.expire(self._logs_key(build_id), ONE_DAY) + + @staticmethod def _status_key(build_id): return 'builds/%s/status' % build_id @@ -106,4 +121,4 @@ class BuildLogs(object): return buildlogs def __getattr__(self, name): - return getattr(self.state, name, None) \ No newline at end of file + return getattr(self.state, name, None) diff --git a/data/database.py b/data/database.py index 4731a06bb..b94badd21 100644 --- a/data/database.py +++ b/data/database.py @@ -8,7 +8,7 @@ from peewee import * from data.read_slave import ReadSlaveModel from sqlalchemy.engine.url import make_url from urlparse import urlparse - +from util.names import urn_generator logger = logging.getLogger(__name__) @@ -21,8 +21,24 @@ SCHEME_DRIVERS = { 'postgresql+psycopg2': PostgresqlDatabase, } +SCHEME_RANDOM_FUNCTION = { + 'mysql': fn.Rand, + 'mysql+pymysql': fn.Rand, + 'sqlite': fn.Random, + 'postgresql': fn.Random, + 'postgresql+psycopg2': fn.Random, +} + +class CallableProxy(Proxy): + def __call__(self, *args, **kwargs): + if self.obj is None: + raise AttributeError('Cannot use uninitialized Proxy.') + return self.obj(*args, **kwargs) + db = Proxy() read_slave = Proxy() +db_random_func = CallableProxy() + def _db_from_url(url, db_kwargs): parsed_url = make_url(url) @@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs): return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs) + def configure(config_object): db_kwargs = dict(config_object['DB_CONNECTION_ARGS']) write_db_uri = config_object['DB_URI'] db.initialize(_db_from_url(write_db_uri, db_kwargs)) + parsed_write_uri = make_url(write_db_uri) + db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername]) + read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None) if read_slave_uri is not None: read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs)) @@ -112,6 +132,15 @@ class TeamMember(BaseModel): ) +class TeamMemberInvite(BaseModel): + # Note: Either user OR email will be filled in, but not both. + user = ForeignKeyField(User, index=True, null=True) + email = CharField(null=True) + team = ForeignKeyField(Team, index=True) + inviter = ForeignKeyField(User, related_name='inviter') + invite_token = CharField(default=urn_generator(['teaminvite'])) + + class LoginService(BaseModel): name = CharField(unique=True, index=True) @@ -289,6 +318,16 @@ class RepositoryTag(BaseModel): ) +class BUILD_PHASE(object): + """ Build phases enum """ + ERROR = 'error' + UNPACKING = 'unpacking' + PULLING = 'pulling' + BUILDING = 'building' + PUSHING = 'pushing' + COMPLETE = 'complete' + + class RepositoryBuild(BaseModel): uuid = CharField(default=uuid_generator, index=True) repository = ForeignKeyField(Repository, index=True) @@ -300,6 +339,7 @@ class RepositoryBuild(BaseModel): display_name = CharField() trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True) pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot') + logs_archived = BooleanField(default=False) class QueueItem(BaseModel): @@ -410,4 +450,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind, Notification, ImageStorageLocation, ImageStoragePlacement, ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification, - RepositoryAuthorizedEmail] + RepositoryAuthorizedEmail, TeamMemberInvite] diff --git a/data/migrations/env.py b/data/migrations/env.py index 863e3d98f..d64cf4ee7 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -8,7 +8,7 @@ from peewee import SqliteDatabase from data.database import all_models, db from app import app from data.model.sqlalchemybridge import gen_sqlalchemy_metadata -from util.collections import AttrDict +from util.morecollections import AttrDict # this is the Alembic Config object, which provides # access to the values within the .ini file in use. diff --git a/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py b/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py new file mode 100644 index 000000000..a731d0158 --- /dev/null +++ b/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py @@ -0,0 +1,26 @@ +"""Add support for build log migration. + +Revision ID: 34fd69f63809 +Revises: 4a0c94399f38 +Create Date: 2014-09-12 11:50:09.217777 + +""" + +# revision identifiers, used by Alembic. +revision = '34fd69f63809' +down_revision = '4a0c94399f38' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false())) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('repositorybuild', 'logs_archived') + ### end Alembic commands ### diff --git a/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py b/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py new file mode 100644 index 000000000..2e4242d8a --- /dev/null +++ b/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py @@ -0,0 +1,81 @@ +"""Email invites for joining a team. + +Revision ID: 51d04d0e7e6f +Revises: 34fd69f63809 +Create Date: 2014-09-15 23:51:35.478232 + +""" + +# revision identifiers, used by Alembic. +revision = '51d04d0e7e6f' +down_revision = '34fd69f63809' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('teammemberinvite', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('email', sa.String(length=255), nullable=True), + sa.Column('team_id', sa.Integer(), nullable=False), + sa.Column('inviter_id', sa.Integer(), nullable=False), + sa.Column('invite_token', sa.String(length=255), nullable=False), + sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False) + op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False) + op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False) + ### end Alembic commands ### + + # Manually add the new logentrykind types + op.bulk_insert(tables.logentrykind, + [ + {'id':42, 'name':'org_invite_team_member'}, + {'id':43, 'name':'org_team_member_invite_accepted'}, + {'id':44, 'name':'org_team_member_invite_declined'}, + {'id':45, 'name':'org_delete_team_member_invite'}, + ]) + + op.bulk_insert(tables.notificationkind, + [ + {'id':10, 'name':'org_team_invite'}, + ]) + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.execute( + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member'))) + ) + + op.execute( + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted'))) + ) + + op.execute( + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined'))) + ) + + op.execute( + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite'))) + ) + + op.execute( + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('org_team_invite'))) + ) + + op.drop_index('teammemberinvite_user_id', table_name='teammemberinvite') + op.drop_index('teammemberinvite_team_id', table_name='teammemberinvite') + op.drop_index('teammemberinvite_inviter_id', table_name='teammemberinvite') + op.drop_table('teammemberinvite') + ### end Alembic commands ### diff --git a/data/model/legacy.py b/data/model/legacy.py index bc49a585e..667c2fa97 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -12,6 +12,7 @@ from util.backoff import exponential_backoff EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1) +PRESUMED_DEAD_BUILD_AGE = timedelta(days=15) logger = logging.getLogger(__name__) @@ -48,6 +49,9 @@ class InvalidRobotException(DataModelException): class InvalidTeamException(DataModelException): pass +class InvalidTeamMemberException(DataModelException): + pass + class InvalidPasswordException(DataModelException): pass @@ -73,6 +77,10 @@ class TooManyUsersException(DataModelException): pass +class UserAlreadyInTeam(DataModelException): + pass + + class TooManyLoginAttemptsException(Exception): def __init__(self, message, retry_after): super(TooManyLoginAttemptsException, self).__init__(message) @@ -332,12 +340,42 @@ def remove_team(org_name, team_name, removed_by_username): team.delete_instance(recursive=True, delete_nullable=True) +def add_or_invite_to_team(inviter, team, user=None, email=None): + # If the user is a member of the organization, then we simply add the + # user directly to the team. Otherwise, an invite is created for the user/email. + # We return None if the user was directly added and the invite object if the user was invited. + requires_invite = True + if user: + orgname = team.organization.username + + # If the user is part of the organization (or a robot), then no invite is required. + if user.robot: + requires_invite = False + if not user.username.startswith(orgname + '+'): + raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' + + 'as it is not a member of the organization') + else: + Org = User.alias() + found = User.select(User.username) + found = found.where(User.username == user.username).join(TeamMember).join(Team) + found = found.join(Org, on=(Org.username == orgname)).limit(1) + requires_invite = not any(found) + + # If we have a valid user and no invite is required, simply add the user to the team. + if user and not requires_invite: + add_user_to_team(user, team) + return None + + email_address = email if not user else None + return TeamMemberInvite.create(user=user, email=email_address, team=team, inviter=inviter) + + def add_user_to_team(user, team): try: return TeamMember.create(user=user, team=team) except Exception: - raise DataModelException('Unable to add user \'%s\' to team: \'%s\'' % - (user.username, team.name)) + raise UserAlreadyInTeam('User \'%s\' is already a member of team \'%s\'' % + (user.username, team.name)) def remove_user_from_team(org_name, team_name, username, removed_by_username): @@ -512,6 +550,13 @@ def get_user(username): return None +def get_user_or_org(username): + try: + return User.get(User.username == username, User.robot == False) + except User.DoesNotExist: + return None + + def get_user_or_org_by_customer_id(customer_id): try: return User.get(User.stripe_id == customer_id) @@ -635,6 +680,10 @@ def get_organization_team_members(teamid): query = joined.where(Team.id == teamid) return query +def get_organization_team_member_invites(teamid): + joined = TeamMemberInvite.select().join(Team).join(User) + query = joined.where(Team.id == teamid) + return query def get_organization_member_set(orgname): Org = User.alias() @@ -1824,6 +1873,32 @@ def delete_notifications_by_kind(target, kind_name): Notification.delete().where(Notification.target == target, Notification.kind == kind_ref).execute() +def delete_matching_notifications(target, kind_name, **kwargs): + kind_ref = NotificationKind.get(name=kind_name) + + # Load all notifications for the user with the given kind. + notifications = Notification.select().where( + Notification.target == target, + Notification.kind == kind_ref) + + # For each, match the metadata to the specified values. + for notification in notifications: + matches = True + try: + metadata = json.loads(notification.metadata_json) + except: + continue + + for (key, value) in kwargs.iteritems(): + if not key in metadata or metadata[key] != value: + matches = False + break + + if not matches: + continue + + notification.delete_instance() + def get_active_users(): return User.select().where(User.organization == False, User.robot == False) @@ -1831,6 +1906,16 @@ def get_active_users(): def get_active_user_count(): return get_active_users().count() + +def detach_external_login(user, service_name): + try: + service = LoginService.get(name = service_name) + except LoginService.DoesNotExist: + return + + FederatedLogin.delete().where(FederatedLogin.user == user, + FederatedLogin.service == service).execute() + def delete_user(user): user.delete_instance(recursive=True, delete_nullable=True) @@ -1879,3 +1964,72 @@ def confirm_email_authorization_for_repo(code): found.save() return found + + +def delete_team_email_invite(team, email): + found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team) + found.delete_instance() + +def delete_team_user_invite(team, user): + try: + found = TeamMemberInvite.get(TeamMemberInvite.user == user, TeamMemberInvite.team == team) + except TeamMemberInvite.DoesNotExist: + return False + + found.delete_instance() + return True + +def lookup_team_invites(user): + return TeamMemberInvite.select().where(TeamMemberInvite.user == user) + +def lookup_team_invite(code, user=None): + # Lookup the invite code. + try: + found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code) + except TeamMemberInvite.DoesNotExist: + raise DataModelException('Invalid confirmation code.') + + if user and found.user != user: + raise DataModelException('Invalid confirmation code.') + + return found + +def delete_team_invite(code, user=None): + found = lookup_team_invite(code, user) + + team = found.team + inviter = found.inviter + + found.delete_instance() + + return (team, inviter) + + +def confirm_team_invite(code, user): + found = lookup_team_invite(code) + + # If the invite is for a specific user, we have to confirm that here. + if found.user is not None and found.user != user: + message = """This invite is intended for user "%s". + Please login to that account and try again.""" % found.user.username + raise DataModelException(message) + + # Add the user to the team. + try: + add_user_to_team(user, found.team) + except UserAlreadyInTeam: + # Ignore. + pass + + # Delete the invite and return the team. + team = found.team + inviter = found.inviter + found.delete_instance() + return (team, inviter) + +def archivable_buildlogs_query(): + presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE + return (RepositoryBuild.select() + .where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) | + (RepositoryBuild.phase == BUILD_PHASE.ERROR) | + (RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False)) diff --git a/data/model/oauth.py b/data/model/oauth.py index 309e2122a..51bfc053e 100644 --- a/data/model/oauth.py +++ b/data/model/oauth.py @@ -46,7 +46,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider): def validate_redirect_uri(self, client_id, redirect_uri): try: app = OAuthApplication.get(client_id=client_id) - if app.redirect_uri and redirect_uri.startswith(app.redirect_uri): + if app.redirect_uri and redirect_uri and redirect_uri.startswith(app.redirect_uri): return True return False except OAuthApplication.DoesNotExist: diff --git a/data/userfiles.py b/data/userfiles.py index 79fbcb507..f4b786df5 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -1,110 +1,35 @@ -import boto import os import logging -import hashlib import magic -from boto.s3.key import Key from uuid import uuid4 from flask import url_for, request, send_file, make_response, abort from flask.views import View +from _pyio import BufferedReader logger = logging.getLogger(__name__) -class FakeUserfiles(object): - def prepare_for_drop(self, mime_type): - return ('http://fake/url', uuid4()) - - def store_file(self, file_like_obj, content_type): - raise NotImplementedError() - - def get_file_url(self, file_id, expires_in=300): - return ('http://fake/url') - - def get_file_checksum(self, file_id): - return 'abcdefg' - - -class S3FileWriteException(Exception): - pass - - -class S3Userfiles(object): - def __init__(self, path, s3_access_key, s3_secret_key, bucket_name): - self._initialized = False - self._bucket_name = bucket_name - self._access_key = s3_access_key - self._secret_key = s3_secret_key - self._prefix = path - self._s3_conn = None - self._bucket = None - - def _initialize_s3(self): - if not self._initialized: - self._s3_conn = boto.connect_s3(self._access_key, self._secret_key) - self._bucket = self._s3_conn.get_bucket(self._bucket_name) - self._initialized = True - - def prepare_for_drop(self, mime_type): - """ Returns a signed URL to upload a file to our bucket. """ - self._initialize_s3() - logger.debug('Requested upload url with content type: %s' % mime_type) - file_id = str(uuid4()) - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, - encrypt_key=True) - return (url, file_id) - - def store_file(self, file_like_obj, content_type): - self._initialize_s3() - file_id = str(uuid4()) - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - logger.debug('Setting s3 content type to: %s' % content_type) - k.set_metadata('Content-Type', content_type) - bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True, - rewind=True) - - if bytes_written == 0: - raise S3FileWriteException('Unable to write file to S3') - - return file_id - - def get_file_url(self, file_id, expires_in=300, mime_type=None): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - headers = None - if mime_type: - headers={'Content-Type': mime_type} - - return k.generate_url(expires_in, headers=headers) - - def get_file_checksum(self, file_id): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = self._bucket.lookup(full_key) - return k.etag[1:-1][:7] - - class UserfilesHandlers(View): methods = ['GET', 'PUT'] - def __init__(self, local_userfiles): - self._userfiles = local_userfiles + def __init__(self, distributed_storage, location, files): + self._storage = distributed_storage + self._files = files + self._locations = {location} self._magic = magic.Magic(mime=True) def get(self, file_id): - path = self._userfiles.file_path(file_id) - if not os.path.exists(path): + path = self._files.get_file_id_path(file_id) + try: + file_stream = self._storage.stream_read_file(self._locations, path) + buffered = BufferedReader(file_stream) + file_header_bytes = buffered.peek(1024) + return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes)) + except IOError: abort(404) - logger.debug('Sending path: %s' % path) - return send_file(path, mimetype=self._magic.from_file(path)) - def put(self, file_id): input_stream = request.stream if request.headers.get('transfer-encoding') == 'chunked': @@ -112,7 +37,10 @@ class UserfilesHandlers(View): # encoding (Gunicorn) input_stream = request.environ['wsgi.input'] - self._userfiles.store_stream(input_stream, file_id) + c_type = request.headers.get('Content-Type', None) + + path = self._files.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, input_stream, c_type) return make_response('Okay') @@ -123,99 +51,82 @@ class UserfilesHandlers(View): return self.put(file_id) -class LocalUserfiles(object): - def __init__(self, app, path): - self._root_path = path - self._buffer_size = 64 * 1024 # 64 KB +class DelegateUserfiles(object): + def __init__(self, app, distributed_storage, location, path, handler_name): self._app = app + self._storage = distributed_storage + self._locations = {location} + self._prefix = path + self._handler_name = handler_name def _build_url_adapter(self): return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'], script_name=self._app.config['APPLICATION_ROOT'] or '/', url_scheme=self._app.config['PREFERRED_URL_SCHEME']) - def prepare_for_drop(self, mime_type): + def get_file_id_path(self, file_id): + return os.path.join(self._prefix, file_id) + + def prepare_for_drop(self, mime_type, requires_cors=True): + """ Returns a signed URL to upload a file to our bucket. """ + logger.debug('Requested upload url with content type: %s' % mime_type) file_id = str(uuid4()) - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id) + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors) - def file_path(self, file_id): - if '..' in file_id or file_id.startswith('/'): - raise RuntimeError('Invalid Filename') - return os.path.join(self._root_path, file_id) + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return (url_for(self._handler_name, file_id=file_id, _external=True), file_id) - def store_stream(self, stream, file_id): - path = self.file_path(file_id) - dirname = os.path.dirname(path) - if not os.path.exists(dirname): - os.makedirs(dirname) + return (url, file_id) - with open(path, 'w') as to_write: - while True: - try: - buf = stream.read(self._buffer_size) - if not buf: - break - to_write.write(buf) - except IOError: - break + def store_file(self, file_like_obj, content_type, content_encoding=None, file_id=None): + if file_id is None: + file_id = str(uuid4()) - def store_file(self, file_like_obj, content_type): - file_id = str(uuid4()) - - # Rewind the file to match what s3 does - file_like_obj.seek(0, os.SEEK_SET) - - self.store_stream(file_like_obj, file_id) + path = self.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, file_like_obj, content_type, + content_encoding) return file_id - def get_file_url(self, file_id, expires_in=300): - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return url_for('userfiles_handlers', file_id=file_id, _external=True) + def get_file_url(self, file_id, expires_in=300, requires_cors=False): + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors) + + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return url_for(self._handler_name, file_id=file_id, _external=True) + + return url def get_file_checksum(self, file_id): - path = self.file_path(file_id) - sha_hash = hashlib.sha256() - with open(path, 'r') as to_hash: - while True: - buf = to_hash.read(self._buffer_size) - if not buf: - break - sha_hash.update(buf) - return sha_hash.hexdigest()[:7] + path = self.get_file_id_path(file_id) + return self._storage.get_checksum(self._locations, path) class Userfiles(object): - def __init__(self, app=None): + def __init__(self, app=None, distributed_storage=None): self.app = app if app is not None: - self.state = self.init_app(app) + self.state = self.init_app(app, distributed_storage) else: self.state = None - def init_app(self, app): - storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles') - path = app.config.get('USERFILES_PATH', '') + def init_app(self, app, distributed_storage): + location = app.config.get('USERFILES_LOCATION') + path = app.config.get('USERFILES_PATH', None) - if storage_type == 'LocalUserfiles': - userfiles = LocalUserfiles(app, path) - app.add_url_rule('/userfiles/', - view_func=UserfilesHandlers.as_view('userfiles_handlers', - local_userfiles=userfiles)) + handler_name = 'userfiles_handlers' - elif storage_type == 'S3Userfiles': - access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '') - secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '') - bucket = app.config.get('USERFILES_S3_BUCKET', '') - userfiles = S3Userfiles(path, access_key, secret_key, bucket) + userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name) - elif storage_type == 'FakeUserfiles': - userfiles = FakeUserfiles() - - else: - raise RuntimeError('Unknown userfiles type: %s' % storage_type) + app.add_url_rule('/userfiles/', + view_func=UserfilesHandlers.as_view(handler_name, + distributed_storage=distributed_storage, + location=location, + files=userfiles)) # register extension with app app.extensions = getattr(app, 'extensions', {}) diff --git a/emails/teaminvite.html b/emails/teaminvite.html new file mode 100644 index 000000000..948899d42 --- /dev/null +++ b/emails/teaminvite.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} + +{% block content %} + +

Invitation to join team {{ teamname }}

+ +{{ inviter | user_reference }} has invited you to join team {{ teamname }} under organization {{ organization | user_reference }}. + +

+ +To join the team, please click the following link:
+{{ app_link('confirminvite?code=' + token) }} + +

+If you were not expecting this invitation, you can ignore this email. + +{% endblock %} diff --git a/endpoints/api/build.py b/endpoints/api/build.py index 21d554069..d792234dd 100644 --- a/endpoints/api/build.py +++ b/endpoints/api/build.py @@ -1,9 +1,9 @@ import logging import json -from flask import request +from flask import request, redirect -from app import app, userfiles as user_files, build_logs +from app import app, userfiles as user_files, build_logs, log_archive from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource, require_repo_read, require_repo_write, validate_json_request, ApiResource, internal_only, format_date, api, Unauthorized, NotFound) @@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False): } if can_write: - resp['archive_url'] = user_files.get_file_url(build_obj.resource_key) + resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True) return resp @@ -215,6 +215,10 @@ class RepositoryBuildLogs(RepositoryParamResource): build = model.get_repository_build(namespace, repository, build_uuid) + # If the logs have been archived, just redirect to the completed archive + if build.logs_archived: + return redirect(log_archive.get_file_url(build.uuid)) + start = int(request.args.get('start', 0)) try: @@ -257,7 +261,7 @@ class FileDropResource(ApiResource): def post(self): """ Request a URL to which a file may be uploaded. """ mime_type = request.get_json()['mimeType'] - (url, file_id) = user_files.prepare_for_drop(mime_type) + (url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True) return { 'url': url, 'file_id': str(file_id), diff --git a/endpoints/api/team.py b/endpoints/api/team.py index 0631cc028..a1c687af9 100644 --- a/endpoints/api/team.py +++ b/endpoints/api/team.py @@ -1,12 +1,48 @@ from flask import request from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error, - log_action, Unauthorized, NotFound, internal_only, require_scope) + log_action, Unauthorized, NotFound, internal_only, require_scope, + query_param, truthy_bool, parse_args, require_user_admin) from auth.permissions import AdministerOrganizationPermission, ViewTeamPermission from auth.auth_context import get_authenticated_user from auth import scopes from data import model +from util.useremails import send_org_invite_email +from util.gravatar import compute_hash +def try_accept_invite(code, user): + (team, inviter) = model.confirm_team_invite(code, user) + + model.delete_matching_notifications(user, 'org_team_invite', code=code) + + orgname = team.organization.username + log_action('org_team_member_invite_accepted', orgname, { + 'member': user.username, + 'team': team.name, + 'inviter': inviter.username + }) + + return team + + +def handle_addinvite_team(inviter, team, user=None, email=None): + invite = model.add_or_invite_to_team(inviter, team, user, email) + if not invite: + # User was added to the team directly. + return + + orgname = team.organization.username + if user: + model.create_notification('org_team_invite', user, metadata = { + 'code': invite.invite_token, + 'inviter': inviter.username, + 'org': orgname, + 'team': team.name + }) + + send_org_invite_email(user.username if user else email, user.email if user else email, + orgname, team.name, inviter.username, invite.invite_token) + return invite def team_view(orgname, team): view_permission = ViewTeamPermission(orgname, team.name) @@ -19,14 +55,28 @@ def team_view(orgname, team): 'role': role } -def member_view(member): +def member_view(member, invited=False): return { 'name': member.username, 'kind': 'user', 'is_robot': member.robot, + 'gravatar': compute_hash(member.email) if not member.robot else None, + 'invited': invited, } +def invite_view(invite): + if invite.user: + return member_view(invite.user, invited=True) + else: + return { + 'email': invite.email, + 'kind': 'invite', + 'gravatar': compute_hash(invite.email), + 'invited': True + } + + @resource('/v1/organization//team/') @internal_only class OrganizationTeam(ApiResource): @@ -114,8 +164,10 @@ class OrganizationTeam(ApiResource): @internal_only class TeamMemberList(ApiResource): """ Resource for managing the list of members for a team. """ + @parse_args + @query_param('includePending', 'Whether to include pending members', type=truthy_bool, default=False) @nickname('getOrganizationTeamMembers') - def get(self, orgname, teamname): + def get(self, args, orgname, teamname): """ Retrieve the list of members for the specified team. """ view_permission = ViewTeamPermission(orgname, teamname) edit_permission = AdministerOrganizationPermission(orgname) @@ -128,11 +180,18 @@ class TeamMemberList(ApiResource): raise NotFound() members = model.get_organization_team_members(team.id) - return { - 'members': {m.username : member_view(m) for m in members}, + invites = [] + + if args['includePending'] and edit_permission.can(): + invites = model.get_organization_team_member_invites(team.id) + + data = { + 'members': [member_view(m) for m in members] + [invite_view(i) for i in invites], 'can_edit': edit_permission.can() } + return data + raise Unauthorized() @@ -142,7 +201,7 @@ class TeamMember(ApiResource): @require_scope(scopes.ORG_ADMIN) @nickname('updateOrganizationTeamMember') def put(self, orgname, teamname, membername): - """ Add a member to an existing team. """ + """ Adds or invites a member to an existing team. """ permission = AdministerOrganizationPermission(orgname) if permission.can(): team = None @@ -159,23 +218,149 @@ class TeamMember(ApiResource): if not user: raise request_error(message='Unknown user') - # Add the user to the team. - model.add_user_to_team(user, team) - log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname}) - return member_view(user) + # Add or invite the user to the team. + inviter = get_authenticated_user() + invite = handle_addinvite_team(inviter, team, user=user) + if not invite: + log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname}) + return member_view(user, invited=False) + + # User was invited. + log_action('org_invite_team_member', orgname, { + 'user': membername, + 'member': membername, + 'team': teamname + }) + return member_view(user, invited=True) raise Unauthorized() @require_scope(scopes.ORG_ADMIN) @nickname('deleteOrganizationTeamMember') def delete(self, orgname, teamname, membername): - """ Delete an existing member of a team. """ + """ Delete a member of a team. If the user is merely invited to join + the team, then the invite is removed instead. + """ permission = AdministerOrganizationPermission(orgname) if permission.can(): # Remote the user from the team. invoking_user = get_authenticated_user().username + + # Find the team. + try: + team = model.get_organization_team(orgname, teamname) + except model.InvalidTeamException: + raise NotFound() + + # Find the member. + member = model.get_user(membername) + if not member: + raise NotFound() + + # First attempt to delete an invite for the user to this team. If none found, + # then we try to remove the user directly. + if model.delete_team_user_invite(team, member): + log_action('org_delete_team_member_invite', orgname, { + 'user': membername, + 'team': teamname, + 'member': membername + }) + return 'Deleted', 204 + model.remove_user_from_team(orgname, teamname, membername, invoking_user) log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname}) return 'Deleted', 204 raise Unauthorized() + + +@resource('/v1/organization//team//invite/') +class InviteTeamMember(ApiResource): + """ Resource for inviting a team member via email address. """ + @require_scope(scopes.ORG_ADMIN) + @nickname('inviteTeamMemberEmail') + def put(self, orgname, teamname, email): + """ Invites an email address to an existing team. """ + permission = AdministerOrganizationPermission(orgname) + if permission.can(): + team = None + + # Find the team. + try: + team = model.get_organization_team(orgname, teamname) + except model.InvalidTeamException: + raise NotFound() + + # Invite the email to the team. + inviter = get_authenticated_user() + invite = handle_addinvite_team(inviter, team, email=email) + log_action('org_invite_team_member', orgname, { + 'email': email, + 'team': teamname, + 'member': email + }) + return invite_view(invite) + + raise Unauthorized() + + @require_scope(scopes.ORG_ADMIN) + @nickname('deleteTeamMemberEmailInvite') + def delete(self, orgname, teamname, email): + """ Delete an invite of an email address to join a team. """ + permission = AdministerOrganizationPermission(orgname) + if permission.can(): + team = None + + # Find the team. + try: + team = model.get_organization_team(orgname, teamname) + except model.InvalidTeamException: + raise NotFound() + + # Delete the invite. + model.delete_team_email_invite(team, email) + log_action('org_delete_team_member_invite', orgname, { + 'email': email, + 'team': teamname, + 'member': email + }) + return 'Deleted', 204 + + raise Unauthorized() + + +@resource('/v1/teaminvite/') +@internal_only +class TeamMemberInvite(ApiResource): + """ Resource for managing invites to jon a team. """ + @require_user_admin + @nickname('acceptOrganizationTeamInvite') + def put(self, code): + """ Accepts an invite to join a team in an organization. """ + # Accept the invite for the current user. + team = try_accept_invite(code, get_authenticated_user()) + if not team: + raise NotFound() + + orgname = team.organization.username + return { + 'org': orgname, + 'team': team.name + } + + @nickname('declineOrganizationTeamInvite') + @require_user_admin + def delete(self, code): + """ Delete an existing member of a team. """ + (team, inviter) = model.delete_team_invite(code, get_authenticated_user()) + + model.delete_matching_notifications(get_authenticated_user(), 'org_team_invite', code=code) + + orgname = team.organization.username + log_action('org_team_member_invite_declined', orgname, { + 'member': get_authenticated_user().username, + 'team': team.name, + 'inviter': inviter.username + }) + + return 'Deleted', 204 diff --git a/endpoints/api/user.py b/endpoints/api/user.py index 4a5df20ee..f81e7ffba 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -12,6 +12,8 @@ from endpoints.api import (ApiResource, nickname, resource, validate_json_reques license_error, require_fresh_login) from endpoints.api.subscribe import subscribe from endpoints.common import common_login +from endpoints.api.team import try_accept_invite + from data import model from data.billing import get_plan from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission, @@ -20,6 +22,7 @@ from auth.auth_context import get_authenticated_user from auth import scopes from util.gravatar import compute_hash from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email, send_password_changed) +from util.names import parse_single_urn import features @@ -188,11 +191,15 @@ class User(ApiResource): return user_view(user) @nickname('createNewUser') + @parse_args + @query_param('inviteCode', 'Invitation code given for creating the user.', type=str, + default='') @internal_only @validate_json_request('NewUser') - def post(self): + def post(self, args): """ Create a new user. """ user_data = request.get_json() + invite_code = args['inviteCode'] existing_user = model.get_user(user_data['username']) if existing_user: @@ -203,6 +210,17 @@ class User(ApiResource): user_data['email']) code = model.create_confirm_email_code(new_user) send_confirmation_email(new_user.username, new_user.email, code.code) + + # Handle any invite codes. + parsed_invite = parse_single_urn(invite_code) + if parsed_invite is not None: + if parsed_invite[0] == 'teaminvite': + # Add the user to the team. + try: + try_accept_invite(invite_code, new_user) + except model.DataModelException: + pass + return 'Created', 201 except model.TooManyUsersException as ex: raise license_error(exception=ex) @@ -409,6 +427,19 @@ class Signout(ApiResource): return {'success': True} + +@resource('/v1/detachexternal/') +@internal_only +class DetachExternal(ApiResource): + """ Resource for detaching an external login. """ + @require_user_admin + @nickname('detachExternalLogin') + def post(self, servicename): + """ Request that the current user be detached from the external login service. """ + model.detach_external_login(get_authenticated_user(), servicename) + return {'success': True} + + @resource("/v1/recovery") @internal_only class Recovery(ApiResource): diff --git a/endpoints/index.py b/endpoints/index.py index 4017d47e9..5c8d7058a 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -66,6 +66,9 @@ def generate_headers(role='read'): @index.route('/users/', methods=['POST']) def create_user(): user_data = request.get_json() + if not 'username' in user_data: + abort(400, 'Missing username') + username = user_data['username'] password = user_data.get('password', '') diff --git a/endpoints/registry.py b/endpoints/registry.py index 72633939e..94719905a 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers): extra_headers = {} - # Add the Accept-Ranges header if the storage engine supports resumeable + # Add the Accept-Ranges header if the storage engine supports resumable # downloads. - if store.get_supports_resumeable_downloads(repo_image.storage.locations): - profile.debug('Storage supports resumeable downloads') + if store.get_supports_resumable_downloads(repo_image.storage.locations): + profile.debug('Storage supports resumable downloads') extra_headers['Accept-Ranges'] = 'bytes' resp = make_response('') diff --git a/endpoints/trigger.py b/endpoints/trigger.py index ab7aa9065..ae0b4b2b7 100644 --- a/endpoints/trigger.py +++ b/endpoints/trigger.py @@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger): with tarfile.open(fileobj=tarball) as archive: tarball_subdir = archive.getnames()[0] + # Seek to position 0 to make boto multipart happy + tarball.seek(0) + dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) logger.debug('Successfully prepared job') diff --git a/endpoints/web.py b/endpoints/web.py index c538e703d..c0da52bdb 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -33,8 +33,8 @@ STATUS_TAGS = app.config['STATUS_TAGS'] @web.route('/', methods=['GET'], defaults={'path': ''}) @web.route('/organization/', methods=['GET']) @no_cache -def index(path): - return render_page_template('index.html') +def index(path, **kwargs): + return render_page_template('index.html', **kwargs) @web.route('/500', methods=['GET']) @@ -102,7 +102,7 @@ def superuser(): @web.route('/signin/') @no_cache -def signin(): +def signin(redirect=None): return index('') @@ -124,6 +124,13 @@ def new(): return index('') +@web.route('/confirminvite') +@no_cache +def confirm_invite(): + code = request.values['code'] + return index('', code=code) + + @web.route('/repository/', defaults={'path': ''}) @web.route('/repository/', methods=['GET']) @no_cache diff --git a/initdb.py b/initdb.py index 34b1c0a08..72f529491 100644 --- a/initdb.py +++ b/initdb.py @@ -214,7 +214,11 @@ def initialize_database(): LogEntryKind.create(name='org_create_team') LogEntryKind.create(name='org_delete_team') + LogEntryKind.create(name='org_invite_team_member') + LogEntryKind.create(name='org_delete_team_member_invite') LogEntryKind.create(name='org_add_team_member') + LogEntryKind.create(name='org_team_member_invite_accepted') + LogEntryKind.create(name='org_team_member_invite_declined') LogEntryKind.create(name='org_remove_team_member') LogEntryKind.create(name='org_set_team_description') LogEntryKind.create(name='org_set_team_role') @@ -269,6 +273,7 @@ def initialize_database(): NotificationKind.create(name='over_private_usage') NotificationKind.create(name='expiring_license') NotificationKind.create(name='maintenance') + NotificationKind.create(name='org_team_invite') NotificationKind.create(name='test_notification') @@ -300,7 +305,7 @@ def populate_database(): new_user_2.verified = True new_user_2.save() - new_user_3 = model.create_user('freshuser', 'password', 'no@thanks.com') + new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com') new_user_3.verified = True new_user_3.save() diff --git a/static/css/quay.css b/static/css/quay.css index 84d89811a..78053c1e8 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -21,8 +21,7 @@ #quay-logo { - width: 80px; - margin-right: 30px; + width: 100px; } #padding-container { @@ -145,6 +144,15 @@ nav.navbar-default .navbar-nav>li>a.active { max-width: 320px; } +.notification-view-element .right-controls button { + margin-left: 10px; +} + +.notification-view-element .message i.fa { + margin-right: 6px; +} + + .notification-view-element .orginfo { margin-top: 8px; float: left; @@ -2559,7 +2567,7 @@ p.editable:hover i { margin-top: 10px; } -.repo-build .build-log-error-element { +.repo-build .build-log-error-element .error-message-container { position: relative; display: inline-block; margin: 10px; @@ -2569,7 +2577,7 @@ p.editable:hover i { margin-left: 22px; } -.repo-build .build-log-error-element i.fa { +.repo-build .build-log-error-element .error-message-container i.fa { color: red; position: absolute; top: 13px; @@ -3559,6 +3567,12 @@ p.editable:hover i { white-space: nowrap; } +.tt-message { + padding: 10px; + font-size: 12px; + white-space: nowrap; +} + .tt-suggestion p { margin: 0; } @@ -4638,4 +4652,57 @@ i.slack-icon { .external-notification-view-element:hover .side-controls button { border: 1px solid #eee; +} + +.member-listing { + width: 100%; +} + +.member-listing .section-header { + color: #ccc; + margin-top: 20px; + margin-bottom: 10px; +} + +.member-listing .gravatar { + vertical-align: middle; + margin-right: 10px; +} + +.member-listing .entity-reference { + margin-bottom: 10px; + display: inline-block; +} + +.member-listing .invite-listing { + margin-bottom: 10px; + display: inline-block; +} + +.team-view .organization-header .popover { + max-width: none !important; +} + +.team-view .organization-header .popover.bottom-right .arrow:after { + border-bottom-color: #f7f7f7; + top: 2px; +} + +.team-view .organization-header .popover-content { + font-size: 14px; + padding-top: 6px; +} + +.team-view .organization-header .popover-content input { + background: white; +} + +.team-view .team-view-add-element .help-text { + font-size: 13px; + color: #ccc; + margin-top: 10px; +} + +.team-view .organization-header .popover-content { + min-width: 500px; } \ No newline at end of file diff --git a/static/directives/build-log-error.html b/static/directives/build-log-error.html index 095f8edd0..13b399bb9 100644 --- a/static/directives/build-log-error.html +++ b/static/directives/build-log-error.html @@ -1,4 +1,23 @@ - - - - +
+ + + + + caused by attempting to pull private repository {{ getLocalPullInfo().repo }} + with inaccessible crdentials + without credentials + + + + +
+
+ Note: The credentials {{ getLocalPullInfo().login.username }} for registry {{ getLocalPullInfo().login.registry }} cannot + access repository {{ getLocalPullInfo().repo }}. +
+
+ Note: No robot account is specified for this build. Without such credentials, this pull will always fail. Please setup a new + build trigger with a robot account that has access to {{ getLocalPullInfo().repo }} or make that repository public. +
+
+
diff --git a/static/directives/dropdown-select.html b/static/directives/dropdown-select.html index c1157e3d0..69404e161 100644 --- a/static/directives/dropdown-select.html +++ b/static/directives/dropdown-select.html @@ -2,7 +2,7 @@
+ ng-readonly="!allowCustomInput">
diff --git a/static/directives/notification-view.html b/static/directives/notification-view.html index f03133e55..fbdb4b419 100644 --- a/static/directives/notification-view.html +++ b/static/directives/notification-view.html @@ -7,10 +7,13 @@ {{ notification.organization }} -
{{ parseDate(notification.created) | date:'medium'}}
Dismiss Notification +
+
{{ parseDate(notification.created) | date:'medium'}}
diff --git a/static/directives/signin-form.html b/static/directives/signin-form.html index de67c18f6..59551b51c 100644 --- a/static/directives/signin-form.html +++ b/static/directives/signin-form.html @@ -1,5 +1,6 @@ diff --git a/static/directives/trigger-setup-github.html b/static/directives/trigger-setup-github.html index 9b0e194ab..48ac359f9 100644 --- a/static/directives/trigger-setup-github.html +++ b/static/directives/trigger-setup-github.html @@ -29,7 +29,8 @@
Dockerfile Location:
- +
diff --git a/static/js/app.js b/static/js/app.js index d798f94e6..5457b46d2 100644 --- a/static/js/app.js +++ b/static/js/app.js @@ -153,6 +153,14 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading this.currentIndex_ = 0; } + _ViewArray.prototype.length = function() { + return this.entries.length; + }; + + _ViewArray.prototype.get = function(index) { + return this.entries[index]; + }; + _ViewArray.prototype.push = function(elem) { this.entries.push(elem); this.hasEntries = true; @@ -215,6 +223,78 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading return service; }]); + /** + * Specialized class for conducting an HTTP poll, while properly preventing multiple calls. + */ + $provide.factory('AngularPollChannel', ['ApiService', '$timeout', function(ApiService, $timeout) { + var _PollChannel = function(scope, requester, opt_sleeptime) { + this.scope_ = scope; + this.requester_ = requester; + this.sleeptime_ = opt_sleeptime || (60 * 1000 /* 60s */); + this.timer_ = null; + + this.working = false; + this.polling = false; + + var that = this; + scope.$on('$destroy', function() { + that.stop(); + }); + }; + + _PollChannel.prototype.stop = function() { + if (this.timer_) { + $timeout.cancel(this.timer_); + this.timer_ = null; + this.polling_ = false; + } + + this.working = false; + }; + + _PollChannel.prototype.start = function() { + // Make sure we invoke call outside the normal digest cycle, since + // we'll call $scope.$apply ourselves. + var that = this; + setTimeout(function() { that.call_(); }, 0); + }; + + _PollChannel.prototype.call_ = function() { + if (this.working) { return; } + + var that = this; + this.working = true; + this.scope_.$apply(function() { + that.requester_(function(status) { + if (status) { + that.working = false; + that.setupTimer_(); + } else { + that.stop(); + } + }); + }); + }; + + _PollChannel.prototype.setupTimer_ = function() { + if (this.timer_) { return; } + + var that = this; + this.polling = true; + this.timer_ = $timeout(function() { + that.timer_ = null; + that.call_(); + }, this.sleeptime_) + }; + + var service = { + 'create': function(scope, requester, opt_sleeptime) { + return new _PollChannel(scope, requester, opt_sleeptime); + } + }; + + return service; + }]); $provide.factory('DataFileService', [function() { var dataFileService = {}; @@ -419,6 +499,11 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading $provide.factory('UtilService', ['$sanitize', function($sanitize) { var utilService = {}; + utilService.isEmailAddress = function(val) { + var emailRegex = /^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/; + return emailRegex.test(val); + }; + utilService.escapeHtmlString = function(text) { var adjusted = text.replace(/&/g, "&") .replace(/ 0 || target == element; + if (!isPanelMember) { + hide(); + } + }); + + $(element).find('input').focus(); + }, 100); + } + }; +}]); quayApp.directive('repoCircle', function () { var directiveDefinitionObject = { @@ -2401,8 +2589,12 @@ quayApp.directive('userSetup', function () { restrict: 'C', scope: { 'redirectUrl': '=redirectUrl', + + 'inviteCode': '=inviteCode', + 'signInStarted': '&signInStarted', - 'signedIn': '&signedIn' + 'signedIn': '&signedIn', + 'userRegistered': '&userRegistered' }, controller: function($scope, $location, $timeout, ApiService, KeyService, UserService) { $scope.sendRecovery = function() { @@ -2417,6 +2609,10 @@ quayApp.directive('userSetup', function () { }); }; + $scope.handleUserRegistered = function(username) { + $scope.userRegistered({'username': username}); + }; + $scope.hasSignedIn = function() { return UserService.hasEverLoggedIn(); }; @@ -2440,6 +2636,7 @@ quayApp.directive('externalLoginButton', function () { 'action': '@action' }, controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) { + $scope.signingIn = false; $scope.startSignin = function(service) { $scope.signInStarted({'service': service}); @@ -2451,6 +2648,7 @@ quayApp.directive('externalLoginButton', function () { // Needed to ensure that UI work done by the started callback is finished before the location // changes. + $scope.signingIn = true; $timeout(function() { document.location = url; }, 250); @@ -2476,8 +2674,10 @@ quayApp.directive('signinForm', function () { controller: function($scope, $location, $timeout, $interval, ApiService, KeyService, UserService, CookieService, Features, Config) { $scope.tryAgainSoon = 0; $scope.tryAgainInterval = null; + $scope.signingIn = false; $scope.markStarted = function() { + $scope.signingIn = true; if ($scope.signInStarted != null) { $scope.signInStarted(); } @@ -2508,25 +2708,30 @@ quayApp.directive('signinForm', function () { $scope.cancelInterval(); ApiService.signinUser($scope.user).then(function() { + $scope.signingIn = false; $scope.needsEmailVerification = false; $scope.invalidCredentials = false; if ($scope.signedIn != null) { $scope.signedIn(); } - + + // Load the newly created user. UserService.load(); // Redirect to the specified page or the landing page // Note: The timeout of 500ms is needed to ensure dialogs containing sign in // forms get removed before the location changes. $timeout(function() { - if ($scope.redirectUrl == $location.path()) { - return; - } - $location.path($scope.redirectUrl ? $scope.redirectUrl : '/'); + var redirectUrl = $scope.redirectUrl; + if (redirectUrl == $location.path() || redirectUrl == null) { + return; + } + window.location = (redirectUrl ? redirectUrl : '/'); }, 500); }, function(result) { + $scope.signingIn = false; + if (result.status == 429 /* try again later */) { $scope.needsEmailVerification = false; $scope.invalidCredentials = false; @@ -2560,7 +2765,9 @@ quayApp.directive('signupForm', function () { transclude: true, restrict: 'C', scope: { + 'inviteCode': '=inviteCode', + 'userRegistered': '&userRegistered' }, controller: function($scope, $location, $timeout, ApiService, KeyService, UserService, Config, UIService) { $('.form-signup').popover(); @@ -2572,6 +2779,10 @@ quayApp.directive('signupForm', function () { UIService.hidePopover('#signupButton'); $scope.registering = true; + if ($scope.inviteCode) { + $scope.newUser['inviteCode'] = $scope.inviteCode; + } + ApiService.createNewUser($scope.newUser).then(function() { $scope.registering = false; $scope.awaitingConfirmation = true; @@ -2579,6 +2790,8 @@ quayApp.directive('signupForm', function () { if (Config.MIXPANEL_KEY) { mixpanel.alias($scope.newUser.username); } + + $scope.userRegistered({'username': $scope.newUser.username}); }, function(result) { $scope.registering = false; UIService.showFormError('#signupButton', result); @@ -2696,7 +2909,7 @@ quayApp.directive('dockerAuthDialog', function (Config) { $scope.downloadCfg = function() { var auth = $.base64.encode($scope.username + ":" + $scope.token); config = {} - config[Config.getUrl('/v1/')] = { + config[Config['SERVER_HOSTNAME']] = { "auth": auth, "email": "" }; @@ -2900,6 +3113,24 @@ quayApp.directive('logsView', function () { 'org_delete_team': 'Delete team: {team}', 'org_add_team_member': 'Add member {member} to team {team}', 'org_remove_team_member': 'Remove member {member} from team {team}', + 'org_invite_team_member': function(metadata) { + if (metadata.user) { + return 'Invite {user} to team {team}'; + } else { + return 'Invite {email} to team {team}'; + } + }, + 'org_delete_team_member_invite': function(metadata) { + if (metadata.user) { + return 'Rescind invite of {user} to team {team}'; + } else { + return 'Rescind invite of {email} to team {team}'; + } + }, + + 'org_team_member_invite_accepted': 'User {member}, invited by {inviter}, joined team {team}', + 'org_team_member_invite_declined': 'User {member}, invited by {inviter}, declined to join team {team}', + 'org_set_team_description': 'Change description of team {team}: {description}', 'org_set_team_role': 'Change permission of team {team} to {role}', 'create_prototype_permission': function(metadata) { @@ -2980,7 +3211,11 @@ quayApp.directive('logsView', function () { 'org_create_team': 'Create team', 'org_delete_team': 'Delete team', 'org_add_team_member': 'Add team member', + 'org_invite_team_member': 'Invite team member', + 'org_delete_team_member_invite': 'Rescind team member invitation', 'org_remove_team_member': 'Remove team member', + 'org_team_member_invite_accepted': 'Team invite accepted', + 'org_team_member_invite_declined': 'Team invite declined', 'org_set_team_description': 'Change team description', 'org_set_team_role': 'Change team permission', 'create_prototype_permission': 'Create default permission', @@ -3689,7 +3924,9 @@ quayApp.directive('entitySearch', function () { 'allowedEntities': '=allowedEntities', 'currentEntity': '=currentEntity', + 'entitySelected': '&entitySelected', + 'emailSelected': '&emailSelected', // When set to true, the contents of the control will be cleared as soon // as an entity is selected. @@ -3697,8 +3934,15 @@ quayApp.directive('entitySearch', function () { // Set this property to immediately clear the contents of the control. 'clearValue': '=clearValue', + + // Whether e-mail addresses are allowed. + 'allowEmails': '@allowEmails', + 'emailMessage': '@emailMessage', + + // True if the menu should pull right. + 'pullRight': '@pullRight' }, - controller: function($rootScope, $scope, $element, Restangular, UserService, ApiService, Config) { + controller: function($rootScope, $scope, $element, Restangular, UserService, ApiService, UtilService, Config) { $scope.lazyLoading = true; $scope.teams = null; @@ -3895,8 +4139,12 @@ quayApp.directive('entitySearch', function () { return null; } - if (val.indexOf('@') > 0) { - return '
A ' + Config.REGISTRY_TITLE_SHORT + ' username (not an e-mail address) must be specified
'; + if (UtilService.isEmailAddress(val)) { + if ($scope.allowEmails) { + return '
' + $scope.emailMessage + '
'; + } else { + return '
A ' + Config.REGISTRY_TITLE_SHORT + ' username (not an e-mail address) must be specified
'; + } } var classes = []; @@ -3952,6 +4200,16 @@ quayApp.directive('entitySearch', function () { }} }); + $(input).on('keypress', function(e) { + var val = $(input).val(); + var code = e.keyCode || e.which; + if (code == 13 && $scope.allowEmails && UtilService.isEmailAddress(val)) { + $scope.$apply(function() { + $scope.emailSelected({'email': val}); + }); + } + }); + $(input).on('input', function(e) { $scope.$apply(function() { $scope.clearEntityInternal(); @@ -4368,9 +4626,48 @@ quayApp.directive('buildLogError', function () { transclude: false, restrict: 'C', scope: { - 'error': '=error' + 'error': '=error', + 'entries': '=entries' }, - controller: function($scope, $element) { + controller: function($scope, $element, Config) { + $scope.getLocalPullInfo = function() { + if ($scope.entries.__localpull !== undefined) { + return $scope.entries.__localpull; + } + + var localInfo = { + 'isLocal': false + }; + + // Find the 'pulling' phase entry, and then extra any metadata found under + // it. + for (var i = 0; i < $scope.entries.length; ++i) { + var entry = $scope.entries[i]; + if (entry.type == 'phase' && entry.message == 'pulling') { + for (var j = 0; j < entry.logs.length(); ++j) { + var log = entry.logs.get(j); + if (log.data && log.data.phasestep == 'login') { + localInfo['login'] = log.data; + } + + if (log.data && log.data.phasestep == 'pull') { + var repo_url = log.data['repo_url']; + var repo_and_tag = repo_url.substring(Config.SERVER_HOSTNAME.length + 1); + var tagIndex = repo_and_tag.lastIndexOf(':'); + var repo = repo_and_tag.substring(0, tagIndex); + + localInfo['repo_url'] = repo_url; + localInfo['repo'] = repo; + + localInfo['isLocal'] = repo_url.indexOf(Config.SERVER_HOSTNAME + '/') == 0; + } + } + break; + } + } + + return $scope.entries.__localpull = localInfo; + }; } }; return directiveDefinitionObject; @@ -4406,6 +4703,9 @@ quayApp.directive('dropdownSelect', function ($compile) { 'selectedItem': '=selectedItem', 'placeholder': '=placeholder', 'lookaheadItems': '=lookaheadItems', + + 'allowCustomInput': '@allowCustomInput', + 'handleItemSelected': '&handleItemSelected', 'handleInput': '&handleInput', @@ -5386,6 +5686,10 @@ quayApp.directive('notificationView', function () { $scope.getClass = function(notification) { return NotificationService.getClass(notification); }; + + $scope.getActions = function(notification) { + return NotificationService.getActions(notification); + }; } }; return directiveDefinitionObject; @@ -5601,7 +5905,7 @@ quayApp.directive('dockerfileBuildForm', function () { var data = { 'mimeType': mimeType }; - + var getUploadUrl = ApiService.getFiledropUrl(data).then(function(resp) { conductUpload(file, resp.url, resp.file_id, mimeType); }, function() { diff --git a/static/js/controllers.js b/static/js/controllers.js index e4e364c87..a4849e342 100644 --- a/static/js/controllers.js +++ b/static/js/controllers.js @@ -1,3 +1,7 @@ +function SignInCtrl($scope, $location) { + $scope.redirectUrl = '/'; +} + function GuideCtrl() { } @@ -978,14 +982,9 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou } function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize, - ansi2html, AngularViewArray) { + ansi2html, AngularViewArray, AngularPollChannel) { var namespace = $routeParams.namespace; var name = $routeParams.name; - var pollTimerHandle = null; - - $scope.$on('$destroy', function() { - stopPollTimer(); - }); // Watch for changes to the current parameter. $scope.$on('$routeUpdate', function(){ @@ -995,8 +994,7 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope }); $scope.builds = null; - $scope.polling = false; - + $scope.pollChannel = null; $scope.buildDialogShowCounter = 0; $scope.showNewBuildDialog = function() { @@ -1081,8 +1079,6 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope $scope.setCurrentBuildInternal = function(index, build, opt_updateURL) { if (build == $scope.currentBuild) { return; } - stopPollTimer(); - $scope.logEntries = null; $scope.logStartIndex = null; $scope.currentParentEntry = null; @@ -1103,47 +1099,35 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope $scope.adjustLogHeight(); }, 1); - // Load the first set of logs. - getBuildStatusAndLogs(); - - // If the build is currently processing, start the build timer. - checkPollTimer(); - }; - - var checkPollTimer = function() { - var build = $scope.currentBuild; - if (!build) { - stopPollTimer(); - return; + // Stop any existing polling. + if ($scope.pollChannel) { + $scope.pollChannel.stop(); } + + // Create a new channel for polling the build status and logs. + var conductStatusAndLogRequest = function(callback) { + getBuildStatusAndLogs(build, callback); + }; - if (build['phase'] != 'complete' && build['phase'] != 'error') { - startPollTimer(); - return true; - } else { - stopPollTimer(); - return false; - } + $scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */); + $scope.pollChannel.start(); }; - var stopPollTimer = function() { - $interval.cancel(pollTimerHandle); - }; - - var startPollTimer = function() { - stopPollTimer(); - pollTimerHandle = $interval(getBuildStatusAndLogs, 2000); - }; - - var processLogs = function(logs, startIndex) { + var processLogs = function(logs, startIndex, endIndex) { if (!$scope.logEntries) { $scope.logEntries = []; } + // If the start index given is less than that requested, then we've received a larger + // pool of logs, and we need to only consider the new ones. + if (startIndex < $scope.logStartIndex) { + logs = logs.slice($scope.logStartIndex - startIndex); + } + for (var i = 0; i < logs.length; ++i) { var entry = logs[i]; var type = entry['type'] || 'entry'; if (type == 'command' || type == 'phase' || type == 'error') { entry['logs'] = AngularViewArray.create(); - entry['index'] = startIndex + i; + entry['index'] = $scope.logStartIndex + i; $scope.logEntries.push(entry); $scope.currentParentEntry = entry; @@ -1151,18 +1135,19 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope $scope.currentParentEntry['logs'].push(entry); } } + + return endIndex; }; - var getBuildStatusAndLogs = function() { - if (!$scope.currentBuild || $scope.polling) { return; } - $scope.polling = true; - + var getBuildStatusAndLogs = function(build, callback) { var params = { 'repository': namespace + '/' + name, - 'build_uuid': $scope.currentBuild.id + 'build_uuid': build.id }; ApiService.getRepoBuildStatus(null, params, true).then(function(resp) { + if (build != $scope.currentBuild) { callback(false); return; } + // Note: We use extend here rather than replacing as Angular is depending on the // root build object to remain the same object. var matchingBuilds = $.grep($scope.builds, function(elem) { @@ -1177,22 +1162,16 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope $scope.builds.push(currentBuild); } - checkPollTimer(); - // Load the updated logs for the build. var options = { 'start': $scope.logStartIndex }; - ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) { - if ($scope.logStartIndex != null && resp['start'] != $scope.logStartIndex) { - $scope.polling = false; - return; - } + ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) { + if (build != $scope.currentBuild) { callback(false); return; } - processLogs(resp['logs'], resp['start']); - $scope.logStartIndex = resp['total']; - $scope.polling = false; + // Process the logs we've received. + $scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']); // If the build status is an error, open the last two log entries. if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) { @@ -1205,9 +1184,15 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope openLogEntries($scope.logEntries[$scope.logEntries.length - 2]); openLogEntries($scope.logEntries[$scope.logEntries.length - 1]); } + + // If the build phase is an error or a complete, then we mark the channel + // as closed. + callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete'); }, function() { - $scope.polling = false; + callback(false); }); + }, function() { + callback(false); }); }; @@ -1647,14 +1632,17 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use if ($scope.cuser.logins) { for (var i = 0; i < $scope.cuser.logins.length; i++) { - if ($scope.cuser.logins[i].service == 'github') { + var login = $scope.cuser.logins[i]; + login.metadata = login.metadata || {}; + + if (login.service == 'github') { $scope.hasGithubLogin = true; - $scope.githubLogin = $scope.cuser.logins[i].metadata['service_username']; + $scope.githubLogin = login.metadata['service_username']; } - if ($scope.cuser.logins[i].service == 'google') { + if (login.service == 'google') { $scope.hasGoogleLogin = true; - $scope.googleLogin = $scope.cuser.logins[i].metadata['service_username']; + $scope.googleLogin = login.metadata['service_username']; } } } @@ -1797,6 +1785,18 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use UIService.showFormError('#changePasswordForm', result); }); }; + + $scope.detachExternalLogin = function(kind) { + var params = { + 'servicename': kind + }; + + ApiService.detachExternalLogin(null, params).then(function() { + $scope.hasGithubLogin = false; + $scope.hasGoogleLogin = false; + UserService.load(); + }, ApiService.errorDisplay('Count not detach service')); + }; } function ImageViewCtrl($scope, $routeParams, $rootScope, $timeout, ApiService, ImageMetadataService) { @@ -2283,29 +2283,91 @@ function OrgAdminCtrl($rootScope, $scope, $timeout, Restangular, $routeParams, U loadOrganization(); } -function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams) { +function TeamViewCtrl($rootScope, $scope, $timeout, Restangular, ApiService, $routeParams) { var teamname = $routeParams.teamname; var orgname = $routeParams.orgname; $scope.orgname = orgname; $scope.teamname = teamname; + $scope.addingMember = false; + $scope.memberMap = null; $rootScope.title = 'Loading...'; - $scope.addNewMember = function(member) { - if (!member || $scope.members[member.name]) { return; } + $scope.filterFunction = function(invited, robots) { + return function(item) { + // Note: The !! is needed because is_robot will be undefined for invites. + var robot_check = (!!item.is_robot == robots); + return robot_check && item.invited == invited; + }; + }; + $scope.inviteEmail = function(email) { + if (!email || $scope.memberMap[email]) { return; } + + $scope.addingMember = true; + + var params = { + 'orgname': orgname, + 'teamname': teamname, + 'email': email + }; + + var errorHandler = ApiService.errorDisplay('Cannot invite team member', function() { + $scope.addingMember = false; + }); + + ApiService.inviteTeamMemberEmail(null, params).then(function(resp) { + $scope.members.push(resp); + $scope.memberMap[resp.email] = resp; + $scope.addingMember = false; + }, errorHandler); + }; + + $scope.addNewMember = function(member) { + if (!member || $scope.memberMap[member.name]) { return; } + var params = { 'orgname': orgname, 'teamname': teamname, 'membername': member.name }; - ApiService.updateOrganizationTeamMember(null, params).then(function(resp) { - $scope.members[member.name] = resp; - }, function() { - $('#cannotChangeMembersModal').modal({}); + var errorHandler = ApiService.errorDisplay('Cannot add team member', function() { + $scope.addingMember = false; }); + + $scope.addingMember = true; + ApiService.updateOrganizationTeamMember(null, params).then(function(resp) { + $scope.members.push(resp); + $scope.memberMap[resp.name] = resp; + $scope.addingMember = false; + }, errorHandler); + }; + + $scope.revokeInvite = function(inviteInfo) { + if (inviteInfo.kind == 'invite') { + // E-mail invite. + $scope.revokeEmailInvite(inviteInfo.email); + } else { + // User invite. + $scope.removeMember(inviteInfo.name); + } + }; + + $scope.revokeEmailInvite = function(email) { + var params = { + 'orgname': orgname, + 'teamname': teamname, + 'email': email + }; + + ApiService.deleteTeamMemberEmailInvite(null, params).then(function(resp) { + if (!$scope.memberMap[email]) { return; } + var index = $.inArray($scope.memberMap[email], $scope.members); + $scope.members.splice(index, 1); + delete $scope.memberMap[email]; + }, ApiService.errorDisplay('Cannot revoke team invite')); }; $scope.removeMember = function(username) { @@ -2316,10 +2378,11 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams) }; ApiService.deleteOrganizationTeamMember(null, params).then(function(resp) { - delete $scope.members[username]; - }, function() { - $('#cannotChangeMembersModal').modal({}); - }); + if (!$scope.memberMap[username]) { return; } + var index = $.inArray($scope.memberMap[username], $scope.members); + $scope.members.splice(index, 1); + delete $scope.memberMap[username]; + }, ApiService.errorDisplay('Cannot remove team member')); }; $scope.updateForDescription = function(content) { @@ -2351,7 +2414,8 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams) var loadMembers = function() { var params = { 'orgname': orgname, - 'teamname': teamname + 'teamname': teamname, + 'includePending': true }; $scope.membersResource = ApiService.getOrganizationTeamMembersAsResource(params).get(function(resp) { @@ -2363,6 +2427,12 @@ function TeamViewCtrl($rootScope, $scope, Restangular, ApiService, $routeParams) 'html': true }); + $scope.memberMap = {}; + for (var i = 0; i < $scope.members.length; ++i) { + var current = $scope.members[i]; + $scope.memberMap[current.name || current.email] = current; + } + return resp.members; }); }; @@ -2688,3 +2758,32 @@ function SuperUserAdminCtrl($scope, ApiService, Features, UserService) { function TourCtrl($scope, $location) { $scope.kind = $location.path().substring('/tour/'.length); } + +function ConfirmInviteCtrl($scope, $location, UserService, ApiService, NotificationService) { + // Monitor any user changes and place the current user into the scope. + $scope.loading = false; + $scope.inviteCode = $location.search()['code'] || ''; + + UserService.updateUserIn($scope, function(user) { + if (!user.anonymous && !$scope.loading) { + // Make sure to not redirect now that we have logged in. We'll conduct the redirect + // manually. + $scope.redirectUrl = null; + $scope.loading = true; + + var params = { + 'code': $location.search()['code'] + }; + + ApiService.acceptOrganizationTeamInvite(null, params).then(function(resp) { + NotificationService.update(); + $location.path('/organization/' + resp.org + '/teams/' + resp.team); + }, function(resp) { + $scope.loading = false; + $scope.invalid = ApiService.getErrorMessage(resp, 'Invalid confirmation code'); + }); + } + }); + + $scope.redirectUrl = window.location.href; +} diff --git a/static/partials/confirm-invite.html b/static/partials/confirm-invite.html new file mode 100644 index 000000000..e3d883e53 --- /dev/null +++ b/static/partials/confirm-invite.html @@ -0,0 +1,15 @@ +
+ +
diff --git a/static/partials/repo-build.html b/static/partials/repo-build.html index 3afe87508..c8a352b5f 100644 --- a/static/partials/repo-build.html +++ b/static/partials/repo-build.html @@ -77,7 +77,7 @@
- +
@@ -94,7 +94,7 @@
- + +
+ +
-
-
Team Members - -
-
- - - - - - - - - -
- - - -
- -
+
+ This team has no members +
+ +
+ No matching team members found +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Team Members
+ + + +
Robot Accounts
+ + + +
Invited To Join
+ + + + + + {{ member.email }} + + + +
+ +
+
+
+
diff --git a/static/partials/user-admin.html b/static/partials/user-admin.html index c4d3b94a0..7d6dd8dfb 100644 --- a/static/partials/user-admin.html +++ b/static/partials/user-admin.html @@ -177,10 +177,14 @@
Account attached to Github Account +
@@ -197,10 +201,14 @@
{{ googleLogin }} +
Account attached to Google Account +
diff --git a/storage/__init__.py b/storage/__init__.py index 6700dab0b..4d1134d4b 100644 --- a/storage/__init__.py +++ b/storage/__init__.py @@ -1,5 +1,5 @@ from storage.local import LocalStorage -from storage.cloud import S3Storage, GoogleCloudStorage +from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage from storage.fakestorage import FakeStorage from storage.distributedstorage import DistributedStorage @@ -8,6 +8,7 @@ STORAGE_DRIVER_CLASSES = { 'LocalStorage': LocalStorage, 'S3Storage': S3Storage, 'GoogleCloudStorage': GoogleCloudStorage, + 'RadosGWStorage': RadosGWStorage, } diff --git a/storage/basestorage.py b/storage/basestorage.py index 2d3727a5b..675c3a738 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -54,10 +54,13 @@ class BaseStorage(StoragePaths): # Set the IO buffer to 64kB buffer_size = 64 * 1024 - def get_direct_download_url(self, path, expires_in=60): + def get_direct_download_url(self, path, expires_in=60, requires_cors=False): return None - def get_supports_resumeable_downloads(self): + def get_direct_upload_url(self, path, mime_type, requires_cors=True): + return None + + def get_supports_resumable_downloads(self): return False def get_content(self, path): @@ -72,7 +75,7 @@ class BaseStorage(StoragePaths): def stream_read_file(self, path): raise NotImplementedError - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None, content_encoding=None): raise NotImplementedError def list_directory(self, path=None): @@ -83,3 +86,6 @@ class BaseStorage(StoragePaths): def remove(self, path): raise NotImplementedError + + def get_checksum(self, path): + raise NotImplementedError \ No newline at end of file diff --git a/storage/cloud.py b/storage/cloud.py index d64415410..06dd8a2a9 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -7,36 +7,39 @@ import boto.gs.connection import boto.s3.key import boto.gs.key +from io import BufferedIOBase + from storage.basestorage import BaseStorage logger = logging.getLogger(__name__) -class StreamReadKeyAsFile(object): +class StreamReadKeyAsFile(BufferedIOBase): def __init__(self, key): self._key = key - self._finished = False - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - self._key.close(fast=True) def read(self, amt=None): - if self._finished: + if self.closed: return None resp = self._key.read(amt) - if not resp: - self._finished = True return resp + def readable(self): + return True + + @property + def closed(self): + return self._key.closed + + def close(self): + self._key.close(fast=True) + class _CloudStorage(BaseStorage): - def __init__(self, connection_class, key_class, upload_params, storage_path, access_key, - secret_key, bucket_name): + def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path, + access_key, secret_key, bucket_name): self._initialized = False self._bucket_name = bucket_name self._access_key = access_key @@ -45,12 +48,14 @@ class _CloudStorage(BaseStorage): self._connection_class = connection_class self._key_class = key_class self._upload_params = upload_params + self._connect_kwargs = connect_kwargs self._cloud_conn = None self._cloud_bucket = None def _initialize_cloud_conn(self): if not self._initialized: - self._cloud_conn = self._connection_class(self._access_key, self._secret_key) + self._cloud_conn = self._connection_class(self._access_key, self._secret_key, + **self._connect_kwargs) self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name) self._initialized = True @@ -87,15 +92,22 @@ class _CloudStorage(BaseStorage): key.set_contents_from_string(content, **self._upload_params) return path - def get_supports_resumeable_downloads(self): + def get_supports_resumable_downloads(self): return True - def get_direct_download_url(self, path, expires_in=60): + def get_direct_download_url(self, path, expires_in=60, requires_cors=False): self._initialize_cloud_conn() path = self._init_path(path) k = self._key_class(self._cloud_bucket, path) return k.generate_url(expires_in) + def get_direct_upload_url(self, path, mime_type, requires_cors=True): + self._initialize_cloud_conn() + path = self._init_path(path) + key = self._key_class(self._cloud_bucket, path) + url = key.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, encrypt_key=True) + return url + def stream_read(self, path): self._initialize_cloud_conn() path = self._init_path(path) @@ -116,14 +128,23 @@ class _CloudStorage(BaseStorage): raise IOError('No such key: \'{0}\''.format(path)) return StreamReadKeyAsFile(key) - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None, content_encoding=None): # Minimum size of upload part size on S3 is 5MB self._initialize_cloud_conn() buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) - mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params) + + metadata = {} + if content_type is not None: + metadata['Content-Type'] = content_type + + if content_encoding is not None: + metadata['Content-Encoding'] = content_encoding + + mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata, + **self._upload_params) num_part = 1 while True: try: @@ -179,25 +200,73 @@ class _CloudStorage(BaseStorage): for key in self._cloud_bucket.list(prefix=path): key.delete() + def get_checksum(self, path): + self._initialize_cloud_conn() + path = self._init_path(path) + key = self._key_class(self._cloud_bucket, path) + k = self._cloud_bucket.lookup(key) + if k is None: + raise IOError('No such key: \'{0}\''.format(path)) + + return k.etag[1:-1][:7] + class S3Storage(_CloudStorage): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): upload_params = { 'encrypt_key': True, } + connect_kwargs = {} super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key, - upload_params, storage_path, s3_access_key, s3_secret_key, - s3_bucket) + connect_kwargs, upload_params, storage_path, s3_access_key, + s3_secret_key, s3_bucket) class GoogleCloudStorage(_CloudStorage): def __init__(self, storage_path, access_key, secret_key, bucket_name): - super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, {}, - storage_path, access_key, secret_key, bucket_name) + upload_params = {} + connect_kwargs = {} + super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, + connect_kwargs, upload_params, storage_path, + access_key, secret_key, bucket_name) - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None, content_encoding=None): # Minimum size of upload part size on S3 is 5MB self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) + + if content_type is not None: + key.set_metadata('Content-Type', content_type) + + if content_encoding is not None: + key.set_metadata('Content-Encoding', content_encoding) + key.set_contents_from_stream(fp) + + +class RadosGWStorage(_CloudStorage): + def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name): + upload_params = {} + connect_kwargs = { + 'host': hostname, + 'is_secure': is_secure, + 'calling_format': boto.s3.connection.OrdinaryCallingFormat(), + } + super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key, + connect_kwargs, upload_params, storage_path, access_key, + secret_key, bucket_name) + + # TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624 + def get_direct_download_url(self, path, expires_in=60, requires_cors=False): + if requires_cors: + return None + + return super(RadosGWStorage, self).get_direct_download_url(path, expires_in, requires_cors) + + # TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624 + def get_direct_upload_url(self, path, mime_type, requires_cors=True): + if requires_cors: + return None + + return super(RadosGWStorage, self).get_direct_upload_url(path, mime_type, requires_cors) diff --git a/storage/distributedstorage.py b/storage/distributedstorage.py index 9941f0fa5..1544d9725 100644 --- a/storage/distributedstorage.py +++ b/storage/distributedstorage.py @@ -31,6 +31,7 @@ class DistributedStorage(StoragePaths): self.preferred_locations = list(preferred_locations) get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url) + get_direct_upload_url = _location_aware(BaseStorage.get_direct_upload_url) get_content = _location_aware(BaseStorage.get_content) put_content = _location_aware(BaseStorage.put_content) stream_read = _location_aware(BaseStorage.stream_read) @@ -39,4 +40,5 @@ class DistributedStorage(StoragePaths): list_directory = _location_aware(BaseStorage.list_directory) exists = _location_aware(BaseStorage.exists) remove = _location_aware(BaseStorage.remove) - get_supports_resumeable_downloads = _location_aware(BaseStorage.get_supports_resumeable_downloads) + get_checksum = _location_aware(BaseStorage.get_checksum) + get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads) diff --git a/storage/fakestorage.py b/storage/fakestorage.py index 597d22af4..cebed1e80 100644 --- a/storage/fakestorage.py +++ b/storage/fakestorage.py @@ -14,7 +14,7 @@ class FakeStorage(BaseStorage): def stream_read(self, path): yield '' - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None, content_encoding=None): pass def remove(self, path): @@ -22,3 +22,6 @@ class FakeStorage(BaseStorage): def exists(self, path): return False + + def get_checksum(self, path): + return 'abcdefg' \ No newline at end of file diff --git a/storage/local.py b/storage/local.py index 361d76403..056c68c05 100644 --- a/storage/local.py +++ b/storage/local.py @@ -1,6 +1,7 @@ - import os import shutil +import hashlib +import io from storage.basestorage import BaseStorage @@ -40,9 +41,9 @@ class LocalStorage(BaseStorage): def stream_read_file(self, path): path = self._init_path(path) - return open(path, mode='rb') + return io.open(path, mode='rb') - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None, content_encoding=None): # Size is mandatory path = self._init_path(path, create=True) with open(path, mode='wb') as f: @@ -80,3 +81,14 @@ class LocalStorage(BaseStorage): os.remove(path) except OSError: pass + + def get_checksum(self, path): + path = self._init_path(path) + sha_hash = hashlib.sha256() + with open(path, 'r') as to_hash: + while True: + buf = to_hash.read(self.buffer_size) + if not buf: + break + sha_hash.update(buf) + return sha_hash.hexdigest()[:7] diff --git a/test/data/test.db b/test/data/test.db index 2c16ee353..29f6e1444 100644 Binary files a/test/data/test.db and b/test/data/test.db differ diff --git a/test/test_api_security.py b/test/test_api_security.py index 3c33ad712..04498ad2a 100644 --- a/test/test_api_security.py +++ b/test/test_api_security.py @@ -8,7 +8,7 @@ from app import app from initdb import setup_database_for_testing, finished_database_for_testing from endpoints.api import api_bp, api -from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam +from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam, TeamMemberInvite from endpoints.api.tag import RepositoryTagImages, RepositoryTag from endpoints.api.search import FindRepositories, EntitySearch from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList @@ -24,7 +24,7 @@ from endpoints.api.repoemail import RepositoryAuthorizedEmail from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout, Signin, User, UserAuthorizationList, UserAuthorization, UserNotification, - VerifyUser) + VerifyUser, DetachExternal) from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs @@ -435,6 +435,24 @@ class TestSignin(ApiTestCase): self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'}) +class TestDetachExternal(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(DetachExternal, servicename='someservice') + + def test_post_anonymous(self): + self._run_test('POST', 401, None, {}) + + def test_post_freshuser(self): + self._run_test('POST', 200, 'freshuser', {}) + + def test_post_reader(self): + self._run_test('POST', 200, 'reader', {}) + + def test_post_devtable(self): + self._run_test('POST', 200, 'devtable', {}) + + class TestVerifyUser(ApiTestCase): def setUp(self): ApiTestCase.setUp(self) @@ -3509,6 +3527,36 @@ class TestSuperUserLogs(ApiTestCase): self._run_test('GET', 200, 'devtable', None) +class TestTeamMemberInvite(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(TeamMemberInvite, code='foobarbaz') + + def test_put_anonymous(self): + self._run_test('PUT', 401, None, None) + + def test_put_freshuser(self): + self._run_test('PUT', 400, 'freshuser', None) + + def test_put_reader(self): + self._run_test('PUT', 400, 'reader', None) + + def test_put_devtable(self): + self._run_test('PUT', 400, 'devtable', None) + + def test_delete_anonymous(self): + self._run_test('DELETE', 401, None, None) + + def test_delete_freshuser(self): + self._run_test('DELETE', 400, 'freshuser', None) + + def test_delete_reader(self): + self._run_test('DELETE', 400, 'reader', None) + + def test_delete_devtable(self): + self._run_test('DELETE', 400, 'devtable', None) + + class TestSuperUserList(ApiTestCase): def setUp(self): ApiTestCase.setUp(self) @@ -3527,7 +3575,6 @@ class TestSuperUserList(ApiTestCase): self._run_test('GET', 200, 'devtable', None) - class TestSuperUserManagement(ApiTestCase): def setUp(self): ApiTestCase.setUp(self) diff --git a/test/test_api_usage.py b/test/test_api_usage.py index 004f20651..4a9f1fbdc 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -11,7 +11,7 @@ from app import app from initdb import setup_database_for_testing, finished_database_for_testing from data import model, database -from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam +from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam from endpoints.api.tag import RepositoryTagImages, RepositoryTag from endpoints.api.search import FindRepositories, EntitySearch from endpoints.api.image import RepositoryImage, RepositoryImageList @@ -131,6 +131,10 @@ class ApiTestCase(unittest.TestCase): def deleteResponse(self, resource_name, params={}, expected_code=204): rv = self.app.delete(self.url_for(resource_name, params)) + + if rv.status_code != expected_code: + print 'Mismatch data for resource DELETE %s: %s' % (resource_name, rv.data) + self.assertEquals(rv.status_code, expected_code) return rv.data @@ -162,6 +166,13 @@ class ApiTestCase(unittest.TestCase): parsed = py_json.loads(data) return parsed + def assertInTeam(self, data, membername): + for memberData in data['members']: + if memberData['name'] == membername: + return + + self.fail(membername + ' not found in team: ' + json.dumps(data)) + def login(self, username, password='password'): return self.postJsonResponse(Signin, data=dict(username=username, password=password)) @@ -380,6 +391,28 @@ class TestCreateNewUser(ApiTestCase): expected_code=201) self.assertEquals('"Created"', data) + def test_createuser_withteaminvite(self): + inviter = model.get_user(ADMIN_ACCESS_USER) + team = model.get_organization_team(ORGANIZATION, 'owners') + invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com') + + details = { + 'inviteCode': invite.invite_token + } + details.update(NEW_USER_DETAILS); + + data = self.postResponse(User, + data=details, + expected_code=201) + self.assertEquals('"Created"', data) + + # Make sure the user was added to the team. + self.login(ADMIN_ACCESS_USER) + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='owners')) + self.assertInTeam(json, NEW_USER_DETAILS['username']) + class TestSignout(ApiTestCase): def test_signout(self): @@ -741,16 +774,43 @@ class TestGetOrganizationTeamMembers(ApiTestCase): params=dict(orgname=ORGANIZATION, teamname='readers')) - assert READ_ACCESS_USER in json['members'] + self.assertEquals(READ_ACCESS_USER, json['members'][1]['name']) class TestUpdateOrganizationTeamMember(ApiTestCase): - def test_addmember(self): + def test_addmember_alreadyteammember(self): self.login(ADMIN_ACCESS_USER) + membername = READ_ACCESS_USER + self.putResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='readers', + membername=membername), + expected_code=400) + + + def test_addmember_orgmember(self): + self.login(ADMIN_ACCESS_USER) + + membername = READ_ACCESS_USER + self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='owners', + membername=membername)) + + # Verify the user was added to the team. + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='owners')) + + self.assertInTeam(json, membername) + + + def test_addmember_robot(self): + self.login(ADMIN_ACCESS_USER) + + membername = ORGANIZATION + '+coolrobot' self.putJsonResponse(TeamMember, params=dict(orgname=ORGANIZATION, teamname='readers', - membername=NO_ACCESS_USER)) + membername=membername)) # Verify the user was added to the team. @@ -758,10 +818,168 @@ class TestUpdateOrganizationTeamMember(ApiTestCase): params=dict(orgname=ORGANIZATION, teamname='readers')) - assert NO_ACCESS_USER in json['members'] + self.assertInTeam(json, membername) + + + def test_addmember_invalidrobot(self): + self.login(ADMIN_ACCESS_USER) + + membername = 'freshuser+anotherrobot' + self.putResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='readers', + membername=membername), + expected_code=400) + + + def test_addmember_nonorgmember(self): + self.login(ADMIN_ACCESS_USER) + + membername = NO_ACCESS_USER + response = self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='owners', + membername=membername)) + + + self.assertEquals(True, response['invited']) + + # Make sure the user is not (yet) part of the team. + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='readers')) + + for member in json['members']: + self.assertNotEqual(membername, member['name']) + + +class TestAcceptTeamMemberInvite(ApiTestCase): + def assertInTeam(self, data, membername): + for memberData in data['members']: + if memberData['name'] == membername: + return + + self.fail(membername + ' not found in team: ' + json.dumps(data)) + + def test_accept(self): + self.login(ADMIN_ACCESS_USER) + + # Create the invite. + membername = NO_ACCESS_USER + response = self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='owners', + membername=membername)) + + self.assertEquals(True, response['invited']) + + # Login as the user. + self.login(membername) + + # Accept the invite. + user = model.get_user(membername) + invites = list(model.lookup_team_invites(user)) + self.assertEquals(1, len(invites)) + + self.putJsonResponse(TeamMemberInvite, + params=dict(code=invites[0].invite_token)) + + # Verify the user is now on the team. + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='owners')) + + self.assertInTeam(json, membername) + + # Verify the accept now fails. + self.putResponse(TeamMemberInvite, + params=dict(code=invites[0].invite_token), + expected_code=400) + + + +class TestDeclineTeamMemberInvite(ApiTestCase): + def test_decline_wronguser(self): + self.login(ADMIN_ACCESS_USER) + + # Create the invite. + membername = NO_ACCESS_USER + response = self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='owners', + membername=membername)) + + self.assertEquals(True, response['invited']) + + # Try to decline the invite. + user = model.get_user(membername) + invites = list(model.lookup_team_invites(user)) + self.assertEquals(1, len(invites)) + + self.deleteResponse(TeamMemberInvite, + params=dict(code=invites[0].invite_token), + expected_code=400) + + + def test_decline(self): + self.login(ADMIN_ACCESS_USER) + + # Create the invite. + membername = NO_ACCESS_USER + response = self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='owners', + membername=membername)) + + self.assertEquals(True, response['invited']) + + # Login as the user. + self.login(membername) + + # Decline the invite. + user = model.get_user(membername) + invites = list(model.lookup_team_invites(user)) + self.assertEquals(1, len(invites)) + + self.deleteResponse(TeamMemberInvite, + params=dict(code=invites[0].invite_token)) + + # Make sure the invite was deleted. + self.deleteResponse(TeamMemberInvite, + params=dict(code=invites[0].invite_token), + expected_code=400) class TestDeleteOrganizationTeamMember(ApiTestCase): + def test_deletememberinvite(self): + self.login(ADMIN_ACCESS_USER) + + membername = NO_ACCESS_USER + response = self.putJsonResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='readers', + membername=membername)) + + + self.assertEquals(True, response['invited']) + + # Verify the invite was added. + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='readers', + includePending=True)) + + assert len(json['members']) == 3 + + # Delete the invite. + self.deleteResponse(TeamMember, + params=dict(orgname=ORGANIZATION, teamname='readers', + membername=membername)) + + + # Verify the user was removed from the team. + json = self.getJsonResponse(TeamMemberList, + params=dict(orgname=ORGANIZATION, + teamname='readers', + includePending=True)) + + assert len(json['members']) == 2 + + def test_deletemember(self): self.login(ADMIN_ACCESS_USER) @@ -775,7 +993,7 @@ class TestDeleteOrganizationTeamMember(ApiTestCase): params=dict(orgname=ORGANIZATION, teamname='readers')) - assert not READ_ACCESS_USER in json['members'] + assert len(json['members']) == 1 class TestCreateRepo(ApiTestCase): @@ -2120,7 +2338,7 @@ class TestSuperUserManagement(ApiTestCase): json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) self.assertEquals('freshuser', json['username']) - self.assertEquals('no@thanks.com', json['email']) + self.assertEquals('jschorr+test@devtable.com', json['email']) self.assertEquals(False, json['super_user']) def test_delete_user(self): @@ -2143,7 +2361,7 @@ class TestSuperUserManagement(ApiTestCase): # Verify the user exists. json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser')) self.assertEquals('freshuser', json['username']) - self.assertEquals('no@thanks.com', json['email']) + self.assertEquals('jschorr+test@devtable.com', json['email']) # Update the user. self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com')) diff --git a/test/testconfig.py b/test/testconfig.py index c74e5712a..35c96a803 100644 --- a/test/testconfig.py +++ b/test/testconfig.py @@ -30,7 +30,7 @@ class TestConfig(DefaultConfig): BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs') BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False] - USERFILES_TYPE = 'FakeUserfiles' + USERFILES_LOCATION = 'local_us' FEATURE_SUPER_USERS = True FEATURE_BILLING = True diff --git a/test/testlogs.py b/test/testlogs.py index 27fe7c47b..7fd9eac21 100644 --- a/test/testlogs.py +++ b/test/testlogs.py @@ -198,3 +198,11 @@ class TestBuildLogs(RedisBuildLogs): return None else: return super(TestBuildLogs, self).get_status(build_id) + + def expire_log_entries(self, build_id): + if build_id == self.test_build_id: + return + if not self.allow_delegate: + return None + else: + return super(TestBuildLogs, self).expire_log_entries(build_id) diff --git a/tools/reparsedockerfile.py b/tools/reparsedockerfile.py new file mode 100644 index 000000000..09ac3955f --- /dev/null +++ b/tools/reparsedockerfile.py @@ -0,0 +1,23 @@ +from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile + +with open('Dockerfile.test', 'r') as dockerfileobj: + parsed_dockerfile = parse_dockerfile(dockerfileobj.read()) + +quay_reponame = 'something' +env_command = { + 'command': 'ENV', + 'parameters': 'QUAY_REPOSITORY %s' % quay_reponame +} + +for index, command in reversed(list(enumerate(parsed_dockerfile.commands))): + if command['command'] == 'FROM': + new_command_index = index + 1 + parsed_dockerfile.commands.insert(new_command_index, env_command) + break + +image_and_tag_tuple = parsed_dockerfile.get_image_and_tag() +print image_and_tag_tuple +if image_and_tag_tuple is None or image_and_tag_tuple[0] is None: + raise Exception('Missing FROM command in Dockerfile') + +print serialize_dockerfile(parsed_dockerfile) diff --git a/util/dockerfileparse.py b/util/dockerfileparse.py index cabaf6b16..398673a5d 100644 --- a/util/dockerfileparse.py +++ b/util/dockerfileparse.py @@ -1,6 +1,6 @@ import re -LINE_CONTINUATION_REGEX = re.compile('\s*\\\s*\n') +LINE_CONTINUATION_REGEX = re.compile(r'(\s)*\\(\s)*\n') COMMAND_REGEX = re.compile('([A-Za-z]+)\s(.*)') COMMENT_CHARACTER = '#' diff --git a/util/collections.py b/util/morecollections.py similarity index 100% rename from util/collections.py rename to util/morecollections.py diff --git a/util/names.py b/util/names.py index 2eb622737..31546d450 100644 --- a/util/names.py +++ b/util/names.py @@ -34,6 +34,27 @@ def parse_robot_username(robot_username): return robot_username.split('+', 2) +def parse_urn(urn): + """ Parses a URN, returning a pair that contains a list of URN + namespace parts, followed by the URN's unique ID. + """ + if not urn.startswith('urn:'): + return None + + parts = urn[len('urn:'):].split(':') + return (parts[0:len(parts) - 1], parts[len(parts) - 1]) + + +def parse_single_urn(urn): + """ Parses a URN, returning a pair that contains the first + namespace part, followed by the URN's unique ID. + """ + result = parse_urn(urn) + if result is None or not len(result[0]): + return None + + return (result[0][0], result[1]) + uuid_generator = lambda: str(uuid4()) diff --git a/util/streamingjsonencoder.py b/util/streamingjsonencoder.py new file mode 100644 index 000000000..f51a4ec9b --- /dev/null +++ b/util/streamingjsonencoder.py @@ -0,0 +1,267 @@ +# Adapted from https://gist.github.com/akaihola/1415730#file-streamingjson-py + +# Copyright (c) Django Software Foundation and individual contributors. +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: + +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. + +# 3. Neither the name of Django nor the names of its contributors may be used +# to endorse or promote products derived from this software without +# specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import collections +import json +from json.encoder import encode_basestring, encode_basestring_ascii, FLOAT_REPR, INFINITY +from types import GeneratorType + + +class StreamingJSONEncoder(json.JSONEncoder): + def iterencode(self, o, _one_shot=False): + """Encode the given object and yield each string + representation as available. + + For example:: + + for chunk in StreamingJSONEncoder().iterencode(bigobject): + mysocket.write(chunk) + + This method is a verbatim copy of + :meth:`json.JSONEncoder.iterencode`. It is + needed because we need to call our patched + :func:`streamingjsonencoder._make_iterencode`. + """ + if self.check_circular: + markers = {} + else: + markers = None + if self.ensure_ascii: + _encoder = encode_basestring_ascii + else: + _encoder = encode_basestring + if self.encoding != 'utf-8': + def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): + if isinstance(o, str): + o = o.decode(_encoding) + return _orig_encoder(o) + + def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY): + # Check for specials. Note that this type of test is processor- and/or + # platform-specific, so do tests which don't depend on the internals. + + if o != o: + text = 'NaN' + elif o == _inf: + text = 'Infinity' + elif o == _neginf: + text = '-Infinity' + else: + return _repr(o) + + if not allow_nan: + raise ValueError("Out of range float values are not JSON compliant: %r" + % (o,)) + + return text + + _iterencode = _make_iterencode( + markers, self.default, _encoder, self.indent, floatstr, + self.key_separator, self.item_separator, self.sort_keys, + self.skipkeys, _one_shot) + return _iterencode(o, 0) + + +def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, + _item_separator, _sort_keys, _skipkeys, _one_shot, False=False, True=True, + ValueError=ValueError, basestring=basestring, dict=dict, float=float, + GeneratorType=GeneratorType, id=id, int=int, isinstance=isinstance, list=list, + long=long, str=str, tuple=tuple): + """ + This is a patched version of + :func:`django.utils.simplejson.encoder.iterencode`. Whenever it encounters + a generator in the data structure, it encodes it as a JSON list. + """ + def _iterencode_list(lst, _current_indent_level): + if not lst: + # note: empty generators aren't caught here, see below + yield '[]' + return + if markers is not None: + markerid = id(lst) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = lst + buf = '[' + if _indent is not None: + _current_indent_level += 1 + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) + separator = _item_separator + newline_indent + buf += newline_indent + else: + newline_indent = None + separator = _item_separator + first = True + for value in lst: + if first: + first = False + else: + buf = separator + if isinstance(value, basestring): + yield buf + _encoder(value) + elif value is None: + yield buf + 'null' + elif value is True: + yield buf + 'true' + elif value is False: + yield buf + 'false' + elif isinstance(value, (int, long)): + yield buf + str(value) + elif isinstance(value, float): + yield buf + _floatstr(value) + else: + yield buf + if isinstance(value, (list, tuple, GeneratorType)): + chunks = _iterencode_list(value, _current_indent_level) + elif isinstance(value, dict): + chunks = _iterencode_dict(value, _current_indent_level) + else: + chunks = _iterencode(value, _current_indent_level) + for chunk in chunks: + yield chunk + if first: + # we had an empty generator + yield buf + if newline_indent is not None: + _current_indent_level -= 1 + yield '\n' + (' ' * (_indent * _current_indent_level)) + yield ']' + if markers is not None: + del markers[markerid] + + def _iterencode_dict(dct, _current_indent_level): + if not dct: + yield '{}' + return + if markers is not None: + markerid = id(dct) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = dct + yield '{' + if _indent is not None: + _current_indent_level += 1 + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) + item_separator = _item_separator + newline_indent + yield newline_indent + else: + newline_indent = None + item_separator = _item_separator + first = True + if _sort_keys: + items = dct.items() + items.sort(key=lambda kv: kv[0]) + else: + items = dct.iteritems() + for key, value in items: + if isinstance(key, basestring): + pass + # JavaScript is weakly typed for these, so it makes sense to + # also allow them. Many encoders seem to do something like this. + elif isinstance(key, float): + key = _floatstr(key) + elif isinstance(key, (int, long)): + key = str(key) + elif key is True: + key = 'true' + elif key is False: + key = 'false' + elif key is None: + key = 'null' + elif _skipkeys: + continue + else: + raise TypeError("key %r is not a string" % (key,)) + if first: + first = False + else: + yield item_separator + yield _encoder(key) + yield _key_separator + if isinstance(value, basestring): + yield _encoder(value) + elif value is None: + yield 'null' + elif value is True: + yield 'true' + elif value is False: + yield 'false' + elif isinstance(value, (int, long)): + yield str(value) + elif isinstance(value, float): + yield _floatstr(value) + else: + if isinstance(value, collections.Mapping): + chunks = _iterencode_dict(value, _current_indent_level) + elif isinstance(value, collections.Iterable): + chunks = _iterencode_list(value, _current_indent_level) + else: + chunks = _iterencode(value, _current_indent_level) + for chunk in chunks: + yield chunk + if newline_indent is not None: + _current_indent_level -= 1 + yield '\n' + (' ' * (_indent * _current_indent_level)) + yield '}' + if markers is not None: + del markers[markerid] + + def _iterencode(o, _current_indent_level): + if isinstance(o, basestring): + yield _encoder(o) + elif o is None: + yield 'null' + elif o is True: + yield 'true' + elif o is False: + yield 'false' + elif isinstance(o, (int, long)): + yield str(o) + elif isinstance(o, float): + yield _floatstr(o) + elif isinstance(o, collections.Mapping): + for chunk in _iterencode_dict(o, _current_indent_level): + yield chunk + elif isinstance(o, collections.Iterable): + for chunk in _iterencode_list(o, _current_indent_level): + yield chunk + else: + if markers is not None: + markerid = id(o) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = o + o = _default(o) + for chunk in _iterencode(o, _current_indent_level): + yield chunk + if markers is not None: + del markers[markerid] + + return _iterencode \ No newline at end of file diff --git a/util/useremails.py b/util/useremails.py index f280e276b..74e98f429 100644 --- a/util/useremails.py +++ b/util/useremails.py @@ -6,7 +6,7 @@ from data import model from util.gravatar import compute_hash def user_reference(username): - user = model.get_user(username) + user = model.get_user_or_org(username) if not user: return username @@ -123,6 +123,14 @@ def send_payment_failed(email, username): 'username': username }) +def send_org_invite_email(member_name, member_email, orgname, team, adder, code): + send_email(member_email, 'Invitation to join team', 'teaminvite', { + 'inviter': adder, + 'token': code, + 'organization': orgname, + 'teamname': team + }) + def send_invoice_email(email, contents): # Note: This completely generates the contents of the email, so we don't use the diff --git a/workers/buildlogsarchiver.py b/workers/buildlogsarchiver.py new file mode 100644 index 000000000..460fd7f4e --- /dev/null +++ b/workers/buildlogsarchiver.py @@ -0,0 +1,56 @@ +import logging + +from apscheduler.schedulers.blocking import BlockingScheduler +from peewee import fn +from tempfile import SpooledTemporaryFile +from gzip import GzipFile + +from data import model +from data.archivedlogs import JSON_MIMETYPE +from data.database import RepositoryBuild, db_random_func +from app import build_logs, log_archive +from util.streamingjsonencoder import StreamingJSONEncoder + +POLL_PERIOD_SECONDS = 30 +MEMORY_TEMPFILE_SIZE = 64 * 1024 # Large enough to handle approximately 99% of builds in memory + +logger = logging.getLogger(__name__) +sched = BlockingScheduler() + +@sched.scheduled_job(trigger='interval', seconds=30) +def archive_redis_buildlogs(): + """ Archive a single build, choosing a candidate at random. This process must be idempotent to + avoid needing two-phase commit. """ + try: + # Get a random build to archive + to_archive = model.archivable_buildlogs_query().order_by(db_random_func()).get() + logger.debug('Archiving: %s', to_archive.uuid) + + length, entries = build_logs.get_log_entries(to_archive.uuid, 0) + to_encode = { + 'start': 0, + 'total': length, + 'logs': entries, + } + + with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile: + with GzipFile('testarchive', fileobj=tempfile) as zipstream: + for chunk in StreamingJSONEncoder().iterencode(to_encode): + zipstream.write(chunk) + + tempfile.seek(0) + log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip', + file_id=to_archive.uuid) + + to_archive.logs_archived = True + to_archive.save() + + build_logs.expire_log_entries(to_archive.uuid) + + except RepositoryBuild.DoesNotExist: + logger.debug('No more builds to archive') + + +if __name__ == "__main__": + logging.basicConfig(level=logging.DEBUG) + sched.start() diff --git a/workers/dockerfilebuild.py b/workers/dockerfilebuild.py index b373a00a9..e43336718 100644 --- a/workers/dockerfilebuild.py +++ b/workers/dockerfilebuild.py @@ -1,6 +1,7 @@ import logging.config -logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) +if __name__ == "__main__": + logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) import logging import argparse @@ -23,6 +24,7 @@ from collections import defaultdict from requests.exceptions import ConnectionError from data import model +from data.database import BUILD_PHASE from workers.worker import Worker, WorkerUnhealthyException, JobException from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue from endpoints.notificationhelper import spawn_notification @@ -223,6 +225,13 @@ class DockerfileBuildContext(object): if self._pull_credentials: logger.debug('Logging in with pull credentials: %s@%s', self._pull_credentials['username'], self._pull_credentials['registry']) + + self._build_logger('Pulling base image: %s' % image_and_tag, log_data = { + 'phasestep': 'login', + 'username': self._pull_credentials['username'], + 'registry': self._pull_credentials['registry'] + }) + self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'], registry=self._pull_credentials['registry'], reauth=True) @@ -233,7 +242,12 @@ class DockerfileBuildContext(object): raise JobException('Missing FROM command in Dockerfile') image_and_tag = ':'.join(image_and_tag_tuple) - self._build_logger('Pulling base image: %s' % image_and_tag) + + self._build_logger('Pulling base image: %s' % image_and_tag, log_data = { + 'phasestep': 'pull', + 'repo_url': image_and_tag + }) + pull_status = self._build_cl.pull(image_and_tag, stream=True) self.__monitor_completion(pull_status, 'Downloading', self._status, 'pull_completion') @@ -495,7 +509,7 @@ class DockerfileBuildWorker(Worker): job_config = json.loads(repository_build.job_config) - resource_url = user_files.get_file_url(repository_build.resource_key) + resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False) tag_names = job_config['docker_tags'] build_subdir = job_config['build_subdir'] repo = job_config['repository'] @@ -545,7 +559,7 @@ class DockerfileBuildWorker(Worker): if c_type not in self._mime_processors: log_appender('error', build_logs.PHASE) - repository_build.phase = 'error' + repository_build.phase = BUILD_PHASE.ERROR repository_build.save() message = 'Unknown mime-type: %s' % c_type log_appender(message, build_logs.ERROR) @@ -554,7 +568,7 @@ class DockerfileBuildWorker(Worker): # Try to build the build directory package from the buildpack. log_appender('unpacking', build_logs.PHASE) - repository_build.phase = 'unpacking' + repository_build.phase = BUILD_PHASE.UNPACKING repository_build.save() build_dir = None @@ -572,20 +586,20 @@ class DockerfileBuildWorker(Worker): repository_build.uuid, self._cache_size_gb, pull_credentials) as build_ctxt: log_appender('pulling', build_logs.PHASE) - repository_build.phase = 'pulling' + repository_build.phase = BUILD_PHASE.PULLING repository_build.save() build_ctxt.pull() self.extend_processing(RESERVATION_TIME) log_appender('building', build_logs.PHASE) - repository_build.phase = 'building' + repository_build.phase = BUILD_PHASE.BUILDING repository_build.save() built_image = build_ctxt.build(self.extend_processing) if not built_image: log_appender('error', build_logs.PHASE) - repository_build.phase = 'error' + repository_build.phase = BUILD_PHASE.ERROR repository_build.save() message = 'Unable to build dockerfile.' @@ -598,13 +612,13 @@ class DockerfileBuildWorker(Worker): self.extend_processing(RESERVATION_TIME) log_appender('pushing', build_logs.PHASE) - repository_build.phase = 'pushing' + repository_build.phase = BUILD_PHASE.PUSHING repository_build.save() build_ctxt.push(built_image) log_appender('complete', build_logs.PHASE) - repository_build.phase = 'complete' + repository_build.phase = BUILD_PHASE.COMPLETE repository_build.save() # Spawn a notification that the build has completed. @@ -641,20 +655,20 @@ class DockerfileBuildWorker(Worker): sentry.client.captureException() log_appender('error', build_logs.PHASE) logger.exception('Exception when processing request.') - repository_build.phase = 'error' + repository_build.phase = BUILD_PHASE.ERROR repository_build.save() log_appender(str(exc), build_logs.ERROR) # Raise the exception to the queue. raise JobException(str(exc)) +if __name__ == "__main__": + desc = 'Worker daemon to monitor dockerfile build' + parser = argparse.ArgumentParser(description=desc) + parser.add_argument('--cachegb', default=20, type=float, + help='Maximum cache size in gigabytes.') + args = parser.parse_args() -desc = 'Worker daemon to monitor dockerfile build' -parser = argparse.ArgumentParser(description=desc) -parser.add_argument('--cachegb', default=20, type=float, - help='Maximum cache size in gigabytes.') -args = parser.parse_args() - -worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue, - reservation_seconds=RESERVATION_TIME) -worker.start(start_status_server_port=8000) + worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue, + reservation_seconds=RESERVATION_TIME) + worker.start(start_status_server_port=8000)