diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 159c7867c..d81b70d69 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -1,4 +1,4 @@ -FROM phusion/baseimage:0.9.13 +FROM phusion/baseimage:0.9.15 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root @@ -7,13 +7,15 @@ ENV HOME /root RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands -RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev +RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev # Build the python dependencies ADD requirements.txt requirements.txt RUN virtualenv --distribute venv RUN venv/bin/pip install -r requirements.txt +RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev + ### End common section ### RUN apt-get install -y lxc aufs-tools @@ -30,6 +32,10 @@ ADD conf/init/preplogsdir.sh /etc/my_init.d/ ADD conf/init/tutumdocker /etc/service/tutumdocker ADD conf/init/dockerfilebuild /etc/service/dockerfilebuild +RUN apt-get remove -y --auto-remove nodejs npm git phantomjs +RUN apt-get autoremove -y +RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + VOLUME ["/var/lib/docker", "/var/lib/lxc", "/conf/stack", "/var/log"] CMD ["/sbin/my_init"] diff --git a/Dockerfile.web b/Dockerfile.web index 605b088b3..3bda5ef4e 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -1,4 +1,4 @@ -FROM phusion/baseimage:0.9.13 +FROM phusion/baseimage:0.9.15 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root @@ -7,13 +7,17 @@ ENV HOME /root RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands -RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev +RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev # Build the python dependencies ADD requirements.txt requirements.txt RUN virtualenv --distribute venv RUN venv/bin/pip install -r requirements.txt +RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev + +### End common section ### + # Install the binary dependencies ADD binary_dependencies binary_dependencies RUN gdebi --n binary_dependencies/*.deb @@ -34,7 +38,9 @@ ADD conf/init/doupdatelimits.sh /etc/my_init.d/ ADD conf/init/preplogsdir.sh /etc/my_init.d/ ADD conf/init/runmigration.sh /etc/my_init.d/ -ADD conf/init/gunicorn /etc/service/gunicorn +ADD conf/init/gunicorn_web /etc/service/gunicorn_web +ADD conf/init/gunicorn_registry /etc/service/gunicorn_registry +ADD conf/init/gunicorn_verbs /etc/service/gunicorn_verbs ADD conf/init/nginx /etc/service/nginx ADD conf/init/diffsworker /etc/service/diffsworker ADD conf/init/notificationworker /etc/service/notificationworker @@ -44,6 +50,9 @@ ADD conf/init/buildlogsarchiver /etc/service/buildlogsarchiver RUN mkdir static/fonts static/ldn RUN venv/bin/python -m external_libraries +RUN apt-get autoremove -y +RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + # Run the tests RUN TEST=true venv/bin/python -m unittest discover diff --git a/app.py b/app.py index 8f0a57d62..3bca06cd4 100644 --- a/app.py +++ b/app.py @@ -3,7 +3,7 @@ import os import json import yaml -from flask import Flask as BaseFlask, Config as BaseConfig +from flask import Flask as BaseFlask, Config as BaseConfig, request, Request from flask.ext.principal import Principal from flask.ext.login import LoginManager from flask.ext.mail import Mail @@ -18,12 +18,12 @@ from data.users import UserAuthentication from util.analytics import Analytics from util.exceptionlog import Sentry from util.queuemetrics import QueueMetrics +from util.names import urn_generator from data.billing import Billing from data.buildlogs import BuildLogs from data.archivedlogs import LogArchive from data.queue import WorkQueue from data.userevent import UserEventsBuilderModule -from datetime import datetime class Config(BaseConfig): @@ -60,6 +60,7 @@ LICENSE_FILENAME = 'conf/stack/license.enc' app = Flask(__name__) logger = logging.getLogger(__name__) +profile = logging.getLogger('profile') if 'TEST' in os.environ: @@ -82,6 +83,37 @@ else: environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}')) app.config.update(environ_config) + app.teardown_request(database.close_db_filter) + + +class RequestWithId(Request): + request_gen = staticmethod(urn_generator(['request'])) + + def __init__(self, *args, **kwargs): + super(RequestWithId, self).__init__(*args, **kwargs) + self.request_id = self.request_gen() + + +@app.before_request +def _request_start(): + profile.debug('Starting request: %s', request.path) + + +@app.after_request +def _request_end(r): + profile.debug('Ending request: %s', request.path) + return r + + +class InjectingFilter(logging.Filter): + def filter(self, record): + record.msg = '[%s] %s' % (request.request_id, record.msg) + return True + +profile.addFilter(InjectingFilter()) + +app.request_class = RequestWithId + features.import_features(app.config) Principal(app, use_sessions=False) @@ -105,9 +137,6 @@ dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf reporter=queue_metrics.report) notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf) -# TODO: Remove this in the prod push following the notifications change. -webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'], tf) - database.configure(app.config) model.config.app_config = app.config model.config.store = storage diff --git a/application.py b/application.py index 4c0adb9b9..a9bd0df6e 100644 --- a/application.py +++ b/application.py @@ -1,90 +1,14 @@ import logging import logging.config -import uuid - -from peewee import Proxy from app import app as application -from flask import request, Request -from util.names import urn_generator -from data.database import db as model_db, read_slave - -# Turn off debug logging for boto -logging.getLogger('boto').setLevel(logging.CRITICAL) - -from endpoints.api import api_bp -from endpoints.index import index -from endpoints.web import web -from endpoints.tags import tags -from endpoints.registry import registry -from endpoints.verbs import verbs -from endpoints.webhooks import webhooks -from endpoints.realtime import realtime -from endpoints.callbacks import callback - -from logentries import LogentriesHandler -logger = logging.getLogger(__name__) +# Bind all of the blueprints +import web +import verbs +import registry -werkzeug = logging.getLogger('werkzeug') -werkzeug.setLevel(logging.DEBUG) - -profile = logging.getLogger('profile') -profile.setLevel(logging.DEBUG) - -logentries_key = application.config.get('LOGENTRIES_KEY', None) -if logentries_key: - logger.debug('Initializing logentries with key: %s' % logentries_key) - werkzeug.addHandler(LogentriesHandler(logentries_key)) - profile.addHandler(LogentriesHandler(logentries_key)) - -application.register_blueprint(web) -application.register_blueprint(callback, url_prefix='/oauth2') -application.register_blueprint(index, url_prefix='/v1') -application.register_blueprint(tags, url_prefix='/v1') -application.register_blueprint(registry, url_prefix='/v1') -application.register_blueprint(verbs, url_prefix='/c1') -application.register_blueprint(api_bp, url_prefix='/api') -application.register_blueprint(webhooks, url_prefix='/webhooks') -application.register_blueprint(realtime, url_prefix='/realtime') - -class RequestWithId(Request): - request_gen = staticmethod(urn_generator(['request'])) - - def __init__(self, *args, **kwargs): - super(RequestWithId, self).__init__(*args, **kwargs) - self.request_id = self.request_gen() - -@application.before_request -def _request_start(): - profile.debug('Starting request: %s', request.path) - - -@application.after_request -def _request_end(r): - profile.debug('Ending request: %s', request.path) - return r - -class InjectingFilter(logging.Filter): - def filter(self, record): - record.msg = '[%s] %s' % (request.request_id, record.msg) - return True - -profile.addFilter(InjectingFilter()) - -def close_db(exc): - db = model_db - if not db.is_closed(): - logger.debug('Disconnecting from database.') - db.close() - - if read_slave.obj is not None and not read_slave.is_closed(): - logger.debug('Disconnecting from read slave.') - read_slave.close() - -application.teardown_request(close_db) -application.request_class = RequestWithId if __name__ == '__main__': logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) diff --git a/conf/gunicorn_config.py b/conf/gunicorn_registry.py similarity index 66% rename from conf/gunicorn_config.py rename to conf/gunicorn_registry.py index ca8ad5363..3a7d4462b 100644 --- a/conf/gunicorn_config.py +++ b/conf/gunicorn_registry.py @@ -1,5 +1,5 @@ -bind = 'unix:/tmp/gunicorn.sock' -workers = 16 +bind = 'unix:/tmp/gunicorn_registry.sock' +workers = 8 worker_class = 'gevent' timeout = 2000 logconfig = 'conf/logging.conf' diff --git a/conf/gunicorn_verbs.py b/conf/gunicorn_verbs.py new file mode 100644 index 000000000..c23b3f304 --- /dev/null +++ b/conf/gunicorn_verbs.py @@ -0,0 +1,6 @@ +bind = 'unix:/tmp/gunicorn_verbs.sock' +workers = 4 +timeout = 2000 +logconfig = 'conf/logging.conf' +pythonpath = '.' +preload_app = True \ No newline at end of file diff --git a/conf/gunicorn_web.py b/conf/gunicorn_web.py new file mode 100644 index 000000000..919dfc88d --- /dev/null +++ b/conf/gunicorn_web.py @@ -0,0 +1,7 @@ +bind = 'unix:/tmp/gunicorn_web.sock' +workers = 2 +worker_class = 'gevent' +timeout = 30 +logconfig = 'conf/logging.conf' +pythonpath = '.' +preload_app = True \ No newline at end of file diff --git a/conf/http-base.conf b/conf/http-base.conf index bfa1a85f2..ad3d9f178 100644 --- a/conf/http-base.conf +++ b/conf/http-base.conf @@ -14,8 +14,12 @@ gzip_types text/plain text/xml text/css text/javascript application/x-javascript application/octet-stream; -upstream app_server { - server unix:/tmp/gunicorn.sock fail_timeout=0; - # For a TCP configuration: - # server 192.168.0.7:8000 fail_timeout=0; +upstream web_app_server { + server unix:/tmp/gunicorn_web.sock fail_timeout=0; +} +upstream verbs_app_server { + server unix:/tmp/gunicorn_verbs.sock fail_timeout=0; +} +upstream registry_app_server { + server unix:/tmp/gunicorn_registry.sock fail_timeout=0; } diff --git a/conf/init/gunicorn/log/run b/conf/init/gunicorn/log/run deleted file mode 100755 index 106d6c4f8..000000000 --- a/conf/init/gunicorn/log/run +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -exec svlogd /var/log/gunicorn/ \ No newline at end of file diff --git a/conf/init/gunicorn/run b/conf/init/gunicorn/run deleted file mode 100755 index a61e7c651..000000000 --- a/conf/init/gunicorn/run +++ /dev/null @@ -1,8 +0,0 @@ -#! /bin/bash - -echo 'Starting gunicon' - -cd / -venv/bin/gunicorn -c conf/gunicorn_config.py application:application - -echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/init/gunicorn_registry/log/run b/conf/init/gunicorn_registry/log/run new file mode 100755 index 000000000..1896ef533 --- /dev/null +++ b/conf/init/gunicorn_registry/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec svlogd /var/log/gunicorn_registry/ \ No newline at end of file diff --git a/conf/init/gunicorn_registry/run b/conf/init/gunicorn_registry/run new file mode 100755 index 000000000..a0a09f5a2 --- /dev/null +++ b/conf/init/gunicorn_registry/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting gunicon' + +cd / +venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application + +echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/init/gunicorn_verbs/log/run b/conf/init/gunicorn_verbs/log/run new file mode 100755 index 000000000..2b061e193 --- /dev/null +++ b/conf/init/gunicorn_verbs/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec svlogd /var/log/gunicorn_verbs/ \ No newline at end of file diff --git a/conf/init/gunicorn_verbs/run b/conf/init/gunicorn_verbs/run new file mode 100755 index 000000000..1cf2ee51c --- /dev/null +++ b/conf/init/gunicorn_verbs/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting gunicon' + +cd / +nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application + +echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/init/gunicorn_web/log/run b/conf/init/gunicorn_web/log/run new file mode 100755 index 000000000..de17cdf61 --- /dev/null +++ b/conf/init/gunicorn_web/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec svlogd /var/log/gunicorn_web/ \ No newline at end of file diff --git a/conf/init/gunicorn_web/run b/conf/init/gunicorn_web/run new file mode 100755 index 000000000..86d107618 --- /dev/null +++ b/conf/init/gunicorn_web/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting gunicon' + +cd / +venv/bin/gunicorn -c conf/gunicorn_web.py web:application + +echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/logging.conf b/conf/logging.conf index 4023e7743..d009f08ee 100644 --- a/conf/logging.conf +++ b/conf/logging.conf @@ -1,5 +1,5 @@ [loggers] -keys=root, gunicorn.error, gunicorn.access, application.profiler +keys=root, gunicorn.error, gunicorn.access, application.profiler, boto, werkzeug [handlers] keys=console @@ -17,6 +17,18 @@ qualname=application.profiler level=DEBUG handlers=console +[logger_boto] +level=INFO +handlers=console +propagate=0 +qualname=boto + +[logger_werkzeug] +level=DEBUG +handlers=console +propagate=0 +qualname=werkzeug + [logger_gunicorn.error] level=INFO handlers=console diff --git a/conf/server-base.conf b/conf/server-base.conf index 4636afdde..01cf74ce4 100644 --- a/conf/server-base.conf +++ b/conf/server-base.conf @@ -1,4 +1,3 @@ -client_max_body_size 20G; client_body_temp_path /var/log/nginx/client_body 1 2; server_name _; @@ -11,17 +10,41 @@ if ($args ~ "_escaped_fragment_") { rewrite ^ /snapshot$uri; } +proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; +proxy_set_header X-Forwarded-Proto $scheme; +proxy_set_header Host $http_host; +proxy_redirect off; + +proxy_set_header Transfer-Encoding $http_transfer_encoding; + location / { - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $http_host; - proxy_redirect off; + proxy_pass http://web_app_server; +} + +location /realtime { + proxy_pass http://web_app_server; + proxy_buffering off; + proxy_request_buffering off; +} + +location /v1/ { proxy_buffering off; proxy_request_buffering off; - proxy_set_header Transfer-Encoding $http_transfer_encoding; - proxy_pass http://app_server; + proxy_pass http://registry_app_server; + proxy_read_timeout 2000; + proxy_temp_path /var/log/nginx/proxy_temp 1 2; + + client_max_body_size 20G; +} + +location /c1/ { + proxy_buffering off; + + proxy_request_buffering off; + + proxy_pass http://verbs_app_server; proxy_read_timeout 2000; proxy_temp_path /var/log/nginx/proxy_temp 1 2; } diff --git a/config.py b/config.py index 6742d1a43..5a8b24b42 100644 --- a/config.py +++ b/config.py @@ -19,7 +19,8 @@ def build_requests_session(): CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID', 'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY', 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE', - 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID'] + 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID', + 'CONTACT_INFO'] def getFrontendVisibleConfig(config_dict): @@ -48,6 +49,12 @@ class DefaultConfig(object): REGISTRY_TITLE = 'Quay.io' REGISTRY_TITLE_SHORT = 'Quay.io' + CONTACT_INFO = [ + 'mailto:support@quay.io', + 'irc://chat.freenode.net:6665/quayio', + 'tel:+1-888-930-3475', + 'https://twitter.com/quayio', + ] # Mail config MAIL_SERVER = '' @@ -55,7 +62,7 @@ class DefaultConfig(object): MAIL_PORT = 587 MAIL_USERNAME = '' MAIL_PASSWORD = '' - DEFAULT_MAIL_SENDER = '' + MAIL_DEFAULT_SENDER = 'support@quay.io' MAIL_FAIL_SILENTLY = False TESTING = True @@ -80,11 +87,11 @@ class DefaultConfig(object): AUTHENTICATION_TYPE = 'Database' # Build logs - BUILDLOGS_REDIS = {'host': 'logs.quay.io'} + BUILDLOGS_REDIS = {'host': 'localhost'} BUILDLOGS_OPTIONS = [] # Real-time user events - USER_EVENTS_REDIS = {'host': 'logs.quay.io'} + USER_EVENTS_REDIS = {'host': 'localhost'} # Stripe config BILLING_TYPE = 'FakeStripe' @@ -132,9 +139,6 @@ class DefaultConfig(object): DIFFS_QUEUE_NAME = 'imagediff' DOCKERFILE_BUILD_QUEUE_NAME = 'dockerfilebuild' - # TODO: Remove this in the prod push following the notifications change. - WEBHOOK_QUEUE_NAME = 'webhook' - # Super user config. Note: This MUST BE an empty list for the default config. SUPER_USERS = [] diff --git a/data/database.py b/data/database.py index a57b6cfb9..1914a954c 100644 --- a/data/database.py +++ b/data/database.py @@ -7,9 +7,9 @@ from datetime import datetime from peewee import * from data.read_slave import ReadSlaveModel from sqlalchemy.engine.url import make_url -from urlparse import urlparse from util.names import urn_generator + logger = logging.getLogger(__name__) @@ -80,6 +80,16 @@ def uuid_generator(): return str(uuid.uuid4()) +def close_db_filter(_): + if not db.is_closed(): + logger.debug('Disconnecting from database.') + db.close() + + if read_slave.obj is not None and not read_slave.is_closed(): + logger.debug('Disconnecting from read slave.') + read_slave.close() + + class BaseModel(ReadSlaveModel): class Meta: database = db diff --git a/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py b/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py index dc01f8676..87d668fa3 100644 --- a/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py +++ b/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py @@ -21,6 +21,10 @@ def upgrade(tables): sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation')) ) op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True) + op.bulk_insert(tables.imagestoragetransformation, + [ + {'id':1, 'name':'squash'}, + ]) op.create_table('derivedimagestorage', sa.Column('id', sa.Integer(), nullable=False), sa.Column('source_id', sa.Integer(), nullable=True), diff --git a/data/model/legacy.py b/data/model/legacy.py index d0233972d..6466e4171 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -13,7 +13,8 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor Notification, ImageStorageLocation, ImageStoragePlacement, ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite, - DerivedImageStorage, random_string_generator, db, BUILD_PHASE) + DerivedImageStorage, ImageStorageTransformation, random_string_generator, + db, BUILD_PHASE) from peewee import JOIN_LEFT_OUTER, fn from util.validation import (validate_username, validate_email, validate_password, INVALID_PASSWORD_MESSAGE) @@ -578,6 +579,13 @@ def get_user(username): return None +def get_namespace_user(username): + try: + return User.get(User.username == username) + except User.DoesNotExist: + return None + + def get_user_or_org(username): try: return User.get(User.username == username, User.robot == False) @@ -1247,6 +1255,20 @@ def find_or_create_derived_storage(source, transformation_name, preferred_locati return new_storage +def delete_derived_storage_by_uuid(storage_uuid): + try: + image_storage = get_storage_by_uuid(storage_uuid) + except InvalidImageException: + return + + try: + DerivedImageStorage.get(derivative=image_storage) + except DerivedImageStorage.DoesNotExist: + return + + image_storage.delete_instance(recursive=True) + + def get_storage_by_uuid(storage_uuid): placements = list(ImageStoragePlacement .select(ImageStoragePlacement, ImageStorage, ImageStorageLocation) @@ -1305,7 +1327,15 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created # We cleanup any old checksum in case it's a retry after a fail fetched.storage.checksum = None - fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None) + fetched.storage.created = datetime.now() + + if created_date_str is not None: + try: + fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None) + except: + # parse raises different exceptions, so we cannot use a specific kind of handler here. + pass + fetched.storage.comment = comment fetched.storage.command = command @@ -1390,48 +1420,87 @@ def garbage_collect_repository(namespace_name, repository_name): all_images = {int(img.id): img for img in all_repo_images} to_remove = set(all_images.keys()).difference(referenced_anscestors) - logger.info('Cleaning up unreferenced images: %s', to_remove) + if len(to_remove) > 0: + logger.info('Cleaning up unreferenced images: %s', to_remove) + storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove} - uuids_to_check_for_gc = set() - for image_id_to_remove in to_remove: - image_to_remove = all_images[image_id_to_remove] + Image.delete().where(Image.id << list(to_remove)).execute() - logger.debug('Adding image storage to the gc list: %s', - image_to_remove.storage.uuid) - uuids_to_check_for_gc.add(image_to_remove.storage.uuid) + garbage_collect_storage(storage_id_whitelist) - image_to_remove.delete_instance() + return len(to_remove) - def remove_storages(query): - for storage in query: - logger.debug('Garbage collecting image storage: %s', storage.uuid) - image_path = config.store.image_path(storage.uuid) - for placement in storage.imagestorageplacement_set: - location_name = placement.location.name - placement.delete_instance() - config.store.remove({location_name}, image_path) +def garbage_collect_storage(storage_id_whitelist): + # We are going to make the conscious decision to not delete image storage inside the transaction + # This may end up producing garbage in s3, trading off for higher availability in the database + def placements_query_to_paths_set(placements_query): + return {(placement.location.name, config.store.image_path(placement.storage.uuid)) + for placement in placements_query} - storage.delete_instance(recursive=True) + def orphaned_storage_query(select_base_query, candidates): + return (select_base_query + .switch(ImageStorage) + .join(Image, JOIN_LEFT_OUTER) + .switch(ImageStorage) + .join(DerivedImageStorage, JOIN_LEFT_OUTER, + on=(ImageStorage.id == DerivedImageStorage.derivative)) + .where(ImageStorage.id << list(candidates)) + .group_by(ImageStorage) + .having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0))) - if uuids_to_check_for_gc: - storage_to_remove = (ImageStorage - .select() - .join(Image, JOIN_LEFT_OUTER) - .group_by(ImageStorage) - .where(ImageStorage.uuid << list(uuids_to_check_for_gc)) - .having(fn.Count(Image.id) == 0)) + logger.debug('Garbage collecting storage from candidates: %s', storage_id_whitelist) + with config.app_config['DB_TRANSACTION_FACTORY'](db): + # Find out which derived storages will be removed, and add them to the whitelist + orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id), + storage_id_whitelist)) - remove_storages(storage_to_remove) + if len(orphaned_from_candidates) > 0: + derived_to_remove = (ImageStorage + .select(ImageStorage.id) + .join(DerivedImageStorage, + on=(ImageStorage.id == DerivedImageStorage.derivative)) + .where(DerivedImageStorage.source << orphaned_from_candidates)) + storage_id_whitelist.update({derived.id for derived in derived_to_remove}) - # Now remove any derived image storages whose sources have been removed - derived_storages_to_remove = (ImageStorage - .select() - .join(DerivedImageStorage, on=(ImageStorage.id == DerivedImageStorage.derivative)) - .where(DerivedImageStorage.source >> None)) - remove_storages(derived_storages_to_remove) + # Remove the dervived image storages with sources of orphaned storages + (DerivedImageStorage + .delete() + .where(DerivedImageStorage.source << orphaned_from_candidates) + .execute()) - return len(to_remove) + # Track all of the data that should be removed from blob storage + placements_to_remove = orphaned_storage_query(ImageStoragePlacement + .select(ImageStoragePlacement, + ImageStorage, + ImageStorageLocation) + .join(ImageStorageLocation) + .switch(ImageStoragePlacement) + .join(ImageStorage), + storage_id_whitelist) + paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone()) + + # Remove the placements for orphaned storages + placements_subquery = list(placements_to_remove.clone().select(ImageStoragePlacement.id)) + if len(placements_subquery) > 0: + (ImageStoragePlacement + .delete() + .where(ImageStoragePlacement.id << list(placements_subquery)) + .execute()) + + # Remove the all orphaned storages + orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id), + storage_id_whitelist)) + if len(orphaned_storages) > 0: + (ImageStorage + .delete() + .where(ImageStorage.id << orphaned_storages) + .execute()) + + # Delete the actual blob storage + for location_name, image_path in paths_to_remove: + logger.debug('Removing %s from %s', image_path, location_name) + config.store.remove({location_name}, image_path) def get_tag_image(namespace_name, repository_name, tag_name): diff --git a/data/userevent.py b/data/userevent.py index b45d4e4fa..508ea572f 100644 --- a/data/userevent.py +++ b/data/userevent.py @@ -30,7 +30,7 @@ class UserEventsBuilderModule(object): if not redis_config: # This is the old key name. redis_config = { - 'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME') + 'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME'), } user_events = UserEventBuilder(redis_config) @@ -45,7 +45,7 @@ class UserEventsBuilderModule(object): class UserEvent(object): - """ + """ Defines a helper class for publishing to realtime user events as backed by Redis. """ @@ -74,7 +74,7 @@ class UserEvent(object): thread = threading.Thread(target=conduct) thread.start() - + class UserEventListener(object): """ Defines a helper class for subscribing to realtime user events as @@ -90,7 +90,7 @@ class UserEventListener(object): @staticmethod def _user_event_key(username, event_id): return 'user/%s/events/%s' % (username, event_id) - + def event_stream(self): """ Starts listening for events on the channel(s), yielding for each event diff --git a/endpoints/api/trigger.py b/endpoints/api/trigger.py index 081641e00..be119499d 100644 --- a/endpoints/api/trigger.py +++ b/endpoints/api/trigger.py @@ -317,7 +317,7 @@ class BuildTriggerAnalyze(RepositoryParamResource): if not found_repository: return { 'status': 'error', - 'message': 'Repository "%s" was not found' % (base_image) + 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image) } # If the repository is private and the user cannot see that repo, then @@ -326,7 +326,7 @@ class BuildTriggerAnalyze(RepositoryParamResource): if found_repository.visibility.name != 'public' and not can_read: return { 'status': 'error', - 'message': 'Repository "%s" was not found' % (base_image) + 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image) } # Check to see if the repository is public. If not, we suggest the @@ -450,18 +450,18 @@ class BuildTriggerFieldValues(RepositoryParamResource): """ Custom verb to fetch a values list for a particular field name. """ @require_repo_admin @nickname('listTriggerFieldValues') - def get(self, namespace, repository, trigger_uuid, field_name): + def post(self, namespace, repository, trigger_uuid, field_name): """ List the field values for a custom run field. """ try: trigger = model.get_build_trigger(namespace, repository, trigger_uuid) except model.InvalidBuildTriggerException: raise NotFound() + config = request.get_json() or json.loads(trigger.config) user_permission = UserAdminPermission(trigger.connected_user.username) if user_permission.can(): trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name) - values = trigger_handler.list_field_values(trigger.auth_token, json.loads(trigger.config), - field_name) + values = trigger_handler.list_field_values(trigger.auth_token, config, field_name) if values is None: raise NotFound() diff --git a/endpoints/callbacks.py b/endpoints/callbacks.py index 637033ab6..95fdaa5d5 100644 --- a/endpoints/callbacks.py +++ b/endpoints/callbacks.py @@ -106,7 +106,15 @@ def conduct_oauth_login(service_name, user_id, username, email, metadata={}): logger.debug('Aliasing with state: %s' % state) analytics.alias(to_login.username, state) - except model.DataModelException, ex: + except model.InvalidEmailAddressException as ieex: + message = "The e-mail address %s is already associated " % (email, ) + message = message + "with an existing %s account." % (app.config['REGISTRY_TITLE_SHORT'], ) + message = message + "\nPlease log in with your username and password and " + message = message + "associate your %s account to use it in the future." % (service_name, ) + + return render_ologin_error(service_name, message) + + except model.DataModelException as ex: return render_ologin_error(service_name, ex.message) if common_login(to_login): diff --git a/endpoints/common.py b/endpoints/common.py index c96a19c1d..ad23c42f6 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -10,6 +10,7 @@ from flask.ext.principal import identity_changed from random import SystemRandom from data import model +from data.database import db from app import app, login_manager, dockerfile_build_queue, notification_queue from auth.permissions import QuayDeferredPermissionUser from auth import scopes @@ -170,6 +171,10 @@ def render_page_template(name, **kwargs): external_styles = get_external_css(local=not app.config.get('USE_CDN', True)) external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True)) + contact_href = None + if len(app.config.get('CONTACT_INFO', [])) == 1: + contact_href = app.config['CONTACT_INFO'][0] + resp = make_response(render_template(name, route_data=json.dumps(get_route_data()), external_styles=external_styles, external_scripts=external_scripts, @@ -186,6 +191,7 @@ def render_page_template(name, **kwargs): show_chat=features.OLARK_CHAT, cache_buster=cache_buster, has_billing=features.BILLING, + contact_href=contact_href, **kwargs)) resp.headers['X-FRAME-OPTIONS'] = 'DENY' @@ -217,14 +223,15 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual, 'build_subdir': subdir } - build_request = model.create_repository_build(repository, token, job_config, - dockerfile_id, build_name, - trigger, pull_robot_name=pull_robot_name) + with app.config['DB_TRANSACTION_FACTORY'](db): + build_request = model.create_repository_build(repository, token, job_config, + dockerfile_id, build_name, + trigger, pull_robot_name=pull_robot_name) - dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({ - 'build_uuid': build_request.uuid, - 'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None - }), retries_remaining=1) + dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({ + 'build_uuid': build_request.uuid, + 'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None + }), retries_remaining=1) # Add the build to the repo's log. metadata = { diff --git a/endpoints/index.py b/endpoints/index.py index eb52971cf..1fa5010cb 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -70,7 +70,7 @@ def create_user(): abort(400, 'User creation is disabled. Please speak to your administrator.') user_data = request.get_json() - if not 'username' in user_data: + if not user_data or not 'username' in user_data: abort(400, 'Missing username') username = user_data['username'] @@ -299,13 +299,6 @@ def update_images(namespace, repository): # Make sure the repo actually exists. abort(404, message='Unknown repository', issue='unknown-repo') - profile.debug('Parsing image data') - image_with_checksums = json.loads(request.data.decode('utf8')) - - updated_tags = {} - for image in image_with_checksums: - updated_tags[image['Tag']] = image['id'] - if get_authenticated_user(): profile.debug('Publishing push event') username = get_authenticated_user().username @@ -326,12 +319,11 @@ def update_images(namespace, repository): # Generate a job for each notification that has been added to this repo profile.debug('Adding notifications for repository') + updated_tags = session.get('pushed_tags', {}) event_data = { 'updated_tags': updated_tags, - 'pushed_image_count': len(image_with_checksums), 'pruned_image_count': num_removed } - spawn_notification(repo, 'repo_push', event_data) return make_response('Updated', 204) diff --git a/endpoints/notificationevent.py b/endpoints/notificationevent.py index 4a195fbd7..617beb177 100644 --- a/endpoints/notificationevent.py +++ b/endpoints/notificationevent.py @@ -1,7 +1,9 @@ import logging from notificationhelper import build_event_data +from util.jinjautil import get_template_env +template_env = get_template_env("events") logger = logging.getLogger(__name__) class InvalidNotificationEventException(Exception): @@ -14,7 +16,7 @@ class NotificationEvent(object): def get_level(self, event_data, notification_data): """ Returns a 'level' representing the severity of the event. - Valid values are: 'info', 'warning', 'error', 'primary' + Valid values are: 'info', 'warning', 'error', 'primary', 'success' """ raise NotImplementedError @@ -28,7 +30,10 @@ class NotificationEvent(object): """ Returns a human readable HTML message for the given notification data. """ - raise NotImplementedError + return template_env.get_template(self.event_name() + '.html').render({ + 'event_data': event_data, + 'notification_data': notification_data + }) def get_sample_data(self, repository=None): """ @@ -59,32 +64,14 @@ class RepoPushEvent(NotificationEvent): return 'repo_push' def get_level(self, event_data, notification_data): - return 'info' + return 'primary' def get_summary(self, event_data, notification_data): return 'Repository %s updated' % (event_data['repository']) - def get_message(self, event_data, notification_data): - if not event_data.get('updated_tags', {}).keys(): - html = """ - Repository %s has been updated via a push. - """ % (event_data['homepage'], - event_data['repository']) - else: - html = """ - Repository %s has been updated via a push. -

- Tags Updated: %s - """ % (event_data['homepage'], - event_data['repository'], - ', '.join(event_data['updated_tags'].keys())) - - return html - def get_sample_data(self, repository): return build_event_data(repository, { 'updated_tags': {'latest': 'someimageid', 'foo': 'anotherimage'}, - 'pushed_image_count': 10, 'pruned_image_count': 3 }) @@ -109,26 +96,7 @@ class BuildQueueEvent(NotificationEvent): }, subpage='/build?current=%s' % build_uuid) def get_summary(self, event_data, notification_data): - return 'Build queued for repository %s' % (event_data['repository']) - - def get_message(self, event_data, notification_data): - is_manual = event_data['is_manual'] - if is_manual: - html = """ - A new build has been manually queued to start on repository %s. -

- Build ID: %s - """ % (event_data['homepage'], event_data['repository'], event_data['build_id']) - else: - html = """ - A new build has been queued via a %s trigger to start on repository %s. -

- Build ID: %s - """ % (event_data['homepage'], event_data['trigger_kind'], - event_data['repository'], event_data['build_id']) - - return html - + return 'Build queued for repository %s' % (event_data['repository']) class BuildStartEvent(NotificationEvent): @@ -152,15 +120,6 @@ class BuildStartEvent(NotificationEvent): def get_summary(self, event_data, notification_data): return 'Build started for repository %s' % (event_data['repository']) - def get_message(self, event_data, notification_data): - html = """ - A new build has started on repository %s. -

- Build ID: %s - """ % (event_data['homepage'], event_data['repository'], event_data['build_id']) - - return html - class BuildSuccessEvent(NotificationEvent): @classmethod @@ -168,7 +127,7 @@ class BuildSuccessEvent(NotificationEvent): return 'build_success' def get_level(self, event_data, notification_data): - return 'primary' + return 'success' def get_sample_data(self, repository): build_uuid = 'fake-build-id' @@ -183,15 +142,6 @@ class BuildSuccessEvent(NotificationEvent): def get_summary(self, event_data, notification_data): return 'Build succeeded for repository %s' % (event_data['repository']) - def get_message(self, event_data, notification_data): - html = """ - A build has finished on repository %s. -

- Build ID: %s - """ % (event_data['homepage'], event_data['repository'], event_data['build_id']) - - return html - class BuildFailureEvent(NotificationEvent): @classmethod @@ -215,13 +165,3 @@ class BuildFailureEvent(NotificationEvent): def get_summary(self, event_data, notification_data): return 'Build failure for repository %s' % (event_data['repository']) - def get_message(self, event_data, notification_data): - html = """ - A build has failed on repository %s. -

- Reason: %s
- Build ID: %s
- """ % (event_data['homepage'], event_data['repository'], - event_data['error_message'], event_data['build_id']) - - return html diff --git a/endpoints/notificationhelper.py b/endpoints/notificationhelper.py index cde307c30..8ac23e985 100644 --- a/endpoints/notificationhelper.py +++ b/endpoints/notificationhelper.py @@ -1,5 +1,6 @@ from app import app, notification_queue from data import model +from auth.auth_context import get_authenticated_user, get_validated_oauth_token import json @@ -27,19 +28,35 @@ def build_event_data(repo, extra_data={}, subpage=None): event_data.update(extra_data) return event_data -def build_notification_data(notification, event_data): +def build_notification_data(notification, event_data, performer_data=None): + if not performer_data: + performer_data = {} + + oauth_token = get_validated_oauth_token() + if oauth_token: + performer_data['oauth_token_id'] = oauth_token.id + performer_data['oauth_token_application_id'] = oauth_token.application.client_id + performer_data['oauth_token_application'] = oauth_token.application.name + + performer_user = get_authenticated_user() + if performer_user: + performer_data['entity_id'] = performer_user.id + performer_data['entity_name'] = performer_user.username + return { 'notification_uuid': notification.uuid, - 'event_data': event_data + 'event_data': event_data, + 'performer_data': performer_data, } -def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[]): +def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[], + performer_data=None): event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage) notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name, event_name=event_name) - for notification in notifications: - notification_data = build_notification_data(notification, event_data) + for notification in list(notifications): + notification_data = build_notification_data(notification, event_data, performer_data) path = [str(repo.namespace_user.id), repo.name, event_name] + pathargs notification_queue.put(path, json.dumps(notification_data)) diff --git a/endpoints/notificationmethod.py b/endpoints/notificationmethod.py index 589ebd06d..0d43498f2 100644 --- a/endpoints/notificationmethod.py +++ b/endpoints/notificationmethod.py @@ -211,7 +211,7 @@ class FlowdockMethod(NotificationMethod): if not token: return - owner = model.get_user(notification.repository.namespace_user.username) + owner = model.get_user_or_org(notification.repository.namespace_user.username) if not owner: # Something went wrong. return @@ -267,7 +267,7 @@ class HipchatMethod(NotificationMethod): if not token or not room_id: return - owner = model.get_user(notification.repository.namespace_user.username) + owner = model.get_user_or_org(notification.repository.namespace_user.username) if not owner: # Something went wrong. return @@ -279,6 +279,7 @@ class HipchatMethod(NotificationMethod): 'info': 'gray', 'warning': 'yellow', 'error': 'red', + 'success': 'green', 'primary': 'purple' }.get(level, 'gray') @@ -303,6 +304,56 @@ class HipchatMethod(NotificationMethod): raise NotificationMethodPerformException(ex.message) +from HTMLParser import HTMLParser + +class SlackAdjuster(HTMLParser): + def __init__(self): + self.reset() + self.result = [] + + def handle_data(self, d): + self.result.append(d) + + def get_attr(self, attrs, name): + for attr in attrs: + if attr[0] == name: + return attr[1] + + return '' + + def handle_starttag(self, tag, attrs): + if tag == 'a': + self.result.append('<%s|' % (self.get_attr(attrs, 'href'), )) + + if tag == 'i': + self.result.append('_') + + if tag == 'b' or tag == 'strong': + self.result.append('*') + + if tag == 'img': + self.result.append(self.get_attr(attrs, 'alt')) + self.result.append(' ') + + def handle_endtag(self, tag): + if tag == 'a': + self.result.append('>') + + if tag == 'b' or tag == 'strong': + self.result.append('*') + + if tag == 'i': + self.result.append('_') + + def get_data(self): + return ''.join(self.result) + +def adjust_tags(html): + s = SlackAdjuster() + s.feed(html) + return s.get_data() + + class SlackMethod(NotificationMethod): """ Method for sending notifications to Slack via the API: https://api.slack.com/docs/attachments @@ -318,12 +369,11 @@ class SlackMethod(NotificationMethod): if not config_data.get('subdomain', '').isalnum(): raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name') - def formatForSlack(self, message): + def format_for_slack(self, message): message = message.replace('\n', '') message = re.sub(r'\s+', ' ', message) message = message.replace('
', '\n') - message = re.sub(r'(.+)', '<\\1|\\2>', message) - return message + return adjust_tags(message) def perform(self, notification, event_handler, notification_data): config_data = json.loads(notification.config_json) @@ -334,7 +384,7 @@ class SlackMethod(NotificationMethod): if not token or not subdomain: return - owner = model.get_user(notification.repository.namespace_user.username) + owner = model.get_user_or_org(notification.repository.namespace_user.username) if not owner: # Something went wrong. return @@ -346,6 +396,7 @@ class SlackMethod(NotificationMethod): 'info': '#ffffff', 'warning': 'warning', 'error': 'danger', + 'success': 'good', 'primary': 'good' }.get(level, '#ffffff') @@ -359,8 +410,9 @@ class SlackMethod(NotificationMethod): 'attachments': [ { 'fallback': summary, - 'text': self.formatForSlack(message), - 'color': color + 'text': self.format_for_slack(message), + 'color': color, + 'mrkdwn_in': ["text"] } ] } diff --git a/endpoints/registry.py b/endpoints/registry.py index 751c2a0ff..1dcaa00c2 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -197,12 +197,15 @@ def put_image_layer(namespace, repository, image_id): # Create a socket reader to read the input stream containing the layer data. sr = SocketReader(input_stream) - # Add a handler that store the data in storage. - tmp, store_hndlr = store.temp_store_handler() - sr.add_handler(store_hndlr) + # Add a handler that copies the data into a temp file. This is used to calculate the tarsum, + # which is only needed for older versions of Docker. + requires_tarsum = session.get('checksum_format') == 'tarsum' + if requires_tarsum: + tmp, tmp_hndlr = store.temp_store_handler() + sr.add_handler(tmp_hndlr) - # Add a handler to compute the uncompressed size of the layer. - uncompressed_size_info, size_hndlr = gzipstream.calculate_size_handler() + # Add a handler to compute the compressed and uncompressed sizes of the layer. + size_info, size_hndlr = gzipstream.calculate_size_handler() sr.add_handler(size_hndlr) # Add a handler which computes the checksum. @@ -217,14 +220,15 @@ def put_image_layer(namespace, repository, image_id): csums.append('sha256:{0}'.format(h.hexdigest())) try: - image_size = tmp.tell() - # Save the size of the image. - model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size) + model.set_image_size(image_id, namespace, repository, size_info.compressed_size, + size_info.uncompressed_size) + + if requires_tarsum: + tmp.seek(0) + csums.append(checksums.compute_tarsum(tmp, json_data)) + tmp.close() - tmp.seek(0) - csums.append(checksums.compute_tarsum(tmp, json_data)) - tmp.close() except (IOError, checksums.TarError) as e: logger.debug('put_image_layer: Error when computing tarsum ' '{0}'.format(e)) @@ -268,7 +272,19 @@ def put_image_checksum(namespace, repository, image_id): if not permission.can(): abort(403) - checksum = request.headers.get('X-Docker-Checksum') + # Docker Version < 0.10 (tarsum+sha): + old_checksum = request.headers.get('X-Docker-Checksum') + + # Docker Version >= 0.10 (sha): + new_checksum = request.headers.get('X-Docker-Checksum-Payload') + + # Store whether we need to calculate the tarsum. + if new_checksum: + session['checksum_format'] = 'sha256' + else: + session['checksum_format'] = 'tarsum' + + checksum = new_checksum or old_checksum if not checksum: abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum', image_id=image_id) @@ -279,6 +295,9 @@ def put_image_checksum(namespace, repository, image_id): profile.debug('Looking up repo image') repo_image = model.get_repo_image(namespace, repository, image_id) + if not repo_image or not repo_image.storage: + abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id) + uuid = repo_image.storage.uuid profile.debug('Looking up repo layer data') diff --git a/endpoints/tags.py b/endpoints/tags.py index a4ed782ab..9e61bf030 100644 --- a/endpoints/tags.py +++ b/endpoints/tags.py @@ -2,7 +2,7 @@ import logging import json -from flask import abort, request, jsonify, make_response, Blueprint +from flask import abort, request, jsonify, make_response, Blueprint, session from app import app from util.names import parse_repository_name @@ -59,6 +59,12 @@ def put_tag(namespace, repository, tag): docker_image_id = json.loads(request.data) model.create_or_update_tag(namespace, repository, tag, docker_image_id) + # Store the updated tag. + if not 'pushed_tags' in session: + session['pushed_tags'] = {} + + session['pushed_tags'][tag] = docker_image_id + return make_response('Created', 200) abort(403) diff --git a/endpoints/trigger.py b/endpoints/trigger.py index c7c47db79..053d47d57 100644 --- a/endpoints/trigger.py +++ b/endpoints/trigger.py @@ -3,11 +3,13 @@ import io import os.path import tarfile import base64 +import re from github import Github, UnknownObjectException, GithubException from tempfile import SpooledTemporaryFile from app import app, userfiles as user_files +from util.tarfileappender import TarfileAppender client = app.config['HTTPCLIENT'] @@ -229,13 +231,35 @@ class GithubBuildTrigger(BuildTrigger): return repos_by_org + def matches_branch(self, branch_name, regex): + if not regex: + return False + + m = regex.match(branch_name) + if not m: + return False + + return len(m.group(0)) == len(branch_name) + def list_build_subdirs(self, auth_token, config): gh_client = self._get_client(auth_token) source = config['build_source'] - try: + try: repo = gh_client.get_repo(source) - default_commit = repo.get_branch(repo.default_branch or 'master').commit + + # Find the first matching branch. + branches = None + if 'branch_regex' in config: + try: + regex = re.compile(config['branch_regex']) + branches = [branch.name for branch in repo.get_branches() + if self.matches_branch(branch.name, regex)] + except: + pass + + branches = branches or [repo.default_branch or 'master'] + default_commit = repo.get_branch(branches[0]).commit commit_tree = repo.get_git_tree(default_commit.sha, recursive=True) return [os.path.dirname(elem.path) for elem in commit_tree.tree @@ -301,10 +325,17 @@ class GithubBuildTrigger(BuildTrigger): with tarfile.open(fileobj=tarball) as archive: tarball_subdir = archive.getnames()[0] - # Seek to position 0 to make boto multipart happy + # Seek to position 0 to make tarfile happy. tarball.seek(0) - dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) + entries = { + tarball_subdir + '/.git/HEAD': commit_sha, + tarball_subdir + '/.git/objects/': None, + tarball_subdir + '/.git/refs/': None + } + + appender = TarfileAppender(tarball, entries).get_stream() + dockerfile_id = user_files.store_file(appender, TARBALL_MIME) logger.debug('Successfully prepared job') @@ -330,7 +361,7 @@ class GithubBuildTrigger(BuildTrigger): payload = request.get_json() if not payload or payload.get('head_commit') is None: raise SkipRequestException() - + if 'zen' in payload: raise ValidationRequestException() @@ -339,6 +370,16 @@ class GithubBuildTrigger(BuildTrigger): commit_sha = payload['head_commit']['id'] commit_message = payload['head_commit'].get('message', '') + if 'branch_regex' in config: + try: + regex = re.compile(config['branch_regex']) + except: + regex = re.compile('.*') + + branch = ref.split('/')[-1] + if not self.matches_branch(branch, regex): + raise SkipRequestException() + if should_skip_commit(commit_message): raise SkipRequestException() diff --git a/endpoints/verbs.py b/endpoints/verbs.py index 91b9f30a9..581da0a17 100644 --- a/endpoints/verbs.py +++ b/endpoints/verbs.py @@ -4,11 +4,12 @@ import hashlib from flask import redirect, Blueprint, abort, send_file -from app import storage as store, app +from app import app from auth.auth import process_auth from auth.permissions import ReadRepositoryPermission from data import model from data import database +from storage import Storage from util.queuefile import QueueFile from util.queueprocess import QueueProcess @@ -19,8 +20,9 @@ from util.dockerloadformat import build_docker_load_stream verbs = Blueprint('verbs', __name__) logger = logging.getLogger(__name__) - def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_list): + store = Storage(app) + def get_next_image(): for current_image_id in image_list: yield model.get_repo_image(namespace, repository, current_image_id) @@ -43,14 +45,23 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file): + database.configure(app.config) + store = Storage(app) + + def handle_exception(ex): + logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex) + model.delete_derived_storage_by_uuid(linked_storage_uuid) + + queue_file.add_exception_handler(handle_exception) + image_path = store.image_layer_path(linked_storage_uuid) store.stream_write(linked_locations, image_path, queue_file) queue_file.close() - database.configure(app.config) - done_uploading = model.get_storage_by_uuid(linked_storage_uuid) - done_uploading.uploading = False - done_uploading.save() + if not queue_file.raised_exception: + done_uploading = model.get_storage_by_uuid(linked_storage_uuid) + done_uploading.uploading = False + done_uploading.save() @verbs.route('/squash///', methods=['GET']) @@ -59,8 +70,9 @@ def get_squashed_tag(namespace, repository, tag): permission = ReadRepositoryPermission(namespace, repository) if permission.can() or model.repository_is_public(namespace, repository): # Lookup the requested tag. - tag_image = model.get_tag_image(namespace, repository, tag) - if not tag_image: + try: + tag_image = model.get_tag_image(namespace, repository, tag) + except model.DataModelException: abort(404) # Lookup the tag's image and storage. @@ -68,6 +80,7 @@ def get_squashed_tag(namespace, repository, tag): if not repo_image: abort(404) + store = Storage(app) derived = model.find_or_create_derived_storage(repo_image.storage, 'squash', store.preferred_locations[0]) if not derived.uploading: @@ -96,8 +109,14 @@ def get_squashed_tag(namespace, repository, tag): # Create a queue process to generate the data. The queue files will read from the process # and send the results to the client and storage. + def _cleanup(): + # Close any existing DB connection once the process has exited. + database.close_db_filter(None) + args = (namespace, repository, tag, synthetic_image_id, image_json, full_image_list) - queue_process = QueueProcess(_open_stream, 8 * 1024, 10 * 1024 * 1024, args) # 8K/10M chunk/max + queue_process = QueueProcess(_open_stream, + 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max + args, finished=_cleanup) client_queue_file = QueueFile(queue_process.create_queue(), 'client') storage_queue_file = QueueFile(queue_process.create_queue(), 'storage') @@ -107,7 +126,7 @@ def get_squashed_tag(namespace, repository, tag): # Start the storage saving. storage_args = (derived.uuid, derived.locations, storage_queue_file) - QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args) + QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup) # Return the client's data. return send_file(client_queue_file) diff --git a/events/build_failure.html b/events/build_failure.html new file mode 100644 index 000000000..cb93ebff4 --- /dev/null +++ b/events/build_failure.html @@ -0,0 +1,2 @@ +Build failed for repository +{{ event_data.repository | repository_reference }} ({{ event_data.build_id }}): {{ event_data.error_message }} \ No newline at end of file diff --git a/events/build_queued.html b/events/build_queued.html new file mode 100644 index 000000000..a4ecf8e41 --- /dev/null +++ b/events/build_queued.html @@ -0,0 +1,9 @@ +{% if event_data.is_manual and notification_data.performer_data.entity_name %} +{{ notification_data.performer_data.entity_name | user_reference }} queued a +build +{% elif event_data.trigger_kind %} +Build queued via a {{ event_data.trigger_kind }} trigger +{% else %} +Build queued +{% endif %} + for repository {{ event_data.repository | repository_reference }} ({{ event_data.build_id }}) diff --git a/events/build_start.html b/events/build_start.html new file mode 100644 index 000000000..81a0c7fd2 --- /dev/null +++ b/events/build_start.html @@ -0,0 +1,2 @@ +Build started for repository +{{ event_data.repository | repository_reference }} ({{ event_data.build_id }}) diff --git a/events/build_success.html b/events/build_success.html new file mode 100644 index 000000000..aee961326 --- /dev/null +++ b/events/build_success.html @@ -0,0 +1,2 @@ +Build completed for repository +{{ event_data.repository | repository_reference }} ({{ event_data.build_id }}) \ No newline at end of file diff --git a/events/repo_push.html b/events/repo_push.html new file mode 100644 index 000000000..0c531d909 --- /dev/null +++ b/events/repo_push.html @@ -0,0 +1,12 @@ +{% if notification_data.performer_data.entity_name %} +{{ notification_data.performer_data.entity_name | user_reference }} pushed +{% else %} +Push of +{% endif %} + +{% if event_data.updated_tags %} + {{ 'tags' | icon_image }} + {% for tag in event_data.updated_tags %}{%if loop.index > 1 %}, {% endif %}{{ (event_data.repository, tag) | repository_tag_reference }}{% endfor %} in +{% endif %} + + repository {{ event_data.repository | repository_reference }} \ No newline at end of file diff --git a/initdb.py b/initdb.py index 87208a8d6..5d5c5fde7 100644 --- a/initdb.py +++ b/initdb.py @@ -8,6 +8,7 @@ from datetime import datetime, timedelta from email.utils import formatdate from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite) +from uuid import UUID from data.database import * from data import model @@ -20,18 +21,6 @@ logger = logging.getLogger(__name__) SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i for i in range(1, 10)] -IMAGE_UUIDS = ['ab5160d1-8fb4-4022-a135-3c4de7f6ed97', - '4259533e-868d-4db3-9a78-fc24ffc03a2b', - 'c2c6dc6e-24d1-4f15-a616-81c41e3e3629', - '8ec59952-8f5a-4fa0-897e-57c3337e1914', - '08a8ab1f-4aaa-4337-88ab-5b5c71a8d492', - '4a71f3db-cbb1-4c3b-858f-1be032b3e875', - 'd40d531a-c70c-47f9-bf5b-2a4381db2d60', - '6fe6cebb-52b2-4036-892e-b86d6487a56b', - 'e969ff76-e87d-4ea3-8cb3-0db9b5bcb8d9', - '2e3b616b-301f-437c-98ab-37352f444a60', - ] - SAMPLE_CMDS = [["/bin/bash"], ["/bin/sh", "-c", "echo \"PasswordAuthentication no\" >> /etc/ssh/sshd_config"], @@ -57,6 +46,13 @@ def __gen_image_id(repo, image_num): return h.hexdigest() + h.hexdigest() +def __gen_image_uuid(repo, image_num): + str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num) + + h = hashlib.md5(str_to_hash) + return UUID(bytes=h.digest()) + + global_image_num = [0] def __create_subtree(repo, structure, creator_username, parent): num_nodes, subtrees, last_node_tags = structure @@ -71,7 +67,7 @@ def __create_subtree(repo, structure, creator_username, parent): new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us') new_image_locations = new_image.storage.locations - new_image.storage.uuid = IMAGE_UUIDS[image_num % len(IMAGE_UUIDS)] + new_image.storage.uuid = __gen_image_uuid(repo, image_num) new_image.storage.uploading = False new_image.storage.checksum = checksum new_image.storage.save() diff --git a/registry.py b/registry.py new file mode 100644 index 000000000..2a356e1ec --- /dev/null +++ b/registry.py @@ -0,0 +1,13 @@ +import logging +import logging.config + +from app import app as application + +from endpoints.index import index +from endpoints.tags import tags +from endpoints.registry import registry + + +application.register_blueprint(index, url_prefix='/v1') +application.register_blueprint(tags, url_prefix='/v1') +application.register_blueprint(registry, url_prefix='/v1') diff --git a/requirements-nover.txt b/requirements-nover.txt index 262e0594d..e5c81cfd3 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -18,7 +18,7 @@ paramiko xhtml2pdf redis hiredis -docker-py +git+https://github.com/devtable/docker-py.git@emptydirs pygithub flask-restful jsonschema diff --git a/requirements.txt b/requirements.txt index e454e6846..be726ece7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,58 +1,57 @@ APScheduler==3.0.0 Flask==0.10.1 Flask-Login==0.2.11 -Flask-Mail==0.9.0 +Flask-Mail==0.9.1 Flask-Principal==0.4.0 Flask-RESTful==0.2.12 Jinja2==2.7.3 LogentriesLogger==0.2.1 Mako==1.0.0 MarkupSafe==0.23 -Pillow==2.5.1 -PyGithub==1.25.0 +Pillow==2.6.0 +PyGithub==1.25.1 PyMySQL==0.6.2 -PyPDF2==1.22 +PyPDF2==1.23 PyYAML==3.11 SQLAlchemy==0.9.7 Werkzeug==0.9.6 -alembic==0.6.5 git+https://github.com/DevTable/aniso8601-fake.git git+https://github.com/DevTable/anunidecode.git -argparse==1.2.1 +alembic==0.6.7 +backports.ssl-match-hostname==3.4.0.2 beautifulsoup4==4.3.2 blinker==1.3 -boto==2.32.0 -coverage==3.7.1 -docker-py==0.4.0 +boto==2.32.1 +git+https://github.com/devtable/docker-py.git@emptydirs ecdsa==0.11 -futures==2.1.6 +futures==2.2.0 gevent==1.0.1 -greenlet==0.4.2 +gipc==0.4.0 +greenlet==0.4.4 gunicorn==18.0 -hiredis==0.1.4 +hiredis==0.1.5 html5lib==0.999 itsdangerous==0.24 -jsonschema==2.3.0 +jsonschema==2.4.0 marisa-trie==0.6 -mixpanel-py==3.1.3 -mock==1.0.1 git+https://github.com/NateFerrero/oauth2lib.git -paramiko==1.14.0 -peewee==2.2.5 +mixpanel-py==3.2.0 +paramiko==1.15.1 +peewee==2.3.3 +psycopg2==2.5.4 py-bcrypt==0.4 pycrypto==2.6.1 python-dateutil==2.2 -python-ldap==2.4.15 +python-ldap==2.4.17 python-magic==0.4.6 -pytz==2014.4 -psycopg2==2.5.3 +pytz==2014.7 raven==5.0.0 -redis==2.10.1 +redis==2.10.3 reportlab==2.7 -requests==2.3.0 -six==1.7.3 +requests==2.4.3 +six==1.8.0 stripe==1.19.0 tzlocal==1.1.1 -websocket-client==0.11.0 +websocket-client==0.18.0 wsgiref==0.1.2 xhtml2pdf==0.0.6 diff --git a/static/css/quay.css b/static/css/quay.css index 08d55c970..58e53af60 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -3974,7 +3974,7 @@ pre.command:before { color: #00b0ed; } -.contact-options .option-phone .fa-circle { +.contact-options .option-tel .fa-circle { color: #1dd924; } @@ -3982,10 +3982,14 @@ pre.command:before { color: #e52f00; } -.contact-options .option-email .fa-circle { +.contact-options .option-mailto .fa-circle { color: #1b72f1; } +.contact-options .option-url .fa-circle { + color: #F1A51B; +} + .about-us .row { margin-bottom: 30px; } @@ -4105,6 +4109,27 @@ pre.command:before { border-bottom-left-radius: 0px; } +.trigger-setup-github-element .branch-reference.not-match { + color: #ccc !important; +} + +.trigger-setup-github-element .branch-reference.not-match a { + color: #ccc !important; + text-decoration: line-through; +} + +.trigger-setup-github-element .branch-filter { + white-space: nowrap; +} + +.trigger-setup-github-element .branch-filter span { + display: inline-block; +} + +.trigger-setup-github-element .selected-info { + margin-bottom: 20px; +} + .trigger-setup-github-element .github-org-icon { width: 20px; margin-right: 8px; @@ -4120,6 +4145,45 @@ pre.command:before { padding-left: 6px; } +.trigger-setup-github-element .matching-branches { + margin: 0px; + padding: 0px; + margin-left: 10px; + display: inline-block; +} + +.trigger-setup-github-element .matching-branches li:before { + content: "\f126"; + font-family: FontAwesome; +} + +.trigger-setup-github-element .matching-branches li { + list-style: none; + display: inline-block; + margin-left: 10px; +} + +.setup-trigger-directive-element .dockerfile-found-content { + margin-left: 32px; +} + +.setup-trigger-directive-element .dockerfile-found-content:before { + content: "\f071"; + font-family: FontAwesome; + color: rgb(255, 194, 0); + position: absolute; + top: 0px; + left: 0px; + font-size: 20px; +} + +.setup-trigger-directive-element .dockerfile-found { + position: relative; + margin-bottom: 16px; + padding-bottom: 16px; + border-bottom: 1px solid #eee; +} + .slideinout { -webkit-transition:0.5s all; transition:0.5s linear all; @@ -4127,7 +4191,7 @@ pre.command:before { position: relative; - height: 75px; + height: 32px; opacity: 1; } diff --git a/static/directives/setup-trigger-dialog.html b/static/directives/setup-trigger-dialog.html index dd384c808..a44893c68 100644 --- a/static/directives/setup-trigger-dialog.html +++ b/static/directives/setup-trigger-dialog.html @@ -8,102 +8,110 @@ -