diff --git a/app.py b/app.py
index 2a99a2446..aa663418c 100644
--- a/app.py
+++ b/app.py
@@ -11,6 +11,7 @@ import features
from storage import Storage
from data.userfiles import Userfiles
from util.analytics import Analytics
+from util.exceptionlog import Sentry
from data.billing import Billing
@@ -44,3 +45,4 @@ storage = Storage(app)
userfiles = Userfiles(app)
analytics = Analytics(app)
billing = Billing(app)
+sentry = Sentry(app)
diff --git a/application.py b/application.py
index afc503379..e7cb11c76 100644
--- a/application.py
+++ b/application.py
@@ -1,9 +1,10 @@
import logging
+import logging.config
+import uuid
from app import app as application
-
-# Initialize logging
-application.config['LOGGING_CONFIG']()
+from flask import request, Request
+from util.names import urn_generator
from data.model import db as model_db
@@ -21,6 +22,7 @@ from endpoints.callbacks import callback
logger = logging.getLogger(__name__)
+profile = logging.getLogger('application.profiler')
application.register_blueprint(web)
application.register_blueprint(callback, url_prefix='/oauth2')
@@ -31,6 +33,29 @@ application.register_blueprint(api_bp, url_prefix='/api')
application.register_blueprint(webhooks, url_prefix='/webhooks')
application.register_blueprint(realtime, url_prefix='/realtime')
+class RequestWithId(Request):
+ request_gen = staticmethod(urn_generator(['request']))
+
+ def __init__(self, *args, **kwargs):
+ super(RequestWithId, self).__init__(*args, **kwargs)
+ self.request_id = self.request_gen()
+
+@application.before_request
+def _request_start():
+ profile.debug('Starting request: %s', request.path)
+
+
+@application.after_request
+def _request_end(r):
+ profile.debug('Ending request: %s', request.path)
+ return r
+
+class InjectingFilter(logging.Filter):
+ def filter(self, record):
+ record.msg = '[%s] %s' % (request.request_id, record.msg)
+ return True
+
+profile.addFilter(InjectingFilter())
def close_db(exc):
db = model_db
@@ -39,6 +64,8 @@ def close_db(exc):
db.close()
application.teardown_request(close_db)
+application.request_class = RequestWithId
if __name__ == '__main__':
+ logging.config.fileConfig('conf/logging_local.conf', disable_existing_loggers=False)
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
diff --git a/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb
deleted file mode 100644
index b69f98a44..000000000
Binary files a/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb and /dev/null differ
diff --git a/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb b/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb
deleted file mode 100644
index c9e530479..000000000
Binary files a/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb and /dev/null differ
diff --git a/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb
deleted file mode 100644
index cf0dce064..000000000
Binary files a/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb and /dev/null differ
diff --git a/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb
deleted file mode 100644
index bdee3f6af..000000000
Binary files a/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb and /dev/null differ
diff --git a/binary_dependencies/builder/lxc-docker-0.9.0-tutum2_0.9.0-tutum2-20140327210604-4c49268-dirty_amd64.deb b/binary_dependencies/builder/lxc-docker-0.9.0-tutum2_0.9.0-tutum2-20140327210604-4c49268-dirty_amd64.deb
deleted file mode 100644
index 318bf9758..000000000
Binary files a/binary_dependencies/builder/lxc-docker-0.9.0-tutum2_0.9.0-tutum2-20140327210604-4c49268-dirty_amd64.deb and /dev/null differ
diff --git a/binary_dependencies/builder/lxc-docker-0.9.0_0.9.0-20140501212101-72572f0-dirty_amd64.deb b/binary_dependencies/builder/lxc-docker-0.9.0_0.9.0-20140501212101-72572f0-dirty_amd64.deb
new file mode 100644
index 000000000..2242c23ce
Binary files /dev/null and b/binary_dependencies/builder/lxc-docker-0.9.0_0.9.0-20140501212101-72572f0-dirty_amd64.deb differ
diff --git a/conf/gunicorn_config.py b/conf/gunicorn_config.py
index a74f95786..b86250125 100644
--- a/conf/gunicorn_config.py
+++ b/conf/gunicorn_config.py
@@ -3,7 +3,5 @@ workers = 8
worker_class = 'gevent'
timeout = 2000
pidfile = '/tmp/gunicorn.pid'
-errorlog = '/mnt/logs/application.log'
-loglevel = 'debug'
-logger_class = 'util.glogger.LogstashLogger'
+logconfig = 'conf/logging.conf'
pythonpath = '.'
\ No newline at end of file
diff --git a/conf/gunicorn_local.py b/conf/gunicorn_local.py
index 2a145fd98..9f93eb008 100644
--- a/conf/gunicorn_local.py
+++ b/conf/gunicorn_local.py
@@ -3,7 +3,5 @@ workers = 2
worker_class = 'gevent'
timeout = 2000
daemon = False
-errorlog = '-'
-loglevel = 'debug'
-logger_class = 'util.glogger.LogstashLogger'
+logconfig = 'conf/logging_local.conf'
pythonpath = '.'
\ No newline at end of file
diff --git a/conf/logging.conf b/conf/logging.conf
new file mode 100644
index 000000000..2061a2375
--- /dev/null
+++ b/conf/logging.conf
@@ -0,0 +1,39 @@
+[loggers]
+keys=root, gunicorn.error, gunicorn.access
+
+[handlers]
+keys=error_file
+
+[formatters]
+keys=generic
+
+[logger_application.profiler]
+level=DEBUG
+handlers=error_file
+propagate=0
+qualname=application.profiler
+
+[logger_root]
+level=DEBUG
+handlers=error_file
+
+[logger_gunicorn.error]
+level=INFO
+handlers=error_file
+propagate=1
+qualname=gunicorn.error
+
+[logger_gunicorn.access]
+level=INFO
+handlers=error_file
+propagate=0
+qualname=gunicorn.access
+
+[handler_error_file]
+class=logging.FileHandler
+formatter=generic
+args=('/mnt/logs/application.log',)
+
+[formatter_generic]
+format=%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s
+class=logging.Formatter
diff --git a/conf/logging_local.conf b/conf/logging_local.conf
new file mode 100644
index 000000000..4023e7743
--- /dev/null
+++ b/conf/logging_local.conf
@@ -0,0 +1,39 @@
+[loggers]
+keys=root, gunicorn.error, gunicorn.access, application.profiler
+
+[handlers]
+keys=console
+
+[formatters]
+keys=generic
+
+[logger_application.profiler]
+level=DEBUG
+handlers=console
+propagate=0
+qualname=application.profiler
+
+[logger_root]
+level=DEBUG
+handlers=console
+
+[logger_gunicorn.error]
+level=INFO
+handlers=console
+propagate=1
+qualname=gunicorn.error
+
+[logger_gunicorn.access]
+level=INFO
+handlers=console
+propagate=0
+qualname=gunicorn.access
+
+[handler_console]
+class=StreamHandler
+formatter=generic
+args=(sys.stdout, )
+
+[formatter_generic]
+format=%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s
+class=logging.Formatter
diff --git a/config.py b/config.py
index 2d8f659df..d5fc126cb 100644
--- a/config.py
+++ b/config.py
@@ -1,5 +1,3 @@
-import logging
-import logstash_formatter
import requests
import os.path
@@ -16,23 +14,11 @@ def build_requests_session():
return sess
-def logs_init_builder(level=logging.DEBUG,
- formatter=logstash_formatter.LogstashFormatter()):
- @staticmethod
- def init_logs():
- handler = logging.StreamHandler()
- root_logger = logging.getLogger('')
- root_logger.setLevel(level)
- handler.setFormatter(formatter)
- root_logger.addHandler(handler)
-
- return init_logs
-
# The set of configuration key names that will be accessible in the client. Since these
# values are set to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list.
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID',
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY',
- 'ENTERPRISE_LOGO_URL']
+ 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN']
def getFrontendVisibleConfig(config_dict):
@@ -53,7 +39,7 @@ class DefaultConfig(object):
JSONIFY_PRETTYPRINT_REGULAR = False
SESSION_COOKIE_SECURE = False
- LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
+ LOGGING_LEVEL = 'DEBUG'
SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True
PREFERRED_URL_SCHEME = 'http'
@@ -102,6 +88,11 @@ class DefaultConfig(object):
# Analytics
ANALYTICS_TYPE = "FakeAnalytics"
+ # Exception logging
+ EXCEPTION_LOG_TYPE = 'FakeSentry'
+ SENTRY_DSN = None
+ SENTRY_PUBLIC_DSN = None
+
# Github Config
GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token'
GITHUB_USER_URL = 'https://api.github.com/user'
diff --git a/data/billing.py b/data/billing.py
index 69f1d7c04..8872ad87f 100644
--- a/data/billing.py
+++ b/data/billing.py
@@ -133,6 +133,8 @@ class FakeStripe(object):
'plan': FAKE_PLAN,
'current_period_start': timegm(datetime.now().utctimetuple()),
'current_period_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()),
+ 'trial_start': timegm(datetime.now().utctimetuple()),
+ 'trial_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()),
})
FAKE_CARD = AttrDict({
diff --git a/data/database.py b/data/database.py
index c4a96c0d0..eaf2f0ff0 100644
--- a/data/database.py
+++ b/data/database.py
@@ -220,7 +220,8 @@ class ImageStorage(BaseModel):
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
- image_size = BigIntegerField(null=True)
+ image_size = BigIntegerField(null=True)
+ uploading = BooleanField(default=True, null=True)
class Image(BaseModel):
diff --git a/data/model/legacy.py b/data/model/legacy.py
index 296b6b1f5..1c207cb79 100644
--- a/data/model/legacy.py
+++ b/data/model/legacy.py
@@ -817,7 +817,7 @@ def get_repository(namespace_name, repository_name):
def get_repo_image(namespace_name, repository_name, image_id):
query = (Image
- .select()
+ .select(Image, ImageStorage)
.join(Repository)
.switch(Image)
.join(ImageStorage, JOIN_LEFT_OUTER)
diff --git a/data/queue.py b/data/queue.py
index af1df3045..61a03a631 100644
--- a/data/queue.py
+++ b/data/queue.py
@@ -7,6 +7,9 @@ from app import app
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
+MINIMUM_EXTENSION = timedelta(seconds=20)
+
+
class WorkQueue(object):
def __init__(self, queue_name, canonical_name_match_list=None):
self.queue_name = queue_name
@@ -80,17 +83,24 @@ class WorkQueue(object):
completed_item.delete_instance()
@staticmethod
- def incomplete(incomplete_item, retry_after=300):
+ def incomplete(incomplete_item, retry_after=300, restore_retry=False):
retry_date = datetime.now() + timedelta(seconds=retry_after)
incomplete_item.available_after = retry_date
incomplete_item.available = True
+
+ if restore_retry:
+ incomplete_item.retries_remaining += 1
+
incomplete_item.save()
@staticmethod
def extend_processing(queue_item, seconds_from_now):
new_expiration = datetime.now() + timedelta(seconds=seconds_from_now)
- queue_item.processing_expires = new_expiration
- queue_item.save()
+
+ # Only actually write the new expiration to the db if it moves the expiration some minimum
+ if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION:
+ queue_item.processing_expires = new_expiration
+ queue_item.save()
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'])
diff --git a/endpoints/common.py b/endpoints/common.py
index 1a81648aa..e9bd7b7c6 100644
--- a/endpoints/common.py
+++ b/endpoints/common.py
@@ -155,6 +155,7 @@ def render_page_template(name, **kwargs):
feature_set=json.dumps(features.get_features()),
config_set=json.dumps(getFrontendVisibleConfig(app.config)),
mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
+ sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
is_debug=str(app.config.get('DEBUGGING', False)).lower(),
show_chat=features.OLARK_CHAT,
cache_buster=cache_buster,
diff --git a/endpoints/index.py b/endpoints/index.py
index 8271dda73..6ebec2d6c 100644
--- a/endpoints/index.py
+++ b/endpoints/index.py
@@ -21,6 +21,7 @@ from util.http import abort
logger = logging.getLogger(__name__)
+profile = logging.getLogger('application.profiler')
index = Blueprint('index', __name__)
@@ -112,9 +113,15 @@ def create_user():
else:
# New user case
+ profile.debug('Creating user')
new_user = model.create_user(username, password, user_data['email'])
+
+ profile.debug('Creating email code for user')
code = model.create_confirm_email_code(new_user)
+
+ profile.debug('Sending email code to user')
send_confirmation_email(new_user.username, new_user.email, code.code)
+
return make_response('Created', 201)
@@ -149,12 +156,12 @@ def update_user(username):
update_request = request.get_json()
if 'password' in update_request:
- logger.debug('Updating user password.')
+ profile.debug('Updating user password')
model.change_password(get_authenticated_user(),
update_request['password'])
if 'email' in update_request:
- logger.debug('Updating user email')
+ profile.debug('Updating user email')
model.update_email(get_authenticated_user(), update_request['email'])
return jsonify({
@@ -170,9 +177,13 @@ def update_user(username):
@parse_repository_name
@generate_headers(role='write')
def create_repository(namespace, repository):
+ profile.debug('Parsing image descriptions')
image_descriptions = json.loads(request.data)
+
+ profile.debug('Looking up repository')
repo = model.get_repository(namespace, repository)
+ profile.debug('Repository looked up')
if not repo and get_authenticated_user() is None:
logger.debug('Attempt to create new repository without user auth.')
abort(401,
@@ -196,11 +207,11 @@ def create_repository(namespace, repository):
issue='no-create-permission',
namespace=namespace)
- logger.debug('Creaing repository with owner: %s' %
- get_authenticated_user().username)
+ profile.debug('Creaing repository with owner: %s', get_authenticated_user().username)
repo = model.create_repository(namespace, repository,
get_authenticated_user())
+ profile.debug('Determining added images')
added_images = OrderedDict([(desc['id'], desc)
for desc in image_descriptions])
new_repo_images = dict(added_images)
@@ -209,12 +220,15 @@ def create_repository(namespace, repository):
if existing.docker_image_id in new_repo_images:
added_images.pop(existing.docker_image_id)
+ profile.debug('Creating/Linking necessary images')
username = get_authenticated_user() and get_authenticated_user().username
translations = {}
for image_description in added_images.values():
model.find_create_or_link_image(image_description['id'], repo, username,
translations)
+
+ profile.debug('Created images')
response = make_response('Created', 201)
extra_params = {
@@ -268,21 +282,23 @@ def update_images(namespace, repository):
permission = ModifyRepositoryPermission(namespace, repository)
if permission.can():
+ profile.debug('Looking up repository')
repo = model.get_repository(namespace, repository)
if not repo:
# Make sure the repo actually exists.
abort(404, message='Unknown repository', issue='unknown-repo')
+ profile.debug('Parsing image data')
image_with_checksums = json.loads(request.data)
updated_tags = {}
for image in image_with_checksums:
- logger.debug('Setting checksum for image id: %s to %s' %
- (image['id'], image['checksum']))
+ profile.debug('Setting checksum for image id: %s to %s', image['id'], image['checksum'])
updated_tags[image['Tag']] = image['id']
model.set_image_checksum(image['id'], repo, image['checksum'])
if get_authenticated_user():
+ profile.debug('Publishing push event')
username = get_authenticated_user().username
# Mark that the user has pushed the repo.
@@ -295,15 +311,18 @@ def update_images(namespace, repository):
event = app.config['USER_EVENTS'].get_event(username)
event.publish_event_data('docker-cli', user_data)
+ profile.debug('GCing repository')
num_removed = model.garbage_collect_repository(namespace, repository)
# Generate a job for each webhook that has been added to this repo
+ profile.debug('Adding webhooks for repository')
+
webhooks = model.list_webhooks(namespace, repository)
for webhook in webhooks:
webhook_data = json.loads(webhook.parameters)
repo_string = '%s/%s' % (namespace, repository)
- logger.debug('Creating webhook for repository \'%s\' for url \'%s\'' %
- (repo_string, webhook_data['url']))
+ profile.debug('Creating webhook for repository \'%s\' for url \'%s\'',
+ repo_string, webhook_data['url'])
webhook_data['payload'] = {
'repository': repo_string,
'namespace': namespace,
@@ -330,14 +349,17 @@ def get_repository_images(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository)
# TODO invalidate token?
+ profile.debug('Looking up public status of repository')
is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public:
# We can't rely on permissions to tell us if a repo exists anymore
+ profile.debug('Looking up repository')
repo = model.get_repository(namespace, repository)
if not repo:
abort(404, message='Unknown repository', issue='unknown-repo')
all_images = []
+ profile.debug('Retrieving repository images')
for image in model.get_repository_images(namespace, repository):
new_image_view = {
'id': image.docker_image_id,
@@ -345,6 +367,7 @@ def get_repository_images(namespace, repository):
}
all_images.append(new_image_view)
+ profile.debug('Building repository image response')
resp = make_response(json.dumps(all_images), 200)
resp.mimetype = 'application/json'
@@ -353,6 +376,7 @@ def get_repository_images(namespace, repository):
'namespace': namespace,
}
+ profile.debug('Logging the pull to Mixpanel and the log system')
if get_validated_oauth_token():
oauth_token = get_validated_oauth_token()
metadata['oauth_token_id'] = oauth_token.id
@@ -408,4 +432,5 @@ def get_search():
def ping():
response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0'
+ response.headers['X-Docker-Registry-Standalone'] = '0'
return response
diff --git a/endpoints/registry.py b/endpoints/registry.py
index 9d981ba08..d701fd140 100644
--- a/endpoints/registry.py
+++ b/endpoints/registry.py
@@ -21,7 +21,7 @@ from data import model
registry = Blueprint('registry', __name__)
logger = logging.getLogger(__name__)
-
+profile = logging.getLogger('application.profiler')
class SocketReader(object):
def __init__(self, fp):
@@ -40,16 +40,35 @@ class SocketReader(object):
return buf
+def image_is_uploading(namespace, repository, image_id, repo_image):
+ if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
+ return repo_image.storage.uploading
+
+ logger.warning('Setting legacy upload flag')
+ uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+ mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
+ return store.exists(mark_path)
+
+
+def mark_upload_complete(namespace, repository, image_id, repo_image):
+ if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
+ repo_image.storage.uploading = False
+ repo_image.storage.save()
+ else:
+ logger.warning('Removing legacy upload flag')
+ uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+ mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
+ if store.exists(mark_path):
+ store.remove(mark_path)
+
+
def require_completion(f):
"""This make sure that the image push correctly finished."""
@wraps(f)
def wrapper(namespace, repository, *args, **kwargs):
image_id = kwargs['image_id']
repo_image = model.get_repo_image(namespace, repository, image_id)
- uuid = repo_image and repo_image.storage and repo_image.storage.uuid
-
- if store.exists(store.image_mark_path(namespace, repository, image_id,
- uuid)):
+ if image_is_uploading(namespace, repository, image_id, repo_image):
abort(400, 'Image %(image_id)s is being uploaded, retry later',
issue='upload-in-progress', image_id=kwargs['image_id'])
@@ -88,17 +107,28 @@ def set_cache_headers(f):
@set_cache_headers
def get_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository)
+
+ profile.debug('Checking repo permissions')
if permission.can() or model.repository_is_public(namespace, repository):
+ profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
+
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+ profile.debug('Looking up the layer path')
path = store.image_layer_path(namespace, repository, image_id, uuid)
+
+ profile.debug('Looking up the direct download URL')
direct_download_url = store.get_direct_download_url(path)
+
if direct_download_url:
+ profile.debug('Returning direct download URL')
return redirect(direct_download_url)
try:
+ profile.debug('Streaming layer data')
return Response(store.stream_read(path), headers=headers)
except IOError:
+ profile.debug('Image not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
@@ -109,25 +139,32 @@ def get_image_layer(namespace, repository, image_id, headers):
@process_auth
@extract_namespace_repo_from_session
def put_image_layer(namespace, repository, image_id):
+ profile.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
+ profile.debug('Retrieving image')
repo_image = model.get_repo_image(namespace, repository, image_id)
+
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
try:
+ profile.debug('Retrieving image data')
json_data = store.get_content(store.image_json_path(namespace, repository,
image_id, uuid))
except IOError:
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
+ profile.debug('Retrieving image path info')
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
- mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
- if store.exists(layer_path) and not store.exists(mark_path):
+ if (store.exists(layer_path) and not
+ image_is_uploading(namespace, repository, image_id, repo_image)):
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
+ profile.debug('Storing layer data')
+
input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked':
# Careful, might work only with WSGI servers supporting chunked
@@ -174,11 +211,11 @@ def put_image_layer(namespace, repository, image_id):
issue='checksum-mismatch', image_id=image_id)
# Checksum is ok, we remove the marker
- store.remove(mark_path)
+ mark_upload_complete(namespace, repository, image_id, repo_image)
# The layer is ready for download, send a job to the work queue to
# process it.
- logger.debug('Queing diffs job for image: %s' % image_id)
+ profile.debug('Adding layer to diff queue')
image_diff_queue.put([namespace, repository, image_id], json.dumps({
'namespace': namespace,
'repository': repository,
@@ -192,6 +229,7 @@ def put_image_layer(namespace, repository, image_id):
@process_auth
@extract_namespace_repo_from_session
def put_image_checksum(namespace, repository, image_id):
+ profile.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
@@ -204,17 +242,22 @@ def put_image_checksum(namespace, repository, image_id):
abort(400, 'Checksum not found in Cookie for image %(imaage_id)s',
issue='missing-checksum-cookie', image_id=image_id)
+ profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
+
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+
+ profile.debug('Looking up repo layer data')
if not store.exists(store.image_json_path(namespace, repository, image_id,
uuid)):
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
- mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
- if not store.exists(mark_path):
+ profile.debug('Marking image path')
+ if not image_is_uploading(namespace, repository, image_id, repo_image):
abort(409, 'Cannot set checksum for image %(image_id)s',
issue='image-write-error', image_id=image_id)
+ profile.debug('Storing image checksum')
err = store_checksum(namespace, repository, image_id, uuid, checksum)
if err:
abort(400, err)
@@ -227,11 +270,11 @@ def put_image_checksum(namespace, repository, image_id):
issue='checksum-mismatch', image_id=image_id)
# Checksum is ok, we remove the marker
- store.remove(mark_path)
+ mark_upload_complete(namespace, repository, image_id, repo_image)
# The layer is ready for download, send a job to the work queue to
# process it.
- logger.debug('Queing diffs job for image: %s' % image_id)
+ profile.debug('Adding layer to diff queue')
image_diff_queue.put([namespace, repository, image_id], json.dumps({
'namespace': namespace,
'repository': repository,
@@ -247,27 +290,31 @@ def put_image_checksum(namespace, repository, image_id):
@require_completion
@set_cache_headers
def get_image_json(namespace, repository, image_id, headers):
+ profile.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
if not permission.can() and not model.repository_is_public(namespace,
repository):
abort(403)
+ profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+ profile.debug('Looking up repo layer data')
try:
data = store.get_content(store.image_json_path(namespace, repository,
image_id, uuid))
except IOError:
flask_abort(404)
+ profile.debug('Looking up repo layer size')
try:
- size = store.get_size(store.image_layer_path(namespace, repository,
- image_id, uuid))
+ size = repo_image.image_size or repo_image.storage.image_size
headers['X-Docker-Size'] = str(size)
except OSError:
pass
+ profile.debug('Retrieving checksum')
checksum_path = store.image_checksum_path(namespace, repository, image_id,
uuid)
if store.exists(checksum_path):
@@ -284,14 +331,17 @@ def get_image_json(namespace, repository, image_id, headers):
@require_completion
@set_cache_headers
def get_image_ancestry(namespace, repository, image_id, headers):
+ profile.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
if not permission.can() and not model.repository_is_public(namespace,
repository):
abort(403)
+ profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
+ profile.debug('Looking up image data')
try:
data = store.get_content(store.image_ancestry_path(namespace, repository,
image_id, uuid))
@@ -299,8 +349,11 @@ def get_image_ancestry(namespace, repository, image_id, headers):
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
+ profile.debug('Converting to <-> from JSON')
response = make_response(json.dumps(json.loads(data)), 200)
response.headers.extend(headers)
+
+ profile.debug('Done')
return response
@@ -335,10 +388,12 @@ def store_checksum(namespace, repository, image_id, uuid, checksum):
@process_auth
@extract_namespace_repo_from_session
def put_image_json(namespace, repository, image_id):
+ profile.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
+ profile.debug('Parsing image JSON')
try:
data = json.loads(request.data)
except json.JSONDecodeError:
@@ -351,6 +406,7 @@ def put_image_json(namespace, repository, image_id):
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
issue='invalid-request', image_id=image_id)
+ profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
@@ -358,12 +414,14 @@ def put_image_json(namespace, repository, image_id):
checksum = request.headers.get('X-Docker-Checksum')
if checksum:
# Storing the checksum is optional at this stage
+ profile.debug('Storing image checksum')
err = store_checksum(namespace, repository, image_id, uuid, checksum)
if err:
abort(400, err, issue='write-error')
else:
# We cleanup any old checksum in case it's a retry after a fail
+ profile.debug('Cleanup old checksum')
store.remove(store.image_checksum_path(namespace, repository, image_id,
uuid))
if image_id != data['id']:
@@ -374,19 +432,27 @@ def put_image_json(namespace, repository, image_id):
parent_image = None
if parent_id:
+ profile.debug('Looking up parent image')
parent_image = model.get_repo_image(namespace, repository, parent_id)
+
parent_uuid = (parent_image and parent_image.storage and
parent_image.storage.uuid)
+ if parent_id:
+ profile.debug('Looking up parent image data')
+
if (parent_id and not
store.exists(store.image_json_path(namespace, repository, parent_id,
parent_uuid))):
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
issue='invalid-request', image_id=image_id, parent_id=parent_id)
+ profile.debug('Looking up image storage paths')
json_path = store.image_json_path(namespace, repository, image_id, uuid)
- mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
- if store.exists(json_path) and not store.exists(mark_path):
+
+ profile.debug('Checking if image already exists')
+ if (store.exists(json_path) and not
+ image_is_uploading(namespace, repository, image_id, repo_image)):
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
# If we reach that point, it means that this is a new image or a retry
@@ -394,13 +460,20 @@ def put_image_json(namespace, repository, image_id):
# save the metadata
command_list = data.get('container_config', {}).get('Cmd', None)
command = json.dumps(command_list) if command_list else None
+
+ profile.debug('Setting image metadata')
model.set_image_metadata(image_id, namespace, repository,
data.get('created'), data.get('comment'), command,
parent_image)
- store.put_content(mark_path, 'true')
+
+ profile.debug('Putting json path')
store.put_content(json_path, request.data)
+
+ profile.debug('Generating image ancestry')
generate_ancestry(namespace, repository, image_id, uuid, parent_id,
parent_uuid)
+
+ profile.debug('Done')
return make_response('true', 200)
diff --git a/endpoints/trigger.py b/endpoints/trigger.py
index 5d9cecf68..8114278d4 100644
--- a/endpoints/trigger.py
+++ b/endpoints/trigger.py
@@ -20,6 +20,10 @@ TARBALL_MIME = 'application/gzip'
CHUNK_SIZE = 512 * 1024
+def should_skip_commit(message):
+ return '[skip build]' in message or '[build skip]' in message
+
+
class BuildArchiveException(Exception):
pass
@@ -35,6 +39,9 @@ class TriggerDeactivationException(Exception):
class ValidationRequestException(Exception):
pass
+class SkipRequestException(Exception):
+ pass
+
class EmptyRepositoryException(Exception):
pass
@@ -308,13 +315,20 @@ class GithubBuildTrigger(BuildTrigger):
def handle_trigger_request(self, request, auth_token, config):
payload = request.get_json()
-
+ if not payload:
+ raise SkipRequestException()
+
if 'zen' in payload:
raise ValidationRequestException()
logger.debug('Payload %s', payload)
ref = payload['ref']
commit_sha = payload['head_commit']['id']
+ commit_message = payload['head_commit'].get('message', '')
+
+ if should_skip_commit(commit_message):
+ raise SkipRequestException()
+
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
gh_client = self._get_client(auth_token)
diff --git a/endpoints/webhooks.py b/endpoints/webhooks.py
index d8fa1e7e4..b31ad343d 100644
--- a/endpoints/webhooks.py
+++ b/endpoints/webhooks.py
@@ -11,7 +11,7 @@ from util.invoice import renderInvoiceToHtml
from util.email import send_invoice_email, send_subscription_change, send_payment_failed
from util.names import parse_repository_name
from util.http import abort
-from endpoints.trigger import BuildTrigger, ValidationRequestException
+from endpoints.trigger import BuildTrigger, ValidationRequestException, SkipRequestException
from endpoints.common import start_build
@@ -30,7 +30,7 @@ def stripe_webhook():
event_type = request_data['type'] if 'type' in request_data else None
if event_type == 'charge.succeeded':
- invoice_id = ['data']['object']['invoice']
+ invoice_id = request_data['data']['object']['invoice']
if user and user.invoice_email:
# Lookup the invoice.
@@ -94,6 +94,10 @@ def build_trigger_webhook(namespace, repository, trigger_uuid):
# This was just a validation request, we don't need to build anything
return make_response('Okay')
+ except SkipRequestException:
+ # The build was requested to be skipped
+ return make_response('Okay')
+
pull_robot_name = model.get_pull_robot_name(trigger)
repo = model.get_repository(namespace, repository)
start_build(repo, dockerfile_id, tags, name, subdir, False, trigger,
diff --git a/initdb.py b/initdb.py
index cf330aabe..2570b7ca9 100644
--- a/initdb.py
+++ b/initdb.py
@@ -489,7 +489,8 @@ def populate_database():
'service': trigger.service.name})
if __name__ == '__main__':
- app.config['LOGGING_CONFIG']()
+ log_level = getattr(logging, app.config['LOGGING_LEVEL'])
+ logging.basicConfig(level=log_level)
initialize_database()
if app.config.get('POPULATE_DB_TEST_DATA', False):
diff --git a/requirements-nover.txt b/requirements-nover.txt
index 2d0d91c34..cc370da9d 100644
--- a/requirements-nover.txt
+++ b/requirements-nover.txt
@@ -18,7 +18,6 @@ python-daemon
paramiko
python-digitalocean
xhtml2pdf
-logstash_formatter
redis
hiredis
git+https://github.com/DevTable/docker-py.git
@@ -30,3 +29,6 @@ git+https://github.com/NateFerrero/oauth2lib.git
alembic
sqlalchemy
python-magic
+reportlab==2.7
+blinker
+raven
diff --git a/requirements.txt b/requirements.txt
index 45afa158b..7951f5dd8 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,10 +6,11 @@ Flask-Principal==0.4.0
Flask-RESTful==0.2.12
Jinja2==2.7.2
Mako==0.9.1
-MarkupSafe==0.19
+MarkupSafe==0.21
Pillow==2.4.0
PyGithub==1.24.1
-PyMySQL==0.6.1
+PyMySQL==0.6.2
+PyPDF2==1.21
SQLAlchemy==0.9.4
Werkzeug==0.9.4
alembic==0.6.4
@@ -20,30 +21,28 @@ blinker==1.3
boto==2.27.0
git+https://github.com/DevTable/docker-py.git
ecdsa==0.11
-gevent==1.0
+gevent==1.0.1
greenlet==0.4.2
gunicorn==18.0
-hiredis==0.1.2
-html5lib==1.0b3
+hiredis==0.1.3
+html5lib==0.999
itsdangerous==0.24
jsonschema==2.3.0
lockfile==0.9.1
-logstash-formatter==0.5.8
loremipsum==1.0.2
marisa-trie==0.6
mixpanel-py==3.1.2
-mock==1.0.1
git+https://github.com/NateFerrero/oauth2lib.git
paramiko==1.13.0
-peewee==2.2.2
+peewee==2.2.3
py-bcrypt==0.4
-pyPdf==1.13
pycrypto==2.6.1
python-daemon==1.6
python-dateutil==2.2
python-digitalocean==0.7
python-magic==0.4.6
pytz==2014.2
+raven==4.2.1
redis==2.9.1
reportlab==2.7
requests==2.2.1
@@ -51,4 +50,4 @@ six==1.6.1
stripe==1.14.0
websocket-client==0.11.0
wsgiref==0.1.2
-xhtml2pdf==0.0.5
+xhtml2pdf==0.0.6
diff --git a/static/css/quay.css b/static/css/quay.css
index 1f9e2ff55..630d7be63 100644
--- a/static/css/quay.css
+++ b/static/css/quay.css
@@ -698,6 +698,10 @@ i.toggle-icon:hover {
background-color: #ddd;
}
+.phase-icon.pulling {
+ background-color: #cab442;
+}
+
.phase-icon.building {
background-color: #f0ad4e;
}
diff --git a/static/js/app.js b/static/js/app.js
index fbd5a399a..d46b536f3 100644
--- a/static/js/app.js
+++ b/static/js/app.js
@@ -235,6 +235,26 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
};
dataFileService.tryAsTarGz_ = function(buf, success, failure) {
+ var gunzip = new Zlib.Gunzip(buf);
+ var plain = null;
+
+ try {
+ plain = gunzip.decompress();
+ } catch (e) {
+ failure();
+ return;
+ }
+
+ dataFileService.arrayToString(plain, function(result) {
+ if (result) {
+ dataFileService.tryAsTarGzWithStringData_(result, success, failure);
+ } else {
+ failure();
+ }
+ });
+ };
+
+ dataFileService.tryAsTarGzWithStringData_ = function(strData, success, failure) {
var collapsePath = function(originalPath) {
// Tar files can contain entries of the form './', so we need to collapse
// those paths down.
@@ -248,12 +268,9 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
return parts.join('/');
};
- var gunzip = new Zlib.Gunzip(buf);
- var plain = gunzip.decompress();
-
var handler = new MultiFile();
handler.files = [];
- handler.processTarChunks(dataFileService.arrayToString(plain), 0);
+ handler.processTarChunks(strData, 0);
if (!handler.files.length) {
failure();
return;
@@ -288,8 +305,19 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
reader.readAsText(blob);
};
- dataFileService.arrayToString = function(buf) {
- return String.fromCharCode.apply(null, new Uint16Array(buf));
+ dataFileService.arrayToString = function(buf, callback) {
+ var bb = new Blob([buf], {type: 'application/octet-binary'});
+ var f = new FileReader();
+ f.onload = function(e) {
+ callback(e.target.result);
+ };
+ f.onerror = function(e) {
+ callback(null);
+ };
+ f.onabort = function(e) {
+ callback(null);
+ };
+ f.readAsText(bb);
};
dataFileService.readDataArrayAsPossibleArchive = function(buf, success, failure) {
@@ -394,7 +422,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
builderService.getDescription = function(name, config) {
switch (name) {
case 'github':
- var source = $sanitize(UtilService.textToSafeHtml(config['build_source']));
+ var source = UtilService.textToSafeHtml(config['build_source']);
var desc = ' Push to Github Repository ';
desc += '' + source + '';
desc += '
Dockerfile folder: //' + UtilService.textToSafeHtml(config['subdir']);
@@ -778,7 +806,18 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
olark('api.chat.updateVisitorStatus', {snippet: 'username: ' + userResponse.username});
}
+ if (window.Raven !== undefined) {
+ Raven.setUser({
+ email: userResponse.email,
+ id: userResponse.username
+ });
+ }
+
CookieService.putPermanent('quay.loggedin', 'true');
+ } else {
+ if (window.Raven !== undefined) {
+ Raven.setUser();
+ }
}
if (opt_callback) {
@@ -1059,7 +1098,9 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
planDict[data.plans[i].stripeId] = data.plans[i];
}
plans = data.plans;
- callback(plans);
+ if (plans) {
+ callback(plans);
+ }
}, function() { callback([]); });
};
@@ -1155,7 +1196,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
planService.getCardInfo(orgname, function(cardInfo) {
if (plan.price > 0 && (previousSubscribeFailure || !cardInfo.last4)) {
- var title = cardInfo.last4 ? 'Subscribe' : 'Start Free trial ';
+ var title = cardInfo.last4 ? 'Subscribe' : 'Start Trial ({{amount}} plan)';
planService.showSubscribeDialog($scope, orgname, planId, callbacks, title);
return;
}
@@ -1374,6 +1415,17 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
}]);
}
+ if (window.__config && window.__config.SENTRY_PUBLIC_DSN) {
+ quayApp.config(function($provide) {
+ $provide.decorator("$exceptionHandler", function($delegate) {
+ return function(ex, cause) {
+ $delegate(ex, cause);
+ Raven.captureException(ex, {extra: {cause: cause}});
+ };
+ });
+ });
+ }
+
function buildConditionalLinker($animate, name, evaluator) {
// Based off of a solution found here: http://stackoverflow.com/questions/20325480/angularjs-whats-the-best-practice-to-add-ngif-to-a-directive-programmatically
@@ -1552,7 +1604,7 @@ quayApp.directive('entityReference', function () {
'entity': '=entity',
'namespace': '=namespace'
},
- controller: function($scope, $element, UserService, $sanitize) {
+ controller: function($scope, $element, UserService, UtilService) {
$scope.getIsAdmin = function(namespace) {
return UserService.isNamespaceAdmin(namespace);
};
@@ -1570,10 +1622,10 @@ quayApp.directive('entityReference', function () {
var org = UserService.getOrganization(namespace);
if (!org) {
// This robot is owned by the user.
- return '/user/?tab=robots&showRobot=' + $sanitize(name);
+ return '/user/?tab=robots&showRobot=' + UtilService.textToSafeHtml(name);
}
- return '/organization/' + org['name'] + '/admin?tab=robots&showRobot=' + $sanitize(name);
+ return '/organization/' + org['name'] + '/admin?tab=robots&showRobot=' + UtilService.textToSafeHtml(name);
};
$scope.getPrefix = function(name) {
@@ -4100,7 +4152,7 @@ quayApp.directive('dockerfileCommand', function () {
scope: {
'command': '=command'
},
- controller: function($scope, $element, $sanitize, Config) {
+ controller: function($scope, $element, UtilService, Config) {
var registryHandlers = {
'quay.io': function(pieces) {
var rnamespace = pieces[pieces.length - 2];
@@ -4137,11 +4189,11 @@ quayApp.directive('dockerfileCommand', function () {
$scope.getCommandTitleHtml = function(title) {
var space = title.indexOf(' ');
if (space <= 0) {
- return $sanitize(title);
+ return UtilService.textToSafeHtml(title);
}
var kind = $scope.getCommandKind(title);
- var sanitized = $sanitize(title.substring(space + 1));
+ var sanitized = UtilService.textToSafeHtml(title.substring(space + 1));
var handler = kindHandlers[kind || ''];
if (handler) {
@@ -4166,7 +4218,7 @@ quayApp.directive('dockerfileView', function () {
scope: {
'contents': '=contents'
},
- controller: function($scope, $element, $sanitize) {
+ controller: function($scope, $element, UtilService) {
$scope.$watch('contents', function(contents) {
$scope.lines = [];
@@ -4181,7 +4233,7 @@ quayApp.directive('dockerfileView', function () {
}
var lineInfo = {
- 'text': $sanitize(line),
+ 'text': UtilService.textToSafeHtml(line),
'kind': kind
};
$scope.lines.push(lineInfo);
@@ -4232,6 +4284,9 @@ quayApp.directive('buildMessage', function () {
case 'waiting':
return 'Waiting for available build worker';
+
+ case 'pulling':
+ return 'Pulling base image';
case 'building':
return 'Building image from Dockerfile';
@@ -4265,10 +4320,14 @@ quayApp.directive('buildProgress', function () {
controller: function($scope, $element) {
$scope.getPercentage = function(buildInfo) {
switch (buildInfo.phase) {
+ case 'pulling':
+ return buildInfo.status.pull_completion * 100;
+ break;
+
case 'building':
return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100;
break;
-
+
case 'pushing':
return buildInfo.status.push_completion * 100;
break;
@@ -4810,6 +4869,9 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
$rootScope.$on('$routeChangeSuccess', function (event, current, previous) {
$rootScope.pageClass = '';
+ $rootScope.current = current.$$route;
+
+ if (!current.$$route) { return; }
if (current.$$route.title) {
$rootScope.title = current.$$route.title;
@@ -4826,7 +4888,6 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
}
$rootScope.fixFooter = !!current.$$route.fixFooter;
- $rootScope.current = current.$$route;
});
$rootScope.$on('$viewContentLoaded', function(event, current) {
diff --git a/static/js/controllers.js b/static/js/controllers.js
index 8161c7a30..e989e4843 100644
--- a/static/js/controllers.js
+++ b/static/js/controllers.js
@@ -782,33 +782,34 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
}
// Create the new tree.
- $scope.tree = new ImageHistoryTree(namespace, name, resp.images,
+ var tree = new ImageHistoryTree(namespace, name, resp.images,
getFirstTextLine, $scope.getTimeSince, ImageMetadataService.getEscapedFormattedCommand);
- $scope.tree.draw('image-history-container');
+ $scope.tree = tree.draw('image-history-container');
+ if ($scope.tree) {
+ // If we already have a tag, use it
+ if ($scope.currentTag) {
+ $scope.tree.setTag($scope.currentTag.name);
+ }
- // If we already have a tag, use it
- if ($scope.currentTag) {
- $scope.tree.setTag($scope.currentTag.name);
+ // Listen for changes to the selected tag and image in the tree.
+ $($scope.tree).bind('tagChanged', function(e) {
+ $scope.$apply(function() { $scope.setTag(e.tag, true); });
+ });
+
+ $($scope.tree).bind('imageChanged', function(e) {
+ $scope.$apply(function() { $scope.setImage(e.image.id, true); });
+ });
+
+ $($scope.tree).bind('showTagMenu', function(e) {
+ $scope.$apply(function() { $scope.showTagMenu(e.tag, e.clientX, e.clientY); });
+ });
+
+ $($scope.tree).bind('hideTagMenu', function(e) {
+ $scope.$apply(function() { $scope.hideTagMenu(); });
+ });
}
- // Listen for changes to the selected tag and image in the tree.
- $($scope.tree).bind('tagChanged', function(e) {
- $scope.$apply(function() { $scope.setTag(e.tag, true); });
- });
-
- $($scope.tree).bind('imageChanged', function(e) {
- $scope.$apply(function() { $scope.setImage(e.image.id, true); });
- });
-
- $($scope.tree).bind('showTagMenu', function(e) {
- $scope.$apply(function() { $scope.showTagMenu(e.tag, e.clientX, e.clientY); });
- });
-
- $($scope.tree).bind('hideTagMenu', function(e) {
- $scope.$apply(function() { $scope.hideTagMenu(); });
- });
-
if ($routeParams.image) {
$scope.setImage($routeParams.image);
}
@@ -892,7 +893,7 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
if (dockerfile && dockerfile.canRead) {
DataFileService.blobToString(dockerfile.toBlob(), function(result) {
$scope.$apply(function() {
- $scope.dockerFilePath = dockerfilePath;
+ $scope.dockerFilePath = dockerfilePath || 'Dockerfile';
$scope.dockerFileContents = result;
});
});
@@ -902,8 +903,11 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
};
var notarchive = function() {
- $scope.dockerFileContents = DataFileService.arrayToString(uint8array);
- $scope.loaded = true;
+ DataFileService.arrayToString(uint8array, function(r) {
+ $scope.dockerFilePath = 'Dockerfile';
+ $scope.dockerFileContents = r;
+ $scope.loaded = true;
+ });
};
DataFileService.readDataArrayAsPossibleArchive(uint8array, archiveread, notarchive);
@@ -2386,10 +2390,10 @@ function NewOrgCtrl($scope, $routeParams, $timeout, $location, UserService, Plan
// Load the list of plans.
PlanService.getPlans(function(plans) {
$scope.plans = plans;
- $scope.currentPlan = null;
+ $scope.holder.currentPlan = null;
if (requested) {
PlanService.getPlan(requested, function(plan) {
- $scope.currentPlan = plan;
+ $scope.holder.currentPlan = plan;
});
}
});
@@ -2410,7 +2414,7 @@ function NewOrgCtrl($scope, $routeParams, $timeout, $location, UserService, Plan
};
$scope.setPlan = function(plan) {
- $scope.currentPlan = plan;
+ $scope.holder.currentPlan = plan;
};
$scope.createNewOrg = function() {
@@ -2438,7 +2442,7 @@ function NewOrgCtrl($scope, $routeParams, $timeout, $location, UserService, Plan
};
// If the selected plan is free, simply move to the org page.
- if (!Features.BILLING || $scope.currentPlan.price == 0) {
+ if (!Features.BILLING || $scope.holder.currentPlan.price == 0) {
showOrg();
return;
}
@@ -2452,7 +2456,7 @@ function NewOrgCtrl($scope, $routeParams, $timeout, $location, UserService, Plan
'failure': showOrg
};
- PlanService.changePlan($scope, org.name, $scope.currentPlan.stripeId, callbacks);
+ PlanService.changePlan($scope, org.name, $scope.holder.currentPlan.stripeId, callbacks);
}, function(result) {
$scope.creating = false;
$scope.createError = result.data.message || result.data;
diff --git a/static/js/graphing.js b/static/js/graphing.js
index f74175015..df1405503 100644
--- a/static/js/graphing.js
+++ b/static/js/graphing.js
@@ -186,6 +186,11 @@ ImageHistoryTree.prototype.draw = function(container) {
// Save the container.
this.container_ = container;
+ if (!$('#' + container)[0]) {
+ this.container_ = null;
+ return;
+ }
+
// Create the tree and all its components.
var tree = d3.layout.tree()
.separation(function() { return 2; });
@@ -193,11 +198,10 @@ ImageHistoryTree.prototype.draw = function(container) {
var diagonal = d3.svg.diagonal()
.projection(function(d) { return [d.x, d.y]; });
- var rootSvg = d3.select("#" + container).append("svg:svg")
+ var rootSvg = d3.select("#" + container).append("svg:svg")
.attr("class", "image-tree");
var vis = rootSvg.append("svg:g");
-
var formatComment = this.formatComment_;
var formatTime = this.formatTime_;
var formatCommand = this.formatCommand_;
@@ -262,6 +266,8 @@ ImageHistoryTree.prototype.draw = function(container) {
this.setTag_(this.currentTag_);
this.setupOverscroll_();
+
+ return this;
};
@@ -1129,7 +1135,12 @@ FileTreeBase.prototype.update_ = function(source) {
};
// Update the height of the container and the SVG.
- document.getElementById(this.container_).style.height = this.getContainerHeight_() + 'px';
+ var containerElm = document.getElementById(this.container_);
+ if (!containerElm) {
+ return;
+ }
+
+ containerElm.style.height = this.getContainerHeight_() + 'px';
svg.attr('height', this.getContainerHeight_());
// Compute the flattened node list.
@@ -1691,7 +1702,12 @@ LogUsageChart.prototype.handleStateChange_ = function(e) {
*/
LogUsageChart.prototype.draw = function(container, logData, startDate, endDate) {
// Reset the container's contents.
- document.getElementById(container).innerHTML = '';
+ var containerElm = document.getElementById(container);
+ if (!containerElm) {
+ return;
+ }
+
+ containerElm.innerHTML = '';
// Returns a date offset from the given date by "days" Days.
var offsetDate = function(d, days) {
diff --git a/static/lib/FileSaver.js b/static/lib/FileSaver.js
index 378a9dcc4..b0b89994c 100644
--- a/static/lib/FileSaver.js
+++ b/static/lib/FileSaver.js
@@ -230,3 +230,4 @@ var saveAs = saveAs
// with an attribute `content` that corresponds to the window
if (typeof module !== 'undefined') module.exports = saveAs;
+window.saveAs = saveAs;
\ No newline at end of file
diff --git a/static/partials/image-view.html b/static/partials/image-view.html
index 2dca39a0d..0091cded0 100644
--- a/static/partials/image-view.html
+++ b/static/partials/image-view.html
@@ -66,7 +66,7 @@
- {{folder}}/{{getFilename(change.file)}}
+ {{folder}}/{{getFilename(change.file)}}
diff --git a/static/partials/view-repo.html b/static/partials/view-repo.html
index d8a4c3112..b5d039974 100644
--- a/static/partials/view-repo.html
+++ b/static/partials/view-repo.html
@@ -329,7 +329,7 @@