Unify the logging infrastructure and turn the prod logging level to INFO in preparation for picking up a new cloud logger.
This commit is contained in:
parent
3abb5bf0a3
commit
0f3d87466e
9 changed files with 105 additions and 116 deletions
12
app.py
12
app.py
|
@ -3,7 +3,7 @@ import os
|
|||
import json
|
||||
import yaml
|
||||
|
||||
from flask import Flask as BaseFlask, Config as BaseConfig, request, Request
|
||||
from flask import Flask as BaseFlask, Config as BaseConfig, request, Request, _request_ctx_stack
|
||||
from flask.ext.principal import Principal
|
||||
from flask.ext.login import LoginManager, UserMixin
|
||||
from flask.ext.mail import Mail
|
||||
|
@ -65,7 +65,6 @@ LICENSE_FILENAME = 'conf/stack/license.enc'
|
|||
|
||||
app = Flask(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('profile')
|
||||
|
||||
|
||||
if 'TEST' in os.environ:
|
||||
|
@ -101,21 +100,24 @@ class RequestWithId(Request):
|
|||
|
||||
@app.before_request
|
||||
def _request_start():
|
||||
profile.debug('Starting request: %s', request.path)
|
||||
logger.debug('Starting request: %s', request.path)
|
||||
|
||||
|
||||
@app.after_request
|
||||
def _request_end(r):
|
||||
profile.debug('Ending request: %s', request.path)
|
||||
logger.debug('Ending request: %s', request.path)
|
||||
return r
|
||||
|
||||
|
||||
class InjectingFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
if _request_ctx_stack.top is not None:
|
||||
record.msg = '[%s] %s' % (request.request_id, record.msg)
|
||||
return True
|
||||
|
||||
profile.addFilter(InjectingFilter())
|
||||
# Add the request id filter to all handlers of the root logger
|
||||
for handler in logging.getLogger().handlers:
|
||||
handler.addFilter(InjectingFilter())
|
||||
|
||||
app.request_class = RequestWithId
|
||||
|
||||
|
|
|
@ -11,5 +11,5 @@ import registry
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
|
||||
|
|
|
@ -3,5 +3,5 @@ workers = 2
|
|||
worker_class = 'gevent'
|
||||
timeout = 2000
|
||||
daemon = False
|
||||
logconfig = 'conf/logging.conf'
|
||||
logconfig = 'conf/logging_debug.conf'
|
||||
pythonpath = '.'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[loggers]
|
||||
keys=root, gunicorn.error, gunicorn.access, application.profiler, boto, werkzeug
|
||||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=console
|
||||
|
@ -7,39 +7,9 @@ keys=console
|
|||
[formatters]
|
||||
keys=generic
|
||||
|
||||
[logger_application.profiler]
|
||||
level=DEBUG
|
||||
handlers=console
|
||||
propagate=0
|
||||
qualname=application.profiler
|
||||
|
||||
[logger_root]
|
||||
level=DEBUG
|
||||
handlers=console
|
||||
|
||||
[logger_boto]
|
||||
level=INFO
|
||||
handlers=console
|
||||
propagate=0
|
||||
qualname=boto
|
||||
|
||||
[logger_werkzeug]
|
||||
level=DEBUG
|
||||
handlers=console
|
||||
propagate=0
|
||||
qualname=werkzeug
|
||||
|
||||
[logger_gunicorn.error]
|
||||
level=INFO
|
||||
handlers=console
|
||||
propagate=1
|
||||
qualname=gunicorn.error
|
||||
|
||||
[logger_gunicorn.access]
|
||||
level=INFO
|
||||
handlers=console
|
||||
propagate=0
|
||||
qualname=gunicorn.access
|
||||
|
||||
[handler_console]
|
||||
class=StreamHandler
|
||||
|
|
21
conf/logging_debug.conf
Normal file
21
conf/logging_debug.conf
Normal file
|
@ -0,0 +1,21 @@
|
|||
[loggers]
|
||||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=console
|
||||
|
||||
[formatters]
|
||||
keys=generic
|
||||
|
||||
[logger_root]
|
||||
level=DEBUG
|
||||
handlers=console
|
||||
|
||||
[handler_console]
|
||||
class=StreamHandler
|
||||
formatter=generic
|
||||
args=(sys.stdout, )
|
||||
|
||||
[formatter_generic]
|
||||
format=%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s
|
||||
class=logging.Formatter
|
|
@ -29,7 +29,6 @@ from endpoints.notificationhelper import spawn_notification
|
|||
import features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
||||
route_data = None
|
||||
|
||||
|
@ -253,7 +252,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
metadata=metadata, repository=repository)
|
||||
|
||||
# Add notifications for the build queue.
|
||||
profile.debug('Adding notifications for repository')
|
||||
logger.debug('Adding notifications for repository')
|
||||
event_data = {
|
||||
'build_id': build_request.uuid,
|
||||
'build_name': build_name,
|
||||
|
|
|
@ -23,7 +23,6 @@ from endpoints.notificationhelper import spawn_notification
|
|||
import features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
||||
index = Blueprint('index', __name__)
|
||||
|
||||
|
@ -120,7 +119,7 @@ def create_user():
|
|||
|
||||
else:
|
||||
# New user case
|
||||
profile.debug('Creating user')
|
||||
logger.debug('Creating user')
|
||||
new_user = None
|
||||
|
||||
try:
|
||||
|
@ -128,10 +127,10 @@ def create_user():
|
|||
except model.TooManyUsersException as ex:
|
||||
abort(402, 'Seat limit has been reached for this license', issue='seat-limit')
|
||||
|
||||
profile.debug('Creating email code for user')
|
||||
logger.debug('Creating email code for user')
|
||||
code = model.create_confirm_email_code(new_user)
|
||||
|
||||
profile.debug('Sending email code to user')
|
||||
logger.debug('Sending email code to user')
|
||||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||
|
||||
return make_response('Created', 201)
|
||||
|
@ -168,12 +167,12 @@ def update_user(username):
|
|||
update_request = request.get_json()
|
||||
|
||||
if 'password' in update_request:
|
||||
profile.debug('Updating user password')
|
||||
logger.debug('Updating user password')
|
||||
model.change_password(get_authenticated_user(),
|
||||
update_request['password'])
|
||||
|
||||
if 'email' in update_request:
|
||||
profile.debug('Updating user email')
|
||||
logger.debug('Updating user email')
|
||||
model.update_email(get_authenticated_user(), update_request['email'])
|
||||
|
||||
return jsonify({
|
||||
|
@ -189,13 +188,13 @@ def update_user(username):
|
|||
@parse_repository_name
|
||||
@generate_headers(role='write')
|
||||
def create_repository(namespace, repository):
|
||||
profile.debug('Parsing image descriptions')
|
||||
logger.debug('Parsing image descriptions')
|
||||
image_descriptions = json.loads(request.data.decode('utf8'))
|
||||
|
||||
profile.debug('Looking up repository')
|
||||
logger.debug('Looking up repository')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
|
||||
profile.debug('Repository looked up')
|
||||
logger.debug('Repository looked up')
|
||||
if not repo and get_authenticated_user() is None:
|
||||
logger.debug('Attempt to create new repository without user auth.')
|
||||
abort(401,
|
||||
|
@ -219,11 +218,11 @@ def create_repository(namespace, repository):
|
|||
issue='no-create-permission',
|
||||
namespace=namespace)
|
||||
|
||||
profile.debug('Creaing repository with owner: %s', get_authenticated_user().username)
|
||||
logger.debug('Creaing repository with owner: %s', get_authenticated_user().username)
|
||||
repo = model.create_repository(namespace, repository,
|
||||
get_authenticated_user())
|
||||
|
||||
profile.debug('Determining already added images')
|
||||
logger.debug('Determining already added images')
|
||||
added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions])
|
||||
new_repo_images = dict(added_images)
|
||||
|
||||
|
@ -239,7 +238,7 @@ def create_repository(namespace, repository):
|
|||
for existing in existing_images:
|
||||
added_images.pop(existing.docker_image_id)
|
||||
|
||||
profile.debug('Creating/Linking necessary images')
|
||||
logger.debug('Creating/Linking necessary images')
|
||||
username = get_authenticated_user() and get_authenticated_user().username
|
||||
translations = {}
|
||||
for image_description in added_images.values():
|
||||
|
@ -247,7 +246,7 @@ def create_repository(namespace, repository):
|
|||
translations, storage.preferred_locations[0])
|
||||
|
||||
|
||||
profile.debug('Created images')
|
||||
logger.debug('Created images')
|
||||
track_and_log('push_repo', repo)
|
||||
return make_response('Created', 201)
|
||||
|
||||
|
@ -260,14 +259,14 @@ def update_images(namespace, repository):
|
|||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
|
||||
if permission.can():
|
||||
profile.debug('Looking up repository')
|
||||
logger.debug('Looking up repository')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
# Make sure the repo actually exists.
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
if get_authenticated_user():
|
||||
profile.debug('Publishing push event')
|
||||
logger.debug('Publishing push event')
|
||||
username = get_authenticated_user().username
|
||||
|
||||
# Mark that the user has pushed the repo.
|
||||
|
@ -280,11 +279,11 @@ def update_images(namespace, repository):
|
|||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', user_data)
|
||||
|
||||
profile.debug('GCing repository')
|
||||
logger.debug('GCing repository')
|
||||
num_removed = model.garbage_collect_repository(namespace, repository)
|
||||
|
||||
# Generate a job for each notification that has been added to this repo
|
||||
profile.debug('Adding notifications for repository')
|
||||
logger.debug('Adding notifications for repository')
|
||||
|
||||
updated_tags = session.get('pushed_tags', {})
|
||||
event_data = {
|
||||
|
@ -307,13 +306,13 @@ def get_repository_images(namespace, repository):
|
|||
# TODO invalidate token?
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
# We can't rely on permissions to tell us if a repo exists anymore
|
||||
profile.debug('Looking up repository')
|
||||
logger.debug('Looking up repository')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
all_images = []
|
||||
profile.debug('Retrieving repository images')
|
||||
logger.debug('Retrieving repository images')
|
||||
for image in model.get_repository_images(namespace, repository):
|
||||
new_image_view = {
|
||||
'id': image.docker_image_id,
|
||||
|
@ -321,7 +320,7 @@ def get_repository_images(namespace, repository):
|
|||
}
|
||||
all_images.append(new_image_view)
|
||||
|
||||
profile.debug('Building repository image response')
|
||||
logger.debug('Building repository image response')
|
||||
resp = make_response(json.dumps(all_images), 200)
|
||||
resp.mimetype = 'application/json'
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ from util import gzipstream
|
|||
registry = Blueprint('registry', __name__)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
||||
class SocketReader(object):
|
||||
def __init__(self, fp):
|
||||
|
@ -100,12 +99,12 @@ def set_cache_headers(f):
|
|||
def head_image_layer(namespace, repository, image_id, headers):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
profile.debug('Image not found')
|
||||
logger.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
|
@ -114,7 +113,7 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
# Add the Accept-Ranges header if the storage engine supports resumable
|
||||
# downloads.
|
||||
if store.get_supports_resumable_downloads(repo_image.storage.locations):
|
||||
profile.debug('Storage supports resumable downloads')
|
||||
logger.debug('Storage supports resumable downloads')
|
||||
extra_headers['Accept-Ranges'] = 'bytes'
|
||||
|
||||
resp = make_response('')
|
||||
|
@ -133,35 +132,35 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
def get_image_layer(namespace, repository, image_id, headers):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
profile.debug('Image not found')
|
||||
logger.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
profile.debug('Looking up the layer path')
|
||||
logger.debug('Looking up the layer path')
|
||||
try:
|
||||
path = store.image_layer_path(repo_image.storage.uuid)
|
||||
|
||||
profile.debug('Looking up the direct download URL')
|
||||
logger.debug('Looking up the direct download URL')
|
||||
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
||||
|
||||
if direct_download_url:
|
||||
profile.debug('Returning direct download URL')
|
||||
logger.debug('Returning direct download URL')
|
||||
resp = redirect(direct_download_url)
|
||||
return resp
|
||||
|
||||
profile.debug('Streaming layer data')
|
||||
logger.debug('Streaming layer data')
|
||||
|
||||
# Close the database handle here for this process before we send the long download.
|
||||
database.close_db_filter(None)
|
||||
|
||||
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
|
||||
except (IOError, AttributeError):
|
||||
profile.exception('Image layer data not found')
|
||||
logger.exception('Image layer data not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
|
@ -172,30 +171,30 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
@process_auth
|
||||
@extract_namespace_repo_from_session
|
||||
def put_image_layer(namespace, repository, image_id):
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if not permission.can():
|
||||
abort(403)
|
||||
|
||||
profile.debug('Retrieving image')
|
||||
logger.debug('Retrieving image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
try:
|
||||
profile.debug('Retrieving image data')
|
||||
logger.debug('Retrieving image data')
|
||||
uuid = repo_image.storage.uuid
|
||||
json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||
except (IOError, AttributeError):
|
||||
profile.exception('Exception when retrieving image data')
|
||||
logger.exception('Exception when retrieving image data')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
profile.debug('Retrieving image path info')
|
||||
logger.debug('Retrieving image path info')
|
||||
layer_path = store.image_layer_path(uuid)
|
||||
|
||||
if (store.exists(repo_image.storage.locations, layer_path) and not
|
||||
image_is_uploading(repo_image)):
|
||||
exact_abort(409, 'Image already exists')
|
||||
|
||||
profile.debug('Storing layer data')
|
||||
logger.debug('Storing layer data')
|
||||
|
||||
input_stream = request.stream
|
||||
if request.headers.get('transfer-encoding') == 'chunked':
|
||||
|
@ -262,7 +261,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
|
@ -277,7 +276,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
@process_auth
|
||||
@extract_namespace_repo_from_session
|
||||
def put_image_checksum(namespace, repository, image_id):
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if not permission.can():
|
||||
abort(403)
|
||||
|
@ -303,23 +302,23 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
abort(400, 'Checksum not found in Cookie for image %(image_id)s',
|
||||
issue='missing-checksum-cookie', image_id=image_id)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image or not repo_image.storage:
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
uuid = repo_image.storage.uuid
|
||||
|
||||
profile.debug('Looking up repo layer data')
|
||||
logger.debug('Looking up repo layer data')
|
||||
if not store.exists(repo_image.storage.locations, store.image_json_path(uuid)):
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
profile.debug('Marking image path')
|
||||
logger.debug('Marking image path')
|
||||
if not image_is_uploading(repo_image):
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||
issue='image-write-error', image_id=image_id)
|
||||
|
||||
profile.debug('Storing image checksum')
|
||||
logger.debug('Storing image checksum')
|
||||
err = store_checksum(repo_image.storage, checksum)
|
||||
if err:
|
||||
abort(400, err)
|
||||
|
@ -336,7 +335,7 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
|
@ -353,23 +352,23 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
@require_completion
|
||||
@set_cache_headers
|
||||
def get_image_json(namespace, repository, image_id, headers):
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if not permission.can() and not model.repository_is_public(namespace,
|
||||
repository):
|
||||
abort(403)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
profile.debug('Looking up repo layer data')
|
||||
logger.debug('Looking up repo layer data')
|
||||
try:
|
||||
uuid = repo_image.storage.uuid
|
||||
data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||
except (IOError, AttributeError):
|
||||
flask_abort(404)
|
||||
|
||||
profile.debug('Looking up repo layer size')
|
||||
logger.debug('Looking up repo layer size')
|
||||
size = repo_image.storage.image_size
|
||||
headers['X-Docker-Size'] = str(size)
|
||||
|
||||
|
@ -384,16 +383,16 @@ def get_image_json(namespace, repository, image_id, headers):
|
|||
@require_completion
|
||||
@set_cache_headers
|
||||
def get_image_ancestry(namespace, repository, image_id, headers):
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if not permission.can() and not model.repository_is_public(namespace,
|
||||
repository):
|
||||
abort(403)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
|
||||
profile.debug('Looking up image data')
|
||||
logger.debug('Looking up image data')
|
||||
try:
|
||||
uuid = repo_image.storage.uuid
|
||||
data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
||||
|
@ -401,11 +400,11 @@ def get_image_ancestry(namespace, repository, image_id, headers):
|
|||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
profile.debug('Converting to <-> from JSON')
|
||||
logger.debug('Converting to <-> from JSON')
|
||||
response = make_response(json.dumps(json.loads(data)), 200)
|
||||
response.headers.extend(headers)
|
||||
|
||||
profile.debug('Done')
|
||||
logger.debug('Done')
|
||||
return response
|
||||
|
||||
|
||||
|
@ -435,12 +434,12 @@ def store_checksum(image_storage, checksum):
|
|||
@process_auth
|
||||
@extract_namespace_repo_from_session
|
||||
def put_image_json(namespace, repository, image_id):
|
||||
profile.debug('Checking repo permissions')
|
||||
logger.debug('Checking repo permissions')
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if not permission.can():
|
||||
abort(403)
|
||||
|
||||
profile.debug('Parsing image JSON')
|
||||
logger.debug('Parsing image JSON')
|
||||
try:
|
||||
data = json.loads(request.data.decode('utf8'))
|
||||
except ValueError:
|
||||
|
@ -454,10 +453,10 @@ def put_image_json(namespace, repository, image_id):
|
|||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
profile.debug('Looking up repo image')
|
||||
logger.debug('Looking up repo image')
|
||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||
if not repo_image:
|
||||
profile.debug('Image not found')
|
||||
logger.debug('Image not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
|
@ -471,24 +470,24 @@ def put_image_json(namespace, repository, image_id):
|
|||
|
||||
parent_image = None
|
||||
if parent_id:
|
||||
profile.debug('Looking up parent image')
|
||||
logger.debug('Looking up parent image')
|
||||
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
|
||||
|
||||
parent_uuid = parent_image and parent_image.storage.uuid
|
||||
parent_locations = parent_image and parent_image.storage.locations
|
||||
|
||||
if parent_id:
|
||||
profile.debug('Looking up parent image data')
|
||||
logger.debug('Looking up parent image data')
|
||||
|
||||
if (parent_id and not
|
||||
store.exists(parent_locations, store.image_json_path(parent_uuid))):
|
||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||
|
||||
profile.debug('Looking up image storage paths')
|
||||
logger.debug('Looking up image storage paths')
|
||||
json_path = store.image_json_path(uuid)
|
||||
|
||||
profile.debug('Checking if image already exists')
|
||||
logger.debug('Checking if image already exists')
|
||||
if (store.exists(repo_image.storage.locations, json_path) and not
|
||||
image_is_uploading(repo_image)):
|
||||
exact_abort(409, 'Image already exists')
|
||||
|
@ -501,24 +500,24 @@ def put_image_json(namespace, repository, image_id):
|
|||
command_list = data.get('container_config', {}).get('Cmd', None)
|
||||
command = json.dumps(command_list) if command_list else None
|
||||
|
||||
profile.debug('Setting image metadata')
|
||||
logger.debug('Setting image metadata')
|
||||
model.set_image_metadata(image_id, namespace, repository,
|
||||
data.get('created'), data.get('comment'), command,
|
||||
parent_image)
|
||||
|
||||
profile.debug('Putting json path')
|
||||
logger.debug('Putting json path')
|
||||
store.put_content(repo_image.storage.locations, json_path, request.data)
|
||||
|
||||
profile.debug('Generating image ancestry')
|
||||
logger.debug('Generating image ancestry')
|
||||
|
||||
try:
|
||||
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
|
||||
parent_locations)
|
||||
except IOError as ioe:
|
||||
profile.debug('Error when generating ancestry: %s' % ioe.message)
|
||||
logger.debug('Error when generating ancestry: %s' % ioe.message)
|
||||
abort(404)
|
||||
|
||||
profile.debug('Done')
|
||||
logger.debug('Done')
|
||||
return make_response('true', 200)
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from flask import request
|
|||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
||||
def track_and_log(event_name, repo, **kwargs):
|
||||
repository = repo.name
|
||||
|
@ -23,7 +22,7 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
authenticated_user = get_authenticated_user()
|
||||
authenticated_token = get_validated_token() if not authenticated_user else None
|
||||
|
||||
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||
logger.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||
if authenticated_oauth_token:
|
||||
metadata['oauth_token_id'] = authenticated_oauth_token.id
|
||||
metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id
|
||||
|
@ -45,9 +44,9 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
}
|
||||
|
||||
# Publish the user event (if applicable)
|
||||
profile.debug('Checking publishing %s to the user events system', event_name)
|
||||
logger.debug('Checking publishing %s to the user events system', event_name)
|
||||
if authenticated_user:
|
||||
profile.debug('Publishing %s to the user events system', event_name)
|
||||
logger.debug('Publishing %s to the user events system', event_name)
|
||||
user_event_data = {
|
||||
'action': event_name,
|
||||
'repository': repository,
|
||||
|
@ -58,14 +57,14 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
||||
# Save the action to mixpanel.
|
||||
profile.debug('Logging the %s to Mixpanel', event_name)
|
||||
logger.debug('Logging the %s to Mixpanel', event_name)
|
||||
analytics.track(analytics_id, event_name, extra_params)
|
||||
|
||||
# Log the action to the database.
|
||||
profile.debug('Logging the %s to logs system', event_name)
|
||||
logger.debug('Logging the %s to logs system', event_name)
|
||||
model.log_action(event_name, namespace,
|
||||
performer=authenticated_user,
|
||||
ip=request.remote_addr, metadata=metadata,
|
||||
repository=repo)
|
||||
|
||||
profile.debug('Track and log of %s complete', event_name)
|
||||
logger.debug('Track and log of %s complete', event_name)
|
||||
|
|
Reference in a new issue