From 3abb5bf0a3bc3226a1d7607ab7c639210019cacc Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Tue, 10 Feb 2015 15:48:27 -0500 Subject: [PATCH 1/2] nginx: set proxy_buffer_size to 6MB Because tags are included in our sessions, pushes containing many tags will make our headers larger than the buffer nginx uses to send to the client and then nginx is unable to validate the headers. --- conf/proxy-server-base.conf | 1 + conf/server-base.conf | 1 + 2 files changed, 2 insertions(+) diff --git a/conf/proxy-server-base.conf b/conf/proxy-server-base.conf index fb2f3f962..5bee725cf 100644 --- a/conf/proxy-server-base.conf +++ b/conf/proxy-server-base.conf @@ -13,6 +13,7 @@ proxy_set_header X-Forwarded-For $proxy_protocol_addr; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header Host $http_host; proxy_redirect off; +proxy_buffer_size 6m; proxy_set_header Transfer-Encoding $http_transfer_encoding; diff --git a/conf/server-base.conf b/conf/server-base.conf index d5b211c52..6ac4dfb28 100644 --- a/conf/server-base.conf +++ b/conf/server-base.conf @@ -16,6 +16,7 @@ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header Host $http_host; proxy_redirect off; +proxy_buffer_size 6m; proxy_set_header Transfer-Encoding $http_transfer_encoding; From 0f3d87466e6b7b1732d5ff820241db7882f15983 Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Wed, 11 Feb 2015 14:15:18 -0500 Subject: [PATCH 2/2] Unify the logging infrastructure and turn the prod logging level to INFO in preparation for picking up a new cloud logger. --- app.py | 14 +++--- application.py | 2 +- conf/gunicorn_local.py | 2 +- conf/logging.conf | 32 +------------- conf/logging_debug.conf | 21 +++++++++ endpoints/common.py | 3 +- endpoints/index.py | 39 ++++++++--------- endpoints/registry.py | 95 ++++++++++++++++++++-------------------- endpoints/trackhelper.py | 13 +++--- 9 files changed, 105 insertions(+), 116 deletions(-) create mode 100644 conf/logging_debug.conf diff --git a/app.py b/app.py index ae7f95600..a94b8d061 100644 --- a/app.py +++ b/app.py @@ -3,7 +3,7 @@ import os import json import yaml -from flask import Flask as BaseFlask, Config as BaseConfig, request, Request +from flask import Flask as BaseFlask, Config as BaseConfig, request, Request, _request_ctx_stack from flask.ext.principal import Principal from flask.ext.login import LoginManager, UserMixin from flask.ext.mail import Mail @@ -65,7 +65,6 @@ LICENSE_FILENAME = 'conf/stack/license.enc' app = Flask(__name__) logger = logging.getLogger(__name__) -profile = logging.getLogger('profile') if 'TEST' in os.environ: @@ -101,21 +100,24 @@ class RequestWithId(Request): @app.before_request def _request_start(): - profile.debug('Starting request: %s', request.path) + logger.debug('Starting request: %s', request.path) @app.after_request def _request_end(r): - profile.debug('Ending request: %s', request.path) + logger.debug('Ending request: %s', request.path) return r class InjectingFilter(logging.Filter): def filter(self, record): - record.msg = '[%s] %s' % (request.request_id, record.msg) + if _request_ctx_stack.top is not None: + record.msg = '[%s] %s' % (request.request_id, record.msg) return True -profile.addFilter(InjectingFilter()) +# Add the request id filter to all handlers of the root logger +for handler in logging.getLogger().handlers: + handler.addFilter(InjectingFilter()) app.request_class = RequestWithId diff --git a/application.py b/application.py index a9bd0df6e..235a80b16 100644 --- a/application.py +++ b/application.py @@ -11,5 +11,5 @@ import registry if __name__ == '__main__': - logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) + logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False) application.run(port=5000, debug=True, threaded=True, host='0.0.0.0') diff --git a/conf/gunicorn_local.py b/conf/gunicorn_local.py index aa16e63ec..ce93304b0 100644 --- a/conf/gunicorn_local.py +++ b/conf/gunicorn_local.py @@ -3,5 +3,5 @@ workers = 2 worker_class = 'gevent' timeout = 2000 daemon = False -logconfig = 'conf/logging.conf' +logconfig = 'conf/logging_debug.conf' pythonpath = '.' diff --git a/conf/logging.conf b/conf/logging.conf index d009f08ee..317803a24 100644 --- a/conf/logging.conf +++ b/conf/logging.conf @@ -1,5 +1,5 @@ [loggers] -keys=root, gunicorn.error, gunicorn.access, application.profiler, boto, werkzeug +keys=root [handlers] keys=console @@ -7,39 +7,9 @@ keys=console [formatters] keys=generic -[logger_application.profiler] -level=DEBUG -handlers=console -propagate=0 -qualname=application.profiler - [logger_root] -level=DEBUG -handlers=console - -[logger_boto] level=INFO handlers=console -propagate=0 -qualname=boto - -[logger_werkzeug] -level=DEBUG -handlers=console -propagate=0 -qualname=werkzeug - -[logger_gunicorn.error] -level=INFO -handlers=console -propagate=1 -qualname=gunicorn.error - -[logger_gunicorn.access] -level=INFO -handlers=console -propagate=0 -qualname=gunicorn.access [handler_console] class=StreamHandler diff --git a/conf/logging_debug.conf b/conf/logging_debug.conf new file mode 100644 index 000000000..01a3c8fbb --- /dev/null +++ b/conf/logging_debug.conf @@ -0,0 +1,21 @@ +[loggers] +keys=root + +[handlers] +keys=console + +[formatters] +keys=generic + +[logger_root] +level=DEBUG +handlers=console + +[handler_console] +class=StreamHandler +formatter=generic +args=(sys.stdout, ) + +[formatter_generic] +format=%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s +class=logging.Formatter diff --git a/endpoints/common.py b/endpoints/common.py index 3534ee072..9301329e0 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -29,7 +29,6 @@ from endpoints.notificationhelper import spawn_notification import features logger = logging.getLogger(__name__) -profile = logging.getLogger('application.profiler') route_data = None @@ -253,7 +252,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual, metadata=metadata, repository=repository) # Add notifications for the build queue. - profile.debug('Adding notifications for repository') + logger.debug('Adding notifications for repository') event_data = { 'build_id': build_request.uuid, 'build_name': build_name, diff --git a/endpoints/index.py b/endpoints/index.py index de45f2fde..660ab94aa 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -23,7 +23,6 @@ from endpoints.notificationhelper import spawn_notification import features logger = logging.getLogger(__name__) -profile = logging.getLogger('application.profiler') index = Blueprint('index', __name__) @@ -120,7 +119,7 @@ def create_user(): else: # New user case - profile.debug('Creating user') + logger.debug('Creating user') new_user = None try: @@ -128,10 +127,10 @@ def create_user(): except model.TooManyUsersException as ex: abort(402, 'Seat limit has been reached for this license', issue='seat-limit') - profile.debug('Creating email code for user') + logger.debug('Creating email code for user') code = model.create_confirm_email_code(new_user) - profile.debug('Sending email code to user') + logger.debug('Sending email code to user') send_confirmation_email(new_user.username, new_user.email, code.code) return make_response('Created', 201) @@ -168,12 +167,12 @@ def update_user(username): update_request = request.get_json() if 'password' in update_request: - profile.debug('Updating user password') + logger.debug('Updating user password') model.change_password(get_authenticated_user(), update_request['password']) if 'email' in update_request: - profile.debug('Updating user email') + logger.debug('Updating user email') model.update_email(get_authenticated_user(), update_request['email']) return jsonify({ @@ -189,13 +188,13 @@ def update_user(username): @parse_repository_name @generate_headers(role='write') def create_repository(namespace, repository): - profile.debug('Parsing image descriptions') + logger.debug('Parsing image descriptions') image_descriptions = json.loads(request.data.decode('utf8')) - profile.debug('Looking up repository') + logger.debug('Looking up repository') repo = model.get_repository(namespace, repository) - profile.debug('Repository looked up') + logger.debug('Repository looked up') if not repo and get_authenticated_user() is None: logger.debug('Attempt to create new repository without user auth.') abort(401, @@ -219,11 +218,11 @@ def create_repository(namespace, repository): issue='no-create-permission', namespace=namespace) - profile.debug('Creaing repository with owner: %s', get_authenticated_user().username) + logger.debug('Creaing repository with owner: %s', get_authenticated_user().username) repo = model.create_repository(namespace, repository, get_authenticated_user()) - profile.debug('Determining already added images') + logger.debug('Determining already added images') added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions]) new_repo_images = dict(added_images) @@ -239,7 +238,7 @@ def create_repository(namespace, repository): for existing in existing_images: added_images.pop(existing.docker_image_id) - profile.debug('Creating/Linking necessary images') + logger.debug('Creating/Linking necessary images') username = get_authenticated_user() and get_authenticated_user().username translations = {} for image_description in added_images.values(): @@ -247,7 +246,7 @@ def create_repository(namespace, repository): translations, storage.preferred_locations[0]) - profile.debug('Created images') + logger.debug('Created images') track_and_log('push_repo', repo) return make_response('Created', 201) @@ -260,14 +259,14 @@ def update_images(namespace, repository): permission = ModifyRepositoryPermission(namespace, repository) if permission.can(): - profile.debug('Looking up repository') + logger.debug('Looking up repository') repo = model.get_repository(namespace, repository) if not repo: # Make sure the repo actually exists. abort(404, message='Unknown repository', issue='unknown-repo') if get_authenticated_user(): - profile.debug('Publishing push event') + logger.debug('Publishing push event') username = get_authenticated_user().username # Mark that the user has pushed the repo. @@ -280,11 +279,11 @@ def update_images(namespace, repository): event = userevents.get_event(username) event.publish_event_data('docker-cli', user_data) - profile.debug('GCing repository') + logger.debug('GCing repository') num_removed = model.garbage_collect_repository(namespace, repository) # Generate a job for each notification that has been added to this repo - profile.debug('Adding notifications for repository') + logger.debug('Adding notifications for repository') updated_tags = session.get('pushed_tags', {}) event_data = { @@ -307,13 +306,13 @@ def get_repository_images(namespace, repository): # TODO invalidate token? if permission.can() or model.repository_is_public(namespace, repository): # We can't rely on permissions to tell us if a repo exists anymore - profile.debug('Looking up repository') + logger.debug('Looking up repository') repo = model.get_repository(namespace, repository) if not repo: abort(404, message='Unknown repository', issue='unknown-repo') all_images = [] - profile.debug('Retrieving repository images') + logger.debug('Retrieving repository images') for image in model.get_repository_images(namespace, repository): new_image_view = { 'id': image.docker_image_id, @@ -321,7 +320,7 @@ def get_repository_images(namespace, repository): } all_images.append(new_image_view) - profile.debug('Building repository image response') + logger.debug('Building repository image response') resp = make_response(json.dumps(all_images), 200) resp.mimetype = 'application/json' diff --git a/endpoints/registry.py b/endpoints/registry.py index b4b03334f..5178f3a83 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -20,7 +20,6 @@ from util import gzipstream registry = Blueprint('registry', __name__) logger = logging.getLogger(__name__) -profile = logging.getLogger('application.profiler') class SocketReader(object): def __init__(self, fp): @@ -100,12 +99,12 @@ def set_cache_headers(f): def head_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') if permission.can() or model.repository_is_public(namespace, repository): - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) if not repo_image: - profile.debug('Image not found') + logger.debug('Image not found') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) @@ -114,7 +113,7 @@ def head_image_layer(namespace, repository, image_id, headers): # Add the Accept-Ranges header if the storage engine supports resumable # downloads. if store.get_supports_resumable_downloads(repo_image.storage.locations): - profile.debug('Storage supports resumable downloads') + logger.debug('Storage supports resumable downloads') extra_headers['Accept-Ranges'] = 'bytes' resp = make_response('') @@ -133,35 +132,35 @@ def head_image_layer(namespace, repository, image_id, headers): def get_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') if permission.can() or model.repository_is_public(namespace, repository): - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) if not repo_image: - profile.debug('Image not found') + logger.debug('Image not found') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) - profile.debug('Looking up the layer path') + logger.debug('Looking up the layer path') try: path = store.image_layer_path(repo_image.storage.uuid) - profile.debug('Looking up the direct download URL') + logger.debug('Looking up the direct download URL') direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path) if direct_download_url: - profile.debug('Returning direct download URL') + logger.debug('Returning direct download URL') resp = redirect(direct_download_url) return resp - profile.debug('Streaming layer data') + logger.debug('Streaming layer data') # Close the database handle here for this process before we send the long download. database.close_db_filter(None) return Response(store.stream_read(repo_image.storage.locations, path), headers=headers) except (IOError, AttributeError): - profile.exception('Image layer data not found') + logger.exception('Image layer data not found') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) @@ -172,30 +171,30 @@ def get_image_layer(namespace, repository, image_id, headers): @process_auth @extract_namespace_repo_from_session def put_image_layer(namespace, repository, image_id): - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') permission = ModifyRepositoryPermission(namespace, repository) if not permission.can(): abort(403) - profile.debug('Retrieving image') + logger.debug('Retrieving image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) try: - profile.debug('Retrieving image data') + logger.debug('Retrieving image data') uuid = repo_image.storage.uuid json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid)) except (IOError, AttributeError): - profile.exception('Exception when retrieving image data') + logger.exception('Exception when retrieving image data') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) - profile.debug('Retrieving image path info') + logger.debug('Retrieving image path info') layer_path = store.image_layer_path(uuid) if (store.exists(repo_image.storage.locations, layer_path) and not image_is_uploading(repo_image)): exact_abort(409, 'Image already exists') - profile.debug('Storing layer data') + logger.debug('Storing layer data') input_stream = request.stream if request.headers.get('transfer-encoding') == 'chunked': @@ -262,7 +261,7 @@ def put_image_layer(namespace, repository, image_id): # The layer is ready for download, send a job to the work queue to # process it. - profile.debug('Adding layer to diff queue') + logger.debug('Adding layer to diff queue') repo = model.get_repository(namespace, repository) image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({ 'namespace_user_id': repo.namespace_user.id, @@ -277,7 +276,7 @@ def put_image_layer(namespace, repository, image_id): @process_auth @extract_namespace_repo_from_session def put_image_checksum(namespace, repository, image_id): - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') permission = ModifyRepositoryPermission(namespace, repository) if not permission.can(): abort(403) @@ -303,23 +302,23 @@ def put_image_checksum(namespace, repository, image_id): abort(400, 'Checksum not found in Cookie for image %(image_id)s', issue='missing-checksum-cookie', image_id=image_id) - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) if not repo_image or not repo_image.storage: abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id) uuid = repo_image.storage.uuid - profile.debug('Looking up repo layer data') + logger.debug('Looking up repo layer data') if not store.exists(repo_image.storage.locations, store.image_json_path(uuid)): abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id) - profile.debug('Marking image path') + logger.debug('Marking image path') if not image_is_uploading(repo_image): abort(409, 'Cannot set checksum for image %(image_id)s', issue='image-write-error', image_id=image_id) - profile.debug('Storing image checksum') + logger.debug('Storing image checksum') err = store_checksum(repo_image.storage, checksum) if err: abort(400, err) @@ -336,7 +335,7 @@ def put_image_checksum(namespace, repository, image_id): # The layer is ready for download, send a job to the work queue to # process it. - profile.debug('Adding layer to diff queue') + logger.debug('Adding layer to diff queue') repo = model.get_repository(namespace, repository) image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({ 'namespace_user_id': repo.namespace_user.id, @@ -353,23 +352,23 @@ def put_image_checksum(namespace, repository, image_id): @require_completion @set_cache_headers def get_image_json(namespace, repository, image_id, headers): - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') permission = ReadRepositoryPermission(namespace, repository) if not permission.can() and not model.repository_is_public(namespace, repository): abort(403) - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) - profile.debug('Looking up repo layer data') + logger.debug('Looking up repo layer data') try: uuid = repo_image.storage.uuid data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid)) except (IOError, AttributeError): flask_abort(404) - profile.debug('Looking up repo layer size') + logger.debug('Looking up repo layer size') size = repo_image.storage.image_size headers['X-Docker-Size'] = str(size) @@ -384,16 +383,16 @@ def get_image_json(namespace, repository, image_id, headers): @require_completion @set_cache_headers def get_image_ancestry(namespace, repository, image_id, headers): - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') permission = ReadRepositoryPermission(namespace, repository) if not permission.can() and not model.repository_is_public(namespace, repository): abort(403) - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) - profile.debug('Looking up image data') + logger.debug('Looking up image data') try: uuid = repo_image.storage.uuid data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid)) @@ -401,11 +400,11 @@ def get_image_ancestry(namespace, repository, image_id, headers): abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) - profile.debug('Converting to <-> from JSON') + logger.debug('Converting to <-> from JSON') response = make_response(json.dumps(json.loads(data)), 200) response.headers.extend(headers) - profile.debug('Done') + logger.debug('Done') return response @@ -435,12 +434,12 @@ def store_checksum(image_storage, checksum): @process_auth @extract_namespace_repo_from_session def put_image_json(namespace, repository, image_id): - profile.debug('Checking repo permissions') + logger.debug('Checking repo permissions') permission = ModifyRepositoryPermission(namespace, repository) if not permission.can(): abort(403) - profile.debug('Parsing image JSON') + logger.debug('Parsing image JSON') try: data = json.loads(request.data.decode('utf8')) except ValueError: @@ -454,10 +453,10 @@ def put_image_json(namespace, repository, image_id): abort(400, 'Missing key `id` in JSON for image: %(image_id)s', issue='invalid-request', image_id=image_id) - profile.debug('Looking up repo image') + logger.debug('Looking up repo image') repo_image = model.get_repo_image_extended(namespace, repository, image_id) if not repo_image: - profile.debug('Image not found') + logger.debug('Image not found') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) @@ -471,24 +470,24 @@ def put_image_json(namespace, repository, image_id): parent_image = None if parent_id: - profile.debug('Looking up parent image') + logger.debug('Looking up parent image') parent_image = model.get_repo_image_extended(namespace, repository, parent_id) parent_uuid = parent_image and parent_image.storage.uuid parent_locations = parent_image and parent_image.storage.locations if parent_id: - profile.debug('Looking up parent image data') + logger.debug('Looking up parent image data') if (parent_id and not store.exists(parent_locations, store.image_json_path(parent_uuid))): abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s', issue='invalid-request', image_id=image_id, parent_id=parent_id) - profile.debug('Looking up image storage paths') + logger.debug('Looking up image storage paths') json_path = store.image_json_path(uuid) - profile.debug('Checking if image already exists') + logger.debug('Checking if image already exists') if (store.exists(repo_image.storage.locations, json_path) and not image_is_uploading(repo_image)): exact_abort(409, 'Image already exists') @@ -501,24 +500,24 @@ def put_image_json(namespace, repository, image_id): command_list = data.get('container_config', {}).get('Cmd', None) command = json.dumps(command_list) if command_list else None - profile.debug('Setting image metadata') + logger.debug('Setting image metadata') model.set_image_metadata(image_id, namespace, repository, data.get('created'), data.get('comment'), command, parent_image) - profile.debug('Putting json path') + logger.debug('Putting json path') store.put_content(repo_image.storage.locations, json_path, request.data) - profile.debug('Generating image ancestry') + logger.debug('Generating image ancestry') try: generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid, parent_locations) except IOError as ioe: - profile.debug('Error when generating ancestry: %s' % ioe.message) + logger.debug('Error when generating ancestry: %s' % ioe.message) abort(404) - profile.debug('Done') + logger.debug('Done') return make_response('true', 200) diff --git a/endpoints/trackhelper.py b/endpoints/trackhelper.py index 83b9d4270..fb99a2c2d 100644 --- a/endpoints/trackhelper.py +++ b/endpoints/trackhelper.py @@ -6,7 +6,6 @@ from flask import request from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token logger = logging.getLogger(__name__) -profile = logging.getLogger('application.profiler') def track_and_log(event_name, repo, **kwargs): repository = repo.name @@ -23,7 +22,7 @@ def track_and_log(event_name, repo, **kwargs): authenticated_user = get_authenticated_user() authenticated_token = get_validated_token() if not authenticated_user else None - profile.debug('Logging the %s to Mixpanel and the log system', event_name) + logger.debug('Logging the %s to Mixpanel and the log system', event_name) if authenticated_oauth_token: metadata['oauth_token_id'] = authenticated_oauth_token.id metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id @@ -45,9 +44,9 @@ def track_and_log(event_name, repo, **kwargs): } # Publish the user event (if applicable) - profile.debug('Checking publishing %s to the user events system', event_name) + logger.debug('Checking publishing %s to the user events system', event_name) if authenticated_user: - profile.debug('Publishing %s to the user events system', event_name) + logger.debug('Publishing %s to the user events system', event_name) user_event_data = { 'action': event_name, 'repository': repository, @@ -58,14 +57,14 @@ def track_and_log(event_name, repo, **kwargs): event.publish_event_data('docker-cli', user_event_data) # Save the action to mixpanel. - profile.debug('Logging the %s to Mixpanel', event_name) + logger.debug('Logging the %s to Mixpanel', event_name) analytics.track(analytics_id, event_name, extra_params) # Log the action to the database. - profile.debug('Logging the %s to logs system', event_name) + logger.debug('Logging the %s to logs system', event_name) model.log_action(event_name, namespace, performer=authenticated_user, ip=request.remote_addr, metadata=metadata, repository=repo) - profile.debug('Track and log of %s complete', event_name) + logger.debug('Track and log of %s complete', event_name)