From c1398c6d2b3e1c857b58eced50d1de79bd3562db Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 29 Oct 2014 15:42:44 -0400 Subject: [PATCH] - Add a log entry for repo verb handling and make the container usage calculation take it into account - Move all the repo push/pull/verb logging into a central track_and_log method - Readd images accidentally deleted in the last CL - Make the uncompressed size migration script better handle exceptions --- config.py | 3 + data/migrations/migration.sh | 4 +- ...4abf14783d_add_log_entry_kind_for_verbs.py | 28 +++++++ ...9f_add_log_kind_for_regenerating_robot_.py | 1 - data/model/legacy.py | 5 +- endpoints/api/superuser.py | 2 +- endpoints/common.py | 42 +++++++++- endpoints/index.py | 78 +------------------ endpoints/verbs.py | 8 +- initdb.py | 2 + static/js/app.js | 20 ++++- static/partials/super-user.html | 16 ++++ .../diffs.json | 5 ++ .../diffs.json | 7 ++ .../diffs.json | 8 ++ .../diffs.json | 45 +++++++++++ test/test_api_security.py | 20 ++++- util/uncompressedsize.py | 7 ++ 18 files changed, 216 insertions(+), 85 deletions(-) create mode 100644 data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py create mode 100644 test/data/registry/us/sharedimages/1d42f7d2-614d-a212-286f-72376617d2d9/diffs.json create mode 100644 test/data/registry/us/sharedimages/6a787f8c-3cc9-b656-b7b0-988c54878741/diffs.json create mode 100644 test/data/registry/us/sharedimages/7d9e2b70-712b-6c47-c2f9-7a5af0987bc4/diffs.json create mode 100644 test/data/registry/us/sharedimages/934caf90-6a23-4a62-9d0a-5004c3dcb2e7/diffs.json diff --git a/config.py b/config.py index 5a8b24b42..2482f31cf 100644 --- a/config.py +++ b/config.py @@ -186,3 +186,6 @@ class DefaultConfig(object): # Build logs archive LOG_ARCHIVE_LOCATION = 'local_us' LOG_ARCHIVE_PATH = 'logarchive/' + + # For enterprise: + MAXIMUM_CONTAINER_USAGE = 20 diff --git a/data/migrations/migration.sh b/data/migrations/migration.sh index 73100b1a8..5deeba3dd 100755 --- a/data/migrations/migration.sh +++ b/data/migrations/migration.sh @@ -5,8 +5,8 @@ up_mysql() { docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql # Sleep for 5s to get MySQL get started. - echo 'Sleeping for 5...' - sleep 5 + echo 'Sleeping for 10...' + sleep 10 # Add the database to mysql. docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword' diff --git a/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py b/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py new file mode 100644 index 000000000..6765a6aef --- /dev/null +++ b/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py @@ -0,0 +1,28 @@ +"""Add log entry kind for verbs + +Revision ID: 204abf14783d +Revises: 2430f55c41d5 +Create Date: 2014-10-29 15:38:06.100915 + +""" + +# revision identifiers, used by Alembic. +revision = '204abf14783d' +down_revision = '2430f55c41d5' + +from alembic import op +import sqlalchemy as sa + +def upgrade(tables): + op.bulk_insert(tables.logentrykind, + [ + {'id': 46, 'name':'repo_verb'}, + ]) + + +def downgrade(tables): + op.execute( + (tables.logentrykind.delete() + .where(tables.logentrykind.c.name == op.inline_literal('repo_verb'))) + + ) \ No newline at end of file diff --git a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py b/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py index f676bf972..983528b06 100644 --- a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py +++ b/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py @@ -12,7 +12,6 @@ down_revision = '82297d834ad' from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import mysql def upgrade(tables): op.bulk_insert(tables.logentrykind, diff --git a/data/model/legacy.py b/data/model/legacy.py index 403a7c4a5..91d1d2643 100644 --- a/data/model/legacy.py +++ b/data/model/legacy.py @@ -2226,7 +2226,10 @@ def confirm_team_invite(code, user): def get_repository_usage(): repo_pull = LogEntryKind.get(name = 'pull_repo') - return (LogEntry.select().where(LogEntry.kind == repo_pull, ~(LogEntry.repository >> None)) + repo_verb = LogEntryKind.get(name = 'repo_verb') + return (LogEntry.select() + .where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb)) + .where(~(LogEntry.repository >> None)) .group_by(LogEntry.ip) .group_by(LogEntry.repository) .count()) diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py index 24472b1b8..3467add8b 100644 --- a/endpoints/api/superuser.py +++ b/endpoints/api/superuser.py @@ -64,7 +64,7 @@ class UsageInformation(ApiResource): if SuperUserPermission().can(): return { 'usage': model.get_repository_usage(), - 'allowed': 0 + 'allowed': app.config.get('MAXIMUM_CONTAINER_USAGE', 20) } abort(403) diff --git a/endpoints/common.py b/endpoints/common.py index bd844992d..a7f31b204 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -11,9 +11,10 @@ from random import SystemRandom from data import model from data.database import db -from app import app, login_manager, dockerfile_build_queue, notification_queue +from app import analytics, app, login_manager, dockerfile_build_queue, notification_queue from auth.permissions import QuayDeferredPermissionUser from auth import scopes +from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token from endpoints.api.discovery import swagger_route_data from werkzeug.routing import BaseConverter from functools import wraps @@ -275,3 +276,42 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual, pathargs=['build', build_request.uuid]) return build_request + +def track_and_log(event_name, repo, **kwargs): + repository = repo.name + namespace = repo.namespace_user.username + metadata = { + 'repo': repository, + 'namespace': namespace, + } + metadata.update(kwargs) + + analytics_id = 'anonymous' + + profile.debug('Logging the %s to Mixpanel and the log system', event_name) + if get_validated_oauth_token(): + oauth_token = get_validated_oauth_token() + metadata['oauth_token_id'] = oauth_token.id + metadata['oauth_token_application_id'] = oauth_token.application.client_id + metadata['oauth_token_application'] = oauth_token.application.name + analytics_id = 'oauth:' + oauth_token.id + elif get_authenticated_user(): + metadata['username'] = get_authenticated_user().username + analytics_id = get_authenticated_user().username + elif get_validated_token(): + metadata['token'] = get_validated_token().friendly_name + metadata['token_code'] = get_validated_token().code + analytics_id = 'token:' + get_validated_token().code + else: + metadata['public'] = True + analytics_id = 'anonymous' + + extra_params = { + 'repository': '%s/%s' % (namespace, repository), + } + + analytics.track(analytics_id, event_name, extra_params) + model.log_action(event_name, namespace, + performer=get_authenticated_user(), + ip=request.remote_addr, metadata=metadata, + repository=repo) \ No newline at end of file diff --git a/endpoints/index.py b/endpoints/index.py index 1fa5010cb..a0df57375 100644 --- a/endpoints/index.py +++ b/endpoints/index.py @@ -8,7 +8,7 @@ from collections import OrderedDict from data import model from data.model import oauth -from app import analytics, app, authentication, userevents, storage +from app import app, authentication, userevents, storage from auth.auth import process_auth from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token from util.names import parse_repository_name @@ -17,6 +17,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission, ReadRepositoryPermission, CreateRepositoryPermission) from util.http import abort +from endpoints.common import track_and_log from endpoints.notificationhelper import spawn_notification import features @@ -241,47 +242,7 @@ def create_repository(namespace, repository): profile.debug('Created images') response = make_response('Created', 201) - - extra_params = { - 'repository': '%s/%s' % (namespace, repository), - } - - metadata = { - 'repo': repository, - 'namespace': namespace - } - - if get_validated_oauth_token(): - analytics.track(username, 'push_repo', extra_params) - - oauth_token = get_validated_oauth_token() - metadata['oauth_token_id'] = oauth_token.id - metadata['oauth_token_application_id'] = oauth_token.application.client_id - metadata['oauth_token_application'] = oauth_token.application.name - elif get_authenticated_user(): - username = get_authenticated_user().username - - analytics.track(username, 'push_repo', extra_params) - metadata['username'] = username - - # Mark that the user has started pushing the repo. - user_data = { - 'action': 'push_repo', - 'repository': repository, - 'namespace': namespace - } - - event = userevents.get_event(username) - event.publish_event_data('docker-cli', user_data) - - elif get_validated_token(): - analytics.track(get_validated_token().code, 'push_repo', extra_params) - metadata['token'] = get_validated_token().friendly_name - metadata['token_code'] = get_validated_token().code - - model.log_action('push_repo', namespace, performer=get_authenticated_user(), - ip=request.remote_addr, metadata=metadata, repository=repo) - + track_and_log('push_repo', repo) return response @@ -360,38 +321,7 @@ def get_repository_images(namespace, repository): resp = make_response(json.dumps(all_images), 200) resp.mimetype = 'application/json' - metadata = { - 'repo': repository, - 'namespace': namespace, - } - - profile.debug('Logging the pull to Mixpanel and the log system') - if get_validated_oauth_token(): - oauth_token = get_validated_oauth_token() - metadata['oauth_token_id'] = oauth_token.id - metadata['oauth_token_application_id'] = oauth_token.application.client_id - metadata['oauth_token_application'] = oauth_token.application.name - elif get_authenticated_user(): - metadata['username'] = get_authenticated_user().username - elif get_validated_token(): - metadata['token'] = get_validated_token().friendly_name - metadata['token_code'] = get_validated_token().code - else: - metadata['public'] = True - - pull_username = 'anonymous' - if get_authenticated_user(): - pull_username = get_authenticated_user().username - - extra_params = { - 'repository': '%s/%s' % (namespace, repository), - } - - analytics.track(pull_username, 'pull_repo', extra_params) - model.log_action('pull_repo', namespace, - performer=get_authenticated_user(), - ip=request.remote_addr, metadata=metadata, - repository=repo) + track_and_log('pull_repo', repo) return resp abort(403) diff --git a/endpoints/verbs.py b/endpoints/verbs.py index 581da0a17..e3d5da37d 100644 --- a/endpoints/verbs.py +++ b/endpoints/verbs.py @@ -2,13 +2,15 @@ import logging import json import hashlib -from flask import redirect, Blueprint, abort, send_file +from flask import redirect, Blueprint, abort, send_file, request from app import app from auth.auth import process_auth +from auth.auth_context import get_authenticated_user from auth.permissions import ReadRepositoryPermission from data import model from data import database +from endpoints.common import track_and_log from storage import Storage from util.queuefile import QueueFile @@ -16,7 +18,6 @@ from util.queueprocess import QueueProcess from util.gzipwrap import GzipWrap from util.dockerloadformat import build_docker_load_stream - verbs = Blueprint('verbs', __name__) logger = logging.getLogger(__name__) @@ -80,6 +81,9 @@ def get_squashed_tag(namespace, repository, tag): if not repo_image: abort(404) + # Log the action. + track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash') + store = Storage(app) derived = model.find_or_create_derived_storage(repo_image.storage, 'squash', store.preferred_locations[0]) diff --git a/initdb.py b/initdb.py index 5d5c5fde7..20acf92b7 100644 --- a/initdb.py +++ b/initdb.py @@ -240,6 +240,8 @@ def initialize_database(): LogEntryKind.create(name='regenerate_robot_token') + LogEntryKind.create(name='repo_verb') + ImageStorageLocation.create(name='local_eu') ImageStorageLocation.create(name='local_us') diff --git a/static/js/app.js b/static/js/app.js index d76fcc549..0dbc98957 100644 --- a/static/js/app.js +++ b/static/js/app.js @@ -1537,7 +1537,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) { return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}'; } else { - return 'Repository {repository} has been pushed'; + return 'Repository {repository} fhas been pushed'; } }, 'page': function(metadata) { @@ -3136,6 +3136,22 @@ quayApp.directive('logsView', function () { 'delete_robot': 'Delete Robot Account: {robot}', 'create_repo': 'Create Repository: {repo}', 'push_repo': 'Push to repository: {repo}', + 'repo_verb': function(metadata) { + var prefix = ''; + if (metadata.verb == 'squash') { + prefix = 'Pull of squashed tag {tag}' + } + + if (metadata.token) { + prefix += ' via token {token}'; + } else if (metadata.username) { + prefix += ' by {username}'; + } else { + prefix += ' by {_ip}'; + } + + return prefix; + }, 'pull_repo': function(metadata) { if (metadata.token) { return 'Pull repository {repo} via token {token}'; @@ -3266,6 +3282,7 @@ quayApp.directive('logsView', function () { 'delete_robot': 'Delete Robot Account', 'create_repo': 'Create Repository', 'push_repo': 'Push to repository', + 'repo_verb': 'Pull Repo Verb', 'pull_repo': 'Pull repository', 'delete_repo': 'Delete repository', 'change_repo_permission': 'Change repository permission', @@ -3357,7 +3374,6 @@ quayApp.directive('logsView', function () { $scope.logsPath = '/api/v1/' + url; if (!$scope.chart) { - window.console.log('creating chart'); $scope.chart = new LogUsageChart(logKinds); $($scope.chart).bind('filteringChanged', function(e) { $scope.$apply(function() { $scope.kindsAllowed = e.allowed; }); diff --git a/static/partials/super-user.html b/static/partials/super-user.html index 0340bffc3..163d5a567 100644 --- a/static/partials/super-user.html +++ b/static/partials/super-user.html @@ -35,6 +35,22 @@
+ + +
+ You have deployed more containers than your plan allows. Please + upgrade your subscription by contacting CoreOS Sales. +
+ +
+ You are at your current plan's number of allowed containers. It might be time to think about + upgrading your subscription by contacting CoreOS Sales. +
+ +
+ You are nearing the number of allowed deployed containers. It might be time to think about + upgrading your subscription by contacting CoreOS Sales. +
diff --git a/test/data/registry/us/sharedimages/1d42f7d2-614d-a212-286f-72376617d2d9/diffs.json b/test/data/registry/us/sharedimages/1d42f7d2-614d-a212-286f-72376617d2d9/diffs.json new file mode 100644 index 000000000..c42e20f73 --- /dev/null +++ b/test/data/registry/us/sharedimages/1d42f7d2-614d-a212-286f-72376617d2d9/diffs.json @@ -0,0 +1,5 @@ +{ + "removed": [], + "added": [], + "changed": [] +} \ No newline at end of file diff --git a/test/data/registry/us/sharedimages/6a787f8c-3cc9-b656-b7b0-988c54878741/diffs.json b/test/data/registry/us/sharedimages/6a787f8c-3cc9-b656-b7b0-988c54878741/diffs.json new file mode 100644 index 000000000..b1df890a5 --- /dev/null +++ b/test/data/registry/us/sharedimages/6a787f8c-3cc9-b656-b7b0-988c54878741/diffs.json @@ -0,0 +1,7 @@ +{ + "removed": [], + "added": [ + "/elasticsearch-0.90.5.tar.gz" + ], + "changed": [] +} \ No newline at end of file diff --git a/test/data/registry/us/sharedimages/7d9e2b70-712b-6c47-c2f9-7a5af0987bc4/diffs.json b/test/data/registry/us/sharedimages/7d9e2b70-712b-6c47-c2f9-7a5af0987bc4/diffs.json new file mode 100644 index 000000000..d77baf59a --- /dev/null +++ b/test/data/registry/us/sharedimages/7d9e2b70-712b-6c47-c2f9-7a5af0987bc4/diffs.json @@ -0,0 +1,8 @@ +{ + "removed": [], + "added": [ + "/root/.bash_history", + "/usr/sbin/policy-rc.d" + ], + "changed": [] +} \ No newline at end of file diff --git a/test/data/registry/us/sharedimages/934caf90-6a23-4a62-9d0a-5004c3dcb2e7/diffs.json b/test/data/registry/us/sharedimages/934caf90-6a23-4a62-9d0a-5004c3dcb2e7/diffs.json new file mode 100644 index 000000000..23b050546 --- /dev/null +++ b/test/data/registry/us/sharedimages/934caf90-6a23-4a62-9d0a-5004c3dcb2e7/diffs.json @@ -0,0 +1,45 @@ +{ + "removed": [], + "added": [ + "/opt/elasticsearch-0.90.5/LICENSE.txt", + "/opt/elasticsearch-0.90.5/NOTICE.txt", + "/opt/elasticsearch-0.90.5/README.textile", + "/opt/elasticsearch-0.90.5/bin/elasticsearch", + "/opt/elasticsearch-0.90.5/bin/elasticsearch.in.sh", + "/opt/elasticsearch-0.90.5/bin/plugin", + "/opt/elasticsearch-0.90.5/config/elasticsearch.yml", + "/opt/elasticsearch-0.90.5/config/logging.yml", + "/opt/elasticsearch-0.90.5/lib/elasticsearch-0.90.5.jar", + "/opt/elasticsearch-0.90.5/lib/jna-3.3.0.jar", + "/opt/elasticsearch-0.90.5/lib/jts-1.12.jar", + "/opt/elasticsearch-0.90.5/lib/log4j-1.2.17.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-analyzers-common-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-codecs-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-core-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-grouping-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-highlighter-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-join-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-memory-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-misc-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-queries-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-queryparser-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-sandbox-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-spatial-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/lucene-suggest-4.4.0.jar", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-freebsd-6.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-linux.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-solaris.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-ia64-linux.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc-solaris.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc64-solaris.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal-macosx.dylib", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal64-macosx.dylib", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-5.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-6.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-linux.so", + "/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-solaris.so", + "/opt/elasticsearch-0.90.5/lib/sigar/sigar-1.6.4.jar", + "/opt/elasticsearch-0.90.5/lib/spatial4j-0.3.jar" + ], + "changed": [] +} \ No newline at end of file diff --git a/test/test_api_security.py b/test/test_api_security.py index 07e5f74d7..5d4674f9e 100644 --- a/test/test_api_security.py +++ b/test/test_api_security.py @@ -43,7 +43,7 @@ from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPe RepositoryTeamPermissionList, RepositoryUserPermissionList) from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement, - SuperUserSendRecoveryEmail) + SuperUserSendRecoveryEmail, UsageInformation) try: @@ -3636,6 +3636,24 @@ class TestTeamMemberInvite(ApiTestCase): self._run_test('DELETE', 400, 'devtable', None) +class TestUsageInformation(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(UsageInformation) + + def test_get_anonymous(self): + self._run_test('GET', 401, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 200, 'devtable', None) + + class TestSuperUserList(ApiTestCase): def setUp(self): ApiTestCase.setUp(self) diff --git a/util/uncompressedsize.py b/util/uncompressedsize.py index 7cfa86e2a..bb12812b2 100644 --- a/util/uncompressedsize.py +++ b/util/uncompressedsize.py @@ -25,6 +25,8 @@ def backfill_sizes_from_data(): ch.setFormatter(formatter) logger.addHandler(ch) + encountered = set() + while True: # Load the record from the DB. batch_ids = list(ImageStorage @@ -33,12 +35,15 @@ def backfill_sizes_from_data(): ImageStorage.uploading == False) .limit(100) .order_by(db_random_func())) + + batch_ids = set(batch_ids) - encountered if len(batch_ids) == 0: # We're done! return for record in batch_ids: uuid = record.uuid + encountered.add(uuid) try: with_locs = model.get_storage_by_uuid(uuid) @@ -76,6 +81,8 @@ def backfill_sizes_from_data(): except model.InvalidImageException: logger.warning('Storage with uuid no longer exists: %s', uuid) + except IOError: + logger.warning('IOError on %s', uuid) except MemoryError: logger.warning('MemoryError on %s', uuid)