- Add a log entry for repo verb handling and make the container usage calculation take it into account
- Move all the repo push/pull/verb logging into a central track_and_log method - Readd images accidentally deleted in the last CL - Make the uncompressed size migration script better handle exceptions
This commit is contained in:
parent
c65031eea5
commit
c1398c6d2b
18 changed files with 216 additions and 85 deletions
|
@ -186,3 +186,6 @@ class DefaultConfig(object):
|
||||||
# Build logs archive
|
# Build logs archive
|
||||||
LOG_ARCHIVE_LOCATION = 'local_us'
|
LOG_ARCHIVE_LOCATION = 'local_us'
|
||||||
LOG_ARCHIVE_PATH = 'logarchive/'
|
LOG_ARCHIVE_PATH = 'logarchive/'
|
||||||
|
|
||||||
|
# For enterprise:
|
||||||
|
MAXIMUM_CONTAINER_USAGE = 20
|
||||||
|
|
|
@ -5,8 +5,8 @@ up_mysql() {
|
||||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||||
|
|
||||||
# Sleep for 5s to get MySQL get started.
|
# Sleep for 5s to get MySQL get started.
|
||||||
echo 'Sleeping for 5...'
|
echo 'Sleeping for 10...'
|
||||||
sleep 5
|
sleep 10
|
||||||
|
|
||||||
# Add the database to mysql.
|
# Add the database to mysql.
|
||||||
docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
|
docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
"""Add log entry kind for verbs
|
||||||
|
|
||||||
|
Revision ID: 204abf14783d
|
||||||
|
Revises: 2430f55c41d5
|
||||||
|
Create Date: 2014-10-29 15:38:06.100915
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '204abf14783d'
|
||||||
|
down_revision = '2430f55c41d5'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
op.bulk_insert(tables.logentrykind,
|
||||||
|
[
|
||||||
|
{'id': 46, 'name':'repo_verb'},
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
op.execute(
|
||||||
|
(tables.logentrykind.delete()
|
||||||
|
.where(tables.logentrykind.c.name == op.inline_literal('repo_verb')))
|
||||||
|
|
||||||
|
)
|
|
@ -12,7 +12,6 @@ down_revision = '82297d834ad'
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
op.bulk_insert(tables.logentrykind,
|
op.bulk_insert(tables.logentrykind,
|
||||||
|
|
|
@ -2226,7 +2226,10 @@ def confirm_team_invite(code, user):
|
||||||
|
|
||||||
def get_repository_usage():
|
def get_repository_usage():
|
||||||
repo_pull = LogEntryKind.get(name = 'pull_repo')
|
repo_pull = LogEntryKind.get(name = 'pull_repo')
|
||||||
return (LogEntry.select().where(LogEntry.kind == repo_pull, ~(LogEntry.repository >> None))
|
repo_verb = LogEntryKind.get(name = 'repo_verb')
|
||||||
|
return (LogEntry.select()
|
||||||
|
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
|
||||||
|
.where(~(LogEntry.repository >> None))
|
||||||
.group_by(LogEntry.ip)
|
.group_by(LogEntry.ip)
|
||||||
.group_by(LogEntry.repository)
|
.group_by(LogEntry.repository)
|
||||||
.count())
|
.count())
|
||||||
|
|
|
@ -64,7 +64,7 @@ class UsageInformation(ApiResource):
|
||||||
if SuperUserPermission().can():
|
if SuperUserPermission().can():
|
||||||
return {
|
return {
|
||||||
'usage': model.get_repository_usage(),
|
'usage': model.get_repository_usage(),
|
||||||
'allowed': 0
|
'allowed': app.config.get('MAXIMUM_CONTAINER_USAGE', 20)
|
||||||
}
|
}
|
||||||
|
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
|
@ -11,9 +11,10 @@ from random import SystemRandom
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data.database import db
|
from data.database import db
|
||||||
from app import app, login_manager, dockerfile_build_queue, notification_queue
|
from app import analytics, app, login_manager, dockerfile_build_queue, notification_queue
|
||||||
from auth.permissions import QuayDeferredPermissionUser
|
from auth.permissions import QuayDeferredPermissionUser
|
||||||
from auth import scopes
|
from auth import scopes
|
||||||
|
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||||
from endpoints.api.discovery import swagger_route_data
|
from endpoints.api.discovery import swagger_route_data
|
||||||
from werkzeug.routing import BaseConverter
|
from werkzeug.routing import BaseConverter
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
@ -275,3 +276,42 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
pathargs=['build', build_request.uuid])
|
pathargs=['build', build_request.uuid])
|
||||||
return build_request
|
return build_request
|
||||||
|
|
||||||
|
|
||||||
|
def track_and_log(event_name, repo, **kwargs):
|
||||||
|
repository = repo.name
|
||||||
|
namespace = repo.namespace_user.username
|
||||||
|
metadata = {
|
||||||
|
'repo': repository,
|
||||||
|
'namespace': namespace,
|
||||||
|
}
|
||||||
|
metadata.update(kwargs)
|
||||||
|
|
||||||
|
analytics_id = 'anonymous'
|
||||||
|
|
||||||
|
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||||
|
if get_validated_oauth_token():
|
||||||
|
oauth_token = get_validated_oauth_token()
|
||||||
|
metadata['oauth_token_id'] = oauth_token.id
|
||||||
|
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||||
|
metadata['oauth_token_application'] = oauth_token.application.name
|
||||||
|
analytics_id = 'oauth:' + oauth_token.id
|
||||||
|
elif get_authenticated_user():
|
||||||
|
metadata['username'] = get_authenticated_user().username
|
||||||
|
analytics_id = get_authenticated_user().username
|
||||||
|
elif get_validated_token():
|
||||||
|
metadata['token'] = get_validated_token().friendly_name
|
||||||
|
metadata['token_code'] = get_validated_token().code
|
||||||
|
analytics_id = 'token:' + get_validated_token().code
|
||||||
|
else:
|
||||||
|
metadata['public'] = True
|
||||||
|
analytics_id = 'anonymous'
|
||||||
|
|
||||||
|
extra_params = {
|
||||||
|
'repository': '%s/%s' % (namespace, repository),
|
||||||
|
}
|
||||||
|
|
||||||
|
analytics.track(analytics_id, event_name, extra_params)
|
||||||
|
model.log_action(event_name, namespace,
|
||||||
|
performer=get_authenticated_user(),
|
||||||
|
ip=request.remote_addr, metadata=metadata,
|
||||||
|
repository=repo)
|
|
@ -8,7 +8,7 @@ from collections import OrderedDict
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data.model import oauth
|
from data.model import oauth
|
||||||
from app import analytics, app, authentication, userevents, storage
|
from app import app, authentication, userevents, storage
|
||||||
from auth.auth import process_auth
|
from auth.auth import process_auth
|
||||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||||
from util.names import parse_repository_name
|
from util.names import parse_repository_name
|
||||||
|
@ -17,6 +17,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||||
ReadRepositoryPermission, CreateRepositoryPermission)
|
ReadRepositoryPermission, CreateRepositoryPermission)
|
||||||
|
|
||||||
from util.http import abort
|
from util.http import abort
|
||||||
|
from endpoints.common import track_and_log
|
||||||
from endpoints.notificationhelper import spawn_notification
|
from endpoints.notificationhelper import spawn_notification
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
@ -241,47 +242,7 @@ def create_repository(namespace, repository):
|
||||||
|
|
||||||
profile.debug('Created images')
|
profile.debug('Created images')
|
||||||
response = make_response('Created', 201)
|
response = make_response('Created', 201)
|
||||||
|
track_and_log('push_repo', repo)
|
||||||
extra_params = {
|
|
||||||
'repository': '%s/%s' % (namespace, repository),
|
|
||||||
}
|
|
||||||
|
|
||||||
metadata = {
|
|
||||||
'repo': repository,
|
|
||||||
'namespace': namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
if get_validated_oauth_token():
|
|
||||||
analytics.track(username, 'push_repo', extra_params)
|
|
||||||
|
|
||||||
oauth_token = get_validated_oauth_token()
|
|
||||||
metadata['oauth_token_id'] = oauth_token.id
|
|
||||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
|
||||||
metadata['oauth_token_application'] = oauth_token.application.name
|
|
||||||
elif get_authenticated_user():
|
|
||||||
username = get_authenticated_user().username
|
|
||||||
|
|
||||||
analytics.track(username, 'push_repo', extra_params)
|
|
||||||
metadata['username'] = username
|
|
||||||
|
|
||||||
# Mark that the user has started pushing the repo.
|
|
||||||
user_data = {
|
|
||||||
'action': 'push_repo',
|
|
||||||
'repository': repository,
|
|
||||||
'namespace': namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
event = userevents.get_event(username)
|
|
||||||
event.publish_event_data('docker-cli', user_data)
|
|
||||||
|
|
||||||
elif get_validated_token():
|
|
||||||
analytics.track(get_validated_token().code, 'push_repo', extra_params)
|
|
||||||
metadata['token'] = get_validated_token().friendly_name
|
|
||||||
metadata['token_code'] = get_validated_token().code
|
|
||||||
|
|
||||||
model.log_action('push_repo', namespace, performer=get_authenticated_user(),
|
|
||||||
ip=request.remote_addr, metadata=metadata, repository=repo)
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@ -360,38 +321,7 @@ def get_repository_images(namespace, repository):
|
||||||
resp = make_response(json.dumps(all_images), 200)
|
resp = make_response(json.dumps(all_images), 200)
|
||||||
resp.mimetype = 'application/json'
|
resp.mimetype = 'application/json'
|
||||||
|
|
||||||
metadata = {
|
track_and_log('pull_repo', repo)
|
||||||
'repo': repository,
|
|
||||||
'namespace': namespace,
|
|
||||||
}
|
|
||||||
|
|
||||||
profile.debug('Logging the pull to Mixpanel and the log system')
|
|
||||||
if get_validated_oauth_token():
|
|
||||||
oauth_token = get_validated_oauth_token()
|
|
||||||
metadata['oauth_token_id'] = oauth_token.id
|
|
||||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
|
||||||
metadata['oauth_token_application'] = oauth_token.application.name
|
|
||||||
elif get_authenticated_user():
|
|
||||||
metadata['username'] = get_authenticated_user().username
|
|
||||||
elif get_validated_token():
|
|
||||||
metadata['token'] = get_validated_token().friendly_name
|
|
||||||
metadata['token_code'] = get_validated_token().code
|
|
||||||
else:
|
|
||||||
metadata['public'] = True
|
|
||||||
|
|
||||||
pull_username = 'anonymous'
|
|
||||||
if get_authenticated_user():
|
|
||||||
pull_username = get_authenticated_user().username
|
|
||||||
|
|
||||||
extra_params = {
|
|
||||||
'repository': '%s/%s' % (namespace, repository),
|
|
||||||
}
|
|
||||||
|
|
||||||
analytics.track(pull_username, 'pull_repo', extra_params)
|
|
||||||
model.log_action('pull_repo', namespace,
|
|
||||||
performer=get_authenticated_user(),
|
|
||||||
ip=request.remote_addr, metadata=metadata,
|
|
||||||
repository=repo)
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
|
@ -2,13 +2,15 @@ import logging
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
from flask import redirect, Blueprint, abort, send_file
|
from flask import redirect, Blueprint, abort, send_file, request
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
from auth.auth import process_auth
|
from auth.auth import process_auth
|
||||||
|
from auth.auth_context import get_authenticated_user
|
||||||
from auth.permissions import ReadRepositoryPermission
|
from auth.permissions import ReadRepositoryPermission
|
||||||
from data import model
|
from data import model
|
||||||
from data import database
|
from data import database
|
||||||
|
from endpoints.common import track_and_log
|
||||||
from storage import Storage
|
from storage import Storage
|
||||||
|
|
||||||
from util.queuefile import QueueFile
|
from util.queuefile import QueueFile
|
||||||
|
@ -16,7 +18,6 @@ from util.queueprocess import QueueProcess
|
||||||
from util.gzipwrap import GzipWrap
|
from util.gzipwrap import GzipWrap
|
||||||
from util.dockerloadformat import build_docker_load_stream
|
from util.dockerloadformat import build_docker_load_stream
|
||||||
|
|
||||||
|
|
||||||
verbs = Blueprint('verbs', __name__)
|
verbs = Blueprint('verbs', __name__)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -80,6 +81,9 @@ def get_squashed_tag(namespace, repository, tag):
|
||||||
if not repo_image:
|
if not repo_image:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
|
# Log the action.
|
||||||
|
track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash')
|
||||||
|
|
||||||
store = Storage(app)
|
store = Storage(app)
|
||||||
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
|
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
|
||||||
store.preferred_locations[0])
|
store.preferred_locations[0])
|
||||||
|
|
|
@ -240,6 +240,8 @@ def initialize_database():
|
||||||
|
|
||||||
LogEntryKind.create(name='regenerate_robot_token')
|
LogEntryKind.create(name='regenerate_robot_token')
|
||||||
|
|
||||||
|
LogEntryKind.create(name='repo_verb')
|
||||||
|
|
||||||
ImageStorageLocation.create(name='local_eu')
|
ImageStorageLocation.create(name='local_eu')
|
||||||
ImageStorageLocation.create(name='local_us')
|
ImageStorageLocation.create(name='local_us')
|
||||||
|
|
||||||
|
|
|
@ -1537,7 +1537,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) {
|
if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) {
|
||||||
return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}';
|
return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}';
|
||||||
} else {
|
} else {
|
||||||
return 'Repository {repository} has been pushed';
|
return 'Repository {repository} fhas been pushed';
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'page': function(metadata) {
|
'page': function(metadata) {
|
||||||
|
@ -3136,6 +3136,22 @@ quayApp.directive('logsView', function () {
|
||||||
'delete_robot': 'Delete Robot Account: {robot}',
|
'delete_robot': 'Delete Robot Account: {robot}',
|
||||||
'create_repo': 'Create Repository: {repo}',
|
'create_repo': 'Create Repository: {repo}',
|
||||||
'push_repo': 'Push to repository: {repo}',
|
'push_repo': 'Push to repository: {repo}',
|
||||||
|
'repo_verb': function(metadata) {
|
||||||
|
var prefix = '';
|
||||||
|
if (metadata.verb == 'squash') {
|
||||||
|
prefix = 'Pull of squashed tag {tag}'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metadata.token) {
|
||||||
|
prefix += ' via token {token}';
|
||||||
|
} else if (metadata.username) {
|
||||||
|
prefix += ' by {username}';
|
||||||
|
} else {
|
||||||
|
prefix += ' by {_ip}';
|
||||||
|
}
|
||||||
|
|
||||||
|
return prefix;
|
||||||
|
},
|
||||||
'pull_repo': function(metadata) {
|
'pull_repo': function(metadata) {
|
||||||
if (metadata.token) {
|
if (metadata.token) {
|
||||||
return 'Pull repository {repo} via token {token}';
|
return 'Pull repository {repo} via token {token}';
|
||||||
|
@ -3266,6 +3282,7 @@ quayApp.directive('logsView', function () {
|
||||||
'delete_robot': 'Delete Robot Account',
|
'delete_robot': 'Delete Robot Account',
|
||||||
'create_repo': 'Create Repository',
|
'create_repo': 'Create Repository',
|
||||||
'push_repo': 'Push to repository',
|
'push_repo': 'Push to repository',
|
||||||
|
'repo_verb': 'Pull Repo Verb',
|
||||||
'pull_repo': 'Pull repository',
|
'pull_repo': 'Pull repository',
|
||||||
'delete_repo': 'Delete repository',
|
'delete_repo': 'Delete repository',
|
||||||
'change_repo_permission': 'Change repository permission',
|
'change_repo_permission': 'Change repository permission',
|
||||||
|
@ -3357,7 +3374,6 @@ quayApp.directive('logsView', function () {
|
||||||
$scope.logsPath = '/api/v1/' + url;
|
$scope.logsPath = '/api/v1/' + url;
|
||||||
|
|
||||||
if (!$scope.chart) {
|
if (!$scope.chart) {
|
||||||
window.console.log('creating chart');
|
|
||||||
$scope.chart = new LogUsageChart(logKinds);
|
$scope.chart = new LogUsageChart(logKinds);
|
||||||
$($scope.chart).bind('filteringChanged', function(e) {
|
$($scope.chart).bind('filteringChanged', function(e) {
|
||||||
$scope.$apply(function() { $scope.kindsAllowed = e.allowed; });
|
$scope.$apply(function() { $scope.kindsAllowed = e.allowed; });
|
||||||
|
|
|
@ -35,6 +35,22 @@
|
||||||
<div class="quay-spinner" ng-show="systemUsage == null"></div>
|
<div class="quay-spinner" ng-show="systemUsage == null"></div>
|
||||||
<div class="usage-chart" total="systemUsage.allowed" limit="systemUsageLimit"
|
<div class="usage-chart" total="systemUsage.allowed" limit="systemUsageLimit"
|
||||||
current="systemUsage.usage" usage-title="Container Usage"></div>
|
current="systemUsage.usage" usage-title="Container Usage"></div>
|
||||||
|
|
||||||
|
<!-- Alerts -->
|
||||||
|
<div class="alert alert-danger" ng-show="systemUsageLimit == 'over' && systemUsage">
|
||||||
|
You have deployed more containers than your plan allows. Please
|
||||||
|
upgrade your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="alert alert-warning" ng-show="systemUsageLimit == 'at' && systemUsage">
|
||||||
|
You are at your current plan's number of allowed containers. It might be time to think about
|
||||||
|
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="alert alert-success" ng-show="systemUsageLimit == 'near' && systemUsage">
|
||||||
|
You are nearing the number of allowed deployed containers. It might be time to think about
|
||||||
|
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Create user tab -->
|
<!-- Create user tab -->
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"removed": [],
|
||||||
|
"added": [],
|
||||||
|
"changed": []
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"removed": [],
|
||||||
|
"added": [
|
||||||
|
"/elasticsearch-0.90.5.tar.gz"
|
||||||
|
],
|
||||||
|
"changed": []
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"removed": [],
|
||||||
|
"added": [
|
||||||
|
"/root/.bash_history",
|
||||||
|
"/usr/sbin/policy-rc.d"
|
||||||
|
],
|
||||||
|
"changed": []
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
{
|
||||||
|
"removed": [],
|
||||||
|
"added": [
|
||||||
|
"/opt/elasticsearch-0.90.5/LICENSE.txt",
|
||||||
|
"/opt/elasticsearch-0.90.5/NOTICE.txt",
|
||||||
|
"/opt/elasticsearch-0.90.5/README.textile",
|
||||||
|
"/opt/elasticsearch-0.90.5/bin/elasticsearch",
|
||||||
|
"/opt/elasticsearch-0.90.5/bin/elasticsearch.in.sh",
|
||||||
|
"/opt/elasticsearch-0.90.5/bin/plugin",
|
||||||
|
"/opt/elasticsearch-0.90.5/config/elasticsearch.yml",
|
||||||
|
"/opt/elasticsearch-0.90.5/config/logging.yml",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/elasticsearch-0.90.5.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/jna-3.3.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/jts-1.12.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/log4j-1.2.17.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-analyzers-common-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-codecs-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-core-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-grouping-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-highlighter-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-join-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-memory-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-misc-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-queries-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-queryparser-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-sandbox-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-spatial-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/lucene-suggest-4.4.0.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-freebsd-6.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-linux.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-solaris.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-ia64-linux.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc-solaris.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc64-solaris.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal-macosx.dylib",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal64-macosx.dylib",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-5.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-6.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-linux.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-solaris.so",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/sigar/sigar-1.6.4.jar",
|
||||||
|
"/opt/elasticsearch-0.90.5/lib/spatial4j-0.3.jar"
|
||||||
|
],
|
||||||
|
"changed": []
|
||||||
|
}
|
|
@ -43,7 +43,7 @@ from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPe
|
||||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||||
|
|
||||||
from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement,
|
from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement,
|
||||||
SuperUserSendRecoveryEmail)
|
SuperUserSendRecoveryEmail, UsageInformation)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -3636,6 +3636,24 @@ class TestTeamMemberInvite(ApiTestCase):
|
||||||
self._run_test('DELETE', 400, 'devtable', None)
|
self._run_test('DELETE', 400, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUsageInformation(ApiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ApiTestCase.setUp(self)
|
||||||
|
self._set_url(UsageInformation)
|
||||||
|
|
||||||
|
def test_get_anonymous(self):
|
||||||
|
self._run_test('GET', 401, None, None)
|
||||||
|
|
||||||
|
def test_get_freshuser(self):
|
||||||
|
self._run_test('GET', 403, 'freshuser', None)
|
||||||
|
|
||||||
|
def test_get_reader(self):
|
||||||
|
self._run_test('GET', 403, 'reader', None)
|
||||||
|
|
||||||
|
def test_get_devtable(self):
|
||||||
|
self._run_test('GET', 200, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
class TestSuperUserList(ApiTestCase):
|
class TestSuperUserList(ApiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ApiTestCase.setUp(self)
|
ApiTestCase.setUp(self)
|
||||||
|
|
|
@ -25,6 +25,8 @@ def backfill_sizes_from_data():
|
||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
logger.addHandler(ch)
|
logger.addHandler(ch)
|
||||||
|
|
||||||
|
encountered = set()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
# Load the record from the DB.
|
# Load the record from the DB.
|
||||||
batch_ids = list(ImageStorage
|
batch_ids = list(ImageStorage
|
||||||
|
@ -33,12 +35,15 @@ def backfill_sizes_from_data():
|
||||||
ImageStorage.uploading == False)
|
ImageStorage.uploading == False)
|
||||||
.limit(100)
|
.limit(100)
|
||||||
.order_by(db_random_func()))
|
.order_by(db_random_func()))
|
||||||
|
|
||||||
|
batch_ids = set(batch_ids) - encountered
|
||||||
if len(batch_ids) == 0:
|
if len(batch_ids) == 0:
|
||||||
# We're done!
|
# We're done!
|
||||||
return
|
return
|
||||||
|
|
||||||
for record in batch_ids:
|
for record in batch_ids:
|
||||||
uuid = record.uuid
|
uuid = record.uuid
|
||||||
|
encountered.add(uuid)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with_locs = model.get_storage_by_uuid(uuid)
|
with_locs = model.get_storage_by_uuid(uuid)
|
||||||
|
@ -76,6 +81,8 @@ def backfill_sizes_from_data():
|
||||||
|
|
||||||
except model.InvalidImageException:
|
except model.InvalidImageException:
|
||||||
logger.warning('Storage with uuid no longer exists: %s', uuid)
|
logger.warning('Storage with uuid no longer exists: %s', uuid)
|
||||||
|
except IOError:
|
||||||
|
logger.warning('IOError on %s', uuid)
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
logger.warning('MemoryError on %s', uuid)
|
logger.warning('MemoryError on %s', uuid)
|
||||||
|
|
||||||
|
|
Reference in a new issue