Change account-less logs to use a user and not null
This allows us to skip the migration
This commit is contained in:
parent
5cb6ba4d12
commit
4f63a50a17
6 changed files with 39 additions and 21 deletions
|
@ -308,3 +308,6 @@ class DefaultConfig(object):
|
|||
# How long to wait before GCing an expired service key.
|
||||
EXPIRED_SERVICE_KEY_TTL_SEC = 60 * 60 * 24 * 7 # One week
|
||||
|
||||
# The ID of the user account in the database to be used for service audit logs. If none, the
|
||||
# lowest user in the database will be used.
|
||||
SERVICE_LOG_ACCOUNT_ID = None
|
||||
|
|
|
@ -698,7 +698,7 @@ _LogEntryAccountProxy = Proxy()
|
|||
|
||||
class LogEntry(BaseModel):
|
||||
kind = ForeignKeyField(LogEntryKind, index=True)
|
||||
account = ForeignKeyField(_LogEntryAccountProxy, index=True, null=True, related_name='account')
|
||||
account = ForeignKeyField(_LogEntryAccountProxy, index=True, related_name='account')
|
||||
performer = QuayUserField(allows_robots=True, index=True, null=True,
|
||||
related_name='performer', robot_null_delete=True)
|
||||
repository = ForeignKeyField(Repository, index=True, null=True)
|
||||
|
|
|
@ -65,8 +65,6 @@ def upgrade(tables):
|
|||
op.add_column(u'notification', sa.Column('lookup_path', sa.String(length=255), nullable=True))
|
||||
op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False)
|
||||
|
||||
op.drop_constraint(u'fk_logentry_user_account_id', 'logentry', type_='foreignkey')
|
||||
op.alter_column('logentry', 'account_id', existing_type=sa.Integer(), nullable=True)
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
|
@ -80,6 +78,3 @@ def downgrade(tables):
|
|||
op.drop_column(u'notification', 'lookup_path')
|
||||
op.drop_table('servicekey')
|
||||
op.drop_table('servicekeyapproval')
|
||||
|
||||
op.alter_column('logentry', 'account_id', existing_type=sa.Integer(), nullable=False)
|
||||
op.create_foreign_key(u'fk_logentry_user_account_id', 'logentry', 'user', ['account_id'], ['id'])
|
||||
|
|
|
@ -6,9 +6,10 @@ from datetime import datetime, timedelta, date
|
|||
from cachetools import lru_cache
|
||||
|
||||
from data.database import LogEntry, LogEntryKind, User, db
|
||||
from data.model import config
|
||||
|
||||
# TODO: Find a way to get logs without slowing down pagination significantly.
|
||||
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
joined = (LogEntry
|
||||
.select(*selections)
|
||||
.switch(LogEntry)
|
||||
|
@ -23,6 +24,11 @@ def _logs_query(selections, start_time, end_time, performer=None, repository=Non
|
|||
if namespace:
|
||||
joined = joined.join(User).where(User.username == namespace)
|
||||
|
||||
if ignore:
|
||||
kind_map = get_log_entry_kinds()
|
||||
ignore_ids = [kind_map[kind_name] for kind_name in ignore]
|
||||
joined = joined.where(~(LogEntry.kind << ignore_ids))
|
||||
|
||||
return joined
|
||||
|
||||
|
||||
|
@ -31,22 +37,25 @@ def get_log_entry_kinds():
|
|||
kind_map = {}
|
||||
for kind in LogEntryKind.select():
|
||||
kind_map[kind.id] = kind.name
|
||||
kind_map[kind.name] = kind.id
|
||||
|
||||
return kind_map
|
||||
|
||||
|
||||
def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
date = db.extract_date('day', LogEntry.datetime)
|
||||
selections = [LogEntry.kind, date.alias('day'), fn.Count(LogEntry.id).alias('count')]
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace)
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore)
|
||||
return query.group_by(date, LogEntry.kind)
|
||||
|
||||
|
||||
def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
Performer = User.alias()
|
||||
selections = [LogEntry, Performer]
|
||||
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace)
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore)
|
||||
query = (query.switch(LogEntry)
|
||||
.join(Performer, JOIN_LEFT_OUTER,
|
||||
on=(LogEntry.performer == Performer.id).alias('performer')))
|
||||
|
@ -76,6 +85,10 @@ def log_action(kind_name, user_or_organization_name, performer=None, repository=
|
|||
account = None
|
||||
if user_or_organization_name is not None:
|
||||
account = User.get(User.username == user_or_organization_name).id
|
||||
else:
|
||||
account = config.app_config.get('SERVICE_LOG_ACCOUNT_ID')
|
||||
if account is None:
|
||||
account = User.select(fn.Min(User.id)).tuples().get()[0]
|
||||
|
||||
kind = LogEntryKind.get(LogEntryKind.name == kind_name)
|
||||
metadata_json = json.dumps(metadata, default=_json_serialize)
|
||||
|
|
|
@ -16,6 +16,8 @@ from auth import scopes
|
|||
from app import avatar
|
||||
|
||||
LOGS_PER_PAGE = 20
|
||||
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
|
||||
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
|
||||
|
||||
def log_view(log, kinds):
|
||||
view = {
|
||||
|
@ -79,11 +81,12 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
|
|||
|
||||
|
||||
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
page_token=None):
|
||||
page_token=None, ignore=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
|
||||
repository=repository, namespace=namespace)
|
||||
repository=repository, namespace=namespace,
|
||||
ignore=ignore)
|
||||
|
||||
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
|
||||
page_token=page_token, limit=LOGS_PER_PAGE)
|
||||
|
@ -95,12 +98,14 @@ def get_logs(start_time, end_time, performer_name=None, repository=None, namespa
|
|||
}, next_page_token
|
||||
|
||||
|
||||
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None):
|
||||
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
||||
repository=repository, namespace=namespace)
|
||||
repository=repository, namespace=namespace,
|
||||
ignore=ignore)
|
||||
|
||||
return {
|
||||
'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
|
||||
|
@ -126,7 +131,8 @@ class RepositoryLogs(RepositoryParamResource):
|
|||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_logs(start_time, end_time, repository=repo, page_token=page_token)
|
||||
return get_logs(start_time, end_time, repository=repo, page_token=page_token,
|
||||
ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/user/logs')
|
||||
|
@ -147,7 +153,7 @@ class UserLogs(ApiResource):
|
|||
|
||||
user = get_authenticated_user()
|
||||
return get_logs(start_time, end_time, performer_name=performer_name, namespace=user.username,
|
||||
page_token=page_token)
|
||||
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/logs')
|
||||
|
@ -172,7 +178,7 @@ class OrgLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
return get_logs(start_time, end_time, namespace=orgname, performer_name=performer_name,
|
||||
page_token=page_token)
|
||||
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
@ -194,7 +200,8 @@ class RepositoryAggregateLogs(RepositoryParamResource):
|
|||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_aggregate_logs(start_time, end_time, repository=repo)
|
||||
return get_aggregate_logs(start_time, end_time, repository=repo,
|
||||
ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/user/aggregatelogs')
|
||||
|
@ -237,6 +244,6 @@ class OrgAggregateLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
return get_aggregate_logs(start_time, end_time, namespace=orgname,
|
||||
performer_name=performer_name)
|
||||
performer_name=performer_name, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
raise Unauthorized()
|
||||
|
|
Binary file not shown.
Reference in a new issue