Fix logs view and API
- We needed to use an engine-agnostic way to extract the days - Joining with the LogEntryKind table has *horrible* performance in MySQL, so do it ourselves - Limit to 50 logs per page
This commit is contained in:
parent
d480a204f5
commit
d34afde954
3 changed files with 39 additions and 14 deletions
|
@ -2,15 +2,14 @@ import json
|
|||
|
||||
from peewee import JOIN_LEFT_OUTER, SQL, fn
|
||||
from datetime import datetime, timedelta, date
|
||||
from cachetools import lru_cache
|
||||
|
||||
from data.database import LogEntry, LogEntryKind, User
|
||||
from data.database import LogEntry, LogEntryKind, User, db
|
||||
|
||||
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
joined = (LogEntry
|
||||
.select(*selections)
|
||||
.switch(LogEntry)
|
||||
.join(LogEntryKind)
|
||||
.switch(LogEntry)
|
||||
.where(LogEntry.datetime >= start_time, LogEntry.datetime < end_time))
|
||||
|
||||
if repository:
|
||||
|
@ -25,17 +24,27 @@ def _logs_query(selections, start_time, end_time, performer=None, repository=Non
|
|||
return joined
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_log_entry_kinds():
|
||||
kind_map = {}
|
||||
for kind in LogEntryKind.select():
|
||||
kind_map[kind.id] = kind.name
|
||||
|
||||
return kind_map
|
||||
|
||||
|
||||
def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None):
|
||||
selections = [LogEntryKind, fn.date(LogEntry.datetime, '%d'), fn.Count(LogEntry.id).alias('count')]
|
||||
date = db.extract_date('day', LogEntry.datetime)
|
||||
selections = [LogEntry.kind, LogEntry.datetime, fn.Count(LogEntry.id).alias('count')]
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace)
|
||||
return query.group_by(fn.date(LogEntry.datetime, '%d'), LogEntryKind)
|
||||
return query.group_by(date, LogEntry.kind)
|
||||
|
||||
|
||||
def list_logs(start_time, end_time, performer=None, repository=None, namespace=None, page=None,
|
||||
count=None):
|
||||
|
||||
Performer = User.alias()
|
||||
selections = [LogEntry, LogEntryKind, Performer]
|
||||
selections = [LogEntry, Performer]
|
||||
|
||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace)
|
||||
query = (query.switch(LogEntry)
|
||||
|
|
|
@ -14,11 +14,11 @@ from data import model
|
|||
from auth import scopes
|
||||
from app import avatar
|
||||
|
||||
LOGS_PER_PAGE = 500
|
||||
LOGS_PER_PAGE = 50
|
||||
|
||||
def log_view(log):
|
||||
def log_view(log, kinds):
|
||||
view = {
|
||||
'kind': log.kind.name,
|
||||
'kind': kinds[log.kind_id],
|
||||
'metadata': json.loads(log.metadata_json),
|
||||
'ip': log.ip,
|
||||
'datetime': format_date(log.datetime),
|
||||
|
@ -34,9 +34,9 @@ def log_view(log):
|
|||
|
||||
return view
|
||||
|
||||
def aggregated_log_view(log):
|
||||
def aggregated_log_view(log, kinds):
|
||||
view = {
|
||||
'kind': log.kind.name,
|
||||
'kind': kinds[log.kind_id],
|
||||
'count': log.count,
|
||||
'datetime': format_date(log.datetime)
|
||||
}
|
||||
|
@ -73,13 +73,14 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
|
|||
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, page=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
page = page if page else 1
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
|
||||
namespace=namespace, page=page, count=LOGS_PER_PAGE + 1)
|
||||
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log_view(log) for log in logs[0:LOGS_PER_PAGE]],
|
||||
'logs': [log_view(log, kinds) for log in logs[0:LOGS_PER_PAGE]],
|
||||
'page': page,
|
||||
'has_additional': len(logs) > LOGS_PER_PAGE,
|
||||
}
|
||||
|
@ -87,11 +88,12 @@ def get_logs(start_time, end_time, performer_name=None, repository=None, namespa
|
|||
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
||||
repository=repository, namespace=namespace)
|
||||
|
||||
return {
|
||||
'aggregated': [aggregated_log_view(log) for log in aggregated_logs]
|
||||
'aggregated': [aggregated_log_view(log, kinds) for log in aggregated_logs]
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Sign
|
|||
|
||||
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
|
||||
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
|
||||
from endpoints.api.logs import UserLogs, OrgLogs
|
||||
from endpoints.api.logs import UserLogs, OrgLogs, OrgAggregateLogs, UserAggregateLogs
|
||||
from endpoints.api.billing import (UserCard, UserPlan, ListPlans, OrganizationCard,
|
||||
OrganizationPlan)
|
||||
from endpoints.api.discovery import DiscoveryResource
|
||||
|
@ -2569,6 +2569,20 @@ class TestLogs(ApiTestCase):
|
|||
assert 'start_time' in json
|
||||
assert 'end_time' in json
|
||||
|
||||
def test_user_aggregate_logs(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(UserAggregateLogs)
|
||||
assert 'aggregated' in json
|
||||
|
||||
|
||||
def test_org_logs(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(OrgAggregateLogs, params=dict(orgname=ORGANIZATION))
|
||||
assert 'aggregated' in json
|
||||
|
||||
|
||||
def test_performer(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
|
|
Reference in a new issue