Implement a worker for batch exporting of usage logs
This will allow customers to request their usage logs for a repository or an entire namespace, and we can export the logs in a manner that doesn't absolutely destroy the database, with every step along the way timed.
This commit is contained in:
parent
b8d2e1be9c
commit
8a212728a3
18 changed files with 768 additions and 15 deletions
|
@ -4,7 +4,7 @@ from data import model, database
|
|||
from endpoints.api.logs_models_interface import LogEntryDataInterface, LogEntryPage, LogEntry, AggregatedLogEntry
|
||||
|
||||
|
||||
def _create_log(log):
|
||||
def create_log(log):
|
||||
account_organization = None
|
||||
account_username = None
|
||||
account_email = None
|
||||
|
@ -57,7 +57,7 @@ class PreOCIModel(LogEntryDataInterface):
|
|||
logs, next_page_token = model.modelutil.paginate(logs_query, m,
|
||||
descending=True, page_token=page_token,
|
||||
limit=20)
|
||||
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
|
||||
return LogEntryPage([create_log(log) for log in logs], next_page_token)
|
||||
|
||||
return get_logs(database.LogEntry)
|
||||
|
||||
|
|
Reference in a new issue