Interface out all action log data model operations
This will allow us to reimplement the logs data model against a non-database system in the near future
This commit is contained in:
parent
a156c91962
commit
b773a18ed8
26 changed files with 714 additions and 902 deletions
|
@ -4,7 +4,7 @@ import os
|
|||
import string
|
||||
import socket
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from random import SystemRandom
|
||||
|
||||
from flask import request, make_response, jsonify
|
||||
|
@ -16,6 +16,7 @@ from auth import scopes
|
|||
from auth.auth_context import get_authenticated_user
|
||||
from auth.permissions import SuperUserPermission
|
||||
from data.database import ServiceKeyApprovalType
|
||||
from data.logs_model import logs_model
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
|
||||
internal_only, require_scope, show_if, parse_args,
|
||||
query_param, require_fresh_login, path_param, verify_not_prod,
|
||||
|
@ -25,36 +26,13 @@ from endpoints.api.build import get_logs_or_log_url
|
|||
from endpoints.api.superuser_models_pre_oci import (pre_oci_model, ServiceKeyDoesNotExist,
|
||||
ServiceKeyAlreadyApproved,
|
||||
InvalidRepositoryBuildException)
|
||||
from endpoints.api.logs_models_pre_oci import pre_oci_model as log_model
|
||||
from endpoints.api.logs import _validate_logs_arguments
|
||||
from util.useremails import send_confirmation_email, send_recovery_email
|
||||
from _init import ROOT_DIR
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _validate_logs_arguments(start_time, end_time):
|
||||
if start_time:
|
||||
try:
|
||||
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
|
||||
except ValueError:
|
||||
start_time = None
|
||||
|
||||
if not start_time:
|
||||
start_time = datetime.today() - timedelta(7) # One week
|
||||
|
||||
if end_time:
|
||||
try:
|
||||
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
|
||||
end_time = end_time + timedelta(days=1)
|
||||
except ValueError:
|
||||
end_time = None
|
||||
|
||||
if not end_time:
|
||||
end_time = datetime.today()
|
||||
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def get_immediate_subdirectories(directory):
|
||||
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
|
||||
|
||||
|
@ -134,10 +112,9 @@ class SuperUserAggregateLogs(ApiResource):
|
|||
if SuperUserPermission().can():
|
||||
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
|
||||
parsed_args['endtime'])
|
||||
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
|
||||
kinds = log_model.get_log_entry_kinds()
|
||||
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time)
|
||||
return {
|
||||
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
|
||||
'aggregated': [log.to_dict() for log in aggregated_logs]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -166,13 +143,12 @@ class SuperUserLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
|
||||
kinds = log_model.get_log_entry_kinds()
|
||||
log_entry_page = logs_model.lookup_logs(start_time, end_time, page_token=page_token)
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log.to_dict(kinds, include_namespace=True) for log in log_page.logs],
|
||||
}, log_page.next_page_token
|
||||
'logs': [log.to_dict(avatar, include_namespace=True) for log in log_entry_page.logs],
|
||||
}, log_entry_page.next_page_token
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
|
Reference in a new issue