Merge pull request #3323 from quay/joseph.schorr/QUAY-1282/log-interfacing

Interface out all action log data model operations
This commit is contained in:
Joseph Schorr 2019-01-28 15:09:25 -05:00 committed by GitHub
commit 9f09d68ad8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 714 additions and 902 deletions

View file

@ -18,6 +18,8 @@ from auth import scopes
from auth.auth_context import (get_authenticated_context, get_authenticated_user,
get_validated_oauth_token)
from auth.decorators import process_oauth
from data.logs_model import logs_model
from data import model as data_model
from endpoints.csrf import csrf_protect
from endpoints.exception import (Unauthorized, InvalidRequest, InvalidResponse,
FreshLoginRequired, NotFound)
@ -365,7 +367,7 @@ def request_error(exception=None, **kwargs):
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
if not metadata:
metadata = {}
oauth_token = get_validated_oauth_token()
if oauth_token:
metadata['oauth_token_id'] = oauth_token.id
@ -373,11 +375,15 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
metadata['oauth_token_application'] = oauth_token.application.name
performer = get_authenticated_user()
if repo:
repo_name = repo.name
model.log_action(kind, user_or_orgname, repo_name, performer, request.remote_addr, metadata)
if repo_name is not None:
repo = data_model.repository.get_repository(user_or_orgname, repo_name)
logs_model.log_action(kind, user_or_orgname,
repository=repo,
performer=performer,
ip=request.remote_addr,
metadata=metadata)
def define_json_response(schema_name):

View file

@ -1,18 +1,19 @@
from __init__models_interface import InitDataInterface
from data import model
from data.logs_model import logs_model
class PreOCIModel(InitDataInterface):
def is_app_repository(self, namespace_name, repository_name):
return model.repository.get_repository(namespace_name, repository_name, kind_filter='application') is not None
return model.repository.get_repository(namespace_name, repository_name,
kind_filter='application') is not None
def repository_is_public(self, namespace_name, repository_name):
return model.repository.repository_is_public(namespace_name, repository_name)
def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
repository = model.repository.get_repository(namespace_name, repository_name)
model.log.log_action(kind, namespace_name, performer=performer, ip=ip, metadata=metadata, repository=repository)
logs_model.log_action(kind, namespace_name, performer=performer, ip=ip, metadata=metadata,
repository=repository)
pre_oci_model = PreOCIModel()
pre_oci_model = PreOCIModel()

View file

@ -1,81 +1,70 @@
""" Access usage logs for organizations or repositories. """
import json
import uuid
from datetime import datetime, timedelta
from flask import request
import features
from app import export_action_logs_queue
from app import app, export_action_logs_queue, avatar
from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
from data.logs_model import logs_model
from data.registry_model import registry_model
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
RepositoryParamResource, require_repo_admin, related_user_resource,
format_date, require_user_admin, path_param, require_scope, page_support,
validate_json_request, InvalidRequest, show_if)
from data import model as data_model
from endpoints.api.logs_models_pre_oci import pre_oci_model as model
from endpoints.exception import Unauthorized, NotFound
from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
LOGS_PER_PAGE = 20
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
def _parse_datetime(dt_string):
if not dt_string:
return None
try:
return datetime.strptime(dt_string + ' UTC', '%m/%d/%Y %Z')
except ValueError:
return None
def _validate_logs_arguments(start_time, end_time):
if start_time:
try:
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
except ValueError:
start_time = None
if not start_time:
start_time = datetime.today() - timedelta(7) # One week
if end_time:
try:
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
end_time = end_time + timedelta(days=1)
except ValueError:
end_time = None
if not end_time:
end_time = datetime.today()
start_time = _parse_datetime(start_time) or (datetime.today() - timedelta(days=1))
end_time = _parse_datetime(end_time) or datetime.today()
end_time = end_time + timedelta(days=1)
return start_time, end_time
def get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
page_token=None, ignore=None):
def _get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
page_token=None, filter_kinds=None):
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
kinds = model.get_log_entry_kinds()
log_entry_page = model.get_logs_query(start_time, end_time, performer_name, repository_name,
namespace_name, ignore, page_token)
log_entry_page = logs_model.lookup_logs(start_time, end_time, performer_name, repository_name,
namespace_name, filter_kinds, page_token,
app.config['ACTION_LOG_MAX_PAGE'])
include_namespace = namespace_name is None and repository_name is None
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log.to_dict(kinds, include_namespace) for log in log_entry_page.logs],
'logs': [log.to_dict(avatar, include_namespace) for log in log_entry_page.logs],
}, log_entry_page.next_page_token
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
ignore=None):
def _get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
filter_kinds=None):
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
kinds = model.get_log_entry_kinds()
aggregated_logs = model.get_aggregated_logs(start_time, end_time, performer_name=performer_name,
repository_name=repository, namespace_name=namespace,
ignore=ignore)
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time,
performer_name=performer_name,
repository_name=repository,
namespace_name=namespace,
filter_kinds=filter_kinds)
return {
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
'aggregated': [log.to_dict() for log in aggregated_logs]
}
@ -87,18 +76,20 @@ class RepositoryLogs(RepositoryParamResource):
@require_repo_admin
@nickname('listRepoLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. The time should be formatted "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time to which to get logs. The time should be formatted "%m/%d/%Y" in UTC.', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@page_support()
def get(self, namespace, repository, page_token, parsed_args):
""" List the logs for the specified repository. """
if model.repo_exists(namespace, repository) is False:
if registry_model.lookup_repository(namespace, repository) is None:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_logs(start_time, end_time, repository_name=repository, page_token=page_token,
namespace_name=namespace)
return _get_logs(start_time, end_time,
repository_name=repository,
page_token=page_token,
namespace_name=namespace)
@resource('/v1/user/logs')
@ -108,8 +99,8 @@ class UserLogs(ApiResource):
@require_user_admin
@nickname('listUserLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
@page_support()
def get(self, parsed_args, page_token):
@ -119,9 +110,11 @@ class UserLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
return get_logs(start_time, end_time, performer_name=performer_name,
namespace_name=user.username, page_token=page_token,
ignore=SERVICE_LEVEL_LOG_KINDS)
return _get_logs(start_time, end_time,
performer_name=performer_name,
namespace_name=user.username,
page_token=page_token,
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/organization/<orgname>/logs')
@ -132,8 +125,8 @@ class OrgLogs(ApiResource):
@nickname('listOrgLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
@page_support()
@require_scope(scopes.ORG_ADMIN)
@ -145,8 +138,10 @@ class OrgLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_logs(start_time, end_time, namespace_name=orgname, performer_name=performer_name,
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
return _get_logs(start_time, end_time,
namespace_name=orgname,
performer_name=performer_name,
page_token=page_token)
raise Unauthorized()
@ -160,16 +155,18 @@ class RepositoryAggregateLogs(RepositoryParamResource):
@require_repo_admin
@nickname('getAggregateRepoLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
def get(self, namespace, repository, parsed_args):
""" Returns the aggregated logs for the specified repository. """
if model.repo_exists(namespace, repository) is False:
if registry_model.lookup_repository(namespace, repository) is None:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_aggregate_logs(start_time, end_time, repository=repository, namespace=namespace)
return _get_aggregate_logs(start_time, end_time,
repository=repository,
namespace=namespace)
@resource('/v1/user/aggregatelogs')
@ -180,8 +177,8 @@ class UserAggregateLogs(ApiResource):
@require_user_admin
@nickname('getAggregateUserLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
def get(self, parsed_args):
""" Returns the aggregated logs for the current user. """
@ -190,8 +187,10 @@ class UserAggregateLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
return get_aggregate_logs(start_time, end_time, performer_name=performer_name,
namespace=user.username, ignore=SERVICE_LEVEL_LOG_KINDS)
return _get_aggregate_logs(start_time, end_time,
performer_name=performer_name,
namespace=user.username,
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/organization/<orgname>/aggregatelogs')
@ -203,8 +202,8 @@ class OrgAggregateLogs(ApiResource):
@nickname('getAggregateOrgLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
@require_scope(scopes.ORG_ADMIN)
def get(self, orgname, parsed_args):
@ -215,46 +214,13 @@ class OrgAggregateLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_aggregate_logs(start_time, end_time, namespace=orgname,
performer_name=performer_name, ignore=SERVICE_LEVEL_LOG_KINDS)
return _get_aggregate_logs(start_time, end_time,
namespace=orgname,
performer_name=performer_name)
raise Unauthorized()
def queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
export_id = str(uuid.uuid4())
namespace = data_model.user.get_namespace_user(namespace_name)
if namespace is None:
raise InvalidRequest('Unknown namespace')
repository = None
if repository_name is not None:
repository = data_model.repository.get_repository(namespace_name, repository_name)
if repository is None:
raise InvalidRequest('Unknown repository')
callback_url = options.get('callback_url')
if callback_url:
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
raise InvalidRequest('Invalid callback URL')
export_action_logs_queue.put([namespace_name], json.dumps({
'export_id': export_id,
'repository_id': repository.id if repository else None,
'namespace_id': namespace.id,
'namespace_name': namespace.username,
'repository_name': repository.name if repository else None,
'start_time': start_time,
'end_time': end_time,
'callback_url': callback_url,
'callback_email': options.get('callback_email'),
}), retries_remaining=3)
return {
'export_id': export_id,
}
EXPORT_LOGS_SCHEMA = {
'type': 'object',
'description': 'Configuration for an export logs operation',
@ -271,6 +237,27 @@ EXPORT_LOGS_SCHEMA = {
}
def _queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
callback_url = options.get('callback_url')
if callback_url:
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
raise InvalidRequest('Invalid callback URL')
callback_email = options.get('callback_email')
if callback_email:
if callback_email.find('@') < 0:
raise InvalidRequest('Invalid callback e-mail')
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
export_id = logs_model.queue_logs_export(start_time, end_time, export_action_logs_queue,
namespace_name, repository_name, callback_url,
callback_email)
if export_id is None:
raise InvalidRequest('Invalid export request')
return export_id
@resource('/v1/repository/<apirepopath:repository>/exportlogs')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class ExportRepositoryLogs(RepositoryParamResource):
@ -282,18 +269,21 @@ class ExportRepositoryLogs(RepositoryParamResource):
@require_repo_admin
@nickname('exportRepoLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@validate_json_request('ExportLogs')
def post(self, namespace, repository, parsed_args):
""" Queues an export of the logs for the specified repository. """
if model.repo_exists(namespace, repository) is False:
if registry_model.lookup_repository(namespace, repository) is None:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return queue_logs_export(start_time, end_time, request.get_json(), namespace,
repository_name=repository)
export_id = _queue_logs_export(start_time, end_time, request.get_json(), namespace,
repository_name=repository)
return {
'export_id': export_id,
}
@resource('/v1/user/exportlogs')
@ -306,8 +296,8 @@ class ExportUserLogs(ApiResource):
@require_user_admin
@nickname('exportUserLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@validate_json_request('ExportLogs')
def post(self, parsed_args):
""" Returns the aggregated logs for the current user. """
@ -315,7 +305,10 @@ class ExportUserLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
return queue_logs_export(start_time, end_time, request.get_json(), user.username)
export_id = _queue_logs_export(start_time, end_time, request.get_json(), user.username)
return {
'export_id': export_id,
}
@resource('/v1/organization/<orgname>/exportlogs')
@ -330,8 +323,8 @@ class ExportOrgLogs(ApiResource):
@nickname('exportOrgLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@require_scope(scopes.ORG_ADMIN)
@validate_json_request('ExportLogs')
def post(self, orgname, parsed_args):
@ -341,6 +334,9 @@ class ExportOrgLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return queue_logs_export(start_time, end_time, request.get_json(), orgname)
export_id = _queue_logs_export(start_time, end_time, request.get_json(), orgname)
return {
'export_id': export_id,
}
raise Unauthorized()

View file

@ -1,141 +0,0 @@
import json
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from datetime import datetime
from dateutil.relativedelta import relativedelta
from six import add_metaclass
from tzlocal import get_localzone
from app import avatar
from endpoints.api import format_date
from util.morecollections import AttrDict
class LogEntry(
namedtuple('LogEntry', [
'metadata_json', 'ip', 'datetime', 'performer_email', 'performer_username', 'performer_robot',
'account_organization', 'account_username', 'account_email', 'account_robot', 'kind_id'
])):
"""
LogEntry a single log entry.
:type metadata_json: string
:type ip: string
:type datetime: string
:type performer_email: int
:type performer_username: string
:type performer_robot: boolean
:type account_organization: boolean
:type account_username: string
:type account_email: string
:type account_robot: boolean
:type kind_id: int
"""
def to_dict(self, kinds, include_namespace):
view = {
'kind': kinds[self.kind_id],
'metadata': json.loads(self.metadata_json),
'ip': self.ip,
'datetime': format_date(self.datetime),
}
if self.performer_username:
performer = AttrDict({'username': self.performer_username, 'email': self.performer_email})
performer.robot = None
if self.performer_robot:
performer.robot = self.performer_robot
view['performer'] = {
'kind': 'user',
'name': self.performer_username,
'is_robot': self.performer_robot,
'avatar': avatar.get_data_for_user(performer),
}
if include_namespace:
if self.account_username:
account = AttrDict({'username': self.account_username, 'email': self.account_email})
if self.account_organization:
view['namespace'] = {
'kind': 'org',
'name': self.account_username,
'avatar': avatar.get_data_for_org(account),
}
else:
account.robot = None
if self.account_robot:
account.robot = self.account_robot
view['namespace'] = {
'kind': 'user',
'name': self.account_username,
'avatar': avatar.get_data_for_user(account),
}
return view
class LogEntryPage(
namedtuple('LogEntryPage', ['logs', 'next_page_token'])):
"""
LogEntryPage represents a single page of logs.
:type logs: [LogEntry]
:type next_page_token: {any -> any}
"""
class AggregatedLogEntry(
namedtuple('AggregatedLogEntry', ['count', 'kind_id', 'day'])):
"""
AggregatedLogEntry represents an aggregated view of logs.
:type count: int
:type kind_id: int
:type day: string
"""
def to_dict(self, kinds, start_time):
synthetic_date = datetime(start_time.year, start_time.month, int(self.day), tzinfo=get_localzone())
if synthetic_date.day < start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
view = {
'kind': kinds[self.kind_id],
'count': self.count,
'datetime': format_date(synthetic_date),
}
return view
@add_metaclass(ABCMeta)
class LogEntryDataInterface(object):
"""
Interface that represents all data store interactions required by a Log.
"""
@abstractmethod
def get_logs_query(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None, page_token=None):
"""
Returns a LogEntryPage.
"""
@abstractmethod
def get_log_entry_kinds(self):
"""
Returns a map of LogEntryKind id -> name and name -> id
"""
@abstractmethod
def repo_exists(self, namespace_name, repository_name):
"""
Returns whether or not a repo exists.
"""
@abstractmethod
def get_aggregated_logs(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None):
"""
Returns a list of aggregated logs
"""

View file

@ -1,122 +0,0 @@
import itertools
from app import app
from data import model, database
from endpoints.api.logs_models_interface import LogEntryDataInterface, LogEntryPage, LogEntry, AggregatedLogEntry
def create_log(log):
account_organization = None
account_username = None
account_email = None
account_robot = None
try:
account_organization = log.account.organization
account_username = log.account.username
account_email = log.account.email
account_robot = log.account.robot
except AttributeError:
pass
performer_robot = None
performer_username = None
performer_email = None
try:
performer_robot = log.performer.robot
performer_username = log.performer.username
performer_email = log.performer.email
except AttributeError:
pass
return LogEntry(log.metadata_json, log.ip, log.datetime, performer_email, performer_username,
performer_robot, account_organization, account_username,
account_email, account_robot, log.kind_id)
class PreOCIModel(LogEntryDataInterface):
"""
PreOCIModel implements the data model for the Tags using a database schema
before it was changed to support the OCI specification.
"""
def get_logs_query(self, start_time, end_time, performer_name=None, repository_name=None,
namespace_name=None, ignore=None, page_token=None):
repo = None
if repository_name and namespace_name:
repo = model.repository.get_repository(namespace_name, repository_name)
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on a single table.
def get_logs(m):
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=m)
logs, next_page_token = model.modelutil.paginate(logs_query, m,
descending=True, page_token=page_token,
limit=20,
max_page=app.config['ACTION_LOG_MAX_PAGE'])
return LogEntryPage([create_log(log) for log in logs], next_page_token)
# First check the LogEntry3 table for the most recent logs, unless we've been expressly told
# to look inside the other tables.
TOKEN_TABLE_ID = 'tti'
tables = [database.LogEntry3, database.LogEntry2, database.LogEntry]
table_index = 0
table_specified = page_token is not None and page_token.get(TOKEN_TABLE_ID) is not None
if table_specified:
table_index = page_token.get(TOKEN_TABLE_ID)
page_result = get_logs(tables[table_index])
if page_result.next_page_token is None and table_index < len(tables) - 1:
page_result = page_result._replace(next_page_token={TOKEN_TABLE_ID: table_index + 1})
return page_result
def get_log_entry_kinds(self):
return model.log.get_log_entry_kinds()
def repo_exists(self, namespace_name, repository_name):
repo = model.repository.get_repository(namespace_name, repository_name)
if repo is None:
return False
return True
def get_aggregated_logs(self, start_time, end_time, performer_name=None, repository_name=None,
namespace_name=None, ignore=None):
repo = None
if repository_name and namespace_name:
repo = model.repository.get_repository(namespace_name, repository_name)
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on a single table.
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry)
aggregated_logs_2 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry2)
aggregated_logs_3 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry3)
entries = {}
for log in itertools.chain(aggregated_logs, aggregated_logs_2, aggregated_logs_3):
key = '%s-%s' % (log.kind_id, log.day)
if key in entries:
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)
else:
entries[key] = AggregatedLogEntry(log.count, log.kind_id, log.day)
return entries.values()
pre_oci_model = PreOCIModel()

View file

@ -4,7 +4,7 @@ import os
import string
import socket
from datetime import datetime, timedelta
from datetime import datetime
from random import SystemRandom
from flask import request, make_response, jsonify
@ -16,6 +16,7 @@ from auth import scopes
from auth.auth_context import get_authenticated_user
from auth.permissions import SuperUserPermission
from data.database import ServiceKeyApprovalType
from data.logs_model import logs_model
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
internal_only, require_scope, show_if, parse_args,
query_param, require_fresh_login, path_param, verify_not_prod,
@ -25,36 +26,13 @@ from endpoints.api.build import get_logs_or_log_url
from endpoints.api.superuser_models_pre_oci import (pre_oci_model, ServiceKeyDoesNotExist,
ServiceKeyAlreadyApproved,
InvalidRepositoryBuildException)
from endpoints.api.logs_models_pre_oci import pre_oci_model as log_model
from endpoints.api.logs import _validate_logs_arguments
from util.useremails import send_confirmation_email, send_recovery_email
from _init import ROOT_DIR
logger = logging.getLogger(__name__)
def _validate_logs_arguments(start_time, end_time):
if start_time:
try:
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
except ValueError:
start_time = None
if not start_time:
start_time = datetime.today() - timedelta(7) # One week
if end_time:
try:
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
end_time = end_time + timedelta(days=1)
except ValueError:
end_time = None
if not end_time:
end_time = datetime.today()
return start_time, end_time
def get_immediate_subdirectories(directory):
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
@ -134,10 +112,9 @@ class SuperUserAggregateLogs(ApiResource):
if SuperUserPermission().can():
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
parsed_args['endtime'])
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
kinds = log_model.get_log_entry_kinds()
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time)
return {
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
'aggregated': [log.to_dict() for log in aggregated_logs]
}
raise Unauthorized()
@ -166,13 +143,12 @@ class SuperUserLogs(ApiResource):
end_time = parsed_args['endtime']
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
kinds = log_model.get_log_entry_kinds()
log_entry_page = logs_model.lookup_logs(start_time, end_time, page_token=page_token)
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log.to_dict(kinds, include_namespace=True) for log in log_page.logs],
}, log_page.next_page_token
'logs': [log.to_dict(avatar, include_namespace=True) for log in log_entry_page.logs],
}, log_entry_page.next_page_token
raise Unauthorized()

View file

@ -208,120 +208,12 @@ class Organization(namedtuple('Organization', ['username', 'email'])):
}
class LogEntry(
namedtuple('LogEntry', [
'metadata_json', 'ip', 'datetime', 'performer_email', 'performer_username', 'performer_robot',
'account_organization', 'account_username', 'account_email', 'account_robot', 'kind',
])):
"""
LogEntry a single log entry.
:type metadata_json: string
:type ip: string
:type datetime: string
:type performer_email: int
:type performer_username: string
:type performer_robot: boolean
:type account_organization: boolean
:type account_username: string
:type account_email: string
:type account_robot: boolean
:type kind_id: int
"""
def to_dict(self):
view = {
'kind': self.kind,
'metadata': json.loads(self.metadata_json),
'ip': self.ip,
'datetime': format_date(self.datetime),
}
if self.performer_username:
performer = AttrDict({'username': self.performer_username, 'email': self.performer_email})
performer.robot = None
if self.performer_robot:
performer.robot = self.performer_robot
view['performer'] = {
'kind': 'user',
'name': self.performer_username,
'is_robot': self.performer_robot,
'avatar': avatar.get_data_for_user(performer),
}
if self.account_username:
account = AttrDict({'username': self.account_username, 'email': self.account_email})
if self.account_organization:
view['namespace'] = {
'kind': 'org',
'name': self.account_username,
'avatar': avatar.get_data_for_org(account),
}
else:
account.robot = None
if self.account_robot:
account.robot = self.account_robot
view['namespace'] = {
'kind': 'user',
'name': self.account_username,
'avatar': avatar.get_data_for_user(account),
}
return view
class LogEntryPage(
namedtuple('LogEntryPage', ['logs', 'next_page_token'])):
"""
LogEntryPage represents a single page of logs.
:type logs: [LogEntry]
:type next_page_token: {any -> any}
"""
class AggregatedLogEntry(
namedtuple('AggregatedLogEntry', ['count', 'kind_id', 'day', 'start_time'])):
"""
AggregatedLogEntry represents an aggregated view of logs.
:type count: int
:type kind_id: int
:type day: string
:type start_time: Date
"""
def to_dict(self):
synthetic_date = datetime(self.start_time.year, self.start_time.month, int(self.day), tzinfo=get_localzone())
if synthetic_date.day < self.start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
kinds = model.log.get_log_entry_kinds()
view = {
'kind': kinds[self.kind_id],
'count': self.count,
'datetime': format_date(synthetic_date),
}
return view
@add_metaclass(ABCMeta)
class SuperuserDataInterface(object):
"""
Interface that represents all data store interactions required by a superuser api.
"""
@abstractmethod
def get_logs_query(self, start_time, end_time, page_token=None):
"""
Returns a LogEntryPage.
"""
@abstractmethod
def get_aggregated_logs(self, start_time, end_time):
"""
Returns a list of AggregatedLogEntry
"""
@abstractmethod
def get_organizations(self):
"""

View file

@ -7,37 +7,8 @@ from auth.permissions import ReadRepositoryPermission, ModifyRepositoryPermissio
from data import model, database
from endpoints.api.build import get_job_config, _get_build_status
from endpoints.api.superuser_models_interface import BuildTrigger
from endpoints.api.superuser_models_interface import SuperuserDataInterface, LogEntryPage, LogEntry, Organization, User, \
ServiceKey, Approval, RepositoryBuild, AggregatedLogEntry
def _create_log(log, log_kind):
account_organization = None
account_username = None
account_email = None
account_robot = None
try:
account_organization = log.account.organization
account_username = log.account.username
account_email = log.account.email
account_robot = log.account.robot
except AttributeError:
pass
performer_robot = None
performer_username = None
performer_email = None
try:
performer_robot = log.performer.robot
performer_username = log.performer.username
performer_email = log.performer.email
except AttributeError:
pass
return LogEntry(log.metadata_json, log.ip, log.datetime, performer_email, performer_username,
performer_robot, account_organization, account_username,
account_email, account_robot, log_kind[log.kind_id])
from endpoints.api.superuser_models_interface import SuperuserDataInterface, Organization, User, \
ServiceKey, Approval, RepositoryBuild
def _create_user(user):
@ -205,16 +176,5 @@ class PreOCIModel(SuperuserDataInterface):
def get_organizations(self):
return [Organization(org.username, org.email) for org in model.organization.get_organizations()]
def get_aggregated_logs(self, start_time, end_time):
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time)
return [AggregatedLogEntry(log.count, log.kind_id, log.day, start_time) for log in aggregated_logs]
def get_logs_query(self, start_time, end_time, page_token=None):
logs_query = model.log.get_logs_query(start_time, end_time)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=20)
kinds = model.log.get_log_entry_kinds()
return LogEntryPage([_create_log(log, kinds) for log in logs], next_page_token)
pre_oci_model = PreOCIModel()

View file

@ -1,131 +0,0 @@
import pytest
from mock import Mock
from data import model, database
from endpoints.api.logs_models_interface import LogEntry, LogEntryPage, AggregatedLogEntry
from endpoints.api.logs_models_pre_oci import pre_oci_model
from util.morecollections import AttrDict
def test_get_logs_query(monkeypatch):
get_repository_mock = Mock()
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
paginate_mock.return_value = ([], {})
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
assert pre_oci_model.get_logs_query('start_time', 'end_time', 'preformer_namne', 'repository_name', 'namespace_name',
set(), None) == LogEntryPage([], {})
def test_get_logs_query_returns_list_log_entries(monkeypatch):
get_repository_mock = Mock()
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
paginate_mock.return_value = ([AttrDict({'kind': 1, 'datetime': 'datetime', 'ip': 'ip', 'metadata_json': '{}',
'account': AttrDict(
{'username': 'account_username', 'email': 'account_email', 'robot': False,
'organization': False}),
'performer': AttrDict(
{'email': 'performer_email', 'username': 'performer_username',
'robot': False}), 'kind_id': 1})], {'key': 'value'})
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
assert pre_oci_model.get_logs_query('start_time', 'end_time', 'performer_username', 'repository_name',
'namespace_name',
set(), {'start_id': 1}) == LogEntryPage([
LogEntry('{}', 'ip', 'datetime', 'performer_email', 'performer_username', False,
False, 'account_username', 'account_email', False, 1)], {'key': 'value'})
@pytest.mark.skip('Turned off until we move back to a single LogEntry table')
def test_get_logs_query_calls_get_repository(monkeypatch):
repo_mock = Mock()
performer_mock = Mock()
query_mock = Mock()
get_repository_mock = Mock()
get_repository_mock.return_value = repo_mock
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
get_user_mock.return_value = performer_mock
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
get_logs_query_mock.return_value = query_mock
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
page_token = {}
paginate_mock.return_value = ([], page_token)
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
ignore = set()
pre_oci_model.get_logs_query('start_time', 'end_time', 'performer_username', 'repository_name', 'namespace_name',
ignore, page_token)
get_repository_mock.assert_called_once_with('namespace_name', 'repository_name')
get_user_mock.assert_called_once_with('performer_username')
get_logs_query_mock.assert_called_once_with('start_time', 'end_time', performer=performer_mock, repository=repo_mock,
namespace='namespace_name', ignore=ignore)
paginate_mock.assert_called_once_with(query_mock, database.LogEntry, descending=True,
page_token=page_token, limit=20)
def test_get_log_entry_kinds(monkeypatch):
get_log_entry_kinds_mock = Mock()
monkeypatch.setattr(model.log, 'get_log_entry_kinds', get_log_entry_kinds_mock)
pre_oci_model.get_log_entry_kinds()
get_log_entry_kinds_mock.assert_called_once_with()
def test_does_repo_exist_returns_false(monkeypatch):
get_repository_mock = Mock()
get_repository_mock.return_value = None
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
assert pre_oci_model.repo_exists('namespace_name', 'repository_name') is False
def test_does_repo_exist_returns_true(monkeypatch):
get_repository_mock = Mock()
get_repository_mock.return_value = True
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
assert pre_oci_model.repo_exists('namespace_name', 'repository_name') is True
def test_get_aggregated_logs(monkeypatch):
get_aggregated_logs_mock = Mock()
get_aggregated_logs_mock.side_effect = [[AttrDict({'day': '1', 'kind_id': 4, 'count': 6})],
[AttrDict({'day': '1', 'kind_id': 4, 'count': 12})],
[AttrDict({'day': '1', 'kind_id': 4, 'count': 3})]]
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
repo_mock = Mock()
get_repository_mock = Mock()
get_repository_mock.return_value = repo_mock
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
performer_mock = Mock()
get_user_mock = Mock()
get_user_mock.return_value = performer_mock
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
'namespace_name', set())
assert actual == [AggregatedLogEntry(21, 4, '1')]