Implement logs model using Elasticsearch
- Implement logs model using Elasticsearch with tests - Implement transition model using both elasticsearch and database model - Add LOGS_MODEL configuration to choose which to use. Co-authored-by: Sida Chen <sidchen@redhat.com> Co-authored-by: Kenny Lee Sin Cheong <kenny.lee@redhat.com>
This commit is contained in:
parent
40c0352dd1
commit
035541c6f2
20 changed files with 1282 additions and 38 deletions
|
|
@ -13,6 +13,7 @@ from data import model
|
|||
from data.database import LogEntry, LogEntry2, LogEntry3
|
||||
from data.logs_model.interface import ActionLogsDataInterface, LogsIterationTimeout
|
||||
from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage, _format_date
|
||||
from data.logs_model.shared import SharedModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -24,7 +25,7 @@ EXPECTED_ITERATION_LOG_COUNT = 1000
|
|||
LOG_MODELS = [LogEntry3, LogEntry2, LogEntry]
|
||||
|
||||
|
||||
class TableLogsModel(ActionLogsDataInterface):
|
||||
class TableLogsModel(SharedModel, ActionLogsDataInterface):
|
||||
"""
|
||||
TableLogsModel implements the data model for the logs API backed by a single table
|
||||
in the database.
|
||||
|
|
@ -121,34 +122,6 @@ class TableLogsModel(ActionLogsDataInterface):
|
|||
model.log.log_action(kind_name, namespace_name, performer=performer, repository=repository,
|
||||
ip=ip, metadata=metadata or {}, timestamp=timestamp)
|
||||
|
||||
def queue_logs_export(self, start_datetime, end_datetime, export_action_logs_queue,
|
||||
namespace_name=None, repository_name=None, callback_url=None,
|
||||
callback_email=None, filter_kinds=None):
|
||||
export_id = str(uuid.uuid4())
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
if namespace is None:
|
||||
return None
|
||||
|
||||
repository = None
|
||||
if repository_name is not None:
|
||||
repository = model.repository.get_repository(namespace_name, repository_name)
|
||||
if repository is None:
|
||||
return None
|
||||
|
||||
export_action_logs_queue.put([namespace_name], json.dumps({
|
||||
'export_id': export_id,
|
||||
'repository_id': repository.id if repository else None,
|
||||
'namespace_id': namespace.id,
|
||||
'namespace_name': namespace.username,
|
||||
'repository_name': repository.name if repository else None,
|
||||
'start_time': _format_date(start_datetime),
|
||||
'end_time': _format_date(end_datetime),
|
||||
'callback_url': callback_url,
|
||||
'callback_email': callback_email,
|
||||
}), retries_remaining=3)
|
||||
|
||||
return export_id
|
||||
|
||||
def yield_logs_for_export(self, start_datetime, end_datetime, repository_id=None,
|
||||
namespace_id=None, max_query_time=None):
|
||||
# Using an adjusting scale, start downloading log rows in batches, starting at
|
||||
|
|
|
|||
Reference in a new issue