Remove all references to LogEntry2 from the code
The migration to actually drop the table will go in after this code has been pushed to production
This commit is contained in:
parent
be2cece7b0
commit
6a94eba1a2
8 changed files with 14 additions and 72 deletions
|
@ -1018,27 +1018,6 @@ class LogEntry(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class LogEntry2(BaseModel):
|
|
||||||
""" TEMP FOR QUAY.IO ONLY. DO NOT RELEASE INTO QUAY ENTERPRISE. """
|
|
||||||
kind = ForeignKeyField(LogEntryKind)
|
|
||||||
account = IntegerField(index=True, db_column='account_id')
|
|
||||||
performer = IntegerField(index=True, null=True, db_column='performer_id')
|
|
||||||
repository = IntegerField(index=True, null=True, db_column='repository_id')
|
|
||||||
datetime = DateTimeField(default=datetime.now, index=True)
|
|
||||||
ip = CharField(null=True)
|
|
||||||
metadata_json = TextField(default='{}')
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
database = db
|
|
||||||
read_slaves = (read_slave,)
|
|
||||||
indexes = (
|
|
||||||
(('account', 'datetime'), False),
|
|
||||||
(('performer', 'datetime'), False),
|
|
||||||
(('repository', 'datetime'), False),
|
|
||||||
(('repository', 'datetime', 'kind'), False),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RepositoryActionCount(BaseModel):
|
class RepositoryActionCount(BaseModel):
|
||||||
repository = ForeignKeyField(Repository)
|
repository = ForeignKeyField(Repository)
|
||||||
count = IntegerField()
|
count = IntegerField()
|
||||||
|
|
|
@ -18,7 +18,6 @@ ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING = ['pull_repo']
|
||||||
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None,
|
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None,
|
||||||
ignore=None, model=LogEntry):
|
ignore=None, model=LogEntry):
|
||||||
""" Returns a query for selecting logs from the table, with various options and filters. """
|
""" Returns a query for selecting logs from the table, with various options and filters. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
joined = (model.select(*selections).switch(model)
|
joined = (model.select(*selections).switch(model)
|
||||||
.where(model.datetime >= start_time, model.datetime < end_time))
|
.where(model.datetime >= start_time, model.datetime < end_time))
|
||||||
|
|
||||||
|
@ -61,7 +60,6 @@ def _get_log_entry_kind(name):
|
||||||
def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None,
|
def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None,
|
||||||
ignore=None, model=LogEntry):
|
ignore=None, model=LogEntry):
|
||||||
""" Returns the count of logs, by kind and day, for the logs matching the given filters. """
|
""" Returns the count of logs, by kind and day, for the logs matching the given filters. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
date = db.extract_date('day', model.datetime)
|
date = db.extract_date('day', model.datetime)
|
||||||
selections = [model.kind, date.alias('day'), fn.Count(model.id).alias('count')]
|
selections = [model.kind, date.alias('day'), fn.Count(model.id).alias('count')]
|
||||||
query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore,
|
query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore,
|
||||||
|
@ -72,7 +70,6 @@ def get_aggregated_logs(start_time, end_time, performer=None, repository=None, n
|
||||||
def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None,
|
def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None,
|
||||||
ignore=None, model=LogEntry):
|
ignore=None, model=LogEntry):
|
||||||
""" Returns the logs matching the given filters. """
|
""" Returns the logs matching the given filters. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
Performer = User.alias()
|
Performer = User.alias()
|
||||||
Account = User.alias()
|
Account = User.alias()
|
||||||
selections = [model, Performer]
|
selections = [model, Performer]
|
||||||
|
@ -143,7 +140,6 @@ def log_action(kind_name, user_or_organization_name, performer=None, repository=
|
||||||
|
|
||||||
def get_stale_logs_start_id(model):
|
def get_stale_logs_start_id(model):
|
||||||
""" Gets the oldest log entry. """
|
""" Gets the oldest log entry. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
try:
|
try:
|
||||||
return (model.select(model.id).order_by(model.id).limit(1).tuples())[0][0]
|
return (model.select(model.id).order_by(model.id).limit(1).tuples())[0][0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -152,7 +148,6 @@ def get_stale_logs_start_id(model):
|
||||||
|
|
||||||
def get_stale_logs_cutoff_id(cutoff_date, model):
|
def get_stale_logs_cutoff_id(cutoff_date, model):
|
||||||
""" Gets the most recent ID created before the cutoff_date. """
|
""" Gets the most recent ID created before the cutoff_date. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
try:
|
try:
|
||||||
return (model.select(fn.Max(model.id)).where(model.datetime <= cutoff_date)
|
return (model.select(fn.Max(model.id)).where(model.datetime <= cutoff_date)
|
||||||
.tuples())[0][0]
|
.tuples())[0][0]
|
||||||
|
@ -162,13 +157,11 @@ def get_stale_logs_cutoff_id(cutoff_date, model):
|
||||||
|
|
||||||
def get_stale_logs(start_id, end_id, model):
|
def get_stale_logs(start_id, end_id, model):
|
||||||
""" Returns all the logs with IDs between start_id and end_id inclusively. """
|
""" Returns all the logs with IDs between start_id and end_id inclusively. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
return model.select().where((model.id >= start_id), (model.id <= end_id))
|
return model.select().where((model.id >= start_id), (model.id <= end_id))
|
||||||
|
|
||||||
|
|
||||||
def delete_stale_logs(start_id, end_id, model):
|
def delete_stale_logs(start_id, end_id, model):
|
||||||
""" Deletes all the logs with IDs between start_id and end_id. """
|
""" Deletes all the logs with IDs between start_id and end_id. """
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
model.delete().where((model.id >= start_id), (model.id <= end_id)).execute()
|
model.delete().where((model.id >= start_id), (model.id <= end_id)).execute()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ from collections import namedtuple
|
||||||
from peewee import IntegrityError
|
from peewee import IntegrityError
|
||||||
|
|
||||||
from datetime import date, timedelta, datetime
|
from datetime import date, timedelta, datetime
|
||||||
from data.database import (Repository, LogEntry, LogEntry2, RepositoryActionCount,
|
from data.database import (Repository, LogEntry, RepositoryActionCount,
|
||||||
RepositorySearchScore, db_random_func, fn)
|
RepositorySearchScore, db_random_func, fn)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -52,16 +52,12 @@ def count_repository_actions(to_count):
|
||||||
today = date.today()
|
today = date.today()
|
||||||
yesterday = today - timedelta(days=1)
|
yesterday = today - timedelta(days=1)
|
||||||
|
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
actions = (LogEntry
|
||||||
def lookup_action_count(model):
|
|
||||||
return (model
|
|
||||||
.select()
|
.select()
|
||||||
.where(model.repository == to_count,
|
.where(LogEntry.repository == to_count,
|
||||||
model.datetime >= yesterday,
|
LogEntry.datetime >= yesterday,
|
||||||
model.datetime < today)
|
LogEntry.datetime < today)
|
||||||
.count())
|
.count())
|
||||||
|
|
||||||
actions = lookup_action_count(LogEntry) + lookup_action_count(LogEntry2)
|
|
||||||
try:
|
try:
|
||||||
RepositoryActionCount.create(repository=to_count, date=yesterday, count=actions)
|
RepositoryActionCount.create(repository=to_count, date=yesterday, count=actions)
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -48,8 +48,7 @@ class PreOCIModel(LogEntryDataInterface):
|
||||||
performer = None
|
performer = None
|
||||||
if performer_name:
|
if performer_name:
|
||||||
performer = model.user.get_user(performer_name)
|
performer = model.user.get_user(performer_name)
|
||||||
|
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
def get_logs(m):
|
def get_logs(m):
|
||||||
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
|
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
|
||||||
repository=repo, namespace=namespace_name,
|
repository=repo, namespace=namespace_name,
|
||||||
|
@ -60,21 +59,7 @@ class PreOCIModel(LogEntryDataInterface):
|
||||||
limit=20)
|
limit=20)
|
||||||
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
|
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
|
||||||
|
|
||||||
# First check the LogEntry table for the most recent logs, unless we've been expressly told
|
return get_logs(database.LogEntry)
|
||||||
# to look inside the "second" table.
|
|
||||||
TOKEN_TABLE_KEY2 = 'ttk2'
|
|
||||||
is_temp_table = page_token is not None and page_token.get(TOKEN_TABLE_KEY2) == 1
|
|
||||||
if is_temp_table:
|
|
||||||
page_result = get_logs(database.LogEntry2)
|
|
||||||
else:
|
|
||||||
page_result = get_logs(database.LogEntry)
|
|
||||||
|
|
||||||
if page_result.next_page_token is None and not is_temp_table:
|
|
||||||
page_result = page_result._replace(next_page_token={TOKEN_TABLE_KEY2: 1})
|
|
||||||
elif is_temp_table and page_result.next_page_token is not None:
|
|
||||||
page_result.next_page_token[TOKEN_TABLE_KEY2] = 1
|
|
||||||
|
|
||||||
return page_result
|
|
||||||
|
|
||||||
def get_log_entry_kinds(self):
|
def get_log_entry_kinds(self):
|
||||||
return model.log.get_log_entry_kinds()
|
return model.log.get_log_entry_kinds()
|
||||||
|
@ -95,16 +80,12 @@ class PreOCIModel(LogEntryDataInterface):
|
||||||
if performer_name:
|
if performer_name:
|
||||||
performer = model.user.get_user(performer_name)
|
performer = model.user.get_user(performer_name)
|
||||||
|
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
|
||||||
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
||||||
repository=repo, namespace=namespace_name,
|
repository=repo, namespace=namespace_name,
|
||||||
ignore=ignore, model=database.LogEntry)
|
ignore=ignore, model=database.LogEntry)
|
||||||
aggregated_logs_2 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
|
||||||
repository=repo, namespace=namespace_name,
|
|
||||||
ignore=ignore, model=database.LogEntry2)
|
|
||||||
|
|
||||||
entries = {}
|
entries = {}
|
||||||
for log in itertools.chain(aggregated_logs, aggregated_logs_2):
|
for log in aggregated_logs:
|
||||||
key = '%s-%s' % (log.kind_id, log.day)
|
key = '%s-%s' % (log.kind_id, log.day)
|
||||||
if key in entries:
|
if key in entries:
|
||||||
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)
|
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)
|
||||||
|
|
|
@ -135,7 +135,6 @@ class SuperUserAggregateLogs(ApiResource):
|
||||||
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
|
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
|
||||||
parsed_args['endtime'])
|
parsed_args['endtime'])
|
||||||
|
|
||||||
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
|
|
||||||
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
|
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
|
||||||
kinds = log_model.get_log_entry_kinds()
|
kinds = log_model.get_log_entry_kinds()
|
||||||
return {
|
return {
|
||||||
|
@ -166,9 +165,8 @@ class SuperUserLogs(ApiResource):
|
||||||
if SuperUserPermission().can():
|
if SuperUserPermission().can():
|
||||||
start_time = parsed_args['starttime']
|
start_time = parsed_args['starttime']
|
||||||
end_time = parsed_args['endtime']
|
end_time = parsed_args['endtime']
|
||||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
|
||||||
|
|
||||||
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
|
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||||
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
|
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
|
||||||
kinds = log_model.get_log_entry_kinds()
|
kinds = log_model.get_log_entry_kinds()
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -110,8 +110,7 @@ def test_does_repo_exist_returns_true(monkeypatch):
|
||||||
|
|
||||||
def test_get_aggregated_logs(monkeypatch):
|
def test_get_aggregated_logs(monkeypatch):
|
||||||
get_aggregated_logs_mock = Mock()
|
get_aggregated_logs_mock = Mock()
|
||||||
get_aggregated_logs_mock.side_effect = [[AttrDict({'day': '1', 'kind_id': 4, 'count': 6})],
|
get_aggregated_logs_mock.side_effect = [[AttrDict({'day': '1', 'kind_id': 4, 'count': 6})]]
|
||||||
[AttrDict({'day': '1', 'kind_id': 4, 'count': 12})]]
|
|
||||||
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
|
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
|
||||||
|
|
||||||
repo_mock = Mock()
|
repo_mock = Mock()
|
||||||
|
@ -127,4 +126,4 @@ def test_get_aggregated_logs(monkeypatch):
|
||||||
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
|
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
|
||||||
'namespace_name', set())
|
'namespace_name', set())
|
||||||
|
|
||||||
assert actual == [AggregatedLogEntry(18, 4, '1')]
|
assert actual == [AggregatedLogEntry(6, 4, '1')]
|
||||||
|
|
|
@ -920,8 +920,7 @@ def populate_database(minimal=False, with_storage=False):
|
||||||
model.repositoryactioncount.update_repository_score(to_count)
|
model.repositoryactioncount.update_repository_score(to_count)
|
||||||
|
|
||||||
|
|
||||||
WHITELISTED_EMPTY_MODELS = ['DeletedNamespace', 'LogEntry2', 'ManifestChild',
|
WHITELISTED_EMPTY_MODELS = ['DeletedNamespace', 'ManifestChild', 'NamespaceGeoRestriction']
|
||||||
'NamespaceGeoRestriction']
|
|
||||||
|
|
||||||
def find_models_missing_data():
|
def find_models_missing_data():
|
||||||
# As a sanity check we are going to make sure that all db tables have some data, unless explicitly
|
# As a sanity check we are going to make sure that all db tables have some data, unless explicitly
|
||||||
|
|
|
@ -8,7 +8,7 @@ from tempfile import SpooledTemporaryFile
|
||||||
|
|
||||||
import features
|
import features
|
||||||
from app import app, storage
|
from app import app, storage
|
||||||
from data.database import UseThenDisconnect, LogEntry, LogEntry2
|
from data.database import UseThenDisconnect, LogEntry
|
||||||
from data.model.log import (get_stale_logs, get_stale_logs_start_id,
|
from data.model.log import (get_stale_logs, get_stale_logs_start_id,
|
||||||
get_stale_logs_cutoff_id, delete_stale_logs)
|
get_stale_logs_cutoff_id, delete_stale_logs)
|
||||||
from data.userfiles import DelegateUserfiles
|
from data.userfiles import DelegateUserfiles
|
||||||
|
@ -36,10 +36,7 @@ class LogRotateWorker(Worker):
|
||||||
self.add_operation(self._archive_logs, WORKER_FREQUENCY)
|
self.add_operation(self._archive_logs, WORKER_FREQUENCY)
|
||||||
|
|
||||||
def _archive_logs(self):
|
def _archive_logs(self):
|
||||||
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
|
self._archive_logs_for_model(LogEntry)
|
||||||
models = [LogEntry, LogEntry2]
|
|
||||||
for model in models:
|
|
||||||
self._archive_logs_for_model(model)
|
|
||||||
|
|
||||||
def _archive_logs_for_model(self, model):
|
def _archive_logs_for_model(self, model):
|
||||||
logger.debug('Attempting to rotate log entries')
|
logger.debug('Attempting to rotate log entries')
|
||||||
|
|
Reference in a new issue