Merge pull request #3082 from quay/migrate-logs-caching

Add some additional caching and logs to the robot cleanup migration
This commit is contained in:
josephschorr 2018-05-15 15:35:59 -04:00 committed by GitHub
commit 0c3b6d5b1a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1,22 +1,39 @@
import logging
from data.database import User
from util.names import parse_robot_username
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def cleanup_old_robots(page_size=50):
""" Deletes any robots that live under namespaces that no longer exist. """
# Collect the robot accounts to delete.
page_number = 1
to_delete = []
encountered_namespaces = {}
while True:
found_bots = False
for robot in list(User.select().where(User.robot == True).paginate(page_number, page_size)):
found_bots = True
logger.info("Checking robot %s (page %s)", robot.username, page_number)
namespace, _ = parse_robot_username(robot.username)
try:
User.get(username=namespace)
except User.DoesNotExist:
# Save the robot account for deletion.
to_delete.append(robot)
if namespace in encountered_namespaces:
if not encountered_namespaces[namespace]:
logger.info('Marking %s to be deleted', robot.username)
to_delete.append(robot)
else:
try:
User.get(username=namespace)
encountered_namespaces[namespace] = True
except User.DoesNotExist:
# Save the robot account for deletion.
logger.info('Marking %s to be deleted', robot.username)
to_delete.append(robot)
encountered_namespaces[namespace] = False
if not found_bots:
break
@ -24,5 +41,7 @@ def cleanup_old_robots(page_size=50):
page_number = page_number + 1
# Cleanup any robot accounts whose corresponding namespace doesn't exist.
for robot in to_delete:
logger.info('Found %s robots to delete', len(to_delete))
for index, robot in enumerate(to_delete):
logger.info('Deleting robot %s of %s (%s)', index, len(to_delete), robot.username)
robot.delete_instance(recursive=True, delete_nullable=True)