Add a bunch of logging to the data model caching mechanism

Should help us debug any potential issues
This commit is contained in:
Joseph Schorr 2017-12-14 14:30:59 -05:00
parent 60bc655695
commit 9e16596854
2 changed files with 15 additions and 1 deletions

View file

@ -1,3 +1,5 @@
import logging
from datetime import datetime
from abc import ABCMeta, abstractmethod
@ -6,6 +8,8 @@ from six import add_metaclass
from util.expiresdict import ExpiresDict
from util.timedeltastring import convert_to_timedelta
logger = logging.getLogger(__name__)
def is_not_none(value):
return value is not None
@ -36,13 +40,23 @@ class InMemoryDataModelCache(DataModelCache):
def retrieve(self, cache_key, loader, should_cache=is_not_none):
not_found = [None]
logger.debug('Checking cache for key %s', cache_key.key)
result = self.cache.get(cache_key.key, default_value=not_found)
if result != not_found:
logger.debug('Found result in cache for key %s: %s', cache_key.key, result)
return result
logger.debug('Found no result in cache for key %s; calling loader', cache_key.key)
result = loader()
logger.debug('Got loaded result for key %s: %s', cache_key.key, result)
if should_cache(result):
logger.debug('Caching loaded result for key %s with expiration %s: %s', cache_key.key,
result, cache_key.expiration)
expires = convert_to_timedelta(cache_key.expiration) + datetime.now()
self.cache.set(cache_key.key, result, expires=expires)
logger.debug('Cached loaded result for key %s with expiration %s: %s', cache_key.key,
result, cache_key.expiration)
else:
logger.debug('Not caching loaded result for key %s: %s', cache_key.key, result)
return result

View file

@ -43,7 +43,7 @@ def _get_repository_blob(namespace_name, repo_name, digest):
return model.get_blob_by_digest(namespace_name, repo_name, digest)
blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, digest)
return model_cache.retrieve(blob_cache_key, load_blob)
return model_cache.retrieve(blob_cache_key, load_blob)
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['HEAD'])