Merge pull request #2944 from coreos-inc/joseph.schorr/QS-91/v2-caching
V2 registry blob caching
This commit is contained in:
commit
024c183f67
14 changed files with 256 additions and 32 deletions
62
data/cache/__init__.py
vendored
Normal file
62
data/cache/__init__.py
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
import logging
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
|
||||
from util.expiresdict import ExpiresDict
|
||||
from util.timedeltastring import convert_to_timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def is_not_none(value):
|
||||
return value is not None
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class DataModelCache(object):
|
||||
""" Defines an interface for cache storing and returning tuple data model objects. """
|
||||
|
||||
@abstractmethod
|
||||
def retrieve(self, cache_key, loader, should_cache=is_not_none):
|
||||
""" Checks the cache for the specified cache key and returns the value found (if any). If none
|
||||
found, the loader is called to get a result and populate the cache.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NoopDataModelCache(DataModelCache):
|
||||
""" Implementation of the data model cache which does nothing. """
|
||||
|
||||
def retrieve(self, cache_key, loader, should_cache=is_not_none):
|
||||
return loader()
|
||||
|
||||
|
||||
class InMemoryDataModelCache(DataModelCache):
|
||||
""" Implementation of the data model cache backed by an in-memory dictionary. """
|
||||
def __init__(self):
|
||||
self.cache = ExpiresDict(rebuilder=lambda: {})
|
||||
|
||||
def retrieve(self, cache_key, loader, should_cache=is_not_none):
|
||||
not_found = [None]
|
||||
logger.debug('Checking cache for key %s', cache_key.key)
|
||||
result = self.cache.get(cache_key.key, default_value=not_found)
|
||||
if result != not_found:
|
||||
logger.debug('Found result in cache for key %s: %s', cache_key.key, result)
|
||||
return result
|
||||
|
||||
logger.debug('Found no result in cache for key %s; calling loader', cache_key.key)
|
||||
result = loader()
|
||||
logger.debug('Got loaded result for key %s: %s', cache_key.key, result)
|
||||
if should_cache(result):
|
||||
logger.debug('Caching loaded result for key %s with expiration %s: %s', cache_key.key,
|
||||
result, cache_key.expiration)
|
||||
expires = convert_to_timedelta(cache_key.expiration) + datetime.now()
|
||||
self.cache.set(cache_key.key, result, expires=expires)
|
||||
logger.debug('Cached loaded result for key %s with expiration %s: %s', cache_key.key,
|
||||
result, cache_key.expiration)
|
||||
else:
|
||||
logger.debug('Not caching loaded result for key %s: %s', cache_key.key, result)
|
||||
|
||||
return result
|
8
data/cache/cache_key.py
vendored
Normal file
8
data/cache/cache_key.py
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
from collections import namedtuple
|
||||
|
||||
class CacheKey(namedtuple('CacheKey', ['key', 'expiration'])):
|
||||
""" Defines a key into the data model cache. """
|
||||
pass
|
||||
|
||||
def for_repository_blob(namespace_name, repo_name, digest):
|
||||
return CacheKey('repository_blob:%s:%s:%s' % (namespace_name, repo_name, digest), '60s')
|
16
data/cache/test/test_cache.py
vendored
Normal file
16
data/cache/test/test_cache.py
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
import pytest
|
||||
|
||||
from data.cache import InMemoryDataModelCache, NoopDataModelCache
|
||||
from data.cache.cache_key import CacheKey
|
||||
|
||||
@pytest.mark.parametrize('cache_type', [
|
||||
(NoopDataModelCache),
|
||||
(InMemoryDataModelCache),
|
||||
])
|
||||
def test_caching(cache_type):
|
||||
key = CacheKey('foo', '60m')
|
||||
cache = cache_type()
|
||||
|
||||
# Perform two retrievals, and make sure both return.
|
||||
assert cache.retrieve(key, lambda: 1234) == 1234
|
||||
assert cache.retrieve(key, lambda: 1234) == 1234
|
|
@ -248,12 +248,19 @@ def get_storage_by_uuid(storage_uuid):
|
|||
|
||||
def get_layer_path(storage_record):
|
||||
""" Returns the path in the storage engine to the layer data referenced by the storage row. """
|
||||
store = config.store
|
||||
if not storage_record.cas_path:
|
||||
logger.debug('Serving layer from legacy v1 path')
|
||||
return store.v1_image_layer_path(storage_record.uuid)
|
||||
return get_layer_path_for_storage(storage_record.uuid, storage_record.cas_path,
|
||||
storage_record.content_checksum)
|
||||
|
||||
return store.blob_path(storage_record.content_checksum)
|
||||
|
||||
def get_layer_path_for_storage(storage_uuid, cas_path, content_checksum):
|
||||
""" Returns the path in the storage engine to the layer data referenced by the storage
|
||||
information. """
|
||||
store = config.store
|
||||
if not cas_path:
|
||||
logger.debug('Serving layer from legacy v1 path for storage %s', storage_uuid)
|
||||
return store.v1_image_layer_path(storage_uuid)
|
||||
|
||||
return store.blob_path(content_checksum)
|
||||
|
||||
|
||||
def lookup_repo_storages_by_content_checksum(repo, checksums):
|
||||
|
|
|
@ -210,4 +210,4 @@ def test_change_tag_expiration(expiration_offset, expected_offset, initialized_d
|
|||
start_date = datetime.utcfromtimestamp(footag_updated.lifetime_start_ts)
|
||||
end_date = datetime.utcfromtimestamp(footag_updated.lifetime_end_ts)
|
||||
expected_end_date = start_date + convert_to_timedelta(expected_offset)
|
||||
assert end_date == expected_end_date
|
||||
assert (expected_end_date - end_date).total_seconds() < 5 # variance in test
|
||||
|
|
Reference in a new issue