Fix serialization of blobs to/from the cache
Also makes sure the test checks that serialization
This commit is contained in:
parent
0bc1a06f4c
commit
24b77bbc10
3 changed files with 12 additions and 6 deletions
4
data/cache/impl.py
vendored
4
data/cache/impl.py
vendored
|
@ -48,7 +48,7 @@ class InMemoryDataModelCache(DataModelCache):
|
|||
result = self.cache.get(cache_key.key, default_value=not_found)
|
||||
if result != not_found:
|
||||
logger.debug('Found result in cache for key %s: %s', cache_key.key, result)
|
||||
return result
|
||||
return json.loads(result)
|
||||
|
||||
logger.debug('Found no result in cache for key %s; calling loader', cache_key.key)
|
||||
result = loader()
|
||||
|
@ -57,7 +57,7 @@ class InMemoryDataModelCache(DataModelCache):
|
|||
logger.debug('Caching loaded result for key %s with expiration %s: %s', cache_key.key,
|
||||
result, cache_key.expiration)
|
||||
expires = convert_to_timedelta(cache_key.expiration) + datetime.now()
|
||||
self.cache.set(cache_key.key, result, expires=expires)
|
||||
self.cache.set(cache_key.key, json.dumps(result), expires=expires)
|
||||
logger.debug('Cached loaded result for key %s with expiration %s: %s', cache_key.key,
|
||||
result, cache_key.expiration)
|
||||
else:
|
||||
|
|
|
@ -16,6 +16,7 @@ from endpoints.decorators import anon_protect, parse_repository_name
|
|||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||
from endpoints.v2.errors import (
|
||||
BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge)
|
||||
from endpoints.v2.models_interface import Blob
|
||||
from endpoints.v2.models_pre_oci import data_model as model
|
||||
from util.cache import cache_control
|
||||
from util.registry.filelike import wrap_with_handler, StreamSlice
|
||||
|
@ -40,11 +41,16 @@ def _get_repository_blob(namespace_name, repo_name, digest):
|
|||
Automatically handles caching.
|
||||
"""
|
||||
def load_blob():
|
||||
return model.get_blob_by_digest(namespace_name, repo_name, digest)
|
||||
blob = model.get_blob_by_digest(namespace_name, repo_name, digest)
|
||||
if blob is None:
|
||||
return None
|
||||
|
||||
return blob._asdict()
|
||||
|
||||
blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, digest)
|
||||
return model_cache.retrieve(blob_cache_key, load_blob)
|
||||
|
||||
blob_dict = model_cache.retrieve(blob_cache_key, load_blob)
|
||||
return Blob(**blob_dict) if blob_dict is not None else None
|
||||
|
||||
|
||||
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['HEAD'])
|
||||
@parse_repository_name()
|
||||
|
|
|
@ -227,7 +227,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
|||
uuid=blob_record.uuid,
|
||||
digest=digest,
|
||||
size=blob_record.image_size,
|
||||
locations=blob_record.locations,
|
||||
locations=list(blob_record.locations),
|
||||
cas_path=blob_record.cas_path
|
||||
)
|
||||
except model.BlobDoesNotExist:
|
||||
|
|
Reference in a new issue