Fix serialization of blobs to/from the cache

Also makes sure the test checks that serialization
This commit is contained in:
Joseph Schorr 2018-03-02 14:22:55 -05:00
parent 0bc1a06f4c
commit 24b77bbc10
3 changed files with 12 additions and 6 deletions

View file

@ -16,6 +16,7 @@ from endpoints.decorators import anon_protect, parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from endpoints.v2.errors import (
BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge)
from endpoints.v2.models_interface import Blob
from endpoints.v2.models_pre_oci import data_model as model
from util.cache import cache_control
from util.registry.filelike import wrap_with_handler, StreamSlice
@ -40,11 +41,16 @@ def _get_repository_blob(namespace_name, repo_name, digest):
Automatically handles caching.
"""
def load_blob():
return model.get_blob_by_digest(namespace_name, repo_name, digest)
blob = model.get_blob_by_digest(namespace_name, repo_name, digest)
if blob is None:
return None
return blob._asdict()
blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, digest)
return model_cache.retrieve(blob_cache_key, load_blob)
blob_dict = model_cache.retrieve(blob_cache_key, load_blob)
return Blob(**blob_dict) if blob_dict is not None else None
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['HEAD'])
@parse_repository_name()