Fix gc by using the v1/v2 storage location helper everywhere
This commit is contained in:
parent
44d06b0c2e
commit
52125bbfed
4 changed files with 11 additions and 22 deletions
|
@ -34,7 +34,7 @@ def garbage_collect_storage(storage_id_whitelist):
|
||||||
return
|
return
|
||||||
|
|
||||||
def placements_query_to_paths_set(placements_query):
|
def placements_query_to_paths_set(placements_query):
|
||||||
return {(placement.location.name, config.store.image_path(placement.storage.uuid))
|
return {(placement.location.name, get_layer_path(placement.storage))
|
||||||
for placement in placements_query}
|
for placement in placements_query}
|
||||||
|
|
||||||
def orphaned_storage_query(select_base_query, candidates, group_by):
|
def orphaned_storage_query(select_base_query, candidates, group_by):
|
||||||
|
@ -213,6 +213,7 @@ def get_layer_path(storage_record):
|
||||||
""" Returns the path in the storage engine to the layer data referenced by the storage row. """
|
""" Returns the path in the storage engine to the layer data referenced by the storage row. """
|
||||||
store = config.store
|
store = config.store
|
||||||
if not storage_record.cas_path:
|
if not storage_record.cas_path:
|
||||||
|
logger.debug('Serving layer from legacy v1 path')
|
||||||
return store.v1_image_layer_path(storage_record.uuid)
|
return store.v1_image_layer_path(storage_record.uuid)
|
||||||
|
|
||||||
return store.blob_path(storage_record.content_checksum)
|
return store.blob_path(storage_record.content_checksum)
|
||||||
|
|
|
@ -152,12 +152,8 @@ def get_image_layer(namespace, repository, image_id, headers):
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
path = store.blob_path(repo_image.storage.content_checksum)
|
path = model.storage.get_layer_path(repo_image.storage)
|
||||||
if not repo_image.storage.cas_path:
|
logger.debug('Looking up the direct download URL for path: %s', path)
|
||||||
path = store.v1_image_layer_path(repo_image.storage.uuid)
|
|
||||||
logger.info('Serving legacy v1 image from path: %s', path)
|
|
||||||
|
|
||||||
logger.debug('Looking up the direct download URL')
|
|
||||||
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
||||||
|
|
||||||
if direct_download_url:
|
if direct_download_url:
|
||||||
|
@ -528,11 +524,7 @@ def process_image_changes(namespace, repository, image_id):
|
||||||
parent_trie.frombytes(parent_trie_bytes)
|
parent_trie.frombytes(parent_trie_bytes)
|
||||||
|
|
||||||
# Read in the file entries from the layer tar file
|
# Read in the file entries from the layer tar file
|
||||||
layer_path = store.blob_path(repo_image.storage.content_checksum)
|
layer_path = model.storage.get_layer_path(repo_image.storage)
|
||||||
if not repo_image.storage.cas_path:
|
|
||||||
logger.info('Processing diffs for newly stored v1 image at %s', layer_path)
|
|
||||||
layer_path = store.v1_image_layer_path(uuid)
|
|
||||||
|
|
||||||
with store.stream_read_file(image.storage.locations, layer_path) as layer_tar_stream:
|
with store.stream_read_file(image.storage.locations, layer_path) as layer_tar_stream:
|
||||||
removed_files = set()
|
removed_files = set()
|
||||||
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
||||||
|
|
|
@ -75,12 +75,8 @@ def check_blob_exists(namespace, repo_name, digest):
|
||||||
def download_blob(namespace, repo_name, digest):
|
def download_blob(namespace, repo_name, digest):
|
||||||
found, headers = _base_blob_fetch(namespace, repo_name, digest)
|
found, headers = _base_blob_fetch(namespace, repo_name, digest)
|
||||||
|
|
||||||
path = storage.blob_path(digest)
|
path = model.storage.get_layer_path(found)
|
||||||
if not found.cas_path:
|
logger.debug('Looking up the direct download URL for path: %s', path)
|
||||||
logger.info('Generating legacy v1 path for image: %s', digest)
|
|
||||||
path = storage.v1_image_layer_path(found.uuid)
|
|
||||||
|
|
||||||
logger.debug('Looking up the direct download URL')
|
|
||||||
direct_download_url = storage.get_direct_download_url(found.locations, path)
|
direct_download_url = storage.get_direct_download_url(found.locations, path)
|
||||||
|
|
||||||
if direct_download_url:
|
if direct_download_url:
|
||||||
|
|
|
@ -18,22 +18,22 @@ class StoragePaths(object):
|
||||||
|
|
||||||
return tmpf, fn
|
return tmpf, fn
|
||||||
|
|
||||||
def image_path(self, storage_uuid):
|
def _image_path(self, storage_uuid):
|
||||||
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
||||||
|
|
||||||
def v1_image_layer_path(self, storage_uuid):
|
def v1_image_layer_path(self, storage_uuid):
|
||||||
base_path = self.image_path(storage_uuid)
|
base_path = self._image_path(storage_uuid)
|
||||||
return '{0}layer'.format(base_path)
|
return '{0}layer'.format(base_path)
|
||||||
|
|
||||||
def blob_path(self, digest_str):
|
def blob_path(self, digest_str):
|
||||||
return content_path(digest_str)
|
return content_path(digest_str)
|
||||||
|
|
||||||
def image_file_trie_path(self, storage_uuid):
|
def image_file_trie_path(self, storage_uuid):
|
||||||
base_path = self.image_path(storage_uuid)
|
base_path = self._image_path(storage_uuid)
|
||||||
return '{0}files.trie'.format(base_path)
|
return '{0}files.trie'.format(base_path)
|
||||||
|
|
||||||
def image_file_diffs_path(self, storage_uuid):
|
def image_file_diffs_path(self, storage_uuid):
|
||||||
base_path = self.image_path(storage_uuid)
|
base_path = self._image_path(storage_uuid)
|
||||||
return '{0}diffs.json'.format(base_path)
|
return '{0}diffs.json'.format(base_path)
|
||||||
|
|
||||||
|
|
||||||
|
|
Reference in a new issue