Fixes for the uncompressedsize backfill script.
This commit is contained in:
parent
45208983bf
commit
d0b93146de
1 changed files with 19 additions and 15 deletions
|
@ -1,4 +1,3 @@
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
@ -11,13 +10,17 @@ from util.gzipstream import ZLIB_GZIP_WINDOW
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
CHUNK_SIZE = 512 * 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
def backfill_sizes_from_data():
|
def backfill_sizes_from_data():
|
||||||
while True:
|
while True:
|
||||||
# Load the record from the DB.
|
# Load the record from the DB.
|
||||||
try:
|
try:
|
||||||
record = (ImageStorage
|
record = (ImageStorage
|
||||||
.select(ImageStorage.uuid)
|
.select(ImageStorage.uuid)
|
||||||
.where(ImageStorage.uncompressed_size == None, ImageStorage.uploading == False)
|
.where(ImageStorage.uncompressed_size >> None, ImageStorage.uploading == False)
|
||||||
.get())
|
.get())
|
||||||
except ImageStorage.DoesNotExist:
|
except ImageStorage.DoesNotExist:
|
||||||
# We're done!
|
# We're done!
|
||||||
|
@ -25,13 +28,14 @@ def backfill_sizes_from_data():
|
||||||
|
|
||||||
uuid = record.uuid
|
uuid = record.uuid
|
||||||
|
|
||||||
|
with_locations = model.get_storage_by_uuid(uuid)
|
||||||
|
|
||||||
# Read the layer from backing storage and calculate the uncompressed size.
|
# Read the layer from backing storage and calculate the uncompressed size.
|
||||||
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
|
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
|
||||||
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
||||||
stream = store.read_stream(with_locations.locations, store.image_layer_path(uuid))
|
|
||||||
|
|
||||||
uncompressed_size = 0
|
uncompressed_size = 0
|
||||||
CHUNK_SIZE = 512 * 1024 * 1024
|
with store.stream_read_file(with_locations.locations, store.image_layer_path(uuid)) as stream:
|
||||||
while True:
|
while True:
|
||||||
current_data = stream.read(CHUNK_SIZE)
|
current_data = stream.read(CHUNK_SIZE)
|
||||||
if len(current_data) == 0:
|
if len(current_data) == 0:
|
||||||
|
@ -45,13 +49,13 @@ def backfill_sizes_from_data():
|
||||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
try:
|
try:
|
||||||
current_record = model.get_storage_by_uuid(uuid)
|
current_record = model.get_storage_by_uuid(uuid)
|
||||||
except:
|
except model.InvalidImageException:
|
||||||
# Record no longer exists.
|
logger.warning('Storage with uuid no longer exists: %s', uuid)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not current_record.uploading and current_record.uncompressed_size == None:
|
if not current_record.uploading and current_record.uncompressed_size == None:
|
||||||
current_record.uncompressed_size = uncompressed_size
|
current_record.uncompressed_size = uncompressed_size
|
||||||
#current_record.save()
|
current_record.save()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Reference in a new issue