Change the backfill script to use a spooled temp file

This commit is contained in:
Joseph Schorr 2014-09-27 14:55:24 -04:00
parent 21d8c41320
commit 0a93b39c54

View file

@ -6,7 +6,7 @@ from data.database import ImageStorage
from app import app, storage as store
from data.database import db
from gzip import GzipFile
from StringIO import StringIO
from tempfile import SpooledTemporaryFile
logger = logging.getLogger(__name__)
@ -67,22 +67,19 @@ def backfill_sizes_from_data():
with_locations = model.get_storage_by_uuid(uuid)
layer_size = -2
# Read the layer from backing storage.
logger.debug('Reading entry: %s (%s bytes)', uuid, with_locations.image_size)
# Read the layer from backing storage and calculate the uncompressed size.
try:
layer_data = store.get_content(with_locations.locations, store.image_layer_path(uuid))
except Exception as ex:
logger.debug('Could not read entry: %s. Reason: %s', uuid, ex)
continue
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
CHUNK_SIZE = 512 * 1024
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
layer_data = store.get_content(with_locations.locations, store.image_layer_path(uuid))
tarball.write(layer_data)
tarball.seek(0)
# Calculate the uncompressed size.
try:
layer_stream = StringIO(layer_data)
with GzipFile(fileobj=layer_stream, mode='rb') as gzip_file:
gzip_file.read()
layer_size = gzip_file.size
with GzipFile(fileobj=tarball, mode='rb') as gzip_file:
gzip_file.read()
layer_size = gzip_file.size
layer_stream.close()
except Exception as ex:
logger.debug('Could not gunzip entry: %s. Reason: %s', uuid, ex)
continue