Fixes for the uncompressed size backfill script.
This commit is contained in:
parent
ec484e3efc
commit
98d8e0fe37
2 changed files with 55 additions and 15 deletions
|
@ -83,6 +83,10 @@ class InvalidBuildTriggerException(DataModelException):
|
|||
pass
|
||||
|
||||
|
||||
class InvalidImageException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
class TooManyUsersException(DataModelException):
|
||||
pass
|
||||
|
||||
|
@ -1180,6 +1184,23 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
|
|||
return new_image
|
||||
|
||||
|
||||
def get_storage_by_uuid(storage_uuid):
|
||||
placements = list(ImageStoragePlacement
|
||||
.select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorage)
|
||||
.where(ImageStorage.uuid == storage_uuid))
|
||||
|
||||
if not placements:
|
||||
raise InvalidImageException('No storage found with uuid: %s', storage_uuid)
|
||||
|
||||
found = placements[0].storage
|
||||
found.locations = {placement.location.name for placement in placements}
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def set_image_size(docker_image_id, namespace_name, repository_name,
|
||||
image_size):
|
||||
try:
|
||||
|
|
|
@ -1,31 +1,50 @@
|
|||
from data import model
|
||||
from data.database import ImageStorage
|
||||
from app import app, storage as store
|
||||
from data.database import db
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def backfill_sizes():
|
||||
count = ImageStorage.select().where(ImageStorage.uncompressed_size == None).count()
|
||||
counter = 0
|
||||
for image_storage in ImageStorage.select().where(ImageStorage.uncompressed_size == None):
|
||||
logging.debug("Backfilling uncompressed size: %s of %s" % (counter, count))
|
||||
query = (ImageStorage
|
||||
.select()
|
||||
.where(ImageStorage.uncompressed_size == None, ImageStorage.uploading == False)
|
||||
.limit(100))
|
||||
|
||||
# Lookup the JSON for the image.
|
||||
uuid = image_storage.uuid
|
||||
with_locations = model.get_storage_by_uuid(uuid)
|
||||
total = 0
|
||||
missing = 0
|
||||
batch_processed = 1
|
||||
|
||||
json_data = store.get_content(with_locations.locations, store.image_json_path(uuid))
|
||||
size = json_data.get('Size', None)
|
||||
if size is None:
|
||||
continue
|
||||
while batch_processed > 0:
|
||||
batch_processed = 0
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for image_storage in query.clone():
|
||||
total += 1
|
||||
batch_processed += 1
|
||||
|
||||
image_storage.uncompressed_size = size
|
||||
image_storage.save()
|
||||
counter += 1
|
||||
if (total - 1) % 100 == 0:
|
||||
logger.debug('Storing entry: %s', total)
|
||||
|
||||
# Lookup the JSON for the image.
|
||||
uuid = image_storage.uuid
|
||||
with_locations = model.get_storage_by_uuid(uuid)
|
||||
|
||||
json_data = store.get_content(with_locations.locations, store.image_json_path(uuid))
|
||||
size = json_data.get('Size', json_data.get('size', -1))
|
||||
|
||||
if size == -1:
|
||||
missing += 1
|
||||
|
||||
logger.debug('Missing entry %s (%s/%s)', uuid, missing, total)
|
||||
|
||||
image_storage.uncompressed_size = size
|
||||
image_storage.save()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
backfill_sizes()
|
||||
backfill_sizes()
|
||||
|
|
Reference in a new issue