Add uncompressed size field to the image storage and add a backfill script (which is not yet automatically called)
This commit is contained in:
parent
f6d3238611
commit
86dfca2e3e
5 changed files with 59 additions and 1 deletions
|
@ -265,6 +265,7 @@ class ImageStorage(BaseModel):
|
||||||
comment = TextField(null=True)
|
comment = TextField(null=True)
|
||||||
command = TextField(null=True)
|
command = TextField(null=True)
|
||||||
image_size = BigIntegerField(null=True)
|
image_size = BigIntegerField(null=True)
|
||||||
|
uncompressed_size = BigIntegerField(null=True)
|
||||||
uploading = BooleanField(default=True, null=True)
|
uploading = BooleanField(default=True, null=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
"""add the uncompressed size to image storage
|
||||||
|
|
||||||
|
Revision ID: 6f2ecf5afcf
|
||||||
|
Revises: 34fd69f63809
|
||||||
|
Create Date: 2014-09-22 14:39:13.470566
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '6f2ecf5afcf'
|
||||||
|
down_revision = '34fd69f63809'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('imagestorage', sa.Column('uncompressed_size', sa.BigInteger(), nullable=True))
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('imagestorage', 'uncompressed_size')
|
||||||
|
### end Alembic commands ###
|
|
@ -452,7 +452,8 @@ def put_image_json(namespace, repository, image_id):
|
||||||
set_uploading_flag(repo_image, True)
|
set_uploading_flag(repo_image, True)
|
||||||
|
|
||||||
# We cleanup any old checksum in case it's a retry after a fail
|
# We cleanup any old checksum in case it's a retry after a fail
|
||||||
profile.debug('Cleanup old checksum')
|
profile.debug('Cleanup old checksum and save size')
|
||||||
|
repo_image.storage.uncompressed_size = data.get('Size')
|
||||||
repo_image.storage.checksum = None
|
repo_image.storage.checksum = None
|
||||||
repo_image.storage.save()
|
repo_image.storage.save()
|
||||||
|
|
||||||
|
|
Binary file not shown.
31
tools/uncompressedsize.py
Normal file
31
tools/uncompressedsize.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
from data import model
|
||||||
|
from data.database import ImageStorage
|
||||||
|
from app import app, storage as store
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def backfill_sizes():
|
||||||
|
count = ImageStorage.select().where(ImageStorage.uncompressed_size == None).count()
|
||||||
|
counter = 0
|
||||||
|
for image_storage in ImageStorage.select().where(ImageStorage.uncompressed_size == None):
|
||||||
|
logging.debug("Backfilling uncompressed size: %s of %s" % (counter, count))
|
||||||
|
|
||||||
|
# Lookup the JSON for the image.
|
||||||
|
uuid = image_storage.uuid
|
||||||
|
with_locations = model.get_storage_by_uuid(uuid)
|
||||||
|
|
||||||
|
json_data = store.get_content(with_locations.locations, store.image_json_path(uuid))
|
||||||
|
size = json_data.get('Size', None)
|
||||||
|
if size is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
image_storage.uncompressed_size = size
|
||||||
|
image_storage.save()
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
backfill_sizes()
|
Reference in a new issue