Change blob upload ints into bigints

This commit is contained in:
Joseph Schorr 2017-02-27 12:58:13 -05:00
parent 3502d9f61c
commit 76de324ca8
2 changed files with 31 additions and 2 deletions

View file

@ -1004,12 +1004,12 @@ class RepositoryAuthorizedEmail(BaseModel):
class BlobUpload(BaseModel):
repository = ForeignKeyField(Repository)
uuid = CharField(index=True, unique=True)
byte_count = IntegerField(default=0)
byte_count = BigIntegerField(default=0)
sha_state = ResumableSHA256Field(null=True, default=resumablehashlib.sha256)
location = ForeignKeyField(ImageStorageLocation)
storage_metadata = JSONField(null=True, default={})
chunk_count = IntegerField(default=0)
uncompressed_byte_count = IntegerField(null=True)
uncompressed_byte_count = BigIntegerField(null=True)
created = DateTimeField(default=datetime.now, index=True)
piece_sha_state = ResumableSHA1Field(null=True)
piece_hashes = Base64BinaryField(null=True)

View file

@ -0,0 +1,29 @@
"""Change BlobUpload fields to BigIntegers to allow layers > 8GB
Revision ID: b8ae68ad3e52
Revises: e2894a3a3c19
Create Date: 2017-02-27 11:26:49.182349
"""
# revision identifiers, used by Alembic.
revision = 'b8ae68ad3e52'
down_revision = 'e2894a3a3c19'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
# Delete old entries in the blobupload table. This is generally a good idea for cleanup and
# also makes the alter column calls much faster.
conn = op.get_bind()
conn.execute("Delete from blobupload where created <= '2017-02-01'", ())
op.alter_column('blobupload', 'byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
op.alter_column('blobupload', 'uncompressed_byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
def downgrade(tables):
op.alter_column('blobupload', 'byte_count', existing_type=sa.BigInteger(), type_=sa.Integer())
op.alter_column('blobupload', 'uncompressed_byte_count', existing_type=sa.BigInteger(), type_=sa.Integer())