Have blob uploads be checked against configurable max layer size
This commit is contained in:
parent
239b6d7cf8
commit
ff7f78e990
2 changed files with 2 additions and 12 deletions
|
@ -1,25 +1,20 @@
|
|||
"""Change BlobUpload fields to BigIntegers to allow layers > 8GB
|
||||
|
||||
Revision ID: b8ae68ad3e52
|
||||
Revises: e2894a3a3c19
|
||||
Revises: 7a525c68eb13
|
||||
Create Date: 2017-02-27 11:26:49.182349
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b8ae68ad3e52'
|
||||
down_revision = 'e2894a3a3c19'
|
||||
down_revision = '7a525c68eb13'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables):
|
||||
# Delete old entries in the blobupload table. This is generally a good idea for cleanup and
|
||||
# also makes the alter column calls much faster.
|
||||
conn = op.get_bind()
|
||||
conn.execute("Delete from blobupload where created <= '2017-02-01'", ())
|
||||
|
||||
op.alter_column('blobupload', 'byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
op.alter_column('blobupload', 'uncompressed_byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
|
||||
|
|
Reference in a new issue