Have blob uploads be checked against configurable max layer size
This commit is contained in:
parent
dd35677712
commit
dd7f254f96
4 changed files with 32 additions and 1 deletions
|
@ -4,6 +4,7 @@ import time
|
|||
|
||||
from flask import url_for, request, redirect, Response, abort as flask_abort
|
||||
|
||||
import bitmath
|
||||
import resumablehashlib
|
||||
|
||||
from app import storage, app, get_app_url, metric_queue
|
||||
|
@ -14,7 +15,7 @@ from digest import digest_tools
|
|||
from endpoints.common import parse_repository_name
|
||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||
from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported,
|
||||
NameUnknown)
|
||||
NameUnknown, LayerTooLarge)
|
||||
from endpoints.decorators import anon_protect
|
||||
from util.cache import cache_control
|
||||
from util.registry.filelike import wrap_with_handler, StreamSlice
|
||||
|
@ -346,6 +347,8 @@ def _upload_chunk(blob_upload, range_header):
|
|||
|
||||
Returns a BlobUpload object or None if there was a failure.
|
||||
"""
|
||||
max_layer_size = bitmath.parse_string_unsafe(app.config['MAXIMUM_LAYER_SIZE'])
|
||||
|
||||
# Get the offset and length of the current chunk.
|
||||
start_offset, length = _start_offset_and_length(range_header)
|
||||
if blob_upload is None or None in {start_offset, length}:
|
||||
|
@ -356,6 +359,16 @@ def _upload_chunk(blob_upload, range_header):
|
|||
logger.error('start_offset provided to _upload_chunk greater than blob.upload.byte_count')
|
||||
return None
|
||||
|
||||
# Check if we should raise 413 before accepting the data.
|
||||
uploaded = bitmath.Byte(length + start_offset)
|
||||
if length > -1 and uploaded > max_layer_size:
|
||||
detail = {
|
||||
'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_layer_size),
|
||||
'max_allowed': max_layer_size.bytes,
|
||||
'uploaded': uploaded.bytes,
|
||||
}
|
||||
raise LayerTooLarge(detail=detail)
|
||||
|
||||
location_set = {blob_upload.location_name}
|
||||
|
||||
upload_error = None
|
||||
|
@ -435,6 +448,16 @@ def _upload_chunk(blob_upload, range_header):
|
|||
blob_upload.byte_count += length_written
|
||||
blob_upload.chunk_count += 1
|
||||
|
||||
# Ensure we have not gone beyond the max layer size.
|
||||
upload_size = bitmath.Byte(blob_upload.byte_count)
|
||||
if upload_size > max_layer_size:
|
||||
detail = {
|
||||
'reason': '%s is greater than maximum allowed size %s' % (upload_size, max_layer_size),
|
||||
'max_allowed': max_layer_size.bytes,
|
||||
'uploaded': upload_size.bytes,
|
||||
}
|
||||
raise LayerTooLarge(detail=detail)
|
||||
|
||||
return blob_upload
|
||||
|
||||
|
||||
|
|
Reference in a new issue