From ee0eb80c8f2d5e201e38dc041bb41b5d2c819d62 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Mon, 30 Nov 2015 15:45:45 -0500 Subject: [PATCH] Fix blob content types Fixes #990 --- endpoints/v2/blob.py | 6 +++++- storage/basestorage.py | 2 +- storage/cloud.py | 4 ++-- storage/fakestorage.py | 2 +- storage/local.py | 2 +- storage/swift.py | 8 ++++---- 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/endpoints/v2/blob.py b/endpoints/v2/blob.py index 215732c7c..e5632c6d2 100644 --- a/endpoints/v2/blob.py +++ b/endpoints/v2/blob.py @@ -21,6 +21,7 @@ logger = logging.getLogger(__name__) BASE_BLOB_ROUTE = '///blobs/' BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN) RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$') +BLOB_CONTENT_TYPE = 'application/octet-stream' class _InvalidRangeHeader(Exception): @@ -60,6 +61,7 @@ def check_blob_exists(namespace, repo_name, digest): response = make_response('') response.headers.extend(headers) response.headers['Content-Length'] = found.image_size + response.headers['Content-Type'] = BLOB_CONTENT_TYPE return response @@ -87,6 +89,7 @@ def download_blob(namespace, repo_name, digest): database.close_db_filter(None) headers['Content-Length'] = found.image_size + headers['Content-Type'] = BLOB_CONTENT_TYPE return Response(storage.stream_read(found.locations, path), headers=headers) @@ -200,7 +203,8 @@ def _upload_chunk(namespace, repo_name, upload_uuid): try: length_written, new_metadata = storage.stream_upload_chunk({found.location.name}, upload_uuid, start_offset, length, input_fp, - found.storage_metadata) + found.storage_metadata, + content_type=BLOB_CONTENT_TYPE) except InvalidChunkException: _range_not_satisfiable(found.byte_count) diff --git a/storage/basestorage.py b/storage/basestorage.py index f8ff49919..28e49c8fa 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -122,7 +122,7 @@ class BaseStorageV2(BaseStorage): """ raise NotImplementedError - def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata): + def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata, content_type=None): """ Upload the specified amount of data from the given file pointer to the chunked destination specified, starting at the given offset. Returns the number of bytes uploaded, and a new version of the storage_metadata. Raises InvalidChunkException if the offset or length can diff --git a/storage/cloud.py b/storage/cloud.py index 81c602471..2db756d7f 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -294,13 +294,13 @@ class _CloudStorage(BaseStorageV2): return random_uuid, metadata - def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata): + def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata, content_type=None): self._initialize_cloud_conn() # We are going to upload each chunk to a separate key chunk_path = self._rel_upload_path(str(uuid4())) bytes_written = self._stream_write_internal(chunk_path, in_fp, cancel_on_error=False, - size=length) + size=length, content_type=content_type) new_metadata = copy.deepcopy(storage_metadata) diff --git a/storage/fakestorage.py b/storage/fakestorage.py index 950819b92..e9f6305ea 100644 --- a/storage/fakestorage.py +++ b/storage/fakestorage.py @@ -54,7 +54,7 @@ class FakeStorage(BaseStorageV2): _FAKE_STORAGE_MAP[new_uuid].seek(0) return new_uuid, {} - def stream_upload_chunk(self, uuid, offset, length, in_fp, _): + def stream_upload_chunk(self, uuid, offset, length, in_fp, _, content_type=None): upload_storage = _FAKE_STORAGE_MAP[uuid] upload_storage.seek(offset) return self.stream_write_to_fp(in_fp, upload_storage, length), {} diff --git a/storage/local.py b/storage/local.py index 4c5873c92..7a4c0d10d 100644 --- a/storage/local.py +++ b/storage/local.py @@ -105,7 +105,7 @@ class LocalStorage(BaseStorageV2): return new_uuid, {} - def stream_upload_chunk(self, uuid, offset, length, in_fp, _): + def stream_upload_chunk(self, uuid, offset, length, in_fp, _, content_type=None): with open(self._init_path(self._rel_upload_path(uuid)), 'r+b') as upload_storage: upload_storage.seek(offset) return self.stream_write_to_fp(in_fp, upload_storage, length), {} diff --git a/storage/swift.py b/storage/swift.py index 8270b2060..71631a608 100644 --- a/storage/swift.py +++ b/storage/swift.py @@ -268,7 +268,7 @@ class SwiftStorage(BaseStorage): return random_uuid, metadata - def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata): + def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata, content_type=None): if length == 0: return 0, storage_metadata @@ -277,7 +277,7 @@ class SwiftStorage(BaseStorage): total_bytes_written = 0 while True: bytes_written, storage_metadata = self._stream_upload_segment(uuid, offset, length, in_fp, - storage_metadata) + storage_metadata, content_type) if length != filelike.READ_UNTIL_END: length = length - bytes_written @@ -287,7 +287,7 @@ class SwiftStorage(BaseStorage): if bytes_written == 0 or length <= 0: return total_bytes_written, storage_metadata - def _stream_upload_segment(self, uuid, offset, length, in_fp, storage_metadata): + def _stream_upload_segment(self, uuid, offset, length, in_fp, storage_metadata, content_type): updated_metadata = copy.deepcopy(storage_metadata) segment_count = len(updated_metadata[_SEGMENTS_KEY]) segment_path = '%s/%s/%s' % (_SEGMENT_DIRECTORY, uuid, segment_count) @@ -302,7 +302,7 @@ class SwiftStorage(BaseStorage): limiting_fp = filelike.LimitingStream(in_fp, length) # Write the segment to Swift. - self.stream_write(segment_path, limiting_fp) + self.stream_write(segment_path, limiting_fp, content_type) # We are only going to track keys to which data was confirmed written. bytes_written = limiting_fp.tell()