From c9e16487817126b04d9b64085ed46a7e9ed35632 Mon Sep 17 00:00:00 2001
From: Jake Moshenko <jake@devtable.com>
Date: Tue, 9 Sep 2014 18:30:14 -0400
Subject: [PATCH] Small fixes to bugs in the streaming handler for use with
 magic and radosgw.

---
 config.py         |  8 ++++----
 data/userfiles.py |  6 +++++-
 storage/cloud.py  | 20 +++++++++++++-------
 storage/local.py  |  4 +++-
 4 files changed, 25 insertions(+), 13 deletions(-)

diff --git a/config.py b/config.py
index f797cb36a..ffcf7f79e 100644
--- a/config.py
+++ b/config.py
@@ -89,10 +89,6 @@ class DefaultConfig(object):
   # Stripe config
   BILLING_TYPE = 'FakeStripe'
 
-  # Userfiles
-  USERFILES_TYPE = 'LocalUserfiles'
-  USERFILES_PATH = 'test/data/registry/userfiles'
-
   # Analytics
   ANALYTICS_TYPE = 'FakeAnalytics'
 
@@ -172,3 +168,7 @@ class DefaultConfig(object):
   }
 
   DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
+
+  # Userfiles
+  USERFILES_LOCATION = 'local_us'
+  USERFILES_PATH = 'userfiles/'
diff --git a/data/userfiles.py b/data/userfiles.py
index c3113802f..7ee7726e4 100644
--- a/data/userfiles.py
+++ b/data/userfiles.py
@@ -5,6 +5,8 @@ import magic
 from uuid import uuid4
 from flask import url_for, request, send_file, make_response, abort
 from flask.views import View
+from io import BufferedReader
+
 
 logger = logging.getLogger(__name__)
 
@@ -22,7 +24,9 @@ class UserfilesHandlers(View):
     path = self._files.get_file_id_path(file_id)
     try:
       file_stream = self._storage.stream_read_file(self._locations, path)
-      return send_file(file_stream)
+      buffered = BufferedReader(file_stream)
+      file_header_bytes = buffered.peek(1024)
+      return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
     except IOError:
       abort(404)
 
diff --git a/storage/cloud.py b/storage/cloud.py
index 28325e187..0d2028e1b 100644
--- a/storage/cloud.py
+++ b/storage/cloud.py
@@ -7,23 +7,19 @@ import boto.gs.connection
 import boto.s3.key
 import boto.gs.key
 
+from io import UnsupportedOperation, BufferedIOBase
+
 from storage.basestorage import BaseStorage
 
 
 logger = logging.getLogger(__name__)
 
 
-class StreamReadKeyAsFile(object):
+class StreamReadKeyAsFile(BufferedIOBase):
   def __init__(self, key):
     self._key = key
     self._finished = False
 
-  def __enter__(self):
-    return self
-
-  def __exit__(self, type, value, tb):
-    self._key.close(fast=True)
-
   def read(self, amt=None):
     if self._finished:
       return None
@@ -33,6 +29,16 @@ class StreamReadKeyAsFile(object):
       self._finished = True
     return resp
 
+  def readable(self):
+    return True
+
+  @property
+  def closed(self):
+    return self._key.closed
+
+  def close(self):
+    self._key.close(fast=True)
+
 
 class _CloudStorage(BaseStorage):
   def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
diff --git a/storage/local.py b/storage/local.py
index a800645a8..987431e33 100644
--- a/storage/local.py
+++ b/storage/local.py
@@ -1,5 +1,7 @@
 import os
 import shutil
+import hashlib
+import io
 
 from storage.basestorage import BaseStorage
 
@@ -39,7 +41,7 @@ class LocalStorage(BaseStorage):
 
   def stream_read_file(self, path):
     path = self._init_path(path)
-    return open(path, mode='rb')
+    return io.open(path, mode='rb')
 
   def stream_write(self, path, fp, content_type=None):
     # Size is mandatory