import boto
import os
import logging
import hashlib
import magic

from boto.s3.key import Key
from uuid import uuid4
from flask import url_for, request, send_file, make_response, abort
from flask.views import View


logger = logging.getLogger(__name__)


class FakeUserfiles(object):
  def prepare_for_drop(self, mime_type):
    return ('http://fake/url', uuid4())

  def store_file(self, file_like_obj, content_type):
    raise NotImplementedError()

  def get_file_url(self, file_id, expires_in=300):
    return ('http://fake/url')

  def get_file_checksum(self, file_id):
    return 'abcdefg'


class S3FileWriteException(Exception):
  pass


class S3Userfiles(object):
  def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
    self._initialized = False
    self._bucket_name = bucket_name
    self._access_key = s3_access_key
    self._secret_key = s3_secret_key
    self._prefix = path
    self._s3_conn = None
    self._bucket = None

  def _initialize_s3(self):
    if not self._initialized:
      self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
      self._bucket = self._s3_conn.get_bucket(self._bucket_name)
      self._initialized = True

  def prepare_for_drop(self, mime_type):
    """ Returns a signed URL to upload a file to our bucket. """
    self._initialize_s3()
    logger.debug('Requested upload url with content type: %s' % mime_type)
    file_id = str(uuid4())
    full_key = os.path.join(self._prefix, file_id)
    k = Key(self._bucket, full_key)
    url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
                         encrypt_key=True)
    return (url, file_id)

  def store_file(self, file_like_obj, content_type):
    self._initialize_s3()
    file_id = str(uuid4())
    full_key = os.path.join(self._prefix, file_id)
    k = Key(self._bucket, full_key)
    logger.debug('Setting s3 content type to: %s' % content_type)
    k.set_metadata('Content-Type', content_type)
    bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
                                             rewind=True)

    if bytes_written == 0:
      raise S3FileWriteException('Unable to write file to S3')

    return file_id

  def get_file_url(self, file_id, expires_in=300, mime_type=None):
    self._initialize_s3()
    full_key = os.path.join(self._prefix, file_id)
    k = Key(self._bucket, full_key)
    headers = None
    if mime_type:
      headers={'Content-Type': mime_type}
    
    return k.generate_url(expires_in, headers=headers)

  def get_file_checksum(self, file_id):
    self._initialize_s3()
    full_key = os.path.join(self._prefix, file_id)
    k = self._bucket.lookup(full_key)
    return k.etag[1:-1][:7]


class UserfilesHandlers(View):
  methods = ['GET', 'PUT']

  def __init__(self, local_userfiles):
    self._userfiles = local_userfiles
    self._magic = magic.Magic(mime=True)

  def get(self, file_id):
    path = self._userfiles.file_path(file_id)
    if not os.path.exists(path):
      abort(404)

    logger.debug('Sending path: %s' % path)
    return send_file(path, mimetype=self._magic.from_file(path))

  def put(self, file_id):
    input_stream = request.stream
    if request.headers.get('transfer-encoding') == 'chunked':
      # Careful, might work only with WSGI servers supporting chunked
      # encoding (Gunicorn)
      input_stream = request.environ['wsgi.input']

    self._userfiles.store_stream(input_stream, file_id)

    return make_response('Okay')

  def dispatch_request(self, file_id):
    if request.method == 'GET':
      return self.get(file_id)
    elif request.method == 'PUT':
      return self.put(file_id)


class LocalUserfiles(object):
  def __init__(self, app, path):
    self._root_path = path
    self._buffer_size = 64 * 1024  # 64 KB
    self._app = app

  def _build_url_adapter(self):
    return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
                                  script_name=self._app.config['APPLICATION_ROOT'] or '/',
                                  url_scheme=self._app.config['PREFERRED_URL_SCHEME'])

  def prepare_for_drop(self, mime_type):
    file_id = str(uuid4())
    with self._app.app_context() as ctx:
      ctx.url_adapter = self._build_url_adapter()
      return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)

  def file_path(self, file_id):
    if '..' in file_id or file_id.startswith('/'):
      raise RuntimeError('Invalid Filename')
    return os.path.join(self._root_path, file_id)

  def store_stream(self, stream, file_id):
    path = self.file_path(file_id)
    dirname = os.path.dirname(path)
    if not os.path.exists(dirname):
      os.makedirs(dirname)

    with open(path, 'w') as to_write:
      while True:
        try:
          buf = stream.read(self._buffer_size)
          if not buf:
            break
          to_write.write(buf)
        except IOError:
          break

  def store_file(self, file_like_obj, content_type):
    file_id = str(uuid4())

    # Rewind the file to match what s3 does
    file_like_obj.seek(0, os.SEEK_SET)

    self.store_stream(file_like_obj, file_id)
    return file_id

  def get_file_url(self, file_id, expires_in=300):
    with self._app.app_context() as ctx:
      ctx.url_adapter = self._build_url_adapter()
      return url_for('userfiles_handlers', file_id=file_id, _external=True)

  def get_file_checksum(self, file_id):
    path = self.file_path(file_id)
    sha_hash = hashlib.sha256()
    with open(path, 'r') as to_hash:
      while True:
        buf = to_hash.read(self._buffer_size)
        if not buf:
          break
        sha_hash.update(buf)
    return sha_hash.hexdigest()[:7]


class Userfiles(object):
  def __init__(self, app=None):
    self.app = app
    if app is not None:
      self.state = self.init_app(app)
    else:
      self.state = None

  def init_app(self, app):
    storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
    path = app.config.get('USERFILES_PATH', '')

    if storage_type == 'LocalUserfiles':
      userfiles = LocalUserfiles(app, path)
      app.add_url_rule('/userfiles/<file_id>',
                       view_func=UserfilesHandlers.as_view('userfiles_handlers',
                                                           local_userfiles=userfiles))

    elif storage_type == 'S3Userfiles':
      access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
      secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
      bucket = app.config.get('USERFILES_S3_BUCKET', '')
      userfiles = S3Userfiles(path, access_key, secret_key, bucket)

    elif storage_type == 'FakeUserfiles':
      userfiles = FakeUserfiles()

    else:
      raise RuntimeError('Unknown userfiles type: %s' % storage_type)

    # register extension with app
    app.extensions = getattr(app, 'extensions', {})
    app.extensions['userfiles'] = userfiles
    return userfiles

  def __getattr__(self, name):
    return getattr(self.state, name, None)