Merge remote-tracking branch 'origin/pullredirect'

This commit is contained in:
root 2013-12-04 00:39:55 +00:00
commit 8f5494fbfd
4 changed files with 50 additions and 14 deletions

View file

@ -4,11 +4,6 @@ import logging
from boto.s3.key import Key from boto.s3.key import Key
from uuid import uuid4 from uuid import uuid4
import hmac
import time
import urllib
import base64
import sha
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -20,15 +15,23 @@ class S3FileWriteException(Exception):
class UserRequestFiles(object): class UserRequestFiles(object):
def __init__(self, s3_access_key, s3_secret_key, bucket_name): def __init__(self, s3_access_key, s3_secret_key, bucket_name):
self._s3_conn = boto.connect_s3(s3_access_key, s3_secret_key) self._initialized = False
self._bucket_name = bucket_name self._bucket_name = bucket_name
self._bucket = self._s3_conn.get_bucket(bucket_name)
self._access_key = s3_access_key self._access_key = s3_access_key
self._secret_key = s3_secret_key self._secret_key = s3_secret_key
self._prefix = 'userfiles' self._prefix = 'userfiles'
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type): def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """ """ Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type) logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4()) file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id) full_key = os.path.join(self._prefix, file_id)
@ -38,6 +41,7 @@ class UserRequestFiles(object):
return (url, file_id) return (url, file_id)
def store_file(self, flask_file): def store_file(self, flask_file):
self._initialize_s3()
file_id = str(uuid4()) file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id) full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key) k = Key(self._bucket, full_key)
@ -51,6 +55,7 @@ class UserRequestFiles(object):
return file_id return file_id
def get_file_url(self, file_id, expires_in=300): def get_file_url(self, file_id, expires_in=300):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id) full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key) k = Key(self._bucket, full_key)
return k.generate_url(expires_in) return k.generate_url(expires_in)

View file

@ -1,7 +1,7 @@
import logging import logging
import json import json
from flask import make_response, request, session, Response, abort from flask import make_response, request, session, Response, abort, redirect
from functools import wraps from functools import wraps
from datetime import datetime from datetime import datetime
from time import time from time import time
@ -80,9 +80,12 @@ def set_cache_headers(f):
def get_image_layer(namespace, repository, image_id, headers): def get_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository) permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository): if permission.can() or model.repository_is_public(namespace, repository):
path = store.image_layer_path(namespace, repository, image_id)
direct_download_url = store.get_direct_download_url(path)
if direct_download_url:
return redirect(direct_download_url)
try: try:
return Response(store.stream_read(store.image_layer_path( return Response(store.stream_read(path), headers=headers)
namespace, repository, image_id)), headers=headers)
except IOError: except IOError:
abort(404) # 'Image not found', 404) abort(404) # 'Image not found', 404)

View file

@ -65,6 +65,9 @@ class Storage(object):
return '{0}/{1}/{2}/{3}/diffs.json'.format(self.images, namespace, return '{0}/{1}/{2}/{3}/diffs.json'.format(self.images, namespace,
repository, image_id) repository, image_id)
def get_direct_download_url(self, path, expires_in=60):
return None
def get_content(self, path): def get_content(self, path):
raise NotImplementedError raise NotImplementedError

View file

@ -5,7 +5,7 @@ import logging
import boto.s3.connection import boto.s3.connection
import boto.s3.key import boto.s3.key
from basestorage import Storage from storage.basestorage import Storage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -35,10 +35,20 @@ class StreamReadKeyAsFile(object):
class S3Storage(Storage): class S3Storage(Storage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
self._s3_conn = \ self._initialized = False
boto.s3.connection.S3Connection(s3_access_key, s3_secret_key) self._bucket = s3_bucket
self._s3_bucket = self._s3_conn.get_bucket(s3_bucket) self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._root_path = storage_path self._root_path = storage_path
self._s3_conn = None
self._s3_bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.s3.connection.S3Connection(self._access_key,
self._secret_key)
self._s3_bucket = self._s3_conn.get_bucket(self._s3_bucket)
self._initialized = True
def _debug_key(self, key): def _debug_key(self, key):
"""Used for debugging only.""" """Used for debugging only."""
@ -59,6 +69,7 @@ class S3Storage(Storage):
return path return path
def get_content(self, path): def get_content(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
@ -66,12 +77,20 @@ class S3Storage(Storage):
return key.get_contents_as_string() return key.get_contents_as_string()
def put_content(self, path, content): def put_content(self, path, content):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
key.set_contents_from_string(content, encrypt_key=True) key.set_contents_from_string(content, encrypt_key=True)
return path return path
def get_direct_download_url(self, path, expires_in=60):
self._initialize_s3()
path = self._init_path(path)
k = boto.s3.key.Key(self._s3_bucket, path)
return k.generate_url(expires_in)
def stream_read(self, path): def stream_read(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
@ -83,6 +102,7 @@ class S3Storage(Storage):
yield buf yield buf
def stream_read_file(self, path): def stream_read_file(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
@ -91,6 +111,7 @@ class S3Storage(Storage):
def stream_write(self, path, fp): def stream_write(self, path, fp):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
self._initialize_s3()
buffer_size = 5 * 1024 * 1024 buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size: if self.buffer_size > buffer_size:
buffer_size = self.buffer_size buffer_size = self.buffer_size
@ -111,6 +132,7 @@ class S3Storage(Storage):
mp.complete_upload() mp.complete_upload()
def list_directory(self, path=None): def list_directory(self, path=None):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
if not path.endswith('/'): if not path.endswith('/'):
path += '/' path += '/'
@ -131,11 +153,13 @@ class S3Storage(Storage):
raise OSError('No such directory: \'{0}\''.format(path)) raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path): def exists(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
return key.exists() return key.exists()
def remove(self, path): def remove(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if key.exists(): if key.exists():
@ -149,6 +173,7 @@ class S3Storage(Storage):
key.delete() key.delete()
def get_size(self, path): def get_size(self, path):
self._initialize_s3()
path = self._init_path(path) path = self._init_path(path)
# Lookup does a HEAD HTTP Request on the object # Lookup does a HEAD HTTP Request on the object
key = self._s3_bucket.lookup(path) key = self._s3_bucket.lookup(path)