diff --git a/data/userfiles.py b/data/userfiles.py index 6330ec207..86ddb62a2 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -4,11 +4,6 @@ import logging from boto.s3.key import Key from uuid import uuid4 -import hmac -import time -import urllib -import base64 -import sha logger = logging.getLogger(__name__) @@ -20,15 +15,23 @@ class S3FileWriteException(Exception): class UserRequestFiles(object): def __init__(self, s3_access_key, s3_secret_key, bucket_name): - self._s3_conn = boto.connect_s3(s3_access_key, s3_secret_key) + self._initialized = False self._bucket_name = bucket_name - self._bucket = self._s3_conn.get_bucket(bucket_name) self._access_key = s3_access_key self._secret_key = s3_secret_key self._prefix = 'userfiles' + self._s3_conn = None + self._bucket = None + + def _initialize_s3(self): + if not self._initialized: + self._s3_conn = boto.connect_s3(self._access_key, self._secret_key) + self._bucket = self._s3_conn.get_bucket(self._bucket_name) + self._initialized = True def prepare_for_drop(self, mime_type): """ Returns a signed URL to upload a file to our bucket. """ + self._initialize_s3() logger.debug('Requested upload url with content type: %s' % mime_type) file_id = str(uuid4()) full_key = os.path.join(self._prefix, file_id) @@ -38,6 +41,7 @@ class UserRequestFiles(object): return (url, file_id) def store_file(self, flask_file): + self._initialize_s3() file_id = str(uuid4()) full_key = os.path.join(self._prefix, file_id) k = Key(self._bucket, full_key) @@ -51,6 +55,7 @@ class UserRequestFiles(object): return file_id def get_file_url(self, file_id, expires_in=300): + self._initialize_s3() full_key = os.path.join(self._prefix, file_id) k = Key(self._bucket, full_key) return k.generate_url(expires_in) diff --git a/endpoints/registry.py b/endpoints/registry.py index d4fadce53..e6b34493e 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -1,7 +1,7 @@ import logging import json -from flask import make_response, request, session, Response, abort +from flask import make_response, request, session, Response, abort, redirect from functools import wraps from datetime import datetime from time import time @@ -80,9 +80,12 @@ def set_cache_headers(f): def get_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) if permission.can() or model.repository_is_public(namespace, repository): + path = store.image_layer_path(namespace, repository, image_id) + direct_download_url = store.get_direct_download_url(path) + if direct_download_url: + return redirect(direct_download_url) try: - return Response(store.stream_read(store.image_layer_path( - namespace, repository, image_id)), headers=headers) + return Response(store.stream_read(path), headers=headers) except IOError: abort(404) # 'Image not found', 404) diff --git a/storage/basestorage.py b/storage/basestorage.py index f38b39790..94d1af659 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -65,6 +65,9 @@ class Storage(object): return '{0}/{1}/{2}/{3}/diffs.json'.format(self.images, namespace, repository, image_id) + def get_direct_download_url(self, path, expires_in=60): + return None + def get_content(self, path): raise NotImplementedError diff --git a/storage/s3.py b/storage/s3.py index 10e7bb50d..486c08313 100644 --- a/storage/s3.py +++ b/storage/s3.py @@ -5,7 +5,7 @@ import logging import boto.s3.connection import boto.s3.key -from basestorage import Storage +from storage.basestorage import Storage logger = logging.getLogger(__name__) @@ -35,10 +35,20 @@ class StreamReadKeyAsFile(object): class S3Storage(Storage): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): - self._s3_conn = \ - boto.s3.connection.S3Connection(s3_access_key, s3_secret_key) - self._s3_bucket = self._s3_conn.get_bucket(s3_bucket) + self._initialized = False + self._bucket = s3_bucket + self._access_key = s3_access_key + self._secret_key = s3_secret_key self._root_path = storage_path + self._s3_conn = None + self._s3_bucket = None + + def _initialize_s3(self): + if not self._initialized: + self._s3_conn = boto.s3.connection.S3Connection(self._access_key, + self._secret_key) + self._s3_bucket = self._s3_conn.get_bucket(self._s3_bucket) + self._initialized = True def _debug_key(self, key): """Used for debugging only.""" @@ -59,6 +69,7 @@ class S3Storage(Storage): return path def get_content(self, path): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): @@ -66,12 +77,20 @@ class S3Storage(Storage): return key.get_contents_as_string() def put_content(self, path, content): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) key.set_contents_from_string(content, encrypt_key=True) return path + def get_direct_download_url(self, path, expires_in=60): + self._initialize_s3() + path = self._init_path(path) + k = boto.s3.key.Key(self._s3_bucket, path) + return k.generate_url(expires_in) + def stream_read(self, path): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): @@ -83,6 +102,7 @@ class S3Storage(Storage): yield buf def stream_read_file(self, path): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): @@ -91,6 +111,7 @@ class S3Storage(Storage): def stream_write(self, path, fp): # Minimum size of upload part size on S3 is 5MB + self._initialize_s3() buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size @@ -111,6 +132,7 @@ class S3Storage(Storage): mp.complete_upload() def list_directory(self, path=None): + self._initialize_s3() path = self._init_path(path) if not path.endswith('/'): path += '/' @@ -131,11 +153,13 @@ class S3Storage(Storage): raise OSError('No such directory: \'{0}\''.format(path)) def exists(self, path): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) return key.exists() def remove(self, path): + self._initialize_s3() path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if key.exists(): @@ -149,6 +173,7 @@ class S3Storage(Storage): key.delete() def get_size(self, path): + self._initialize_s3() path = self._init_path(path) # Lookup does a HEAD HTTP Request on the object key = self._s3_bucket.lookup(path)