Merge remote-tracking branch 'origin/master' into waltermitty

Conflicts:
	app.py
	data/userfiles.py
This commit is contained in:
Jake Moshenko 2014-09-11 11:18:28 -04:00
commit 2455c17f96
23 changed files with 232 additions and 241 deletions

View file

@ -1,10 +1,10 @@
FROM phusion/baseimage:0.9.11 FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
# Install the dependencies. # Install the dependencies.
RUN apt-get update # 21AUG2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev

View file

@ -1,10 +1,10 @@
FROM phusion/baseimage:0.9.11 FROM phusion/baseimage:0.9.13
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
# Install the dependencies. # Install the dependencies.
RUN apt-get update # 21AUG2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev

4
app.py
View file

@ -89,8 +89,8 @@ Principal(app, use_sessions=False)
login_manager = LoginManager(app) login_manager = LoginManager(app)
mail = Mail(app) mail = Mail(app)
storage = Storage(app) storage = Storage(app)
userfiles = Userfiles(app) userfiles = Userfiles(app, storage)
log_archive = LogArchive(app) log_archive = LogArchive(app, storage)
analytics = Analytics(app) analytics = Analytics(app)
billing = Billing(app) billing = Billing(app)
sentry = Sentry(app) sentry = Sentry(app)

View file

@ -83,20 +83,12 @@ class DefaultConfig(object):
BUILDLOGS_REDIS_HOSTNAME = 'logs.quay.io' BUILDLOGS_REDIS_HOSTNAME = 'logs.quay.io'
BUILDLOGS_OPTIONS = [] BUILDLOGS_OPTIONS = []
# Build logs archive
LOG_ARCHIVE_TYPE = 'LocalArchivedLogs'
LOG_ARCHIVE_PATH = 'test/data/registry/logarchive'
# Real-time user events # Real-time user events
USER_EVENTS_REDIS_HOSTNAME = 'logs.quay.io' USER_EVENTS_REDIS_HOSTNAME = 'logs.quay.io'
# Stripe config # Stripe config
BILLING_TYPE = 'FakeStripe' BILLING_TYPE = 'FakeStripe'
# Userfiles
USERFILES_TYPE = 'LocalUserfiles'
USERFILES_PATH = 'test/data/registry/userfiles'
# Analytics # Analytics
ANALYTICS_TYPE = 'FakeAnalytics' ANALYTICS_TYPE = 'FakeAnalytics'
@ -176,3 +168,11 @@ class DefaultConfig(object):
} }
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
# Userfiles
USERFILES_LOCATION = 'local_us'
USERFILES_PATH = 'userfiles/'
# Build logs archive
LOG_ARCHIVE_LOCATION = 'local_us'
LOG_ARCHIVE_PATH = 'logarchive/'

View file

@ -1,39 +1,31 @@
from data.userfiles import LocalUserfiles, UserfilesHandlers, S3Userfiles, FakeUserfiles from data.userfiles import DelegateUserfiles, UserfilesHandlers
class LogArchive(object): class LogArchive(object):
def __init__(self, app=None): def __init__(self, app=None, distributed_storage=None):
self.app = app self.app = app
if app is not None: if app is not None:
self.state = self.init_app(app) self.state = self.init_app(app, distributed_storage)
else: else:
self.state = None self.state = None
def init_app(self, app): def init_app(self, app, distributed_storage):
storage_type = app.config.get('LOG_ARCHIVE_TYPE', 'LocalArchivedLogs') location = app.config.get('LOG_ARCHIVE_LOCATION')
path = app.config.get('LOG_ARCHIVE_PATH', '') path = app.config.get('LOG_ARCHIVE_PATH', None)
if storage_type == 'LocalArchivedLogs': handler_name = 'logarchive_handlers'
archive = LocalUserfiles(app, path)
app.add_url_rule('/archivedlogs/<file_id>',
view_func=UserfilesHandlers.as_view('log_archive_handlers',
local_userfiles=archive))
elif storage_type == 'S3ArchivedLogs': log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
access_key = app.config.get('LOG_ARCHIVE_AWS_ACCESS_KEY', '')
secret_key = app.config.get('LOG_ARCHIVE_AWS_SECRET_KEY', '')
bucket = app.config.get('LOG_ARCHIVE_S3_BUCKET', '')
archive = S3Userfiles(path, access_key, secret_key, bucket)
elif storage_type == 'FakeArchivedLogs': app.add_url_rule('/logarchive/<file_id>',
archive = FakeUserfiles() view_func=UserfilesHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
else: location=location,
raise RuntimeError('Unknown log archive type: %s' % storage_type) files=log_archive))
# register extension with app # register extension with app
app.extensions = getattr(app, 'extensions', {}) app.extensions = getattr(app, 'extensions', {})
app.extensions['log_archive'] = archive app.extensions['log_archive'] = log_archive
return archive return log_archive
def __getattr__(self, name): def __getattr__(self, name):
return getattr(self.state, name, None) return getattr(self.state, name, None)

View file

@ -1,113 +1,35 @@
import boto
import os import os
import logging import logging
import hashlib
import magic import magic
from boto.s3.key import Key
from uuid import uuid4 from uuid import uuid4
from flask import url_for, request, send_file, make_response, abort from flask import url_for, request, send_file, make_response, abort
from flask.views import View from flask.views import View
from _pyio import BufferedReader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = path
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id)
def store_file(self, file_like_obj, content_type, file_id=None):
self._initialize_s3()
if file_id is None:
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % content_type)
k.set_metadata('Content-Type', content_type)
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
rewind=True)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300, mime_type=None):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
headers = None
if mime_type:
headers={'Content-Type': mime_type}
return k.generate_url(expires_in, headers=headers)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
class UserfilesHandlers(View): class UserfilesHandlers(View):
methods = ['GET', 'PUT'] methods = ['GET', 'PUT']
def __init__(self, local_userfiles): def __init__(self, distributed_storage, location, files):
self._userfiles = local_userfiles self._storage = distributed_storage
self._files = files
self._locations = {location}
self._magic = magic.Magic(mime=True) self._magic = magic.Magic(mime=True)
def get(self, file_id): def get(self, file_id):
path = self._userfiles.file_path(file_id) path = self._files.get_file_id_path(file_id)
if not os.path.exists(path): try:
file_stream = self._storage.stream_read_file(self._locations, path)
buffered = BufferedReader(file_stream)
file_header_bytes = buffered.peek(1024)
return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
except IOError:
abort(404) abort(404)
logger.debug('Sending path: %s' % path)
return send_file(path, mimetype=self._magic.from_file(path))
def put(self, file_id): def put(self, file_id):
input_stream = request.stream input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked': if request.headers.get('transfer-encoding') == 'chunked':
@ -115,7 +37,10 @@ class UserfilesHandlers(View):
# encoding (Gunicorn) # encoding (Gunicorn)
input_stream = request.environ['wsgi.input'] input_stream = request.environ['wsgi.input']
self._userfiles.store_stream(input_stream, file_id) c_type = request.headers.get('Content-Type', None)
path = self._files.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, input_stream, c_type)
return make_response('Okay') return make_response('Okay')
@ -126,100 +51,81 @@ class UserfilesHandlers(View):
return self.put(file_id) return self.put(file_id)
class LocalUserfiles(object): class DelegateUserfiles(object):
def __init__(self, app, path): def __init__(self, app, distributed_storage, location, path, handler_name):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
self._app = app self._app = app
self._storage = distributed_storage
self._locations = {location}
self._prefix = path
self._handler_name = handler_name
def _build_url_adapter(self): def _build_url_adapter(self):
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'], return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
script_name=self._app.config['APPLICATION_ROOT'] or '/', script_name=self._app.config['APPLICATION_ROOT'] or '/',
url_scheme=self._app.config['PREFERRED_URL_SCHEME']) url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
def prepare_for_drop(self, mime_type): def get_file_id_path(self, file_id):
return os.path.join(self._prefix, file_id)
def prepare_for_drop(self, mime_type, requires_cors=True):
""" Returns a signed URL to upload a file to our bucket. """
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4()) file_id = str(uuid4())
with self._app.app_context() as ctx: path = self.get_file_id_path(file_id)
ctx.url_adapter = self._build_url_adapter() url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors)
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)
def file_path(self, file_id): if url is None:
if '..' in file_id or file_id.startswith('/'): with self._app.app_context() as ctx:
raise RuntimeError('Invalid Filename') ctx.url_adapter = self._build_url_adapter()
return os.path.join(self._root_path, file_id) return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
def store_stream(self, stream, file_id): return (url, file_id)
path = self.file_path(file_id)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(path, 'w') as to_write:
while True:
try:
buf = stream.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
def store_file(self, file_like_obj, content_type, file_id=None): def store_file(self, file_like_obj, content_type, file_id=None):
if file_id is None: if file_id is None:
file_id = str(uuid4()) file_id = str(uuid4())
# Rewind the file to match what s3 does path = self.get_file_id_path(file_id)
file_like_obj.seek(0, os.SEEK_SET) self._storage.stream_write(self._locations, path, file_like_obj, content_type)
self.store_stream(file_like_obj, file_id)
return file_id return file_id
def get_file_url(self, file_id, expires_in=300): def get_file_url(self, file_id, expires_in=300, requires_cors=False):
with self._app.app_context() as ctx: path = self.get_file_id_path(file_id)
ctx.url_adapter = self._build_url_adapter() url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors)
return url_for('userfiles_handlers', file_id=file_id, _external=True)
if url is None:
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return url_for(self._handler_name, file_id=file_id, _external=True)
return url
def get_file_checksum(self, file_id): def get_file_checksum(self, file_id):
path = self.file_path(file_id) path = self.get_file_id_path(file_id)
sha_hash = hashlib.sha256() return self._storage.get_checksum(self._locations, path)
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
class Userfiles(object): class Userfiles(object):
def __init__(self, app=None): def __init__(self, app=None, distributed_storage=None):
self.app = app self.app = app
if app is not None: if app is not None:
self.state = self.init_app(app) self.state = self.init_app(app, distributed_storage)
else: else:
self.state = None self.state = None
def init_app(self, app): def init_app(self, app, distributed_storage):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles') location = app.config.get('USERFILES_LOCATION')
path = app.config.get('USERFILES_PATH', '') path = app.config.get('USERFILES_PATH', None)
if storage_type == 'LocalUserfiles': handler_name = 'userfiles_handlers'
userfiles = LocalUserfiles(app, path)
app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view('userfiles_handlers',
local_userfiles=userfiles))
elif storage_type == 'S3Userfiles': userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
elif storage_type == 'FakeUserfiles': app.add_url_rule('/userfiles/<file_id>',
userfiles = FakeUserfiles() view_func=UserfilesHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
else: location=location,
raise RuntimeError('Unknown userfiles type: %s' % storage_type) files=userfiles))
# register extension with app # register extension with app
app.extensions = getattr(app, 'extensions', {}) app.extensions = getattr(app, 'extensions', {})

View file

@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False):
} }
if can_write: if can_write:
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key) resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
return resp return resp
@ -257,7 +257,7 @@ class FileDropResource(ApiResource):
def post(self): def post(self):
""" Request a URL to which a file may be uploaded. """ """ Request a URL to which a file may be uploaded. """
mime_type = request.get_json()['mimeType'] mime_type = request.get_json()['mimeType']
(url, file_id) = user_files.prepare_for_drop(mime_type) (url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
return { return {
'url': url, 'url': url,
'file_id': str(file_id), 'file_id': str(file_id),

View file

@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers):
extra_headers = {} extra_headers = {}
# Add the Accept-Ranges header if the storage engine supports resumeable # Add the Accept-Ranges header if the storage engine supports resumable
# downloads. # downloads.
if store.get_supports_resumeable_downloads(repo_image.storage.locations): if store.get_supports_resumable_downloads(repo_image.storage.locations):
profile.debug('Storage supports resumeable downloads') profile.debug('Storage supports resumable downloads')
extra_headers['Accept-Ranges'] = 'bytes' extra_headers['Accept-Ranges'] = 'bytes'
resp = make_response('') resp = make_response('')

View file

@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive: with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0] tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy
tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
logger.debug('Successfully prepared job') logger.debug('Successfully prepared job')

View file

@ -21,8 +21,7 @@
#quay-logo { #quay-logo {
width: 80px; width: 100px;
margin-right: 30px;
} }
#padding-container { #padding-container {

View file

@ -2,7 +2,7 @@
<div class="current-item"> <div class="current-item">
<div class="dropdown-select-icon-transclude"></div> <div class="dropdown-select-icon-transclude"></div>
<input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}" <input type="text" class="lookahead-input form-control" placeholder="{{ placeholder }}"
ng-readonly="!lookaheadItems || !lookaheadItems.length"></input> ng-readonly="!allowCustomInput"></input>
</div> </div>
<div class="dropdown"> <div class="dropdown">
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown"> <button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">

View file

@ -4,7 +4,7 @@
&equiv; &equiv;
</button> </button>
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}"> <a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
<img id="quay-logo" src="/static/img/black-horizontal.svg"> <img id="quay-logo" src="/static/img/quay-logo.png">
</a> </a>
</div> </div>

View file

@ -29,7 +29,8 @@
<div class="slideinout" ng-show="currentRepo"> <div class="slideinout" ng-show="currentRepo">
<div style="margin-top: 10px">Dockerfile Location:</div> <div style="margin-top: 10px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation" <div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"> lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"
allow-custom-input="true">
<!-- Icons --> <!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i>

View file

@ -1280,7 +1280,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'name': 'token', 'name': 'token',
'type': 'string', 'type': 'string',
'title': 'Token', 'title': 'Token',
'help_url': 'https://{subdomain}.slack.com/services/new/outgoing-webhook' 'help_url': 'https://{subdomain}.slack.com/services/new/incoming-webhook'
} }
] ]
} }
@ -4406,6 +4406,9 @@ quayApp.directive('dropdownSelect', function ($compile) {
'selectedItem': '=selectedItem', 'selectedItem': '=selectedItem',
'placeholder': '=placeholder', 'placeholder': '=placeholder',
'lookaheadItems': '=lookaheadItems', 'lookaheadItems': '=lookaheadItems',
'allowCustomInput': '@allowCustomInput',
'handleItemSelected': '&handleItemSelected', 'handleItemSelected': '&handleItemSelected',
'handleInput': '&handleInput', 'handleInput': '&handleInput',

View file

@ -1647,14 +1647,17 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
if ($scope.cuser.logins) { if ($scope.cuser.logins) {
for (var i = 0; i < $scope.cuser.logins.length; i++) { for (var i = 0; i < $scope.cuser.logins.length; i++) {
if ($scope.cuser.logins[i].service == 'github') { var login = $scope.cuser.logins[i];
login.metadata = login.metadata || {};
if (login.service == 'github') {
$scope.hasGithubLogin = true; $scope.hasGithubLogin = true;
$scope.githubLogin = $scope.cuser.logins[i].metadata['service_username']; $scope.githubLogin = login.metadata['service_username'];
} }
if ($scope.cuser.logins[i].service == 'google') { if (login.service == 'google') {
$scope.hasGoogleLogin = true; $scope.hasGoogleLogin = true;
$scope.googleLogin = $scope.cuser.logins[i].metadata['service_username']; $scope.googleLogin = login.metadata['service_username'];
} }
} }
} }

View file

@ -1,5 +1,5 @@
from storage.local import LocalStorage from storage.local import LocalStorage
from storage.cloud import S3Storage, GoogleCloudStorage from storage.cloud import S3Storage, GoogleCloudStorage, RadosGWStorage
from storage.fakestorage import FakeStorage from storage.fakestorage import FakeStorage
from storage.distributedstorage import DistributedStorage from storage.distributedstorage import DistributedStorage
@ -8,6 +8,7 @@ STORAGE_DRIVER_CLASSES = {
'LocalStorage': LocalStorage, 'LocalStorage': LocalStorage,
'S3Storage': S3Storage, 'S3Storage': S3Storage,
'GoogleCloudStorage': GoogleCloudStorage, 'GoogleCloudStorage': GoogleCloudStorage,
'RadosGWStorage': RadosGWStorage,
} }

View file

@ -54,10 +54,13 @@ class BaseStorage(StoragePaths):
# Set the IO buffer to 64kB # Set the IO buffer to 64kB
buffer_size = 64 * 1024 buffer_size = 64 * 1024
def get_direct_download_url(self, path, expires_in=60): def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
return None return None
def get_supports_resumeable_downloads(self): def get_direct_upload_url(self, path, mime_type, requires_cors=True):
return None
def get_supports_resumable_downloads(self):
return False return False
def get_content(self, path): def get_content(self, path):
@ -72,7 +75,7 @@ class BaseStorage(StoragePaths):
def stream_read_file(self, path): def stream_read_file(self, path):
raise NotImplementedError raise NotImplementedError
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
raise NotImplementedError raise NotImplementedError
def list_directory(self, path=None): def list_directory(self, path=None):
@ -83,3 +86,6 @@ class BaseStorage(StoragePaths):
def remove(self, path): def remove(self, path):
raise NotImplementedError raise NotImplementedError
def get_checksum(self, path):
raise NotImplementedError

View file

@ -7,36 +7,39 @@ import boto.gs.connection
import boto.s3.key import boto.s3.key
import boto.gs.key import boto.gs.key
from io import BufferedIOBase
from storage.basestorage import BaseStorage from storage.basestorage import BaseStorage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class StreamReadKeyAsFile(object): class StreamReadKeyAsFile(BufferedIOBase):
def __init__(self, key): def __init__(self, key):
self._key = key self._key = key
self._finished = False
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self._key.close(fast=True)
def read(self, amt=None): def read(self, amt=None):
if self._finished: if self.closed:
return None return None
resp = self._key.read(amt) resp = self._key.read(amt)
if not resp:
self._finished = True
return resp return resp
def readable(self):
return True
@property
def closed(self):
return self._key.closed
def close(self):
self._key.close(fast=True)
class _CloudStorage(BaseStorage): class _CloudStorage(BaseStorage):
def __init__(self, connection_class, key_class, upload_params, storage_path, access_key, def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
secret_key, bucket_name): access_key, secret_key, bucket_name):
self._initialized = False self._initialized = False
self._bucket_name = bucket_name self._bucket_name = bucket_name
self._access_key = access_key self._access_key = access_key
@ -45,12 +48,14 @@ class _CloudStorage(BaseStorage):
self._connection_class = connection_class self._connection_class = connection_class
self._key_class = key_class self._key_class = key_class
self._upload_params = upload_params self._upload_params = upload_params
self._connect_kwargs = connect_kwargs
self._cloud_conn = None self._cloud_conn = None
self._cloud_bucket = None self._cloud_bucket = None
def _initialize_cloud_conn(self): def _initialize_cloud_conn(self):
if not self._initialized: if not self._initialized:
self._cloud_conn = self._connection_class(self._access_key, self._secret_key) self._cloud_conn = self._connection_class(self._access_key, self._secret_key,
**self._connect_kwargs)
self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name) self._cloud_bucket = self._cloud_conn.get_bucket(self._bucket_name)
self._initialized = True self._initialized = True
@ -87,15 +92,22 @@ class _CloudStorage(BaseStorage):
key.set_contents_from_string(content, **self._upload_params) key.set_contents_from_string(content, **self._upload_params)
return path return path
def get_supports_resumeable_downloads(self): def get_supports_resumable_downloads(self):
return True return True
def get_direct_download_url(self, path, expires_in=60): def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
k = self._key_class(self._cloud_bucket, path) k = self._key_class(self._cloud_bucket, path)
return k.generate_url(expires_in) return k.generate_url(expires_in)
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
url = key.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, encrypt_key=True)
return url
def stream_read(self, path): def stream_read(self, path):
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
@ -116,14 +128,20 @@ class _CloudStorage(BaseStorage):
raise IOError('No such key: \'{0}\''.format(path)) raise IOError('No such key: \'{0}\''.format(path))
return StreamReadKeyAsFile(key) return StreamReadKeyAsFile(key)
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn() self._initialize_cloud_conn()
buffer_size = 5 * 1024 * 1024 buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size: if self.buffer_size > buffer_size:
buffer_size = self.buffer_size buffer_size = self.buffer_size
path = self._init_path(path) path = self._init_path(path)
mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params)
metadata = {}
if content_type is not None:
metadata['Content-Type'] = content_type
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
**self._upload_params)
num_part = 1 num_part = 1
while True: while True:
try: try:
@ -179,25 +197,67 @@ class _CloudStorage(BaseStorage):
for key in self._cloud_bucket.list(prefix=path): for key in self._cloud_bucket.list(prefix=path):
key.delete() key.delete()
def get_checksum(self, path):
self._initialize_cloud_conn()
path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path)
k = self._cloud_bucket.lookup(key)
return k.etag[1:-1][:7]
class S3Storage(_CloudStorage): class S3Storage(_CloudStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
upload_params = { upload_params = {
'encrypt_key': True, 'encrypt_key': True,
} }
connect_kwargs = {}
super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key, super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
upload_params, storage_path, s3_access_key, s3_secret_key, connect_kwargs, upload_params, storage_path, s3_access_key,
s3_bucket) s3_secret_key, s3_bucket)
class GoogleCloudStorage(_CloudStorage): class GoogleCloudStorage(_CloudStorage):
def __init__(self, storage_path, access_key, secret_key, bucket_name): def __init__(self, storage_path, access_key, secret_key, bucket_name):
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key, {}, upload_params = {}
storage_path, access_key, secret_key, bucket_name) connect_kwargs = {}
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key,
connect_kwargs, upload_params, storage_path,
access_key, secret_key, bucket_name)
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
self._initialize_cloud_conn() self._initialize_cloud_conn()
path = self._init_path(path) path = self._init_path(path)
key = self._key_class(self._cloud_bucket, path) key = self._key_class(self._cloud_bucket, path)
if content_type is not None:
key.set_metadata('Content-Type', content_type)
key.set_contents_from_stream(fp) key.set_contents_from_stream(fp)
class RadosGWStorage(_CloudStorage):
def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name):
upload_params = {}
connect_kwargs = {
'host': hostname,
'is_secure': is_secure,
'calling_format': boto.s3.connection.OrdinaryCallingFormat(),
}
super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
connect_kwargs, upload_params, storage_path, access_key,
secret_key, bucket_name)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_download_url(path, expires_in, requires_cors)
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
if requires_cors:
return None
return super(RadosGWStorage, self).get_direct_upload_url(path, mime_type, requires_cors)

View file

@ -31,6 +31,7 @@ class DistributedStorage(StoragePaths):
self.preferred_locations = list(preferred_locations) self.preferred_locations = list(preferred_locations)
get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url) get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url)
get_direct_upload_url = _location_aware(BaseStorage.get_direct_upload_url)
get_content = _location_aware(BaseStorage.get_content) get_content = _location_aware(BaseStorage.get_content)
put_content = _location_aware(BaseStorage.put_content) put_content = _location_aware(BaseStorage.put_content)
stream_read = _location_aware(BaseStorage.stream_read) stream_read = _location_aware(BaseStorage.stream_read)
@ -39,4 +40,5 @@ class DistributedStorage(StoragePaths):
list_directory = _location_aware(BaseStorage.list_directory) list_directory = _location_aware(BaseStorage.list_directory)
exists = _location_aware(BaseStorage.exists) exists = _location_aware(BaseStorage.exists)
remove = _location_aware(BaseStorage.remove) remove = _location_aware(BaseStorage.remove)
get_supports_resumeable_downloads = _location_aware(BaseStorage.get_supports_resumeable_downloads) get_checksum = _location_aware(BaseStorage.get_checksum)
get_supports_resumable_downloads = _location_aware(BaseStorage.get_supports_resumable_downloads)

View file

@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
def stream_read(self, path): def stream_read(self, path):
yield '' yield ''
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
pass pass
def remove(self, path): def remove(self, path):
@ -22,3 +22,6 @@ class FakeStorage(BaseStorage):
def exists(self, path): def exists(self, path):
return False return False
def get_checksum(self, path):
return 'abcdefg'

View file

@ -1,6 +1,7 @@
import os import os
import shutil import shutil
import hashlib
import io
from storage.basestorage import BaseStorage from storage.basestorage import BaseStorage
@ -40,9 +41,9 @@ class LocalStorage(BaseStorage):
def stream_read_file(self, path): def stream_read_file(self, path):
path = self._init_path(path) path = self._init_path(path)
return open(path, mode='rb') return io.open(path, mode='rb')
def stream_write(self, path, fp): def stream_write(self, path, fp, content_type=None):
# Size is mandatory # Size is mandatory
path = self._init_path(path, create=True) path = self._init_path(path, create=True)
with open(path, mode='wb') as f: with open(path, mode='wb') as f:
@ -80,3 +81,14 @@ class LocalStorage(BaseStorage):
os.remove(path) os.remove(path)
except OSError: except OSError:
pass pass
def get_checksum(self, path):
path = self._init_path(path)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self.buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]

View file

@ -30,7 +30,7 @@ class TestConfig(DefaultConfig):
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs') BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False] BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef', False]
USERFILES_TYPE = 'FakeUserfiles' USERFILES_LOCATION = 'local_us'
FEATURE_SUPER_USERS = True FEATURE_SUPER_USERS = True
FEATURE_BILLING = True FEATURE_BILLING = True

View file

@ -497,7 +497,7 @@ class DockerfileBuildWorker(Worker):
job_config = json.loads(repository_build.job_config) job_config = json.loads(repository_build.job_config)
resource_url = user_files.get_file_url(repository_build.resource_key) resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
tag_names = job_config['docker_tags'] tag_names = job_config['docker_tags']
build_subdir = job_config['build_subdir'] build_subdir = job_config['build_subdir']
repo = job_config['repository'] repo = job_config['repository']