diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 04efe38f0..159c7867c 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev diff --git a/Dockerfile.web b/Dockerfile.web index e1d253632..b24694b42 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -1,10 +1,10 @@ -FROM phusion/baseimage:0.9.11 +FROM phusion/baseimage:0.9.13 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 21AUG2014 +RUN apt-get update # 10SEP2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev diff --git a/app.py b/app.py index e03480782..8f0a57d62 100644 --- a/app.py +++ b/app.py @@ -89,8 +89,8 @@ Principal(app, use_sessions=False) login_manager = LoginManager(app) mail = Mail(app) storage = Storage(app) -userfiles = Userfiles(app) -log_archive = LogArchive(app) +userfiles = Userfiles(app, storage) +log_archive = LogArchive(app, storage) analytics = Analytics(app) billing = Billing(app) sentry = Sentry(app) diff --git a/config.py b/config.py index 1355aa2a9..f810007e8 100644 --- a/config.py +++ b/config.py @@ -83,20 +83,12 @@ class DefaultConfig(object): BUILDLOGS_REDIS_HOSTNAME = 'logs.quay.io' BUILDLOGS_OPTIONS = [] - # Build logs archive - LOG_ARCHIVE_TYPE = 'LocalArchivedLogs' - LOG_ARCHIVE_PATH = 'test/data/registry/logarchive' - # Real-time user events USER_EVENTS_REDIS_HOSTNAME = 'logs.quay.io' # Stripe config BILLING_TYPE = 'FakeStripe' - # Userfiles - USERFILES_TYPE = 'LocalUserfiles' - USERFILES_PATH = 'test/data/registry/userfiles' - # Analytics ANALYTICS_TYPE = 'FakeAnalytics' @@ -176,3 +168,11 @@ class DefaultConfig(object): } DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] + + # Userfiles + USERFILES_LOCATION = 'local_us' + USERFILES_PATH = 'userfiles/' + + # Build logs archive + LOG_ARCHIVE_LOCATION = 'local_us' + LOG_ARCHIVE_PATH = 'logarchive/' diff --git a/data/archivedlogs.py b/data/archivedlogs.py index 0801b8815..bde3e9151 100644 --- a/data/archivedlogs.py +++ b/data/archivedlogs.py @@ -1,39 +1,31 @@ -from data.userfiles import LocalUserfiles, UserfilesHandlers, S3Userfiles, FakeUserfiles +from data.userfiles import DelegateUserfiles, UserfilesHandlers class LogArchive(object): - def __init__(self, app=None): + def __init__(self, app=None, distributed_storage=None): self.app = app if app is not None: - self.state = self.init_app(app) + self.state = self.init_app(app, distributed_storage) else: self.state = None - def init_app(self, app): - storage_type = app.config.get('LOG_ARCHIVE_TYPE', 'LocalArchivedLogs') - path = app.config.get('LOG_ARCHIVE_PATH', '') + def init_app(self, app, distributed_storage): + location = app.config.get('LOG_ARCHIVE_LOCATION') + path = app.config.get('LOG_ARCHIVE_PATH', None) - if storage_type == 'LocalArchivedLogs': - archive = LocalUserfiles(app, path) - app.add_url_rule('/archivedlogs/', - view_func=UserfilesHandlers.as_view('log_archive_handlers', - local_userfiles=archive)) + handler_name = 'logarchive_handlers' - elif storage_type == 'S3ArchivedLogs': - access_key = app.config.get('LOG_ARCHIVE_AWS_ACCESS_KEY', '') - secret_key = app.config.get('LOG_ARCHIVE_AWS_SECRET_KEY', '') - bucket = app.config.get('LOG_ARCHIVE_S3_BUCKET', '') - archive = S3Userfiles(path, access_key, secret_key, bucket) + log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name) - elif storage_type == 'FakeArchivedLogs': - archive = FakeUserfiles() - - else: - raise RuntimeError('Unknown log archive type: %s' % storage_type) + app.add_url_rule('/logarchive/', + view_func=UserfilesHandlers.as_view(handler_name, + distributed_storage=distributed_storage, + location=location, + files=log_archive)) # register extension with app app.extensions = getattr(app, 'extensions', {}) - app.extensions['log_archive'] = archive - return archive + app.extensions['log_archive'] = log_archive + return log_archive def __getattr__(self, name): return getattr(self.state, name, None) diff --git a/data/userfiles.py b/data/userfiles.py index c12553493..8e7227e01 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -1,113 +1,35 @@ -import boto import os import logging -import hashlib import magic -from boto.s3.key import Key from uuid import uuid4 from flask import url_for, request, send_file, make_response, abort from flask.views import View +from _pyio import BufferedReader logger = logging.getLogger(__name__) -class FakeUserfiles(object): - def prepare_for_drop(self, mime_type): - return ('http://fake/url', uuid4()) - - def store_file(self, file_like_obj, content_type): - raise NotImplementedError() - - def get_file_url(self, file_id, expires_in=300): - return ('http://fake/url') - - def get_file_checksum(self, file_id): - return 'abcdefg' - - -class S3FileWriteException(Exception): - pass - - -class S3Userfiles(object): - def __init__(self, path, s3_access_key, s3_secret_key, bucket_name): - self._initialized = False - self._bucket_name = bucket_name - self._access_key = s3_access_key - self._secret_key = s3_secret_key - self._prefix = path - self._s3_conn = None - self._bucket = None - - def _initialize_s3(self): - if not self._initialized: - self._s3_conn = boto.connect_s3(self._access_key, self._secret_key) - self._bucket = self._s3_conn.get_bucket(self._bucket_name) - self._initialized = True - - def prepare_for_drop(self, mime_type): - """ Returns a signed URL to upload a file to our bucket. """ - self._initialize_s3() - logger.debug('Requested upload url with content type: %s' % mime_type) - file_id = str(uuid4()) - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type}, - encrypt_key=True) - return (url, file_id) - - def store_file(self, file_like_obj, content_type, file_id=None): - self._initialize_s3() - - if file_id is None: - file_id = str(uuid4()) - - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - logger.debug('Setting s3 content type to: %s' % content_type) - k.set_metadata('Content-Type', content_type) - bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True, - rewind=True) - - if bytes_written == 0: - raise S3FileWriteException('Unable to write file to S3') - - return file_id - - def get_file_url(self, file_id, expires_in=300, mime_type=None): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = Key(self._bucket, full_key) - headers = None - if mime_type: - headers={'Content-Type': mime_type} - - return k.generate_url(expires_in, headers=headers) - - def get_file_checksum(self, file_id): - self._initialize_s3() - full_key = os.path.join(self._prefix, file_id) - k = self._bucket.lookup(full_key) - return k.etag[1:-1][:7] - - class UserfilesHandlers(View): methods = ['GET', 'PUT'] - def __init__(self, local_userfiles): - self._userfiles = local_userfiles + def __init__(self, distributed_storage, location, files): + self._storage = distributed_storage + self._files = files + self._locations = {location} self._magic = magic.Magic(mime=True) def get(self, file_id): - path = self._userfiles.file_path(file_id) - if not os.path.exists(path): + path = self._files.get_file_id_path(file_id) + try: + file_stream = self._storage.stream_read_file(self._locations, path) + buffered = BufferedReader(file_stream) + file_header_bytes = buffered.peek(1024) + return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes)) + except IOError: abort(404) - logger.debug('Sending path: %s' % path) - return send_file(path, mimetype=self._magic.from_file(path)) - def put(self, file_id): input_stream = request.stream if request.headers.get('transfer-encoding') == 'chunked': @@ -115,7 +37,10 @@ class UserfilesHandlers(View): # encoding (Gunicorn) input_stream = request.environ['wsgi.input'] - self._userfiles.store_stream(input_stream, file_id) + c_type = request.headers.get('Content-Type', None) + + path = self._files.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, input_stream, c_type) return make_response('Okay') @@ -126,100 +51,81 @@ class UserfilesHandlers(View): return self.put(file_id) -class LocalUserfiles(object): - def __init__(self, app, path): - self._root_path = path - self._buffer_size = 64 * 1024 # 64 KB +class DelegateUserfiles(object): + def __init__(self, app, distributed_storage, location, path, handler_name): self._app = app + self._storage = distributed_storage + self._locations = {location} + self._prefix = path + self._handler_name = handler_name def _build_url_adapter(self): return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'], script_name=self._app.config['APPLICATION_ROOT'] or '/', url_scheme=self._app.config['PREFERRED_URL_SCHEME']) - def prepare_for_drop(self, mime_type): + def get_file_id_path(self, file_id): + return os.path.join(self._prefix, file_id) + + def prepare_for_drop(self, mime_type, requires_cors=True): + """ Returns a signed URL to upload a file to our bucket. """ + logger.debug('Requested upload url with content type: %s' % mime_type) file_id = str(uuid4()) - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id) + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors) - def file_path(self, file_id): - if '..' in file_id or file_id.startswith('/'): - raise RuntimeError('Invalid Filename') - return os.path.join(self._root_path, file_id) + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return (url_for(self._handler_name, file_id=file_id, _external=True), file_id) - def store_stream(self, stream, file_id): - path = self.file_path(file_id) - dirname = os.path.dirname(path) - if not os.path.exists(dirname): - os.makedirs(dirname) - - with open(path, 'w') as to_write: - while True: - try: - buf = stream.read(self._buffer_size) - if not buf: - break - to_write.write(buf) - except IOError: - break + return (url, file_id) def store_file(self, file_like_obj, content_type, file_id=None): if file_id is None: file_id = str(uuid4()) - # Rewind the file to match what s3 does - file_like_obj.seek(0, os.SEEK_SET) - - self.store_stream(file_like_obj, file_id) + path = self.get_file_id_path(file_id) + self._storage.stream_write(self._locations, path, file_like_obj, content_type) return file_id - def get_file_url(self, file_id, expires_in=300): - with self._app.app_context() as ctx: - ctx.url_adapter = self._build_url_adapter() - return url_for('userfiles_handlers', file_id=file_id, _external=True) + def get_file_url(self, file_id, expires_in=300, requires_cors=False): + path = self.get_file_id_path(file_id) + url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors) + + if url is None: + with self._app.app_context() as ctx: + ctx.url_adapter = self._build_url_adapter() + return url_for(self._handler_name, file_id=file_id, _external=True) + + return url def get_file_checksum(self, file_id): - path = self.file_path(file_id) - sha_hash = hashlib.sha256() - with open(path, 'r') as to_hash: - while True: - buf = to_hash.read(self._buffer_size) - if not buf: - break - sha_hash.update(buf) - return sha_hash.hexdigest()[:7] + path = self.get_file_id_path(file_id) + return self._storage.get_checksum(self._locations, path) class Userfiles(object): - def __init__(self, app=None): + def __init__(self, app=None, distributed_storage=None): self.app = app if app is not None: - self.state = self.init_app(app) + self.state = self.init_app(app, distributed_storage) else: self.state = None - def init_app(self, app): - storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles') - path = app.config.get('USERFILES_PATH', '') + def init_app(self, app, distributed_storage): + location = app.config.get('USERFILES_LOCATION') + path = app.config.get('USERFILES_PATH', None) - if storage_type == 'LocalUserfiles': - userfiles = LocalUserfiles(app, path) - app.add_url_rule('/userfiles/', - view_func=UserfilesHandlers.as_view('userfiles_handlers', - local_userfiles=userfiles)) + handler_name = 'userfiles_handlers' - elif storage_type == 'S3Userfiles': - access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '') - secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '') - bucket = app.config.get('USERFILES_S3_BUCKET', '') - userfiles = S3Userfiles(path, access_key, secret_key, bucket) + userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name) - elif storage_type == 'FakeUserfiles': - userfiles = FakeUserfiles() - - else: - raise RuntimeError('Unknown userfiles type: %s' % storage_type) + app.add_url_rule('/userfiles/', + view_func=UserfilesHandlers.as_view(handler_name, + distributed_storage=distributed_storage, + location=location, + files=userfiles)) # register extension with app app.extensions = getattr(app, 'extensions', {}) diff --git a/endpoints/api/build.py b/endpoints/api/build.py index 21d554069..74677fadb 100644 --- a/endpoints/api/build.py +++ b/endpoints/api/build.py @@ -80,7 +80,7 @@ def build_status_view(build_obj, can_write=False): } if can_write: - resp['archive_url'] = user_files.get_file_url(build_obj.resource_key) + resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True) return resp @@ -257,7 +257,7 @@ class FileDropResource(ApiResource): def post(self): """ Request a URL to which a file may be uploaded. """ mime_type = request.get_json()['mimeType'] - (url, file_id) = user_files.prepare_for_drop(mime_type) + (url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True) return { 'url': url, 'file_id': str(file_id), diff --git a/endpoints/registry.py b/endpoints/registry.py index 72633939e..94719905a 100644 --- a/endpoints/registry.py +++ b/endpoints/registry.py @@ -110,10 +110,10 @@ def head_image_layer(namespace, repository, image_id, headers): extra_headers = {} - # Add the Accept-Ranges header if the storage engine supports resumeable + # Add the Accept-Ranges header if the storage engine supports resumable # downloads. - if store.get_supports_resumeable_downloads(repo_image.storage.locations): - profile.debug('Storage supports resumeable downloads') + if store.get_supports_resumable_downloads(repo_image.storage.locations): + profile.debug('Storage supports resumable downloads') extra_headers['Accept-Ranges'] = 'bytes' resp = make_response('') diff --git a/endpoints/trigger.py b/endpoints/trigger.py index ab7aa9065..ae0b4b2b7 100644 --- a/endpoints/trigger.py +++ b/endpoints/trigger.py @@ -291,6 +291,9 @@ class GithubBuildTrigger(BuildTrigger): with tarfile.open(fileobj=tarball) as archive: tarball_subdir = archive.getnames()[0] + # Seek to position 0 to make boto multipart happy + tarball.seek(0) + dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) logger.debug('Successfully prepared job') diff --git a/static/css/quay.css b/static/css/quay.css index 84d89811a..38d03893f 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -21,8 +21,7 @@ #quay-logo { - width: 80px; - margin-right: 30px; + width: 100px; } #padding-container { diff --git a/static/directives/dropdown-select.html b/static/directives/dropdown-select.html index c1157e3d0..69404e161 100644 --- a/static/directives/dropdown-select.html +++ b/static/directives/dropdown-select.html @@ -2,7 +2,7 @@
+ ng-readonly="!allowCustomInput">
diff --git a/static/directives/trigger-setup-github.html b/static/directives/trigger-setup-github.html index 9b0e194ab..48ac359f9 100644 --- a/static/directives/trigger-setup-github.html +++ b/static/directives/trigger-setup-github.html @@ -29,7 +29,8 @@
Dockerfile Location: