First attempt at making config loadable through string config overrides in an env variable.

This commit is contained in:
jakedt 2014-04-03 17:31:46 -04:00
parent b95d3ec329
commit e87ffa20cf
21 changed files with 367 additions and 397 deletions

39
app.py
View file

@ -1,39 +1,32 @@
import logging
import os
import stripe
import json
from flask import Flask
from flask.ext.principal import Principal
from flask.ext.login import LoginManager
from flask.ext.mail import Mail
from config import (ProductionConfig, DebugConfig, LocalHostedConfig,
TestConfig, StagingConfig)
from util import analytics
from storage import Storage
from config import DefaultConfig
from data.userfiles import Userfiles
app = Flask(__name__)
logger = logging.getLogger(__name__)
logger.debug('Loading default config.')
app.config.from_object(DefaultConfig())
stack = os.environ.get('STACK', '').strip().lower()
if stack.startswith('prod'):
logger.info('Running with production config.')
config = ProductionConfig()
elif stack.startswith('staging'):
logger.info('Running with staging config on production data.')
config = StagingConfig()
elif stack.startswith('localhosted'):
logger.info('Running with debug config on production data.')
config = LocalHostedConfig()
elif stack.startswith('test'):
logger.info('Running with test config on ephemeral data.')
config = TestConfig()
else:
logger.info('Running with debug config.')
config = DebugConfig()
if 'QUAY_CONFIG_FILE' in os.environ:
config_filename = os.environ['QUAY_CONFIG_FILE']
logger.debug('Applying config file: %s', config_filename)
app.config.from_pyfile(config_filename)
app.config.from_object(config)
overrides = json.loads(os.environ.get('QUAY_CONFIG', '{}'))
logger.debug('Applying %s config overrides.', len(overrides))
app.config.from_object(overrides)
Principal(app, use_sessions=False)
@ -43,6 +36,12 @@ login_manager.init_app(app)
mail = Mail()
mail.init_app(app)
storage = Storage()
storage.init_app(app)
userfiles = Userfiles()
userfiles.init_app(app)
stripe.api_key = app.config.get('STRIPE_SECRET_KEY', None)
mixpanel = app.config['ANALYTICS'].init_app(app)

319
config.py
View file

@ -4,182 +4,21 @@ import requests
import os.path
from peewee import MySQLDatabase, SqliteDatabase
from storage.s3 import S3Storage
from storage.local import LocalStorage
from data.userfiles import UserRequestFiles
from data.buildlogs import BuildLogs
from data.userevent import UserEventBuilder
from util import analytics
from test.teststorage import FakeStorage, FakeUserfiles
from test.teststorage import FakeUserfiles
from test import analytics as fake_analytics
from test.testlogs import TestBuildLogs
class FlaskConfig(object):
SECRET_KEY = '1cb18882-6d12-440d-a4cc-b7430fb5f884'
JSONIFY_PRETTYPRINT_REGULAR = False
class FlaskProdConfig(FlaskConfig):
SESSION_COOKIE_SECURE = True
class MailConfig(object):
MAIL_SERVER = 'email-smtp.us-east-1.amazonaws.com'
MAIL_USE_TLS = True
MAIL_PORT = 587
MAIL_USERNAME = 'AKIAIXV5SDGCPVMU3N4Q'
MAIL_PASSWORD = 'AhmX/vWE91uQ2RtcEKTkfNrzZehEjPNXOXeOXgQNfLao'
DEFAULT_MAIL_SENDER = 'support@quay.io'
MAIL_FAIL_SILENTLY = False
TESTING = False
class RealTransactions(object):
@staticmethod
def create_transaction(db):
return db.transaction()
DB_TRANSACTION_FACTORY = create_transaction
class SQLiteDB(RealTransactions):
DB_NAME = 'test/data/test.db'
DB_CONNECTION_ARGS = {
'threadlocals': True,
'autorollback': True,
}
DB_DRIVER = SqliteDatabase
class FakeTransaction(object):
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
pass
class EphemeralDB(object):
DB_NAME = ':memory:'
DB_CONNECTION_ARGS = {}
DB_DRIVER = SqliteDatabase
@staticmethod
def create_transaction(db):
return FakeTransaction()
DB_TRANSACTION_FACTORY = create_transaction
class RDSMySQL(RealTransactions):
DB_NAME = 'quay'
DB_CONNECTION_ARGS = {
'host': 'fluxmonkeylogin.cb0vumcygprn.us-east-1.rds.amazonaws.com',
'user': 'fluxmonkey',
'passwd': '8eifM#uoZ85xqC^',
'threadlocals': True,
'autorollback': True,
}
DB_DRIVER = MySQLDatabase
class AWSCredentials(object):
AWS_ACCESS_KEY = 'AKIAJWZWUIS24TWSMWRA'
AWS_SECRET_KEY = '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'
REGISTRY_S3_BUCKET = 'quay-registry'
class S3Storage(AWSCredentials):
STORAGE = S3Storage('', AWSCredentials.AWS_ACCESS_KEY,
AWSCredentials.AWS_SECRET_KEY,
AWSCredentials.REGISTRY_S3_BUCKET)
class LocalStorage(object):
STORAGE = LocalStorage('test/data/registry')
class FakeStorage(object):
STORAGE = FakeStorage()
class FakeUserfiles(object):
USERFILES = FakeUserfiles()
class S3Userfiles(AWSCredentials):
USERFILES = UserRequestFiles(AWSCredentials.AWS_ACCESS_KEY,
AWSCredentials.AWS_SECRET_KEY,
AWSCredentials.REGISTRY_S3_BUCKET)
class RedisBuildLogs(object):
BUILDLOGS = BuildLogs('logs.quay.io')
class UserEventConfig(object):
USER_EVENTS = UserEventBuilder('logs.quay.io')
class TestBuildLogs(object):
BUILDLOGS = TestBuildLogs('logs.quay.io', 'devtable', 'building',
'deadbeef-dead-beef-dead-beefdeadbeef')
class StripeTestConfig(object):
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y'
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh'
class StripeLiveConfig(object):
STRIPE_SECRET_KEY = 'sk_live_TRuTHYwTvmrLeU3ib7Z9hpqE'
STRIPE_PUBLISHABLE_KEY = 'pk_live_P5wLU0vGdHnZGyKnXlFG4oiu'
class FakeAnalytics(object):
ANALYTICS = fake_analytics
class MixpanelTestConfig(object):
ANALYTICS = analytics
MIXPANEL_KEY = '38014a0f27e7bdc3ff8cc7cc29c869f9'
class MixpanelProdConfig(MixpanelTestConfig):
MIXPANEL_KEY = '50ff2b2569faa3a51c8f5724922ffb7e'
class GitHubTestConfig(object):
GITHUB_CLIENT_ID = 'cfbc4aca88e5c1b40679'
GITHUB_CLIENT_SECRET = '7d1cc21e17e10cd8168410e2cd1e4561cb854ff9'
GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token'
GITHUB_USER_URL = 'https://api.github.com/user'
GITHUB_USER_EMAILS = GITHUB_USER_URL + '/emails'
class GitHubStagingConfig(GitHubTestConfig):
GITHUB_CLIENT_ID = '4886304accbc444f0471'
GITHUB_CLIENT_SECRET = '27d8a5d99af02dda821eb10883bcb2e785e70a62'
class GitHubProdConfig(GitHubTestConfig):
GITHUB_CLIENT_ID = '5a8c08b06c48d89d4d1e'
GITHUB_CLIENT_SECRET = 'f89d8bb28ea3bd4e1c68808500d185a816be53b1'
class DigitalOceanConfig(object):
DO_CLIENT_ID = 'LJ44y2wwYj1MD0BRxS6qHA'
DO_CLIENT_SECRET = 'b9357a6f6ff45a33bb03f6dbbad135f9'
DO_SSH_KEY_ID = '46986'
DO_SSH_PRIVATE_KEY_FILENAME = 'certs/digital_ocean'
DO_ALLOWED_REGIONS = {1, 4}
DO_DOCKER_IMAGE = 1341147
class BuildNodeConfig(object):
BUILD_NODE_PULL_TOKEN = 'F02O2E86CQLKZUQ0O81J8XDHQ6F0N1V36L9JTOEEK6GKKMT1GI8PTJQT4OU88Y6G'
def build_requests_session():
sess = requests.Session()
adapter = requests.adapters.HTTPAdapter(pool_connections=100,
pool_maxsize=100)
sess.mount('http://', adapter)
sess.mount('https://', adapter)
return sess
def logs_init_builder(level=logging.DEBUG,
@ -195,79 +34,105 @@ def logs_init_builder(level=logging.DEBUG,
return init_logs
def build_requests_session():
sess = requests.Session()
adapter = requests.adapters.HTTPAdapter(pool_connections=100,
pool_maxsize=100)
sess.mount('http://', adapter)
sess.mount('https://', adapter)
return sess
class DefaultConfig(object):
# Flask config
SECRET_KEY = 'a36c9d7d-25a9-4d3f-a586-3d2f8dc40a83'
JSONIFY_PRETTYPRINT_REGULAR = False
SESSION_COOKIE_SECURE = False
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True
PREFERRED_URL_SCHEME = 'http'
SERVER_NAME = 'localhost:5000'
class LargePoolHttpClient(object):
# Mail config
MAIL_SERVER = ''
MAIL_USE_TLS = True
MAIL_PORT = 587
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
DEFAULT_MAIL_SENDER = ''
MAIL_FAIL_SILENTLY = False
TESTING = True
# DB config
DB_NAME = 'test/data/test.db'
DB_CONNECTION_ARGS = {
'threadlocals': True,
'autorollback': True,
}
DB_DRIVER = SqliteDatabase
@staticmethod
def create_transaction(db):
return db.transaction()
DB_TRANSACTION_FACTORY = create_transaction
# Data storage
STORAGE_TYPE = 'LocalStorage'
STORAGE_PATH = 'test/data/registry'
# Build logs
BUILDLOGS = BuildLogs('logs.quay.io') # Change me
# Real-time user events
USER_EVENTS = UserEventBuilder('logs.quay.io')
# Stripe config
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y' # remove me
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh' # remove me
# Userfiles
USERFILES_TYPE = 'LocalUserfiles'
USERFILES_PATH = 'test/data/userfiles'
# Analytics
ANALYTICS = fake_analytics
# Github Config
GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token'
GITHUB_USER_URL = 'https://api.github.com/user'
GITHUB_USER_EMAILS = GITHUB_USER_URL + '/emails'
GITHUB_CLIENT_ID = 'cfbc4aca88e5c1b40679' # remove
GITHUB_CLIENT_SECRET = '7d1cc21e17e10cd8168410e2cd1e4561cb854ff9' # remove me
# Requests based HTTP client with a large request pool
HTTPCLIENT = build_requests_session()
class StatusTagConfig(object):
# Status tag config
STATUS_TAGS = {}
for tag_name in ['building', 'failed', 'none', 'ready']:
tag_path = os.path.join('buildstatus', tag_name + '.svg')
with open(tag_path) as tag_svg:
STATUS_TAGS[tag_name] = tag_svg.read()
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
FakeAnalytics, StripeTestConfig, RedisBuildLogs,
UserEventConfig, LargePoolHttpClient, StatusTagConfig):
LOGGING_CONFIG = logs_init_builder(logging.WARN)
POPULATE_DB_TEST_DATA = True
class FakeTransaction(object):
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
pass
class TestConfig(DefaultConfig):
TESTING = True
URL_SCHEME = 'http'
URL_HOST = 'localhost:5000'
DB_NAME = ':memory:'
DB_CONNECTION_ARGS = {}
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
UserEventConfig, TestBuildLogs, LargePoolHttpClient,
StatusTagConfig):
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True
URL_SCHEME = 'http'
URL_HOST = 'ci.devtable.com:5000'
@staticmethod
def create_transaction(db):
return FakeTransaction()
DB_TRANSACTION_FACTORY = create_transaction
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelTestConfig,
GitHubProdConfig, DigitalOceanConfig,
BuildNodeConfig, S3Userfiles, RedisBuildLogs,
UserEventConfig, LargePoolHttpClient,
StatusTagConfig):
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
SEND_FILE_MAX_AGE_DEFAULT = 0
URL_SCHEME = 'http'
URL_HOST = 'ci.devtable.com:5000'
STORAGE_TYPE = 'FakeStorage'
BUILDLOGS = TestBuildLogs('logs.quay.io', 'devtable', 'building',
'deadbeef-dead-beef-dead-beefdeadbeef')
class StagingConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig,
GitHubStagingConfig, DigitalOceanConfig, BuildNodeConfig,
S3Userfiles, RedisBuildLogs, UserEventConfig,
LargePoolHttpClient, StatusTagConfig):
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
SEND_FILE_MAX_AGE_DEFAULT = 0
URL_SCHEME = 'https'
URL_HOST = 'staging.quay.io'
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig,
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
S3Userfiles, RedisBuildLogs, UserEventConfig,
LargePoolHttpClient, StatusTagConfig):
LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0
URL_SCHEME = 'https'
URL_HOST = 'quay.io'
USERFILES_TYPE = 'FakeUserfiles'

View file

@ -4,14 +4,14 @@ import datetime
import dateutil.parser
import json
from data.database import *
from util.validation import *
from util.names import format_robot_username
from app import storage as store
logger = logging.getLogger(__name__)
store = app.config['STORAGE']
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
class DataModelException(Exception):
@ -1485,7 +1485,7 @@ def get_pull_credentials(robotname):
return {
'username': robot.username,
'password': login_info.service_ident,
'registry': '%s://%s/v1/' % (app.config['URL_SCHEME'], app.config['URL_HOST']),
'registry': '%s://%s/v1/' % (app.config['PREFERRED_URL_SCHEME'], app.config['SERVER_NAME']),
}

View file

@ -1,25 +1,41 @@
import boto
import os
import logging
import hashlib
from boto.s3.key import Key
from uuid import uuid4
from flask import url_for
logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class UserRequestFiles(object):
def __init__(self, s3_access_key, s3_secret_key, bucket_name):
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = 'userfiles'
self._prefix = path
self._s3_conn = None
self._bucket = None
@ -70,3 +86,87 @@ class UserRequestFiles(object):
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
def upload_userfile_endpoint(file_id):
raise NotImplementedError()
def download_userfile_endpoint(file_id):
raise NotImplementedError()
class LocalUserfiles(object):
def __init__(self, path):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
def prepare_for_drop(self, mime_type):
file_id = str(uuid4())
return (url_for('upload_userfile_endpoint', file_id=file_id), file_id)
def store_file(self, file_like_obj, content_type):
file_id = str(uuid4())
path = os.path.join(self._root_path, file_id)
with open(path, 'w') as to_write:
while True:
try:
buf = file_like_obj.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
return file_id
def get_file_url(self, file_id, expires_in=300):
return url_for('download_userfile_endpoint', file_id=file_id)
def get_file_checksum(self, file_id):
path = os.path.join(self._root_path, file_id)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
class Userfiles(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
path = app.config.get('USERFILES_PATH', '')
if storage_type == 'LocalUserfiles':
app.add_url_rule('/userfiles/<file_id>', 'upload_userfile_endpoint',
upload_userfile_endpoint, methods=['PUT'])
app.add_url_rule('/userfiles/<file_id>', 'download_userfile_endpoint',
download_userfile_endpoint, methods=['GET'])
userfiles = LocalUserfiles(path)
elif userfiles_type == 'S3Userfiles':
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
else:
userfiles = FakeUserfiles()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['userfiles'] = userfiles
return userfiles
def __getattr__(self, name):
return getattr(self.state, name, None)

View file

@ -3,7 +3,7 @@ import json
from flask import request
from app import app
from app import app, userfiles as user_files
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
require_repo_read, require_repo_write, validate_json_request,
ApiResource, internal_only, format_date, api, Unauthorized, NotFound)
@ -17,7 +17,6 @@ from util.names import parse_robot_username
logger = logging.getLogger(__name__)
user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']

View file

@ -23,8 +23,8 @@ TYPE_CONVERTER = {
int: 'integer',
}
URL_SCHEME = app.config['URL_SCHEME']
URL_HOST = app.config['URL_HOST']
PREFERRED_URL_SCHEME = app.config['PREFERRED_URL_SCHEME']
SERVER_NAME = app.config['SERVER_NAME']
def fully_qualified_name(method_view_class):
@ -143,7 +143,7 @@ def swagger_route_data(include_internal=False, compact=False):
swagger_data = {
'apiVersion': 'v1',
'swaggerVersion': '1.2',
'basePath': '%s://%s' % (URL_SCHEME, URL_HOST),
'basePath': '%s://%s' % (PREFERRED_URL_SCHEME, SERVER_NAME),
'resourcePath': '/',
'info': {
'title': 'Quay.io API',
@ -160,7 +160,7 @@ def swagger_route_data(include_internal=False, compact=False):
"implicit": {
"tokenName": "access_token",
"loginEndpoint": {
"url": "%s://%s/oauth/authorize" % (URL_SCHEME, URL_HOST),
"url": "%s://%s/oauth/authorize" % (PREFERRED_URL_SCHEME, SERVER_NAME),
},
},
},

View file

@ -2,16 +2,13 @@ import json
from collections import defaultdict
from app import app
from app import storage as store
from endpoints.api import (resource, nickname, require_repo_read, RepositoryParamResource,
format_date, NotFound)
from data import model
from util.cache import cache_control_flask_restful
store = app.config['STORAGE']
def image_view(image):
extended_props = image
if image.storage and image.storage.id:

View file

@ -204,9 +204,8 @@ class BuildTriggerActivate(RepositoryParamResource):
trigger.repository.name)
path = url_for('webhooks.build_trigger_webhook',
repository=repository_path, trigger_uuid=trigger.uuid)
authed_url = _prepare_webhook_url(app.config['URL_SCHEME'], '$token',
token.code, app.config['URL_HOST'],
path)
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'], '$token', token.code,
app.config['SERVER_NAME'], path)
final_config = handler.activate(trigger.uuid, authed_url,
trigger.auth_token, new_config_dict)

View file

@ -9,7 +9,7 @@ from time import time
from data.queue import image_diff_queue
from app import app
from app import storage as store
from auth.auth import process_auth, extract_namespace_repo_from_session
from util import checksums, changes
from util.http import abort
@ -17,9 +17,9 @@ from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from data import model
registry = Blueprint('registry', __name__)
store = app.config['STORAGE']
logger = logging.getLogger(__name__)

View file

@ -6,10 +6,9 @@ import tarfile
from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile
from app import app
from app import app, userfiles as user_files
user_files = app.config['USERFILES']
client = app.config['HTTPCLIENT']

View file

@ -10,11 +10,10 @@ from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
from data.database import *
from data import model
from data.model import oauth
from app import app
from app import app, storage as store
logger = logging.getLogger(__name__)
store = app.config['STORAGE']
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
for i in range(1, 10)]

View file

@ -0,0 +1,36 @@
from storage.local import LocalStorage
from storage.s3 import S3Storage
from storage.fakestorage import FakeStorage
class Storage(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('STORAGE_TYPE', 'LocalStorage')
path = app.config.get('STORAGE_PATH', '')
if storage_type == 'LocalStorage':
storage = LocalStorage(path)
elif storage_type == 'S3Storage':
access_key = app.config.get('STORAGE_AWS_ACCESS_KEY', '')
secret_key = app.config.get('STORAGE_AWS_SECRET_KEY', '')
bucket = app.config.get('STORAGE_S3_BUCKET', '')
storage = S3Storage(path, access_key, secret_key, bucket)
else:
storage = FakeStorage()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['storage'] = storage
return storage
def __getattr__(self, name):
return getattr(self.state, name, None)

View file

@ -1,7 +1,7 @@
import tempfile
class Storage(object):
class BaseStorage(object):
"""Storage is organized as follow:
$ROOT/images/<image_id>/json

24
storage/fakestorage.py Normal file
View file

@ -0,0 +1,24 @@
from storage.basestorage import BaseStorage
class FakeStorage(BaseStorage):
def _init_path(self, path=None, create=False):
return path
def get_content(self, path):
raise IOError('Fake files are fake!')
def put_content(self, path, content):
return path
def stream_read(self, path):
yield ''
def stream_write(self, path, fp):
pass
def remove(self, path):
pass
def exists(self, path):
return False

View file

@ -2,85 +2,85 @@
import os
import shutil
from basestorage import Storage
from storage.basestorage import BaseStorage
class LocalStorage(Storage):
class LocalStorage(BaseStorage):
def __init__(self, storage_path):
self._root_path = storage_path
def __init__(self, storage_path):
self._root_path = storage_path
def _init_path(self, path=None, create=False):
path = os.path.join(self._root_path, path) if path else self._root_path
if create is True:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def _init_path(self, path=None, create=False):
path = os.path.join(self._root_path, path) if path else self._root_path
if create is True:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def get_content(self, path):
path = self._init_path(path)
with open(path, mode='r') as f:
return f.read()
def get_content(self, path):
path = self._init_path(path)
with open(path, mode='r') as f:
return f.read()
def put_content(self, path, content):
path = self._init_path(path, create=True)
with open(path, mode='w') as f:
f.write(content)
return path
def put_content(self, path, content):
path = self._init_path(path, create=True)
with open(path, mode='w') as f:
f.write(content)
return path
def stream_read(self, path):
path = self._init_path(path)
with open(path, mode='rb') as f:
while True:
buf = f.read(self.buffer_size)
if not buf:
break
yield buf
def stream_read(self, path):
path = self._init_path(path)
with open(path, mode='rb') as f:
while True:
buf = f.read(self.buffer_size)
if not buf:
break
yield buf
def stream_read_file(self, path):
path = self._init_path(path)
return open(path, mode='rb')
def stream_read_file(self, path):
path = self._init_path(path)
return open(path, mode='rb')
def stream_write(self, path, fp):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
while True:
try:
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
break
def list_directory(self, path=None):
path = self._init_path(path)
prefix = path[len(self._root_path) + 1:] + '/'
exists = False
for d in os.listdir(path):
exists = True
yield prefix + d
if exists is False:
# Raises OSError even when the directory is empty
# (to be consistent with S3)
raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path):
path = self._init_path(path)
return os.path.exists(path)
def remove(self, path):
path = self._init_path(path)
if os.path.isdir(path):
shutil.rmtree(path)
return
def stream_write(self, path, fp):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
while True:
try:
os.remove(path)
except OSError:
pass
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
break
def get_size(self, path):
path = self._init_path(path)
return os.path.getsize(path)
def list_directory(self, path=None):
path = self._init_path(path)
prefix = path[len(self._root_path) + 1:] + '/'
exists = False
for d in os.listdir(path):
exists = True
yield prefix + d
if exists is False:
# Raises OSError even when the directory is empty
# (to be consistent with S3)
raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path):
path = self._init_path(path)
return os.path.exists(path)
def remove(self, path):
path = self._init_path(path)
if os.path.isdir(path):
shutil.rmtree(path)
return
try:
os.remove(path)
except OSError:
pass
def get_size(self, path):
path = self._init_path(path)
return os.path.getsize(path)

View file

@ -5,7 +5,7 @@ import logging
import boto.s3.connection
import boto.s3.key
from storage.basestorage import Storage
from storage.basestorage import BaseStorage
logger = logging.getLogger(__name__)
@ -32,7 +32,7 @@ class StreamReadKeyAsFile(object):
return resp
class S3Storage(Storage):
class S3Storage(BaseStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
self._initialized = False

View file

@ -1,40 +0,0 @@
from uuid import uuid4
from storage.basestorage import Storage
class FakeStorage(Storage):
def _init_path(self, path=None, create=False):
return path
def get_content(self, path):
raise IOError('Fake files are fake!')
def put_content(self, path, content):
return path
def stream_read(self, path):
yield ''
def stream_write(self, path, fp):
pass
def remove(self, path):
pass
def exists(self, path):
return False
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'

View file

@ -3,13 +3,12 @@ import json
from data.database import Image, ImageStorage, Repository
from data import model
from app import app
from app import app, storage as store
import boto.s3.connection
import boto.s3.key
store = app.config['STORAGE']
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)

View file

@ -1,9 +1,6 @@
from data.database import Image, RepositoryTag, Repository
from app import app
store = app.config['STORAGE']
from app import storage as store
tag_query = (RepositoryTag

View file

@ -1,13 +1,11 @@
from data.database import Image
from app import app
from app import app, storage as store
live_image_id_set = set()
for image in Image.select():
live_image_id_set.add(image.docker_image_id)
store = app.config['STORAGE']
storage_image_id_set = set()
for customer in store.list_directory('images/'):
for repo in store.list_directory(customer):

View file

@ -18,7 +18,7 @@ from threading import Event
from data.queue import dockerfile_build_queue
from data import model
from workers.worker import Worker
from app import app
from app import app, userfiles as user_files
from util.safetar import safe_extractall
@ -30,7 +30,6 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
TIMEOUT_PERIOD_MINUTES = 20