Merge branch 'master' into better-emails

This commit is contained in:
Joseph Schorr 2014-09-18 13:20:32 -04:00
commit b212dbb2ab
65 changed files with 2314 additions and 407 deletions

56
data/archivedlogs.py Normal file
View file

@ -0,0 +1,56 @@
import logging
from gzip import GzipFile
from flask import send_file, abort
from cStringIO import StringIO
from data.userfiles import DelegateUserfiles, UserfilesHandlers
JSON_MIMETYPE = 'application/json'
logger = logging.getLogger(__name__)
class LogArchiveHandlers(UserfilesHandlers):
def get(self, file_id):
path = self._files.get_file_id_path(file_id)
try:
with self._storage.stream_read_file(self._locations, path) as gzip_stream:
with GzipFile(fileobj=gzip_stream) as unzipped:
unzipped_buffer = StringIO(unzipped.read())
return send_file(unzipped_buffer, mimetype=JSON_MIMETYPE)
except IOError:
abort(404)
class LogArchive(object):
def __init__(self, app=None, distributed_storage=None):
self.app = app
if app is not None:
self.state = self.init_app(app, distributed_storage)
else:
self.state = None
def init_app(self, app, distributed_storage):
location = app.config.get('LOG_ARCHIVE_LOCATION')
path = app.config.get('LOG_ARCHIVE_PATH', None)
handler_name = 'logarchive_handlers'
log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
app.add_url_rule('/logarchive/<file_id>',
view_func=LogArchiveHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
location=location,
files=log_archive))
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['log_archive'] = log_archive
return log_archive
def __getattr__(self, name):
return getattr(self.state, name, None)

View file

@ -3,7 +3,7 @@ import stripe
from datetime import datetime, timedelta
from calendar import timegm
from util.collections import AttrDict
from util.morecollections import AttrDict
PLANS = [
# Deprecated Plans

View file

@ -2,6 +2,11 @@ import redis
import json
from util.dynamic import import_class
from datetime import timedelta
ONE_DAY = timedelta(days=1)
class BuildStatusRetrievalError(Exception):
pass
@ -25,7 +30,7 @@ class RedisBuildLogs(object):
"""
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def append_log_message(self, build_id, log_message, log_type=None):
def append_log_message(self, build_id, log_message, log_type=None, log_data=None):
"""
Wraps the message in an envelope and push it to the end of the log entry
list and returns the index at which it was inserted.
@ -37,6 +42,9 @@ class RedisBuildLogs(object):
if log_type:
log_obj['type'] = log_type
if log_data:
log_obj['data'] = log_data
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1
def get_log_entries(self, build_id, start_index):
@ -51,6 +59,13 @@ class RedisBuildLogs(object):
except redis.ConnectionError:
raise BuildStatusRetrievalError('Cannot retrieve build logs')
def expire_log_entries(self, build_id):
"""
Sets the log entry to expire in 1 day.
"""
self._redis.expire(self._logs_key(build_id), ONE_DAY)
@staticmethod
def _status_key(build_id):
return 'builds/%s/status' % build_id
@ -106,4 +121,4 @@ class BuildLogs(object):
return buildlogs
def __getattr__(self, name):
return getattr(self.state, name, None)
return getattr(self.state, name, None)

View file

@ -8,7 +8,7 @@ from peewee import *
from data.read_slave import ReadSlaveModel
from sqlalchemy.engine.url import make_url
from urlparse import urlparse
from util.names import urn_generator
logger = logging.getLogger(__name__)
@ -21,8 +21,24 @@ SCHEME_DRIVERS = {
'postgresql+psycopg2': PostgresqlDatabase,
}
SCHEME_RANDOM_FUNCTION = {
'mysql': fn.Rand,
'mysql+pymysql': fn.Rand,
'sqlite': fn.Random,
'postgresql': fn.Random,
'postgresql+psycopg2': fn.Random,
}
class CallableProxy(Proxy):
def __call__(self, *args, **kwargs):
if self.obj is None:
raise AttributeError('Cannot use uninitialized Proxy.')
return self.obj(*args, **kwargs)
db = Proxy()
read_slave = Proxy()
db_random_func = CallableProxy()
def _db_from_url(url, db_kwargs):
parsed_url = make_url(url)
@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs):
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
def configure(config_object):
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
write_db_uri = config_object['DB_URI']
db.initialize(_db_from_url(write_db_uri, db_kwargs))
parsed_write_uri = make_url(write_db_uri)
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
if read_slave_uri is not None:
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
@ -112,6 +132,15 @@ class TeamMember(BaseModel):
)
class TeamMemberInvite(BaseModel):
# Note: Either user OR email will be filled in, but not both.
user = ForeignKeyField(User, index=True, null=True)
email = CharField(null=True)
team = ForeignKeyField(Team, index=True)
inviter = ForeignKeyField(User, related_name='inviter')
invite_token = CharField(default=urn_generator(['teaminvite']))
class LoginService(BaseModel):
name = CharField(unique=True, index=True)
@ -289,6 +318,16 @@ class RepositoryTag(BaseModel):
)
class BUILD_PHASE(object):
""" Build phases enum """
ERROR = 'error'
UNPACKING = 'unpacking'
PULLING = 'pulling'
BUILDING = 'building'
PUSHING = 'pushing'
COMPLETE = 'complete'
class RepositoryBuild(BaseModel):
uuid = CharField(default=uuid_generator, index=True)
repository = ForeignKeyField(Repository, index=True)
@ -300,6 +339,7 @@ class RepositoryBuild(BaseModel):
display_name = CharField()
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
logs_archived = BooleanField(default=False)
class QueueItem(BaseModel):
@ -410,4 +450,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
RepositoryAuthorizedEmail]
RepositoryAuthorizedEmail, TeamMemberInvite]

View file

@ -8,7 +8,7 @@ from peewee import SqliteDatabase
from data.database import all_models, db
from app import app
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from util.collections import AttrDict
from util.morecollections import AttrDict
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.

View file

@ -0,0 +1,26 @@
"""Add support for build log migration.
Revision ID: 34fd69f63809
Revises: 4a0c94399f38
Create Date: 2014-09-12 11:50:09.217777
"""
# revision identifiers, used by Alembic.
revision = '34fd69f63809'
down_revision = '4a0c94399f38'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorybuild', 'logs_archived')
### end Alembic commands ###

View file

@ -0,0 +1,81 @@
"""Email invites for joining a team.
Revision ID: 51d04d0e7e6f
Revises: 34fd69f63809
Create Date: 2014-09-15 23:51:35.478232
"""
# revision identifiers, used by Alembic.
revision = '51d04d0e7e6f'
down_revision = '34fd69f63809'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('teammemberinvite',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('inviter_id', sa.Integer(), nullable=False),
sa.Column('invite_token', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
### end Alembic commands ###
# Manually add the new logentrykind types
op.bulk_insert(tables.logentrykind,
[
{'id':42, 'name':'org_invite_team_member'},
{'id':43, 'name':'org_team_member_invite_accepted'},
{'id':44, 'name':'org_team_member_invite_declined'},
{'id':45, 'name':'org_delete_team_member_invite'},
])
op.bulk_insert(tables.notificationkind,
[
{'id':10, 'name':'org_team_invite'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
)
op.drop_index('teammemberinvite_user_id', table_name='teammemberinvite')
op.drop_index('teammemberinvite_team_id', table_name='teammemberinvite')
op.drop_index('teammemberinvite_inviter_id', table_name='teammemberinvite')
op.drop_table('teammemberinvite')
### end Alembic commands ###

View file

@ -12,6 +12,7 @@ from util.backoff import exponential_backoff
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
logger = logging.getLogger(__name__)
@ -48,6 +49,9 @@ class InvalidRobotException(DataModelException):
class InvalidTeamException(DataModelException):
pass
class InvalidTeamMemberException(DataModelException):
pass
class InvalidPasswordException(DataModelException):
pass
@ -73,6 +77,10 @@ class TooManyUsersException(DataModelException):
pass
class UserAlreadyInTeam(DataModelException):
pass
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
@ -332,12 +340,42 @@ def remove_team(org_name, team_name, removed_by_username):
team.delete_instance(recursive=True, delete_nullable=True)
def add_or_invite_to_team(inviter, team, user=None, email=None):
# If the user is a member of the organization, then we simply add the
# user directly to the team. Otherwise, an invite is created for the user/email.
# We return None if the user was directly added and the invite object if the user was invited.
requires_invite = True
if user:
orgname = team.organization.username
# If the user is part of the organization (or a robot), then no invite is required.
if user.robot:
requires_invite = False
if not user.username.startswith(orgname + '+'):
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
'as it is not a member of the organization')
else:
Org = User.alias()
found = User.select(User.username)
found = found.where(User.username == user.username).join(TeamMember).join(Team)
found = found.join(Org, on=(Org.username == orgname)).limit(1)
requires_invite = not any(found)
# If we have a valid user and no invite is required, simply add the user to the team.
if user and not requires_invite:
add_user_to_team(user, team)
return None
email_address = email if not user else None
return TeamMemberInvite.create(user=user, email=email_address, team=team, inviter=inviter)
def add_user_to_team(user, team):
try:
return TeamMember.create(user=user, team=team)
except Exception:
raise DataModelException('Unable to add user \'%s\' to team: \'%s\'' %
(user.username, team.name))
raise UserAlreadyInTeam('User \'%s\' is already a member of team \'%s\'' %
(user.username, team.name))
def remove_user_from_team(org_name, team_name, username, removed_by_username):
@ -512,6 +550,13 @@ def get_user(username):
return None
def get_user_or_org(username):
try:
return User.get(User.username == username, User.robot == False)
except User.DoesNotExist:
return None
def get_user_or_org_by_customer_id(customer_id):
try:
return User.get(User.stripe_id == customer_id)
@ -635,6 +680,10 @@ def get_organization_team_members(teamid):
query = joined.where(Team.id == teamid)
return query
def get_organization_team_member_invites(teamid):
joined = TeamMemberInvite.select().join(Team).join(User)
query = joined.where(Team.id == teamid)
return query
def get_organization_member_set(orgname):
Org = User.alias()
@ -1824,6 +1873,32 @@ def delete_notifications_by_kind(target, kind_name):
Notification.delete().where(Notification.target == target,
Notification.kind == kind_ref).execute()
def delete_matching_notifications(target, kind_name, **kwargs):
kind_ref = NotificationKind.get(name=kind_name)
# Load all notifications for the user with the given kind.
notifications = Notification.select().where(
Notification.target == target,
Notification.kind == kind_ref)
# For each, match the metadata to the specified values.
for notification in notifications:
matches = True
try:
metadata = json.loads(notification.metadata_json)
except:
continue
for (key, value) in kwargs.iteritems():
if not key in metadata or metadata[key] != value:
matches = False
break
if not matches:
continue
notification.delete_instance()
def get_active_users():
return User.select().where(User.organization == False, User.robot == False)
@ -1831,6 +1906,16 @@ def get_active_users():
def get_active_user_count():
return get_active_users().count()
def detach_external_login(user, service_name):
try:
service = LoginService.get(name = service_name)
except LoginService.DoesNotExist:
return
FederatedLogin.delete().where(FederatedLogin.user == user,
FederatedLogin.service == service).execute()
def delete_user(user):
user.delete_instance(recursive=True, delete_nullable=True)
@ -1879,3 +1964,72 @@ def confirm_email_authorization_for_repo(code):
found.save()
return found
def delete_team_email_invite(team, email):
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
found.delete_instance()
def delete_team_user_invite(team, user):
try:
found = TeamMemberInvite.get(TeamMemberInvite.user == user, TeamMemberInvite.team == team)
except TeamMemberInvite.DoesNotExist:
return False
found.delete_instance()
return True
def lookup_team_invites(user):
return TeamMemberInvite.select().where(TeamMemberInvite.user == user)
def lookup_team_invite(code, user=None):
# Lookup the invite code.
try:
found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
except TeamMemberInvite.DoesNotExist:
raise DataModelException('Invalid confirmation code.')
if user and found.user != user:
raise DataModelException('Invalid confirmation code.')
return found
def delete_team_invite(code, user=None):
found = lookup_team_invite(code, user)
team = found.team
inviter = found.inviter
found.delete_instance()
return (team, inviter)
def confirm_team_invite(code, user):
found = lookup_team_invite(code)
# If the invite is for a specific user, we have to confirm that here.
if found.user is not None and found.user != user:
message = """This invite is intended for user "%s".
Please login to that account and try again.""" % found.user.username
raise DataModelException(message)
# Add the user to the team.
try:
add_user_to_team(user, found.team)
except UserAlreadyInTeam:
# Ignore.
pass
# Delete the invite and return the team.
team = found.team
inviter = found.inviter
found.delete_instance()
return (team, inviter)
def archivable_buildlogs_query():
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
return (RepositoryBuild.select()
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
(RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False))

View file

@ -46,7 +46,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
def validate_redirect_uri(self, client_id, redirect_uri):
try:
app = OAuthApplication.get(client_id=client_id)
if app.redirect_uri and redirect_uri.startswith(app.redirect_uri):
if app.redirect_uri and redirect_uri and redirect_uri.startswith(app.redirect_uri):
return True
return False
except OAuthApplication.DoesNotExist:

View file

@ -1,110 +1,35 @@
import boto
import os
import logging
import hashlib
import magic
from boto.s3.key import Key
from uuid import uuid4
from flask import url_for, request, send_file, make_response, abort
from flask.views import View
from _pyio import BufferedReader
logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = path
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id)
def store_file(self, file_like_obj, content_type):
self._initialize_s3()
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % content_type)
k.set_metadata('Content-Type', content_type)
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
rewind=True)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300, mime_type=None):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
headers = None
if mime_type:
headers={'Content-Type': mime_type}
return k.generate_url(expires_in, headers=headers)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
class UserfilesHandlers(View):
methods = ['GET', 'PUT']
def __init__(self, local_userfiles):
self._userfiles = local_userfiles
def __init__(self, distributed_storage, location, files):
self._storage = distributed_storage
self._files = files
self._locations = {location}
self._magic = magic.Magic(mime=True)
def get(self, file_id):
path = self._userfiles.file_path(file_id)
if not os.path.exists(path):
path = self._files.get_file_id_path(file_id)
try:
file_stream = self._storage.stream_read_file(self._locations, path)
buffered = BufferedReader(file_stream)
file_header_bytes = buffered.peek(1024)
return send_file(buffered, mimetype=self._magic.from_buffer(file_header_bytes))
except IOError:
abort(404)
logger.debug('Sending path: %s' % path)
return send_file(path, mimetype=self._magic.from_file(path))
def put(self, file_id):
input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked':
@ -112,7 +37,10 @@ class UserfilesHandlers(View):
# encoding (Gunicorn)
input_stream = request.environ['wsgi.input']
self._userfiles.store_stream(input_stream, file_id)
c_type = request.headers.get('Content-Type', None)
path = self._files.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, input_stream, c_type)
return make_response('Okay')
@ -123,99 +51,82 @@ class UserfilesHandlers(View):
return self.put(file_id)
class LocalUserfiles(object):
def __init__(self, app, path):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
class DelegateUserfiles(object):
def __init__(self, app, distributed_storage, location, path, handler_name):
self._app = app
self._storage = distributed_storage
self._locations = {location}
self._prefix = path
self._handler_name = handler_name
def _build_url_adapter(self):
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
script_name=self._app.config['APPLICATION_ROOT'] or '/',
url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
def prepare_for_drop(self, mime_type):
def get_file_id_path(self, file_id):
return os.path.join(self._prefix, file_id)
def prepare_for_drop(self, mime_type, requires_cors=True):
""" Returns a signed URL to upload a file to our bucket. """
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_upload_url(self._locations, path, mime_type, requires_cors)
def file_path(self, file_id):
if '..' in file_id or file_id.startswith('/'):
raise RuntimeError('Invalid Filename')
return os.path.join(self._root_path, file_id)
if url is None:
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return (url_for(self._handler_name, file_id=file_id, _external=True), file_id)
def store_stream(self, stream, file_id):
path = self.file_path(file_id)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return (url, file_id)
with open(path, 'w') as to_write:
while True:
try:
buf = stream.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
def store_file(self, file_like_obj, content_type, content_encoding=None, file_id=None):
if file_id is None:
file_id = str(uuid4())
def store_file(self, file_like_obj, content_type):
file_id = str(uuid4())
# Rewind the file to match what s3 does
file_like_obj.seek(0, os.SEEK_SET)
self.store_stream(file_like_obj, file_id)
path = self.get_file_id_path(file_id)
self._storage.stream_write(self._locations, path, file_like_obj, content_type,
content_encoding)
return file_id
def get_file_url(self, file_id, expires_in=300):
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return url_for('userfiles_handlers', file_id=file_id, _external=True)
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
path = self.get_file_id_path(file_id)
url = self._storage.get_direct_download_url(self._locations, path, expires_in, requires_cors)
if url is None:
with self._app.app_context() as ctx:
ctx.url_adapter = self._build_url_adapter()
return url_for(self._handler_name, file_id=file_id, _external=True)
return url
def get_file_checksum(self, file_id):
path = self.file_path(file_id)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
path = self.get_file_id_path(file_id)
return self._storage.get_checksum(self._locations, path)
class Userfiles(object):
def __init__(self, app=None):
def __init__(self, app=None, distributed_storage=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
self.state = self.init_app(app, distributed_storage)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
path = app.config.get('USERFILES_PATH', '')
def init_app(self, app, distributed_storage):
location = app.config.get('USERFILES_LOCATION')
path = app.config.get('USERFILES_PATH', None)
if storage_type == 'LocalUserfiles':
userfiles = LocalUserfiles(app, path)
app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view('userfiles_handlers',
local_userfiles=userfiles))
handler_name = 'userfiles_handlers'
elif storage_type == 'S3Userfiles':
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
userfiles = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
elif storage_type == 'FakeUserfiles':
userfiles = FakeUserfiles()
else:
raise RuntimeError('Unknown userfiles type: %s' % storage_type)
app.add_url_rule('/userfiles/<file_id>',
view_func=UserfilesHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
location=location,
files=userfiles))
# register extension with app
app.extensions = getattr(app, 'extensions', {})