Archived logs commit 1. Squash me.
This commit is contained in:
parent
54fbb2a4c0
commit
451e034ca1
9 changed files with 402 additions and 22 deletions
39
data/archivedlogs.py
Normal file
39
data/archivedlogs.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from data.userfiles import LocalUserfiles, UserfilesHandlers, S3Userfiles, FakeUserfiles
|
||||
|
||||
class LogArchive(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
storage_type = app.config.get('LOG_ARCHIVE_TYPE', 'LocalArchivedLogs')
|
||||
path = app.config.get('LOG_ARCHIVE_PATH', '')
|
||||
|
||||
if storage_type == 'LocalArchivedLogs':
|
||||
archive = LocalUserfiles(app, path)
|
||||
app.add_url_rule('/archivedlogs/<file_id>',
|
||||
view_func=UserfilesHandlers.as_view('log_archive_handlers',
|
||||
local_userfiles=archive))
|
||||
|
||||
elif storage_type == 'S3ArchivedLogs':
|
||||
access_key = app.config.get('LOG_ARCHIVE_AWS_ACCESS_KEY', '')
|
||||
secret_key = app.config.get('LOG_ARCHIVE_AWS_SECRET_KEY', '')
|
||||
bucket = app.config.get('LOG_ARCHIVE_S3_BUCKET', '')
|
||||
archive = S3Userfiles(path, access_key, secret_key, bucket)
|
||||
|
||||
elif storage_type == 'FakeArchivedLogs':
|
||||
archive = FakeUserfiles()
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unknown log archive type: %s' % storage_type)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['log_archive'] = archive
|
||||
return archive
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
Reference in a new issue