Archived logs commit 1. Squash me.
This commit is contained in:
parent
54fbb2a4c0
commit
451e034ca1
9 changed files with 402 additions and 22 deletions
39
data/archivedlogs.py
Normal file
39
data/archivedlogs.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from data.userfiles import LocalUserfiles, UserfilesHandlers, S3Userfiles, FakeUserfiles
|
||||
|
||||
class LogArchive(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
storage_type = app.config.get('LOG_ARCHIVE_TYPE', 'LocalArchivedLogs')
|
||||
path = app.config.get('LOG_ARCHIVE_PATH', '')
|
||||
|
||||
if storage_type == 'LocalArchivedLogs':
|
||||
archive = LocalUserfiles(app, path)
|
||||
app.add_url_rule('/archivedlogs/<file_id>',
|
||||
view_func=UserfilesHandlers.as_view('log_archive_handlers',
|
||||
local_userfiles=archive))
|
||||
|
||||
elif storage_type == 'S3ArchivedLogs':
|
||||
access_key = app.config.get('LOG_ARCHIVE_AWS_ACCESS_KEY', '')
|
||||
secret_key = app.config.get('LOG_ARCHIVE_AWS_SECRET_KEY', '')
|
||||
bucket = app.config.get('LOG_ARCHIVE_S3_BUCKET', '')
|
||||
archive = S3Userfiles(path, access_key, secret_key, bucket)
|
||||
|
||||
elif storage_type == 'FakeArchivedLogs':
|
||||
archive = FakeUserfiles()
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unknown log archive type: %s' % storage_type)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['log_archive'] = archive
|
||||
return archive
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
|
@ -289,6 +289,16 @@ class RepositoryTag(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class BUILD_PHASE(object):
|
||||
""" Build phases enum """
|
||||
ERROR = 'error'
|
||||
UNPACKING = 'unpacking'
|
||||
PULLING = 'pulling'
|
||||
BUILDING = 'building'
|
||||
PUSHING = 'pushing'
|
||||
COMPLETE = 'complete'
|
||||
|
||||
|
||||
class RepositoryBuild(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
|
@ -300,6 +310,7 @@ class RepositoryBuild(BaseModel):
|
|||
display_name = CharField()
|
||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
|
||||
logs_archived = BooleanField(default=False)
|
||||
|
||||
|
||||
class QueueItem(BaseModel):
|
||||
|
|
|
@ -12,6 +12,7 @@ from util.backoff import exponential_backoff
|
|||
|
||||
|
||||
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
||||
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -1877,3 +1878,11 @@ def confirm_email_authorization_for_repo(code):
|
|||
found.save()
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def archivable_buildlogs_query():
|
||||
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||
return (RepositoryBuild.select()
|
||||
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
||||
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
||||
(RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False))
|
||||
|
|
|
@ -58,9 +58,12 @@ class S3Userfiles(object):
|
|||
encrypt_key=True)
|
||||
return (url, file_id)
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
def store_file(self, file_like_obj, content_type, file_id=None):
|
||||
self._initialize_s3()
|
||||
file_id = str(uuid4())
|
||||
|
||||
if file_id is None:
|
||||
file_id = str(uuid4())
|
||||
|
||||
full_key = os.path.join(self._prefix, file_id)
|
||||
k = Key(self._bucket, full_key)
|
||||
logger.debug('Setting s3 content type to: %s' % content_type)
|
||||
|
@ -161,8 +164,9 @@ class LocalUserfiles(object):
|
|||
except IOError:
|
||||
break
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
file_id = str(uuid4())
|
||||
def store_file(self, file_like_obj, content_type, file_id=None):
|
||||
if file_id is None:
|
||||
file_id = str(uuid4())
|
||||
|
||||
# Rewind the file to match what s3 does
|
||||
file_like_obj.seek(0, os.SEEK_SET)
|
||||
|
|
Reference in a new issue