refactor(archivedlogs): move archivelog handler to endpoints

This commit is contained in:
EvB 2017-04-24 15:59:10 -04:00
parent 0d04fd8bd2
commit 5e995fae20
4 changed files with 54 additions and 22 deletions

View file

@ -12,19 +12,6 @@ JSON_MIMETYPE = 'application/json'
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class LogArchiveHandlers(UserfilesHandlers):
methods = ['GET']
def get(self, file_id):
path = self._files.get_file_id_path(file_id)
try:
data_stream = self._storage.stream_read_file(self._locations, path)
return send_file(GzipInputStream(data_stream), mimetype=JSON_MIMETYPE)
except IOError:
logger.exception('Could not read archived logs')
abort(404)
class LogArchive(object): class LogArchive(object):
def __init__(self, app=None, distributed_storage=None): def __init__(self, app=None, distributed_storage=None):
self.app = app self.app = app
@ -41,13 +28,6 @@ class LogArchive(object):
log_archive = DelegateUserfiles(app, distributed_storage, location, path, log_archive = DelegateUserfiles(app, distributed_storage, location, path,
handler_name=handler_name) handler_name=handler_name)
app.add_url_rule('/logarchive/<file_id>',
view_func=LogArchiveHandlers.as_view(handler_name,
distributed_storage=distributed_storage,
location=location,
files=log_archive))
# register extension with app # register extension with app
app.extensions = getattr(app, 'extensions', {}) app.extensions = getattr(app, 'extensions', {})
app.extensions['log_archive'] = log_archive app.extensions['log_archive'] = log_archive

View file

@ -37,7 +37,7 @@ from util.invoice import renderInvoiceToPdf
from util.saas.useranalytics import build_error_callback from util.saas.useranalytics import build_error_callback
from util.systemlogs import build_logs_archive from util.systemlogs import build_logs_archive
from util.useremails import send_email_changed from util.useremails import send_email_changed
from util.registry.gzipinputstream import GzipInputStream
PGP_KEY_MIMETYPE = 'application/pgp-keys' PGP_KEY_MIMETYPE = 'application/pgp-keys'
@ -336,6 +336,29 @@ def buildlogs(build_uuid):
return response return response
@web.route('/logarchive/<file_id>', methods=['GET'])
@route_show_if(features.BUILD_SUPPORT)
@require_session_login
def logarchive(file_id):
JSON_MIMETYPE = 'application/json'
try:
found_build = model.build.get_repository_build(file_id)
except model.InvalidRepositoryBuildException as ex:
logger.exception(ex, extra={'build_uuid': file_id})
abort(403)
repo = found_build.repository
if not ModifyRepositoryPermission(repo.namespace_user.username, repo.name).can():
abort(403)
try:
path = log_archive.get_file_id_path(file_id)
data_stream = log_archive._storage.stream_read_file(log_archive._locations, path)
return send_file(GzipInputStream(data_stream), mimetype=JSON_MIMETYPE)
except IOError:
logger.exception('Could not read archived logs')
abort(403)
@web.route('/receipt', methods=['GET']) @web.route('/receipt', methods=['GET'])
@route_show_if(features.BILLING) @route_show_if(features.BILLING)
@require_session_login @require_session_login

View file

@ -602,7 +602,7 @@ def populate_database(minimal=False, with_storage=False):
building = __generate_repository(with_storage, new_user_1, 'building', building = __generate_repository(with_storage, new_user_1, 'building',
'Empty repository which is building.', 'Empty repository which is building.',
False, [], (0, [], None)) False, [(new_user_2, 'write'), (reader, 'read')], (0, [], None))
new_token = model.token.create_access_token(building, 'write', 'build-worker') new_token = model.token.create_access_token(building, 'write', 'build-worker')

View file

@ -4,7 +4,10 @@ import json as py_json
import time import time
import unittest import unittest
import base64 import base64
import zlib
from mock import patch
from io import BytesIO
from urllib import urlencode from urllib import urlencode
from urlparse import urlparse, urlunparse, parse_qs from urlparse import urlparse, urlunparse, parse_qs
from datetime import datetime, timedelta from datetime import datetime, timedelta
@ -27,6 +30,7 @@ from endpoints.web import web as web_bp
from endpoints.webhooks import webhooks as webhooks_bp from endpoints.webhooks import webhooks as webhooks_bp
from initdb import setup_database_for_testing, finished_database_for_testing from initdb import setup_database_for_testing, finished_database_for_testing
from test.helpers import assert_action_logged from test.helpers import assert_action_logged
from util.registry.gzipinputstream import WINDOW_BUFFER_SIZE
try: try:
app.register_blueprint(web_bp, url_prefix='') app.register_blueprint(web_bp, url_prefix='')
@ -133,6 +137,31 @@ class EndpointTestCase(unittest.TestCase):
headers={"Content-Type": "application/json"}) headers={"Content-Type": "application/json"})
self.assertEquals(rv.status_code, 200) self.assertEquals(rv.status_code, 200)
class BuildLogsTestCase(EndpointTestCase):
build_uuid = 'deadpork-dead-pork-dead-porkdeadpork'
def test_logarchive_invalid_build_uuid(self):
self.login('public', 'password')
self.getResponse('web.logarchive', file_id='bad_build_uuid', expected_code=403)
def test_logarchive_not_logged_in(self):
self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=401)
def test_logarchive_unauthorized(self):
self.login('reader', 'password')
self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=403)
def test_logarchive_file_not_found(self):
self.login('public', 'password')
self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=403)
def test_logarchive_successful(self):
self.login('public', 'password')
data = b"my_file_stream"
mock_file = BytesIO(zlib.compressobj(-1, zlib.DEFLATED, WINDOW_BUFFER_SIZE).compress(data))
with patch('endpoints.web.log_archive._storage.stream_read_file', return_value=mock_file):
self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=200)
class WebhookEndpointTestCase(EndpointTestCase): class WebhookEndpointTestCase(EndpointTestCase):
def test_invalid_build_trigger_webhook(self): def test_invalid_build_trigger_webhook(self):