From 5e995fae202398700e5465de0eb518e220392304 Mon Sep 17 00:00:00 2001 From: EvB Date: Mon, 24 Apr 2017 15:59:10 -0400 Subject: [PATCH] refactor(archivedlogs): move archivelog handler to endpoints --- data/archivedlogs.py | 20 -------------------- endpoints/web.py | 25 ++++++++++++++++++++++++- initdb.py | 2 +- test/test_endpoints.py | 29 +++++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 22 deletions(-) diff --git a/data/archivedlogs.py b/data/archivedlogs.py index 869be946a..ec54cb9b3 100644 --- a/data/archivedlogs.py +++ b/data/archivedlogs.py @@ -12,19 +12,6 @@ JSON_MIMETYPE = 'application/json' logger = logging.getLogger(__name__) -class LogArchiveHandlers(UserfilesHandlers): - methods = ['GET'] - - def get(self, file_id): - path = self._files.get_file_id_path(file_id) - try: - data_stream = self._storage.stream_read_file(self._locations, path) - return send_file(GzipInputStream(data_stream), mimetype=JSON_MIMETYPE) - except IOError: - logger.exception('Could not read archived logs') - abort(404) - - class LogArchive(object): def __init__(self, app=None, distributed_storage=None): self.app = app @@ -41,13 +28,6 @@ class LogArchive(object): log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name=handler_name) - - app.add_url_rule('/logarchive/', - view_func=LogArchiveHandlers.as_view(handler_name, - distributed_storage=distributed_storage, - location=location, - files=log_archive)) - # register extension with app app.extensions = getattr(app, 'extensions', {}) app.extensions['log_archive'] = log_archive diff --git a/endpoints/web.py b/endpoints/web.py index d0b67f48e..c73e5dad9 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -37,7 +37,7 @@ from util.invoice import renderInvoiceToPdf from util.saas.useranalytics import build_error_callback from util.systemlogs import build_logs_archive from util.useremails import send_email_changed - +from util.registry.gzipinputstream import GzipInputStream PGP_KEY_MIMETYPE = 'application/pgp-keys' @@ -336,6 +336,29 @@ def buildlogs(build_uuid): return response +@web.route('/logarchive/', methods=['GET']) +@route_show_if(features.BUILD_SUPPORT) +@require_session_login +def logarchive(file_id): + JSON_MIMETYPE = 'application/json' + try: + found_build = model.build.get_repository_build(file_id) + except model.InvalidRepositoryBuildException as ex: + logger.exception(ex, extra={'build_uuid': file_id}) + abort(403) + + repo = found_build.repository + if not ModifyRepositoryPermission(repo.namespace_user.username, repo.name).can(): + abort(403) + + try: + path = log_archive.get_file_id_path(file_id) + data_stream = log_archive._storage.stream_read_file(log_archive._locations, path) + return send_file(GzipInputStream(data_stream), mimetype=JSON_MIMETYPE) + except IOError: + logger.exception('Could not read archived logs') + abort(403) + @web.route('/receipt', methods=['GET']) @route_show_if(features.BILLING) @require_session_login diff --git a/initdb.py b/initdb.py index 28093dcfd..c4aa51a8c 100644 --- a/initdb.py +++ b/initdb.py @@ -602,7 +602,7 @@ def populate_database(minimal=False, with_storage=False): building = __generate_repository(with_storage, new_user_1, 'building', 'Empty repository which is building.', - False, [], (0, [], None)) + False, [(new_user_2, 'write'), (reader, 'read')], (0, [], None)) new_token = model.token.create_access_token(building, 'write', 'build-worker') diff --git a/test/test_endpoints.py b/test/test_endpoints.py index 98fc40545..5ffdf638d 100644 --- a/test/test_endpoints.py +++ b/test/test_endpoints.py @@ -4,7 +4,10 @@ import json as py_json import time import unittest import base64 +import zlib +from mock import patch +from io import BytesIO from urllib import urlencode from urlparse import urlparse, urlunparse, parse_qs from datetime import datetime, timedelta @@ -27,6 +30,7 @@ from endpoints.web import web as web_bp from endpoints.webhooks import webhooks as webhooks_bp from initdb import setup_database_for_testing, finished_database_for_testing from test.helpers import assert_action_logged +from util.registry.gzipinputstream import WINDOW_BUFFER_SIZE try: app.register_blueprint(web_bp, url_prefix='') @@ -133,6 +137,31 @@ class EndpointTestCase(unittest.TestCase): headers={"Content-Type": "application/json"}) self.assertEquals(rv.status_code, 200) +class BuildLogsTestCase(EndpointTestCase): + build_uuid = 'deadpork-dead-pork-dead-porkdeadpork' + + def test_logarchive_invalid_build_uuid(self): + self.login('public', 'password') + self.getResponse('web.logarchive', file_id='bad_build_uuid', expected_code=403) + + def test_logarchive_not_logged_in(self): + self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=401) + + def test_logarchive_unauthorized(self): + self.login('reader', 'password') + self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=403) + + def test_logarchive_file_not_found(self): + self.login('public', 'password') + self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=403) + + def test_logarchive_successful(self): + self.login('public', 'password') + data = b"my_file_stream" + mock_file = BytesIO(zlib.compressobj(-1, zlib.DEFLATED, WINDOW_BUFFER_SIZE).compress(data)) + with patch('endpoints.web.log_archive._storage.stream_read_file', return_value=mock_file): + self.getResponse('web.logarchive', file_id=self.build_uuid, expected_code=200) + class WebhookEndpointTestCase(EndpointTestCase): def test_invalid_build_trigger_webhook(self):