Add worker to update ipresolver data files every few hours

This commit is contained in:
Joseph Schorr 2017-09-27 17:14:04 -04:00
parent 52927de7f6
commit 05b4a7d457
7 changed files with 130 additions and 77 deletions

View file

@ -1,13 +1,14 @@
import pytest
from httmock import urlmatch, HTTMock
from contextlib import contextmanager
from mock import patch
from moto import mock_s3
import boto
from app import config_provider
from storage import CloudFrontedS3Storage, StorageContext
from util.ipresolver import IPResolver
from util.ipresolver.test.test_ipresolver import http_client, test_aws_ip, aws_ip_range_handler
from util.ipresolver.test.test_ipresolver import test_aws_ip, aws_ip_range_data
from test.fixtures import *
_TEST_CONTENT = os.urandom(1024)
@ -20,37 +21,34 @@ _TEST_PATH = 'some/cool/path'
def ipranges_populated(request):
return request.param
@pytest.fixture()
def ipresolver(http_client, aws_ip_range_handler, ipranges_populated, app):
with HTTMock(aws_ip_range_handler):
ipresolver = IPResolver(app, client=http_client)
if ipranges_populated:
assert ipresolver._update_aws_ip_range()
return ipresolver
@pytest.fixture()
def storage_context(ipresolver, app):
return StorageContext('nyc', None, None, config_provider, ipresolver)
@mock_s3
def test_direct_download(storage_context, test_aws_ip, ipranges_populated, app):
# Create a test bucket and put some test content.
boto.connect_s3().create_bucket(_TEST_BUCKET)
engine = CloudFrontedS3Storage(storage_context, 'cloudfrontdomain', 'keyid', 'test/data/test.pem', 'some/path',
_TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
engine.put_content(_TEST_PATH, _TEST_CONTENT)
assert engine.exists(_TEST_PATH)
# Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL.
assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH, test_aws_ip)
def test_direct_download(test_aws_ip, aws_ip_range_data, ipranges_populated, app):
ipresolver = IPResolver(app)
if ipranges_populated:
# Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL.
assert 'cloudfrontdomain' in engine.get_direct_download_url(_TEST_PATH, '1.2.3.4')
else:
# Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still
# get back an S3 URL.
assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH, '1.2.3.4')
empty_range_data = {
'syncToken': 123456789,
'prefixes': [],
}
with patch.object(ipresolver, '_get_aws_ip_ranges', lambda: aws_ip_range_data if ipranges_populated else empty_range_data):
context = StorageContext('nyc', None, None, config_provider, ipresolver)
# Create a test bucket and put some test content.
boto.connect_s3().create_bucket(_TEST_BUCKET)
engine = CloudFrontedS3Storage(context, 'cloudfrontdomain', 'keyid', 'test/data/test.pem', 'some/path',
_TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
engine.put_content(_TEST_PATH, _TEST_CONTENT)
assert engine.exists(_TEST_PATH)
# Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL.
assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH, test_aws_ip)
if ipranges_populated:
# Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL.
assert 'cloudfrontdomain' in engine.get_direct_download_url(_TEST_PATH, '1.2.3.4')
else:
# Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still
# get back an S3 URL.
assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH, '1.2.3.4')