mirror of
https://github.com/jart/cosmopolitan.git
synced 2025-05-22 21:32:31 +00:00
Begin incorporating Python unit tests into build
We now build a separate APE binary for each test so they can run in parallel. We've got 148 tests running fast and stable so far.
This commit is contained in:
parent
51904e2687
commit
b5f743cdc3
121 changed files with 4995 additions and 4767 deletions
74
third_party/python/Lib/test/test_robotparser.py
vendored
74
third_party/python/Lib/test/test_robotparser.py
vendored
|
@ -285,79 +285,5 @@ class RobotHandler(BaseHTTPRequestHandler):
|
|||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
|
||||
@unittest.skipUnless(threading, 'threading required for this test')
|
||||
class PasswordProtectedSiteTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.server = HTTPServer((support.HOST, 0), RobotHandler)
|
||||
|
||||
self.t = threading.Thread(
|
||||
name='HTTPServer serving',
|
||||
target=self.server.serve_forever,
|
||||
# Short poll interval to make the test finish quickly.
|
||||
# Time between requests is short enough that we won't wake
|
||||
# up spuriously too many times.
|
||||
kwargs={'poll_interval':0.01})
|
||||
self.t.daemon = True # In case this function raises.
|
||||
self.t.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown()
|
||||
self.t.join()
|
||||
self.server.server_close()
|
||||
|
||||
@support.reap_threads
|
||||
def testPasswordProtectedSite(self):
|
||||
addr = self.server.server_address
|
||||
url = 'http://' + support.HOST + ':' + str(addr[1])
|
||||
robots_url = url + "/robots.txt"
|
||||
parser = urllib.robotparser.RobotFileParser()
|
||||
parser.set_url(url)
|
||||
parser.read()
|
||||
self.assertFalse(parser.can_fetch("*", robots_url))
|
||||
|
||||
|
||||
class NetworkTestCase(unittest.TestCase):
|
||||
|
||||
base_url = 'http://www.pythontest.net/'
|
||||
robots_txt = '{}elsewhere/robots.txt'.format(base_url)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
support.requires('network')
|
||||
with support.transient_internet(cls.base_url):
|
||||
cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt)
|
||||
cls.parser.read()
|
||||
|
||||
def url(self, path):
|
||||
return '{}{}{}'.format(
|
||||
self.base_url, path, '/' if not os.path.splitext(path)[1] else ''
|
||||
)
|
||||
|
||||
def test_basic(self):
|
||||
self.assertFalse(self.parser.disallow_all)
|
||||
self.assertFalse(self.parser.allow_all)
|
||||
self.assertGreater(self.parser.mtime(), 0)
|
||||
self.assertFalse(self.parser.crawl_delay('*'))
|
||||
self.assertFalse(self.parser.request_rate('*'))
|
||||
|
||||
def test_can_fetch(self):
|
||||
self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere')))
|
||||
self.assertFalse(self.parser.can_fetch('Nutch', self.base_url))
|
||||
self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian')))
|
||||
self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats')))
|
||||
self.assertFalse(self.parser.can_fetch('*', self.url('webstats')))
|
||||
self.assertTrue(self.parser.can_fetch('*', self.base_url))
|
||||
|
||||
def test_read_404(self):
|
||||
parser = urllib.robotparser.RobotFileParser(self.url('i-robot.txt'))
|
||||
parser.read()
|
||||
self.assertTrue(parser.allow_all)
|
||||
self.assertFalse(parser.disallow_all)
|
||||
self.assertEqual(parser.mtime(), 0)
|
||||
self.assertIsNone(parser.crawl_delay('*'))
|
||||
self.assertIsNone(parser.request_rate('*'))
|
||||
|
||||
if __name__=='__main__':
|
||||
unittest.main()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue