Undiamond Python headers

This change gets the Python codebase into a state where it conforms to
the conventions of this codebase. It's now possible to include headers
from Python, without worrying about ordering. Python has traditionally
solved that problem by "diamonding" everything in Python.h, but that's
problematic since it means any change to any Python header invalidates
all the build artifacts. Lastly it makes tooling not work. Since it is
hard to explain to Emacs when I press C-c C-h to add an import line it
shouldn't add the header that actually defines the symbol, and instead
do follow the nonstandard Python convention.

Progress has been made on letting Python load source code from the zip
executable structure via the standard C library APIs. System calss now
recognizes zip!FILENAME alternative URIs as equivalent to zip:FILENAME
since Python uses colon as its delimiter.

Some progress has been made on embedding the notice license terms into
the Python object code. This is easier said than done since Python has
an extremely complicated ownership story.

- Some termios APIs have been added
- Implement rewinddir() dirstream API
- GetCpuCount() API added to Cosmopolitan Libc
- More bugs in Cosmopolitan Libc have been fixed
- zipobj.com now has flags for mangling the path
- Fixed bug a priori with sendfile() on certain BSDs
- Polyfill F_DUPFD and F_DUPFD_CLOEXEC across platforms
- FIOCLEX / FIONCLEX now polyfilled for fast O_CLOEXEC changes
- APE now supports a hybrid solution to no-self-modify for builds
- Many BSD-only magnums added, e.g. O_SEARCH, O_SHLOCK, SF_NODISKIO
This commit is contained in:
Justine Tunney 2021-08-12 00:42:14 -07:00
parent 20bb8db9f8
commit b420ed8248
762 changed files with 18410 additions and 53772 deletions

View file

@ -15,9 +15,9 @@ __all__ = [
# like "-arch" or "-isdkroot", that may need customization for
# the user environment
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
# configuration variables that may contain compiler calls
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')

View file

@ -10,14 +10,13 @@ build_time_vars = {'ABIFLAGS': 'm',
'BASEMODLIBS': '',
'BINDIR': '/bin',
'BINLIBDEST': '/lib/python3.6',
'BLAKE_SSE': "'BLAKE_USE_SSE=1'",
'BLDLIBRARY': 'libpython3.6m.a',
'BLDSHARED': 'gcc -shared -static -nostdlib -nostdinc -fno-pie '
'-mno-red-zone -include '
'/cpython36/libcosmo/cosmopolitan.h',
'BUILDEXE': '.com.dbg',
'BUILDPYTHON': 'python.com.dbg',
'BUILD_GNU_TYPE': 'x86_64-pc-linux-gnu',
'BUILD_GNU_TYPE': 'x86_64-pc-cosmo-gnu',
'BYTESTR_DEPS': '\\',
'CC': 'gcc',
'CCSHARED': '-fPIC',
@ -126,7 +125,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'EXTRAMACHDEPPATH': '',
'EXTRATESTOPTS': '',
'EXTRA_CFLAGS': '',
'EXT_SUFFIX': '.cpython-36m-x86_64-linux-gnu.so',
'EXT_SUFFIX': '.cpython-36m-x86_64-cosmo-gnu.so',
'FILEMODE': 644,
'FLOCK_NEEDS_LIBBSD': 0,
'GETPGRP_HAVE_ARG': 0,
@ -207,8 +206,8 @@ build_time_vars = {'ABIFLAGS': 'm',
'HAVE_DUP3': 1,
'HAVE_DYNAMIC_LOADING': 1,
'HAVE_ENDIAN_H': 0,
'HAVE_EPOLL': 1,
'HAVE_EPOLL_CREATE1': 1,
'HAVE_EPOLL': 0,
'HAVE_EPOLL_CREATE1': 0,
'HAVE_ERF': 1,
'HAVE_ERFC': 1,
'HAVE_ERRNO_H': 1,
@ -309,13 +308,13 @@ build_time_vars = {'ABIFLAGS': 'm',
'HAVE_LIBUTIL_H': 0,
'HAVE_LINK': 1,
'HAVE_LINKAT': 1,
'HAVE_LINUX_CAN_BCM_H': 0,
'HAVE_LINUX_CAN_H': 0,
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 0,
'HAVE_LINUX_CAN_RAW_H': 0,
'HAVE_LINUX_NETLINK_H': 0,
'HAVE_LINUX_RANDOM_H': 0,
'HAVE_LINUX_TIPC_H': 0,
'HAVE_COSMO_CAN_BCM_H': 0,
'HAVE_COSMO_CAN_H': 0,
'HAVE_COSMO_CAN_RAW_FD_FRAMES': 0,
'HAVE_COSMO_CAN_RAW_H': 0,
'HAVE_COSMO_NETLINK_H': 0,
'HAVE_COSMO_RANDOM_H': 0,
'HAVE_COSMO_TIPC_H': 0,
'HAVE_LOCKF': 0,
'HAVE_LOG1P': 1,
'HAVE_LOG2': 1,
@ -347,7 +346,6 @@ build_time_vars = {'ABIFLAGS': 'm',
'HAVE_PIPE2': 1,
'HAVE_PLOCK': 0,
'HAVE_POLL': 1,
'HAVE_POLL_H': 1,
'HAVE_POSIX_FADVISE': 1,
'HAVE_POSIX_FALLOCATE': 0,
'HAVE_PREAD': 1,
@ -522,9 +520,9 @@ build_time_vars = {'ABIFLAGS': 'm',
'HAVE_WMEMCMP': 1,
'HAVE_WORKING_TZSET': 1,
'HAVE_WRITEV': 1,
'HAVE_ZLIB_COPY': 0,
'HAVE_ZLIB_COPY': 1,
'HAVE__GETPTY': 0,
'HOST_GNU_TYPE': 'x86_64-pc-linux-gnu',
'HOST_GNU_TYPE': 'x86_64-pc-cosmo-gnu',
'INCLDIRSTOMAKE': '/include /include /include/python3.6m /include/python3.6m',
'INCLUDEDIR': '/include',
'INCLUDEPY': '/include/python3.6m',
@ -556,7 +554,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'LIBOBJDIR': 'Python/',
'LIBOBJS': '',
'LIBPC': '/lib/pkgconfig',
'LIBPL': '/lib/python3.6/config-3.6m-x86_64-linux-gnu',
'LIBPL': '/lib/python3.6/config-3.6m-x86_64-cosmo-gnu',
'LIBRARY': 'libpython3.6m.a',
'LIBRARY_OBJS': '\\',
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
@ -577,7 +575,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'LN': 'ln',
'LOCALMODLIBS': '',
'LOG1P_DROPS_ZERO_SIGN': 0,
'MACHDEP': 'linux',
'MACHDEP': 'cosmo',
'MACHDEPPATH': ':',
'MACHDEP_OBJS': '',
'MACHDESTLIB': '/lib/python3.6',
@ -595,7 +593,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'_tracemalloc _symtable array cmath math _struct _weakref '
'_testcapi _random _elementtree _pickle _datetime _bisect _heapq '
'unicodedata fcntl grp select mmap _csv _socket resource '
'_posixsubprocess _md5 _sha1 _sha256 _sha512 _sha3 _blake2 syslog '
'_posixsubprocess _md5 _sha1 _sha256 _sha512 _sha3 syslog '
'binascii parser fpectl zlib pyexpat _multibytecodec _codecs_cn '
'_codecs_hk _codecs_iso2022 _codecs_jp _codecs_kr _codecs_tw '
'_json _lsprof _opcode',
@ -626,8 +624,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'Modules/resource.o Modules/_posixsubprocess.o '
'Modules/md5module.o Modules/sha1module.o '
'Modules/sha256module.o Modules/sha512module.o '
'Modules/sha3module.o Modules/blake2module.o '
'Modules/blake2b_impl.o Modules/blake2s_impl.o '
'Modules/_sha3.o '
'Modules/syslogmodule.o Modules/binascii.o '
'Modules/parsermodule.o Modules/fpectlmodule.o '
'Modules/zlibmodule.o Modules/xmlparse.o Modules/xmlrole.o '
@ -638,8 +635,8 @@ build_time_vars = {'ABIFLAGS': 'm',
'Modules/_json.o Modules/_lsprof.o Modules/rotatingtree.o '
'Modules/_opcode.o',
'MODULE_OBJS': '\\',
'MULTIARCH': 'x86_64-linux-gnu',
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
'MULTIARCH': 'x86_64-cosmo-gnu',
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-cosmo-gnu\\"',
'MVWDELCH_IS_EXPRESSION': 1,
'NO_AS_NEEDED': '-Wl,--no-as-needed',
'OBJECT_OBJS': '\\',
@ -762,7 +759,7 @@ build_time_vars = {'ABIFLAGS': 'm',
'SIZEOF_VOID_P': 8,
'SIZEOF_WCHAR_T': 4,
'SIZEOF__BOOL': 1,
'SOABI': 'cpython-36m-x86_64-linux-gnu',
'SOABI': 'cpython-36m-x86_64-cosmo',
'SRCDIRS': 'Parser Objects Python Modules Programs',
'SRC_GDB_HOOKS': './Tools/gdb/libpython.py',
'STDC_HEADERS': 1,

View file

@ -11,7 +11,7 @@ new(name, data=b'', **kwargs) - returns a new hash object implementing the
Named constructor functions are also available, these are faster
than using new(name):
md5(), sha1(), sha224(), sha256(), sha384(), sha512(), blake2b(), blake2s(),
md5(), sha1(), sha224(), sha256(), sha384(), sha512(),
sha3_224, sha3_256, sha3_384, sha3_512, shake_128, and shake_256.
More algorithms may be available on your platform but the above are guaranteed
@ -56,7 +56,6 @@ More condensed:
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
'blake2b', 'blake2s',
'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
'shake_128', 'shake_256')
@ -90,10 +89,6 @@ def __get_builtin_constructor(name):
import _sha512
cache['SHA384'] = cache['sha384'] = _sha512.sha384
cache['SHA512'] = cache['sha512'] = _sha512.sha512
elif name in ('blake2b', 'blake2s'):
import _blake2
cache['blake2b'] = _blake2.blake2b
cache['blake2s'] = _blake2.blake2s
elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
'shake_128', 'shake_256'}:
import _sha3
@ -114,9 +109,6 @@ def __get_builtin_constructor(name):
def __get_openssl_constructor(name):
if name in {'blake2b', 'blake2s'}:
# Prefer our blake2 implementation.
return __get_builtin_constructor(name)
try:
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
@ -140,12 +132,6 @@ def __hash_new(name, data=b'', **kwargs):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be a bytes-like object).
"""
if name in {'blake2b', 'blake2s'}:
# Prefer our blake2 implementation.
# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s.
# It does neither support keyed blake2 nor advanced features like
# salt, personal, tree hashing or SSE.
return __get_builtin_constructor(name)(data, **kwargs)
try:
return _hashlib.new(name, data)
except ValueError:

View file

@ -86,8 +86,8 @@ class Converter(grammar.Grammar):
The file looks as follows. The first two lines are always this:
#include "pgenheaders.h"
#include "grammar.h"
#include "third_party/python/Include/pgenheaders.h"
#include "third_party/python/Include/grammar.h"
After that come four blocks:

View file

@ -235,13 +235,6 @@ def clear_caches():
else:
doctest.master = None
try:
ctypes = sys.modules['ctypes']
except KeyError:
pass
else:
ctypes._reset_cache()
try:
typing = sys.modules['typing']
except KeyError:

View file

@ -30,13 +30,6 @@ COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
try:
import _blake2
except ImportError:
_blake2 = None
requires_blake2 = unittest.skipUnless(_blake2, 'requires _blake2')
try:
import _sha3
except ImportError:
@ -76,7 +69,6 @@ class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
'sha384', 'SHA384', 'sha512', 'SHA512',
'blake2b', 'blake2s',
'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
'shake_128', 'shake_256')
@ -99,10 +91,6 @@ class HashLibTestCase(unittest.TestCase):
for algorithm in self.supported_hash_names:
algorithms.add(algorithm.lower())
_blake2 = self._conditional_import_module('_blake2')
if _blake2:
algorithms.update({'blake2b', 'blake2s'})
self.constructors_to_test = {}
for algorithm in algorithms:
self.constructors_to_test[algorithm] = set()
@ -146,10 +134,6 @@ class HashLibTestCase(unittest.TestCase):
if _sha512:
add_builtin_constructor('sha384')
add_builtin_constructor('sha512')
if _blake2:
add_builtin_constructor('blake2s')
add_builtin_constructor('blake2b')
_sha3 = self._conditional_import_module('_sha3')
if _sha3:
add_builtin_constructor('sha3_224')
@ -325,11 +309,6 @@ class HashLibTestCase(unittest.TestCase):
self.check_no_unicode('sha384')
self.check_no_unicode('sha512')
@requires_blake2
def test_no_unicode_blake2(self):
self.check_no_unicode('blake2b')
self.check_no_unicode('blake2s')
@requires_sha3
def test_no_unicode_sha3(self):
self.check_no_unicode('sha3_224')
@ -393,11 +372,6 @@ class HashLibTestCase(unittest.TestCase):
self.check_sha3('shake_128', 256, 1344, b'\x1f')
self.check_sha3('shake_256', 512, 1088, b'\x1f')
@requires_blake2
def test_blocksize_name_blake2(self):
self.check_blocksize_name('blake2b', 128, 64)
self.check_blocksize_name('blake2s', 64, 32)
def test_case_md5_0(self):
self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
@ -526,195 +500,6 @@ class HashLibTestCase(unittest.TestCase):
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
def check_blake2(self, constructor, salt_size, person_size, key_size,
digest_size, max_offset):
self.assertEqual(constructor.SALT_SIZE, salt_size)
for i in range(salt_size + 1):
constructor(salt=b'a' * i)
salt = b'a' * (salt_size + 1)
self.assertRaises(ValueError, constructor, salt=salt)
self.assertEqual(constructor.PERSON_SIZE, person_size)
for i in range(person_size+1):
constructor(person=b'a' * i)
person = b'a' * (person_size + 1)
self.assertRaises(ValueError, constructor, person=person)
self.assertEqual(constructor.MAX_DIGEST_SIZE, digest_size)
for i in range(1, digest_size + 1):
constructor(digest_size=i)
self.assertRaises(ValueError, constructor, digest_size=-1)
self.assertRaises(ValueError, constructor, digest_size=0)
self.assertRaises(ValueError, constructor, digest_size=digest_size+1)
self.assertEqual(constructor.MAX_KEY_SIZE, key_size)
for i in range(key_size+1):
constructor(key=b'a' * i)
key = b'a' * (key_size + 1)
self.assertRaises(ValueError, constructor, key=key)
self.assertEqual(constructor().hexdigest(),
constructor(key=b'').hexdigest())
for i in range(0, 256):
constructor(fanout=i)
self.assertRaises(ValueError, constructor, fanout=-1)
self.assertRaises(ValueError, constructor, fanout=256)
for i in range(1, 256):
constructor(depth=i)
self.assertRaises(ValueError, constructor, depth=-1)
self.assertRaises(ValueError, constructor, depth=0)
self.assertRaises(ValueError, constructor, depth=256)
for i in range(0, 256):
constructor(node_depth=i)
self.assertRaises(ValueError, constructor, node_depth=-1)
self.assertRaises(ValueError, constructor, node_depth=256)
for i in range(0, digest_size + 1):
constructor(inner_size=i)
self.assertRaises(ValueError, constructor, inner_size=-1)
self.assertRaises(ValueError, constructor, inner_size=digest_size+1)
constructor(leaf_size=0)
constructor(leaf_size=(1<<32)-1)
self.assertRaises(OverflowError, constructor, leaf_size=-1)
self.assertRaises(OverflowError, constructor, leaf_size=1<<32)
constructor(node_offset=0)
constructor(node_offset=max_offset)
self.assertRaises(OverflowError, constructor, node_offset=-1)
self.assertRaises(OverflowError, constructor, node_offset=max_offset+1)
self.assertRaises(TypeError, constructor, data=b'')
self.assertRaises(TypeError, constructor, string=b'')
self.assertRaises(TypeError, constructor, '')
constructor(
b'',
key=b'',
salt=b'',
person=b'',
digest_size=17,
fanout=1,
depth=1,
leaf_size=256,
node_offset=512,
node_depth=1,
inner_size=7,
last_node=True
)
def blake2_rfc7693(self, constructor, md_len, in_len):
def selftest_seq(length, seed):
mask = (1<<32)-1
a = (0xDEAD4BAD * seed) & mask
b = 1
out = bytearray(length)
for i in range(length):
t = (a + b) & mask
a, b = b, t
out[i] = (t >> 24) & 0xFF
return out
outer = constructor(digest_size=32)
for outlen in md_len:
for inlen in in_len:
indata = selftest_seq(inlen, inlen)
key = selftest_seq(outlen, outlen)
unkeyed = constructor(indata, digest_size=outlen)
outer.update(unkeyed.digest())
keyed = constructor(indata, key=key, digest_size=outlen)
outer.update(keyed.digest())
return outer.hexdigest()
@requires_blake2
def test_blake2b(self):
self.check_blake2(hashlib.blake2b, 16, 16, 64, 64, (1<<64)-1)
b2b_md_len = [20, 32, 48, 64]
b2b_in_len = [0, 3, 128, 129, 255, 1024]
self.assertEqual(
self.blake2_rfc7693(hashlib.blake2b, b2b_md_len, b2b_in_len),
"c23a7800d98123bd10f506c61e29da5603d763b8bbad2e737f5e765a7bccd475")
@requires_blake2
def test_case_blake2b_0(self):
self.check('blake2b', b"",
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419"+
"d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce")
@requires_blake2
def test_case_blake2b_1(self):
self.check('blake2b', b"abc",
"ba80a53f981c4d0d6a2797b69f12f6e94c212f14685ac4b74b12bb6fdbffa2d1"+
"7d87c5392aab792dc252d5de4533cc9518d38aa8dbf1925ab92386edd4009923")
@requires_blake2
def test_case_blake2b_all_parameters(self):
# This checks that all the parameters work in general, and also that
# parameter byte order doesn't get confused on big endian platforms.
self.check('blake2b', b"foo",
"920568b0c5873b2f0ab67bedb6cf1b2b",
digest_size=16,
key=b"bar",
salt=b"baz",
person=b"bing",
fanout=2,
depth=3,
leaf_size=4,
node_offset=5,
node_depth=6,
inner_size=7,
last_node=True)
@requires_blake2
def test_blake2b_vectors(self):
for msg, key, md in read_vectors('blake2b'):
key = bytes.fromhex(key)
self.check('blake2b', msg, md, key=key)
@requires_blake2
def test_blake2s(self):
self.check_blake2(hashlib.blake2s, 8, 8, 32, 32, (1<<48)-1)
b2s_md_len = [16, 20, 28, 32]
b2s_in_len = [0, 3, 64, 65, 255, 1024]
self.assertEqual(
self.blake2_rfc7693(hashlib.blake2s, b2s_md_len, b2s_in_len),
"6a411f08ce25adcdfb02aba641451cec53c598b24f4fc787fbdc88797f4c1dfe")
@requires_blake2
def test_case_blake2s_0(self):
self.check('blake2s', b"",
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9")
@requires_blake2
def test_case_blake2s_1(self):
self.check('blake2s', b"abc",
"508c5e8c327c14e2e1a72ba34eeb452f37458b209ed63a294d999b4c86675982")
@requires_blake2
def test_case_blake2s_all_parameters(self):
# This checks that all the parameters work in general, and also that
# parameter byte order doesn't get confused on big endian platforms.
self.check('blake2s', b"foo",
"bf2a8f7fe3c555012a6f8046e646bc75",
digest_size=16,
key=b"bar",
salt=b"baz",
person=b"bing",
fanout=2,
depth=3,
leaf_size=4,
node_offset=5,
node_depth=6,
inner_size=7,
last_node=True)
@requires_blake2
def test_blake2s_vectors(self):
for msg, key, md in read_vectors('blake2s'):
key = bytes.fromhex(key)
self.check('blake2s', msg, md, key=key)
@requires_sha3
def test_case_sha3_224_0(self):
self.check('sha3_224', b"",

View file

@ -177,93 +177,6 @@ class ThreadTests(BaseTestCase):
self.assertRegex(repr(threading._active[tid]), '_DummyThread')
del threading._active[tid]
# PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently)
# exposed at the Python level. This test relies on ctypes to get at it.
def test_PyThreadState_SetAsyncExc(self):
ctypes = import_module("ctypes")
set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc
class AsyncExc(Exception):
pass
exception = ctypes.py_object(AsyncExc)
# First check it works when setting the exception from the same thread.
tid = threading.get_ident()
try:
result = set_async_exc(ctypes.c_long(tid), exception)
# The exception is async, so we might have to keep the VM busy until
# it notices.
while True:
pass
except AsyncExc:
pass
else:
# This code is unreachable but it reflects the intent. If we wanted
# to be smarter the above loop wouldn't be infinite.
self.fail("AsyncExc not raised")
try:
self.assertEqual(result, 1) # one thread state modified
except UnboundLocalError:
# The exception was raised too quickly for us to get the result.
pass
# `worker_started` is set by the thread when it's inside a try/except
# block waiting to catch the asynchronously set AsyncExc exception.
# `worker_saw_exception` is set by the thread upon catching that
# exception.
worker_started = threading.Event()
worker_saw_exception = threading.Event()
class Worker(threading.Thread):
def run(self):
self.id = threading.get_ident()
self.finished = False
try:
while True:
worker_started.set()
time.sleep(0.1)
except AsyncExc:
self.finished = True
worker_saw_exception.set()
t = Worker()
t.daemon = True # so if this fails, we don't hang Python at shutdown
t.start()
if verbose:
print(" started worker thread")
# Try a thread id that doesn't make sense.
if verbose:
print(" trying nonsensical thread id")
result = set_async_exc(ctypes.c_long(-1), exception)
self.assertEqual(result, 0) # no thread states modified
# Now raise an exception in the worker thread.
if verbose:
print(" waiting for worker thread to get started")
ret = worker_started.wait()
self.assertTrue(ret)
if verbose:
print(" verifying worker hasn't exited")
self.assertFalse(t.finished)
if verbose:
print(" attempting to raise asynch exception in worker")
result = set_async_exc(ctypes.c_long(t.id), exception)
self.assertEqual(result, 1) # one thread state modified
if verbose:
print(" waiting for worker to say it caught the exception")
worker_saw_exception.wait(timeout=10)
self.assertTrue(t.finished)
if verbose:
print(" all OK -- joining worker")
if t.finished:
t.join()
# else the thread is still running, and we have no way to kill it
def test_limbo_cleanup(self):
# Issue 7481: Failure to start thread should cleanup the limbo map.
def fail_new_thread(*args):