cosmopolitan/third_party/python/Include/pystate.h
Justine Tunney b420ed8248 Undiamond Python headers
This change gets the Python codebase into a state where it conforms to
the conventions of this codebase. It's now possible to include headers
from Python, without worrying about ordering. Python has traditionally
solved that problem by "diamonding" everything in Python.h, but that's
problematic since it means any change to any Python header invalidates
all the build artifacts. Lastly it makes tooling not work. Since it is
hard to explain to Emacs when I press C-c C-h to add an import line it
shouldn't add the header that actually defines the symbol, and instead
do follow the nonstandard Python convention.

Progress has been made on letting Python load source code from the zip
executable structure via the standard C library APIs. System calss now
recognizes zip!FILENAME alternative URIs as equivalent to zip:FILENAME
since Python uses colon as its delimiter.

Some progress has been made on embedding the notice license terms into
the Python object code. This is easier said than done since Python has
an extremely complicated ownership story.

- Some termios APIs have been added
- Implement rewinddir() dirstream API
- GetCpuCount() API added to Cosmopolitan Libc
- More bugs in Cosmopolitan Libc have been fixed
- zipobj.com now has flags for mangling the path
- Fixed bug a priori with sendfile() on certain BSDs
- Polyfill F_DUPFD and F_DUPFD_CLOEXEC across platforms
- FIOCLEX / FIONCLEX now polyfilled for fast O_CLOEXEC changes
- APE now supports a hybrid solution to no-self-modify for builds
- Many BSD-only magnums added, e.g. O_SEARCH, O_SHLOCK, SF_NODISKIO
2021-08-12 14:07:40 -07:00

320 lines
11 KiB
C

#ifndef Py_PYSTATE_H
#define Py_PYSTATE_H
#include "third_party/python/Include/moduleobject.h"
#include "third_party/python/Include/object.h"
#include "third_party/python/Include/pyatomic.h"
COSMOPOLITAN_C_START_
/* clang-format off */
/* This limitation is for performance and simplicity. If needed it can be
removed (with effort). */
#define MAX_CO_EXTRA_USERS 255
/* State shared between threads */
struct _ts; /* Forward */
struct _is; /* Forward */
struct _frame; /* Forward declaration for PyFrameObject. */
#ifdef Py_LIMITED_API
typedef struct _is PyInterpreterState;
#else
typedef PyObject* (*_PyFrameEvalFunction)(struct _frame *, int);
typedef struct _is {
struct _is *next;
struct _ts *tstate_head;
PyObject *modules;
PyObject *modules_by_index;
PyObject *sysdict;
PyObject *builtins;
PyObject *importlib;
PyObject *codec_search_path;
PyObject *codec_search_cache;
PyObject *codec_error_registry;
int codecs_initialized;
int fscodec_initialized;
#ifdef HAVE_DLOPEN
int dlopenflags;
#endif
PyObject *builtins_copy;
PyObject *import_func;
/* Initialized to PyEval_EvalFrameDefault(). */
_PyFrameEvalFunction eval_frame;
} PyInterpreterState;
#endif
typedef struct _co_extra_state {
struct _co_extra_state *next;
PyInterpreterState* interp;
Py_ssize_t co_extra_user_count;
freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS];
} __PyCodeExtraState;
/* This is temporary for backwards compat in 3.6 and will be removed in 3.7 */
__PyCodeExtraState* __PyCodeExtraState_Get(void);
/* State unique per thread */
#ifndef Py_LIMITED_API
/* Py_tracefunc return -1 when raising an exception, or 0 for success. */
typedef int (*Py_tracefunc)(PyObject *, struct _frame *, int, PyObject *);
/* The following values are used for 'what' for tracefunc functions: */
#define PyTrace_CALL 0
#define PyTrace_EXCEPTION 1
#define PyTrace_LINE 2
#define PyTrace_RETURN 3
#define PyTrace_C_CALL 4
#define PyTrace_C_EXCEPTION 5
#define PyTrace_C_RETURN 6
#endif
#ifdef Py_LIMITED_API
typedef struct _ts PyThreadState;
#else
typedef struct _ts {
/* See Python/ceval.c for comments explaining most fields */
struct _ts *prev;
struct _ts *next;
PyInterpreterState *interp;
struct _frame *frame;
int recursion_depth;
char overflowed; /* The stack has overflowed. Allow 50 more calls
to handle the runtime error. */
char recursion_critical; /* The current calls must not cause
a stack overflow. */
/* 'tracing' keeps track of the execution depth when tracing/profiling.
This is to prevent the actual trace/profile code from being recorded in
the trace/profile. */
int tracing;
int use_tracing;
Py_tracefunc c_profilefunc;
Py_tracefunc c_tracefunc;
PyObject *c_profileobj;
PyObject *c_traceobj;
PyObject *curexc_type;
PyObject *curexc_value;
PyObject *curexc_traceback;
PyObject *exc_type;
PyObject *exc_value;
PyObject *exc_traceback;
PyObject *dict; /* Stores per-thread state */
int gilstate_counter;
PyObject *async_exc; /* Asynchronous exception to raise */
long thread_id; /* Thread id where this tstate was created */
int trash_delete_nesting;
PyObject *trash_delete_later;
/* Called when a thread state is deleted normally, but not when it
* is destroyed after fork().
* Pain: to prevent rare but fatal shutdown errors (issue 18808),
* Thread.join() must wait for the join'ed thread's tstate to be unlinked
* from the tstate chain. That happens at the end of a thread's life,
* in pystate.c.
* The obvious way doesn't quite work: create a lock which the tstate
* unlinking code releases, and have Thread.join() wait to acquire that
* lock. The problem is that we _are_ at the end of the thread's life:
* if the thread holds the last reference to the lock, decref'ing the
* lock will delete the lock, and that may trigger arbitrary Python code
* if there's a weakref, with a callback, to the lock. But by this time
* _PyThreadState_Current is already NULL, so only the simplest of C code
* can be allowed to run (in particular it must not be possible to
* release the GIL).
* So instead of holding the lock directly, the tstate holds a weakref to
* the lock: that's the value of on_delete_data below. Decref'ing a
* weakref is harmless.
* on_delete points to _threadmodule.c's static release_sentinel() function.
* After the tstate is unlinked, release_sentinel is called with the
* weakref-to-lock (on_delete_data) argument, and release_sentinel releases
* the indirectly held lock.
*/
void (*on_delete)(void *);
void *on_delete_data;
PyObject *coroutine_wrapper;
int in_coroutine_wrapper;
/* Now used from PyInterpreterState, kept here for ABI
compatibility with PyThreadState */
Py_ssize_t _preserve_36_ABI_1;
freefunc _preserve_36_ABI_2[MAX_CO_EXTRA_USERS];
PyObject *async_gen_firstiter;
PyObject *async_gen_finalizer;
/* XXX signal handlers should also be here */
} PyThreadState;
#endif
PyInterpreterState * PyInterpreterState_New(void);
void PyInterpreterState_Clear(PyInterpreterState *);
void PyInterpreterState_Delete(PyInterpreterState *);
#ifndef Py_LIMITED_API
int _PyState_AddModule(PyObject*, struct PyModuleDef*);
#endif /* !Py_LIMITED_API */
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000
/* New in 3.3 */
int PyState_AddModule(PyObject*, struct PyModuleDef*);
int PyState_RemoveModule(struct PyModuleDef*);
#endif
PyObject* PyState_FindModule(struct PyModuleDef*);
#ifndef Py_LIMITED_API
void _PyState_ClearModules(void);
#endif
PyThreadState * PyThreadState_New(PyInterpreterState *);
#ifndef Py_LIMITED_API
PyThreadState * _PyThreadState_Prealloc(PyInterpreterState *);
void _PyThreadState_Init(PyThreadState *);
#endif /* !Py_LIMITED_API */
void PyThreadState_Clear(PyThreadState *);
void PyThreadState_Delete(PyThreadState *);
#ifndef Py_LIMITED_API
void _PyThreadState_DeleteExcept(PyThreadState *tstate);
#endif /* !Py_LIMITED_API */
#ifdef WITH_THREAD
void PyThreadState_DeleteCurrent(void);
#ifndef Py_LIMITED_API
void _PyGILState_Reinit(void);
#endif /* !Py_LIMITED_API */
#endif
/* Return the current thread state. The global interpreter lock must be held.
* When the current thread state is NULL, this issues a fatal error (so that
* the caller needn't check for NULL). */
PyThreadState * PyThreadState_Get(void);
#ifndef Py_LIMITED_API
/* Similar to PyThreadState_Get(), but don't issue a fatal error
* if it is NULL. */
PyThreadState * _PyThreadState_UncheckedGet(void);
#endif /* !Py_LIMITED_API */
PyThreadState * PyThreadState_Swap(PyThreadState *);
PyObject * PyThreadState_GetDict(void);
int PyThreadState_SetAsyncExc(long, PyObject *);
/* Variable and macro for in-line access to current thread state */
/* Assuming the current thread holds the GIL, this is the
PyThreadState for the current thread. */
#ifdef Py_BUILD_CORE
extern _Py_atomic_address _PyThreadState_Current;
# define PyThreadState_GET() \
((PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current))
#else
# define PyThreadState_GET() PyThreadState_Get()
#endif
typedef
enum {PyGILState_LOCKED, PyGILState_UNLOCKED}
PyGILState_STATE;
#ifdef WITH_THREAD
/* Ensure that the current thread is ready to call the Python
C API, regardless of the current state of Python, or of its
thread lock. This may be called as many times as desired
by a thread so long as each call is matched with a call to
PyGILState_Release(). In general, other thread-state APIs may
be used between _Ensure() and _Release() calls, so long as the
thread-state is restored to its previous state before the Release().
For example, normal use of the Py_BEGIN_ALLOW_THREADS/
Py_END_ALLOW_THREADS macros are acceptable.
The return value is an opaque "handle" to the thread state when
PyGILState_Ensure() was called, and must be passed to
PyGILState_Release() to ensure Python is left in the same state. Even
though recursive calls are allowed, these handles can *not* be shared -
each unique call to PyGILState_Ensure must save the handle for its
call to PyGILState_Release.
When the function returns, the current thread will hold the GIL.
Failure is a fatal error.
*/
PyGILState_STATE PyGILState_Ensure(void);
/* Release any resources previously acquired. After this call, Python's
state will be the same as it was prior to the corresponding
PyGILState_Ensure() call (but generally this state will be unknown to
the caller, hence the use of the GILState API.)
Every call to PyGILState_Ensure must be matched by a call to
PyGILState_Release on the same thread.
*/
void PyGILState_Release(PyGILState_STATE);
/* Helper/diagnostic function - get the current thread state for
this thread. May return NULL if no GILState API has been used
on the current thread. Note that the main thread always has such a
thread-state, even if no auto-thread-state call has been made
on the main thread.
*/
PyThreadState * PyGILState_GetThisThreadState(void);
#ifndef Py_LIMITED_API
/* Issue #26558: Flag to disable PyGILState_Check().
If set to non-zero, PyGILState_Check() always return 1. */
extern int _PyGILState_check_enabled;
/* Helper/diagnostic function - return 1 if the current thread
currently holds the GIL, 0 otherwise.
The function returns 1 if _PyGILState_check_enabled is non-zero. */
int PyGILState_Check(void);
/* Unsafe function to get the single PyInterpreterState used by this process'
GILState implementation.
Return NULL before _PyGILState_Init() is called and after _PyGILState_Fini()
is called. */
PyInterpreterState * _PyGILState_GetInterpreterStateUnsafe(void);
#endif
#endif /* #ifdef WITH_THREAD */
/* The implementation of sys._current_frames() Returns a dict mapping
thread id to that thread's current frame.
*/
#ifndef Py_LIMITED_API
PyObject * _PyThread_CurrentFrames(void);
#endif
/* Routines for advanced debuggers, requested by David Beazley.
Don't use unless you know what you are doing! */
#ifndef Py_LIMITED_API
PyInterpreterState * PyInterpreterState_Head(void);
PyInterpreterState * PyInterpreterState_Next(PyInterpreterState *);
PyThreadState * PyInterpreterState_ThreadHead(PyInterpreterState *);
PyThreadState * PyThreadState_Next(PyThreadState *);
typedef struct _frame *(*PyThreadFrameGetter)(PyThreadState *self_);
#endif
/* hook for PyEval_GetFrame(), requested for Psyco */
#ifndef Py_LIMITED_API
extern PyThreadFrameGetter _PyThreadState_GetFrame;
#endif
COSMOPOLITAN_C_END_
#endif /* !Py_PYSTATE_H */