cosmopolitan/third_party/quickjs/gc.c
Justine Tunney ae5d06dc53 Unbloat build config
- 10.5% reduction of o//depend dependency graph
- 8.8% reduction in latency of make command
- Fix issue with temporary file cleanup

There's a new -w option in compile.com that turns off the recent
Landlock output path workaround for "good commands" which do not
unlink() the output file like GNU tooling does.

Our new GNU Make unveil sandboxing appears to have zero overhead
in the grand scheme of things. Full builds are pretty fast since
the only thing that's actually slowed us down is probably libcxx

    make -j16 MODE=rel
    RL: took 85,732,063µs wall time
    RL: ballooned to 323,612kb in size
    RL: needed 828,560,521µs cpu (11% kernel)
    RL: caused 39,080,670 page faults (99% memcpy)
    RL: 350,073 context switches (72% consensual)
    RL: performed 0 reads and 11,494,960 write i/o operations

pledge() and unveil() no longer consider ENOSYS to be an error.
These functions have also been added to Python's cosmo module.

This change also removes some WIN32 APIs and System Five magnums
which we're not using and it's doubtful anyone else would be too
2022-08-10 04:43:09 -07:00

496 lines
15 KiB
C

/*
* QuickJS Javascript Engine
*
* Copyright (c) 2017-2021 Fabrice Bellard
* Copyright (c) 2017-2021 Charlie Gordon
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "libc/assert.h"
#include "libc/runtime/runtime.h"
#include "third_party/quickjs/internal.h"
asm(".ident\t\"\\n\\n\
QuickJS (MIT License)\\n\
Copyright (c) 2017-2021 Fabrice Bellard\\n\
Copyright (c) 2017-2021 Charlie Gordon\"");
asm(".include \"libc/disclaimer.inc\"");
/* clang-format off */
void js_trigger_gc(JSRuntime *rt, size_t size)
{
BOOL force_gc;
#ifdef FORCE_GC_AT_MALLOC
force_gc = TRUE;
#else
force_gc = ((rt->malloc_state.malloc_size + size) >
rt->malloc_gc_threshold);
#endif
if (force_gc) {
#ifdef DUMP_GC
printf("GC: size=%" PRIu64 "\n",
(uint64_t)rt->malloc_state.malloc_size);
#endif
JS_RunGC(rt);
rt->malloc_gc_threshold = rt->malloc_state.malloc_size +
(rt->malloc_state.malloc_size >> 1);
}
}
void JS_MarkValue(JSRuntime *rt, JSValueConst val, JS_MarkFunc *mark_func)
{
if (JS_VALUE_HAS_REF_COUNT(val)) {
switch(JS_VALUE_GET_TAG(val)) {
case JS_TAG_OBJECT:
case JS_TAG_FUNCTION_BYTECODE:
mark_func(rt, JS_VALUE_GET_PTR(val));
break;
default:
break;
}
}
}
static void js_mark_module_def(JSRuntime *rt, JSModuleDef *m,
JS_MarkFunc *mark_func)
{
int i;
for(i = 0; i < m->export_entries_count; i++) {
JSExportEntry *me = &m->export_entries[i];
if (me->export_type == JS_EXPORT_TYPE_LOCAL &&
me->u.local.var_ref) {
mark_func(rt, &me->u.local.var_ref->header);
}
}
JS_MarkValue(rt, m->module_ns, mark_func);
JS_MarkValue(rt, m->func_obj, mark_func);
JS_MarkValue(rt, m->eval_exception, mark_func);
JS_MarkValue(rt, m->meta_obj, mark_func);
}
static void JS_MarkContext(JSRuntime *rt, JSContext *ctx, JS_MarkFunc *mark_func)
{
int i;
struct list_head *el;
/* modules are not seen by the GC, so we directly mark the objects
referenced by each module */
list_for_each(el, &ctx->loaded_modules) {
JSModuleDef *m = list_entry(el, JSModuleDef, link);
js_mark_module_def(rt, m, mark_func);
}
JS_MarkValue(rt, ctx->global_obj, mark_func);
JS_MarkValue(rt, ctx->global_var_obj, mark_func);
JS_MarkValue(rt, ctx->throw_type_error, mark_func);
JS_MarkValue(rt, ctx->eval_obj, mark_func);
JS_MarkValue(rt, ctx->array_proto_values, mark_func);
for(i = 0; i < JS_NATIVE_ERROR_COUNT; i++) {
JS_MarkValue(rt, ctx->native_error_proto[i], mark_func);
}
for(i = 0; i < rt->class_count; i++) {
JS_MarkValue(rt, ctx->class_proto[i], mark_func);
}
JS_MarkValue(rt, ctx->iterator_proto, mark_func);
JS_MarkValue(rt, ctx->async_iterator_proto, mark_func);
JS_MarkValue(rt, ctx->promise_ctor, mark_func);
JS_MarkValue(rt, ctx->array_ctor, mark_func);
JS_MarkValue(rt, ctx->regexp_ctor, mark_func);
JS_MarkValue(rt, ctx->function_ctor, mark_func);
JS_MarkValue(rt, ctx->function_proto, mark_func);
if (ctx->array_shape)
mark_func(rt, &ctx->array_shape->header);
}
static void mark_children(JSRuntime *rt, JSGCObjectHeader *gp,
JS_MarkFunc *mark_func)
{
switch(gp->gc_obj_type) {
case JS_GC_OBJ_TYPE_JS_OBJECT:
{
JSObject *p = (JSObject *)gp;
JSShapeProperty *prs;
JSShape *sh;
int i;
sh = p->shape;
mark_func(rt, &sh->header);
/* mark all the fields */
prs = get_shape_prop(sh);
for(i = 0; i < sh->prop_count; i++) {
JSProperty *pr = &p->prop[i];
if (prs->atom != JS_ATOM_NULL) {
if (prs->flags & JS_PROP_TMASK) {
if ((prs->flags & JS_PROP_TMASK) == JS_PROP_GETSET) {
if (pr->u.getset.getter)
mark_func(rt, &pr->u.getset.getter->header);
if (pr->u.getset.setter)
mark_func(rt, &pr->u.getset.setter->header);
} else if ((prs->flags & JS_PROP_TMASK) == JS_PROP_VARREF) {
if (pr->u.var_ref->is_detached) {
/* Note: the tag does not matter
provided it is a GC object */
mark_func(rt, &pr->u.var_ref->header);
}
} else if ((prs->flags & JS_PROP_TMASK) == JS_PROP_AUTOINIT) {
js_autoinit_mark(rt, pr, mark_func);
}
} else {
JS_MarkValue(rt, pr->u.value, mark_func);
}
}
prs++;
}
if (p->class_id != JS_CLASS_OBJECT) {
JSClassGCMark *gc_mark;
gc_mark = rt->class_array[p->class_id].gc_mark;
if (gc_mark)
gc_mark(rt, JS_MKPTR(JS_TAG_OBJECT, p), mark_func);
}
}
break;
case JS_GC_OBJ_TYPE_FUNCTION_BYTECODE:
/* the template objects can be part of a cycle */
{
JSFunctionBytecode *b = (JSFunctionBytecode *)gp;
int i;
for(i = 0; i < b->cpool_count; i++) {
JS_MarkValue(rt, b->cpool[i], mark_func);
}
if (b->realm)
mark_func(rt, &b->realm->header);
}
break;
case JS_GC_OBJ_TYPE_VAR_REF:
{
JSVarRef *var_ref = (JSVarRef *)gp;
/* only detached variable referenced are taken into account */
assert(var_ref->is_detached);
JS_MarkValue(rt, *var_ref->pvalue, mark_func);
}
break;
case JS_GC_OBJ_TYPE_ASYNC_FUNCTION:
{
JSAsyncFunctionData *s = (JSAsyncFunctionData *)gp;
if (s->is_active)
async_func_mark(rt, &s->func_state, mark_func);
JS_MarkValue(rt, s->resolving_funcs[0], mark_func);
JS_MarkValue(rt, s->resolving_funcs[1], mark_func);
}
break;
case JS_GC_OBJ_TYPE_SHAPE:
{
JSShape *sh = (JSShape *)gp;
if (sh->proto != NULL) {
mark_func(rt, &sh->proto->header);
}
}
break;
case JS_GC_OBJ_TYPE_JS_CONTEXT:
{
JSContext *ctx = (JSContext *)gp;
JS_MarkContext(rt, ctx, mark_func);
}
break;
default:
abort();
}
}
static void gc_decref_child(JSRuntime *rt, JSGCObjectHeader *p)
{
assert(p->ref_count > 0);
p->ref_count--;
if (p->ref_count == 0 && p->mark == 1) {
list_del(&p->link);
list_add_tail(&p->link, &rt->tmp_obj_list);
}
}
static void gc_decref(JSRuntime *rt)
{
struct list_head *el, *el1;
JSGCObjectHeader *p;
init_list_head(&rt->tmp_obj_list);
/* decrement the refcount of all the children of all the GC
objects and move the GC objects with zero refcount to
tmp_obj_list */
list_for_each_safe(el, el1, &rt->gc_obj_list) {
p = list_entry(el, JSGCObjectHeader, link);
assert(p->mark == 0);
mark_children(rt, p, gc_decref_child);
p->mark = 1;
if (p->ref_count == 0) {
list_del(&p->link);
list_add_tail(&p->link, &rt->tmp_obj_list);
}
}
}
static void gc_scan_incref_child(JSRuntime *rt, JSGCObjectHeader *p)
{
p->ref_count++;
if (p->ref_count == 1) {
/* ref_count was 0: remove from tmp_obj_list and add at the
end of gc_obj_list */
list_del(&p->link);
list_add_tail(&p->link, &rt->gc_obj_list);
p->mark = 0; /* reset the mark for the next GC call */
}
}
static void gc_scan_incref_child2(JSRuntime *rt, JSGCObjectHeader *p)
{
p->ref_count++;
}
static void gc_scan(JSRuntime *rt)
{
struct list_head *el;
JSGCObjectHeader *p;
/* keep the objects with a refcount > 0 and their children. */
list_for_each(el, &rt->gc_obj_list) {
p = list_entry(el, JSGCObjectHeader, link);
assert(p->ref_count > 0);
p->mark = 0; /* reset the mark for the next GC call */
mark_children(rt, p, gc_scan_incref_child);
}
/* restore the refcount of the objects to be deleted. */
list_for_each(el, &rt->tmp_obj_list) {
p = list_entry(el, JSGCObjectHeader, link);
mark_children(rt, p, gc_scan_incref_child2);
}
}
static void free_object(JSRuntime *rt, JSObject *p)
{
int i;
JSClassFinalizer *finalizer;
JSShape *sh;
JSShapeProperty *pr;
p->free_mark = 1; /* used to tell the object is invalid when
freeing cycles */
/* free all the fields */
sh = p->shape;
pr = get_shape_prop(sh);
for(i = 0; i < sh->prop_count; i++) {
free_property(rt, &p->prop[i], pr->flags);
pr++;
}
js_free_rt(rt, p->prop);
/* as an optimization we destroy the shape immediately without
putting it in gc_zero_ref_count_list */
js_free_shape(rt, sh);
/* fail safe */
p->shape = NULL;
p->prop = NULL;
if (UNLIKELY(p->first_weak_ref)) {
reset_weak_ref(rt, p);
}
finalizer = rt->class_array[p->class_id].finalizer;
if (finalizer)
(*finalizer)(rt, JS_MKPTR(JS_TAG_OBJECT, p));
/* fail safe */
p->class_id = 0;
p->u.opaque = NULL;
p->u.func.var_refs = NULL;
p->u.func.home_object = NULL;
remove_gc_object(&p->header);
if (rt->gc_phase == JS_GC_PHASE_REMOVE_CYCLES && p->header.ref_count != 0) {
list_add_tail(&p->header.link, &rt->gc_zero_ref_count_list);
} else {
js_free_rt(rt, p);
}
}
static void free_gc_object(JSRuntime *rt, JSGCObjectHeader *gp)
{
switch(gp->gc_obj_type) {
case JS_GC_OBJ_TYPE_JS_OBJECT:
free_object(rt, (JSObject *)gp);
break;
case JS_GC_OBJ_TYPE_FUNCTION_BYTECODE:
free_function_bytecode(rt, (JSFunctionBytecode *)gp);
break;
default:
abort();
}
}
void free_zero_refcount(JSRuntime *rt)
{
struct list_head *el;
JSGCObjectHeader *p;
rt->gc_phase = JS_GC_PHASE_DECREF;
for(;;) {
el = rt->gc_zero_ref_count_list.next;
if (el == &rt->gc_zero_ref_count_list)
break;
p = list_entry(el, JSGCObjectHeader, link);
assert(p->ref_count == 0);
free_gc_object(rt, p);
}
rt->gc_phase = JS_GC_PHASE_NONE;
}
static void gc_free_cycles(JSRuntime *rt)
{
struct list_head *el, *el1;
JSGCObjectHeader *p;
#ifdef DUMP_GC_FREE
BOOL header_done = FALSE;
#endif
rt->gc_phase = JS_GC_PHASE_REMOVE_CYCLES;
for(;;) {
el = rt->tmp_obj_list.next;
if (el == &rt->tmp_obj_list)
break;
p = list_entry(el, JSGCObjectHeader, link);
/* Only need to free the GC object associated with JS
values. The rest will be automatically removed because they
must be referenced by them. */
switch(p->gc_obj_type) {
case JS_GC_OBJ_TYPE_JS_OBJECT:
case JS_GC_OBJ_TYPE_FUNCTION_BYTECODE:
#ifdef DUMP_GC_FREE
if (!header_done) {
printf("Freeing cycles:\n");
JS_DumpObjectHeader(rt);
header_done = TRUE;
}
JS_DumpGCObject(rt, p);
#endif
free_gc_object(rt, p);
break;
default:
list_del(&p->link);
list_add_tail(&p->link, &rt->gc_zero_ref_count_list);
break;
}
}
rt->gc_phase = JS_GC_PHASE_NONE;
list_for_each_safe(el, el1, &rt->gc_zero_ref_count_list) {
p = list_entry(el, JSGCObjectHeader, link);
assert(p->gc_obj_type == JS_GC_OBJ_TYPE_JS_OBJECT ||
p->gc_obj_type == JS_GC_OBJ_TYPE_FUNCTION_BYTECODE);
js_free_rt(rt, p);
}
init_list_head(&rt->gc_zero_ref_count_list);
}
void JS_RunGC(JSRuntime *rt)
{
/* decrement the reference of the children of each object. mark =
1 after this pass. */
gc_decref(rt);
/* keep the GC objects with a non zero refcount and their childs */
gc_scan(rt);
/* free the GC objects in a cycle */
gc_free_cycles(rt);
}
/* Return false if not an object or if the object has already been
freed (zombie objects are visible in finalizers when freeing
cycles). */
BOOL JS_IsLiveObject(JSRuntime *rt, JSValueConst obj)
{
JSObject *p;
if (!JS_IsObject(obj))
return FALSE;
p = JS_VALUE_GET_OBJ(obj);
return !p->free_mark;
}
/* called with the ref_count of 'v' reaches zero. */
void __JS_FreeValueRT(JSRuntime *rt, JSValue v)
{
uint32_t tag = JS_VALUE_GET_TAG(v);
#ifdef DUMP_FREE
{
printf("Freeing ");
if (tag == JS_TAG_OBJECT) {
JS_DumpObject(rt, JS_VALUE_GET_OBJ(v));
} else {
JS_DumpValueShort(rt, v);
printf("\n");
}
}
#endif
switch(tag) {
case JS_TAG_STRING:
{
JSString *p = JS_VALUE_GET_STRING(v);
if (p->atom_type) {
JS_FreeAtomStruct(rt, p);
} else {
#ifdef DUMP_LEAKS
list_del(&p->link);
#endif
js_free_rt(rt, p);
}
}
break;
case JS_TAG_OBJECT:
case JS_TAG_FUNCTION_BYTECODE:
{
JSGCObjectHeader *p = JS_VALUE_GET_PTR(v);
if (rt->gc_phase != JS_GC_PHASE_REMOVE_CYCLES) {
list_del(&p->link);
list_add(&p->link, &rt->gc_zero_ref_count_list);
if (rt->gc_phase == JS_GC_PHASE_NONE) {
free_zero_refcount(rt);
}
}
}
break;
case JS_TAG_MODULE:
abort(); /* never freed here */
break;
#ifdef CONFIG_BIGNUM
case JS_TAG_BIG_INT:
case JS_TAG_BIG_FLOAT:
{
JSBigFloat *bf = JS_VALUE_GET_PTR(v);
bf_delete(&bf->num);
js_free_rt(rt, bf);
}
break;
case JS_TAG_BIG_DECIMAL:
{
JSBigDecimal *bf = JS_VALUE_GET_PTR(v);
bfdec_delete(&bf->num);
js_free_rt(rt, bf);
}
break;
#endif
case JS_TAG_SYMBOL:
{
JSAtomStruct *p = JS_VALUE_GET_PTR(v);
JS_FreeAtomStruct(rt, p);
}
break;
default:
printf("__JS_FreeValue: unknown tag=%d\n", tag);
abort();
}
}
void __JS_FreeValue(JSContext *ctx, JSValue v)
{
__JS_FreeValueRT(ctx->rt, v);
}