mirror of
https://github.com/jart/cosmopolitan.git
synced 2025-07-26 20:40:28 +00:00
Make dlmalloc a little faster
This change also documents the libc arena allocator.
This commit is contained in:
parent
fa1e8a3e65
commit
a41669dec6
5 changed files with 59 additions and 19 deletions
|
@ -126,7 +126,7 @@ static dontinline bool __arena_grow(size_t offset, size_t request) {
|
|||
return false;
|
||||
}
|
||||
|
||||
static void *__arena_alloc(size_t a, size_t n) {
|
||||
static inline void *__arena_alloc(size_t a, size_t n) {
|
||||
size_t o;
|
||||
if (!n) n = 1;
|
||||
o = ROUNDUP(__arena.offset[__arena.depth] + sizeof(size_t), a);
|
||||
|
@ -299,6 +299,25 @@ static void __arena_init(void) {
|
|||
atexit(__arena_destroy);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushes memory arena.
|
||||
*
|
||||
* This allocator gives a ~3x performance boost over dlmalloc, mostly
|
||||
* because it isn't thread safe and it doesn't do defragmentation.
|
||||
*
|
||||
* Calling this function will push a new arena. It may be called
|
||||
* multiple times from the main thread recursively. The first time it's
|
||||
* called, it hooks all the regular memory allocation functions. Any
|
||||
* allocations that were made previously outside the arena, will be
|
||||
* passed on to the previous hooks. Then, the basic idea, is rather than
|
||||
* bothering with free() you can just call __arena_pop() to bulk free.
|
||||
*
|
||||
* Arena allocations also have a slight size advantage, since 32-bit
|
||||
* pointers are always used. The maximum amount of arena memory is
|
||||
* 805,175,296 bytes.
|
||||
*
|
||||
* @see __arena_pop()
|
||||
*/
|
||||
void __arena_push(void) {
|
||||
if (UNLIKELY(!__arena.once)) {
|
||||
__arena_init();
|
||||
|
@ -313,6 +332,15 @@ void __arena_push(void) {
|
|||
++__arena.depth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pops memory arena.
|
||||
*
|
||||
* This pops the most recently created arena, freeing all the memory
|
||||
* that was allocated between the push and pop arena calls. If this is
|
||||
* the last arena on the stack, then the old malloc hooks are restored.
|
||||
*
|
||||
* @see __arena_push()
|
||||
*/
|
||||
void __arena_pop(void) {
|
||||
size_t a, b, greed;
|
||||
__arena_check();
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "libc/fmt/leb128.h"
|
||||
#include "libc/intrin/lockcmpxchg.h"
|
||||
#include "libc/nexgen32e/crc32.h"
|
||||
#include "libc/runtime/internal.h"
|
||||
#include "libc/runtime/runtime.h"
|
||||
#include "libc/x/x.h"
|
||||
#include "third_party/zlib/zlib.h"
|
||||
|
@ -47,16 +48,7 @@ void *xloadzd(bool *o, void **t, const void *p, size_t n, size_t m, size_t c,
|
|||
int64_t x, y;
|
||||
assert(z == 2 || z == 4);
|
||||
b = q = malloc(m);
|
||||
zs.zfree = 0;
|
||||
zs.zalloc = 0;
|
||||
zs.next_in = p;
|
||||
zs.avail_in = n;
|
||||
zs.total_in = n;
|
||||
zs.avail_out = m;
|
||||
zs.total_out = m;
|
||||
zs.next_out = (void *)q;
|
||||
inflateInit2(&zs, -MAX_WBITS);
|
||||
inflate(&zs, Z_NO_FLUSH);
|
||||
__inflate(q, m, p, n);
|
||||
r = memalign(z, c * z);
|
||||
for (x = i = 0; i < c; ++i) {
|
||||
b += unzleb64(b, 10, &y);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue