14#define STATIC_BUFFER_SIZE (1024 * 1024)
16static _Thread_local
char static_buffer[STATIC_BUFFER_SIZE] ARENA_ALIGNED(64);
17static _Thread_local
bool static_buffer_in_use =
false;
19static ARENA_INLINE
size_t get_page_size(
void) {
25 return (
size_t)sysconf(_SC_PAGESIZE);
29void arena_init(Arena* restrict a,
void* restrict buf,
size_t size) {
30 memset(a, 0,
sizeof(Arena));
31 a->page_size = get_page_size();
32 a->heap_allocated =
false;
34 a->first_block.base = (
char*)buf;
35 a->first_block.end = (
char*)buf + size;
36 a->first_block.is_static =
true;
37 a->first_block.next = NULL;
39 a->head = &a->first_block;
40 a->current_block = a->head;
41 a->curr = a->head->base;
42 a->end = a->head->end;
43 a->total_committed = size;
46Arena* arena_create(
size_t reserve_size) {
47 Arena* a = (Arena*)aligned_alloc_xp(64,
sizeof(Arena));
50 if (!static_buffer_in_use && reserve_size <= STATIC_BUFFER_SIZE) {
51 arena_init(a, static_buffer, STATIC_BUFFER_SIZE);
52 static_buffer_in_use =
true;
54 size_t initial_size = reserve_size > 0 ? reserve_size : ARENA_MIN_BLOCK_SIZE;
55 initial_size = (initial_size + get_page_size() - 1) & ~(get_page_size() - 1);
57 char* buf = (
char*)aligned_alloc_xp(64, initial_size);
62 arena_init(a, buf, initial_size);
63 a->first_block.is_static =
false;
66 a->heap_allocated =
true;
70void* _arena_alloc_slow(Arena* restrict a,
size_t size,
size_t alignment) {
74 uintptr_t aligned = ((uintptr_t)next->base + alignment - 1) & ~(alignment - 1);
75 if (aligned + size <= (uintptr_t)next->
end) {
76 a->current_block = next;
77 a->curr = (
char*)(aligned + size);
79 return (
void*)aligned;
85 size_t current_size = (size_t)(a->current_block->end - a->current_block->base);
86 size_t needed =
sizeof(
ArenaBlock) + alignment + size;
87 size_t next_size = current_size * 2;
89 if (next_size < needed) next_size = needed;
90 if (next_size < ARENA_MIN_BLOCK_SIZE) next_size = ARENA_MIN_BLOCK_SIZE;
91 next_size = (next_size + a->page_size - 1) & ~(a->page_size - 1);
94 char* ptr = (
char*)aligned_alloc_xp(64, next_size);
95 if (!ptr)
return NULL;
98 block->base = (
char*)(((uintptr_t)(ptr +
sizeof(
ArenaBlock)) + alignment - 1) & ~(alignment - 1));
99 block->
end = ptr + next_size;
101 block->next = a->current_block->next;
103 a->current_block->next = block;
104 a->current_block = block;
105 a->total_committed += next_size;
107 uintptr_t aligned = ((uintptr_t)block->base + alignment - 1) & ~(alignment - 1);
108 a->curr = (
char*)(aligned + size);
111 return (
void*)aligned;
114void arena_destroy(Arena* restrict a) {
120 if (block && !block->
is_static) aligned_free_xp(block->base);
125 block = block ? block->next : NULL;
129 aligned_free_xp(temp);
132 if (a->heap_allocated) aligned_free_xp(a);
Aligned memory allocation functions for cross-platform support.