29#if defined(__cplusplus)
35#define ARENA_INLINE __forceinline
36#define ARENA_PREFETCH(addr) _mm_prefetch((const char*)(addr), _MM_HINT_T0)
37#define ARENA_LIKELY(x) (x)
38#define ARENA_UNLIKELY(x) (x)
39#define ARENA_ALIGNED(n) __declspec(align(n))
41#define ARENA_INLINE inline __attribute__((always_inline))
42#define ARENA_PREFETCH(addr) __builtin_prefetch((addr), 1, 3)
43#define ARENA_LIKELY(x) __builtin_expect(!!(x), 1)
44#define ARENA_UNLIKELY(x) __builtin_expect(!!(x), 0)
45#define ARENA_ALIGNED(n) __attribute__((aligned(n)))
49#define ARENA_DEFAULT_ALIGN 16
52#define ARENA_MIN_BLOCK_SIZE (64 * 1024)
74typedef struct ARENA_ALIGNED(64) Arena {
80 size_t total_committed;
95void arena_init(Arena* restrict a,
void* restrict buf,
size_t size);
106Arena* arena_create(
size_t reserve_size);
113static ARENA_INLINE
void arena_reset(Arena* restrict a) {
114 if (!a || !a->head)
return;
115 a->current_block = a->head;
116 a->curr = a->head->base;
117 a->end = a->head->end;
125void arena_destroy(Arena* restrict arena);
128static ARENA_INLINE
size_t arena_committed_size(
const Arena* restrict a) {
return a->total_committed; }
135static ARENA_INLINE
size_t arena_used_size(
const Arena* restrict a) {
136 if (!a || !a->current_block)
return 0;
137 size_t block_capacity = (size_t)(a->current_block->end - a->current_block->base);
138 size_t block_used = (size_t)(a->curr - a->current_block->base);
139 return (a->total_committed - block_capacity) + block_used;
146void* _arena_alloc_slow(Arena* restrict arena,
size_t size,
size_t alignment);
154static ARENA_INLINE
void* arena_alloc_align(Arena* restrict arena,
size_t size,
size_t alignment) {
155 if (size == 0)
return NULL;
157 uintptr_t aligned = ((uintptr_t)arena->curr + alignment - 1) & ~(alignment - 1);
158 uintptr_t next = aligned + size;
160 if (ARENA_LIKELY(next <= (uintptr_t)arena->end)) {
161 arena->curr = (
char*)next;
162 return (
void*)aligned;
165 return _arena_alloc_slow(arena, size, alignment);
172static ARENA_INLINE
void* arena_alloc(Arena* restrict arena,
size_t size) {
173 return arena_alloc_align(arena, size, ARENA_DEFAULT_ALIGN);
181static ARENA_INLINE
void* arena_alloc_unaligned(Arena* restrict arena,
size_t size) {
182 if (size == 0)
return NULL;
184 char* ptr = arena->curr;
185 char* next = ptr + size;
187 if (ARENA_LIKELY(next <= arena->end)) {
192 return _arena_alloc_slow(arena, size, 1);
196#define ARENA_ALLOC(arena, type) ((type*)arena_alloc_align((arena), sizeof(type), _Alignof(type)))
199#define ARENA_ALLOC_ARRAY(arena, type, count) \
200 ((type*)arena_alloc_align((arena), sizeof(type) * (count), _Alignof(type)))
203#define ARENA_ALLOC_ZERO(arena, type) \
205 type* _ptr = ARENA_ALLOC((arena), type); \
206 (_ptr) ? (type*)memset(_ptr, 0, sizeof(type)) : NULL; \
210#define ARENA_ALLOC_ARRAY_ZERO(arena, type, count) \
212 type* _ptr = ARENA_ALLOC_ARRAY((arena), type, (count)); \
213 (_ptr) ? (type*)memset(_ptr, 0, sizeof(type) * (count)) : NULL; \
225static ARENA_INLINE
bool arena_alloc_batch(Arena* restrict arena,
const size_t* restrict sizes,
size_t count,
226 void** restrict out_ptrs) {
227 if (!arena || !sizes || !out_ptrs || count == 0)
return false;
229 const size_t mask = ARENA_DEFAULT_ALIGN - 1;
231 for (
size_t i = 0; i < count; ++i) {
232 total = (total + mask) & ~mask;
236 char* base = (
char*)arena_alloc(arena, total);
237 if (!base)
return false;
240 for (
size_t i = 0; i < count; ++i) {
241 cur = (
char*)(((uintptr_t)cur + mask) & ~mask);
253static ARENA_INLINE
char* arena_strdup(Arena* restrict arena,
const char* restrict str) {
254 if (!arena || !str)
return NULL;
255 size_t len = strlen(str);
256 char* dup = (
char*)arena_alloc_unaligned(arena, len + 1);
257 if (!dup)
return NULL;
258 memcpy(dup, str, len + 1);
267static ARENA_INLINE
char* arena_strdupn(Arena* restrict arena,
const char* restrict str,
size_t length) {
268 if (!arena || !str)
return NULL;
269 char* dup = (
char*)arena_alloc_unaligned(arena, length + 1);
270 if (!dup)
return NULL;
271 memcpy(dup, str, length);
276#if defined(__cplusplus)