summaryrefslogtreecommitdiff
path: root/source/base/base_arena.c
diff options
context:
space:
mode:
authornasr <nsrddyn@gmail.com>2026-04-13 15:33:05 +0200
committernasr <nsrddyn@gmail.com>2026-04-13 15:36:24 +0200
commit9d09d66a273f68fae7efb71504bf40c664b91983 (patch)
tree41a46c52a01338bf22d5f3ebdf0bb27dc3d33cc1 /source/base/base_arena.c
feature(main): init
feature(main): init feature(main): init feature(main): init
Diffstat (limited to 'source/base/base_arena.c')
-rwxr-xr-xsource/base/base_arena.c138
1 files changed, 138 insertions, 0 deletions
diff --git a/source/base/base_arena.c b/source/base/base_arena.c
new file mode 100755
index 0000000..7e9c44b
--- /dev/null
+++ b/source/base/base_arena.c
@@ -0,0 +1,138 @@
1internal mem_arena *
2arena_create(u64 capacity)
3{
4 mem_arena *arena = (mem_arena *)mmap(
5 /* kernel decides where to throw the arena */
6 NULL,
7 capacity + sizeof(mem_arena),
8 PROT_READ | PROT_WRITE,
9 MAP_SHARED | MAP_ANONYMOUS,
10 -1,
11 0);
12
13 if (arena == MAP_FAILED)
14 {
15 return NULL;
16 }
17
18 arena->capacity = capacity;
19 arena->base_position = (u8 *)arena + sizeof(mem_arena);
20 arena->current_position = 0;
21 arena->previous_position = 0;
22
23 return arena;
24}
25
26internal void
27arena_destroy(mem_arena *arena)
28{
29 if (!arena)
30 {
31 return;
32 }
33 munmap(arena, arena->capacity + sizeof(mem_arena));
34}
35internal void *
36arena_alloc(mem_arena *arena, u64 size, b32 zero)
37{
38 if (!arena)
39 {
40 return NULL;
41 }
42 u64 aligned = Align(arena->current_position, ARENA_ALIGN);
43 u64 new_pos = aligned + size;
44 if (new_pos > arena->capacity)
45 {
46 return NULL;
47 }
48
49 void *out = arena->base_position + aligned;
50
51 arena->previous_position = arena->current_position;
52 arena->current_position = aligned + size;
53
54 if (zero) MemSet(out, size);
55
56 return out;
57}
58
59internal void
60arena_pop(mem_arena *arena, u64 size)
61{
62 size = MIN(size, arena->current_position);
63 arena->current_position -= size;
64}
65
66internal void
67arena_pop_to(mem_arena *arena, u64 pos)
68{
69 u64 size = pos < arena->current_position ? arena->current_position - pos : 0;
70 arena_pop(arena, size);
71}
72
73internal void
74arena_clear(mem_arena *arena)
75{
76 arena->current_position = 0;
77}
78
79internal mem_arena *
80arena_resize_align(mem_arena *arena, void *old_memory, u64 new_size, u64 old_size, umm alignment)
81{
82 u8 *old_mem = (u8 *)old_memory;
83
84 if (!is_pow(alignment))
85 {
86 Align(arena->current_position, alignment);
87 }
88
89 if (old_memory == NULL || old_size == 0)
90 {
91 return (mem_arena *)arena_alloc(arena, new_size, 0);
92 }
93 else if ((old_mem >= arena->base_position && old_mem < arena->base_position + arena->capacity))
94 {
95 if ((arena->base_position + arena->previous_position) == old_memory)
96 {
97 arena->current_position = arena->previous_position + new_size;
98 if (new_size > old_size)
99 {
100 MemSet(&arena->current_position, new_size - old_size);
101 }
102 return (mem_arena *)old_memory;
103 }
104 else
105 {
106 void *new_memory = arena_alloc(arena, new_size, 0);
107 umm copy_size = old_size < new_size ? old_size : new_size;
108 memmove(new_memory, old_mem, copy_size);
109 }
110 }
111 else
112 {
113 verify(0);
114 }
115 return NULL;
116}
117
118internal mem_arena *
119arena_resize(mem_arena *arena, void *old_memory, u64 new_size, u64 old_size)
120{
121 return arena_resize_align(arena, old_memory, new_size, old_size, ARENA_ALIGN);
122}
123
124internal temp_arena
125temp_arena_begin(mem_arena *arena)
126{
127 temp_arena t;
128 t.arena = arena;
129 t.start_position = arena->current_position;
130
131 return t;
132}
133
134internal void
135temp_arena_end(temp_arena temp)
136{
137 temp.arena->current_position = temp.start_position;
138}