Graphviz 13.1.3~dev.20250813.2319
Loading...
Searching...
No Matches
arena.c
Go to the documentation of this file.
1
3
4#include <assert.h>
5#include <stdbool.h>
6#include <stdint.h>
7#include <stdlib.h>
8#include <string.h>
9#include <util/alloc.h>
10#include <util/arena.h>
11#include <util/asan.h>
12#include <util/unused.h>
13
21
23static UNUSED bool is_power_of_2(size_t value) {
24 if (value == 0) {
25 return false;
26 }
27 while ((value & 1) != 1) {
28 value >>= 1;
29 }
30 return value == 1;
31}
32
38static void more_core(arena_t *arena, size_t req_alignment, size_t req_size) {
39 assert(arena != NULL);
40 assert(req_alignment != 0);
41
42 // A default number of bytes to allocate in a chunk. The aim is for the
43 // resulting allocation to be a multiple of the system page size, to encourage
44 // the system to give us entire pages. If this does not work out, it is not
45 // critical.
46 enum { DEFAULT_CHUNK = 16384 - sizeof(arena_chunk_t) };
47
48 size_t chunk_size = DEFAULT_CHUNK;
49 // override the default size if we are allocating something too large
50 if (chunk_size < req_size + req_alignment - 1) {
51 chunk_size = req_size + req_alignment - 1;
52 }
53
54 arena_chunk_t *const more = gv_alloc(sizeof(arena_chunk_t) + chunk_size);
55
56 // mark the newly available space as unused
57 ASAN_POISON((char *)more + sizeof(arena_chunk_t), chunk_size);
58
59 // install the new chunk
60 more->previous = arena->source;
61 arena->source = more;
62 arena->remaining = chunk_size;
63}
64
71static void *alloc(arena_t *arena, size_t alignment, size_t size) {
72 assert(arena != NULL);
73 assert(alignment != 0);
74 assert(is_power_of_2(alignment));
75
76 if (arena->remaining < size) {
77 return NULL;
78 }
79
80 const uintptr_t base = (uintptr_t)arena->source + sizeof(arena_chunk_t);
81 const uintptr_t limit = base + arena->remaining;
82
83 // Allocate from the end of the chunk memory, for simplicity. E.g.:
84 //
85 // actual allocation ┐ ┌ wasted space
86 // ┌───┴───┬─┴─┐
87 // ┌────────┬──────────┬───────┬───┬───────────────┐
88 // chunk: │previous│ <free> … │ │ │ <allocated> … │
89 // └────────┴──────────┴───────┴───┴───────────────┘
90 // ▲ ▲ ▲ ▲
91 // base ┘ start ┘ │ └ limit
92 // └ start + size
93 const uintptr_t start = (limit - size) & ~(alignment - 1);
94
95 if (start < base) {
96 // we had enough bytes, but not enough aligned bytes
97 return NULL;
98 }
99
100 arena->remaining -= limit - start;
101
102 // Only unpoison the narrow allocation, not the full area we are carving off.
103 // Repeating the diagram from above:
104 //
105 // unpoisoning this ┐ ┌ not unpoisoning this
106 // ┌───┴───┬─┴─┐
107 // ┌────────┬──────────┬───────┬───┬───────────────┐
108 // chunk: │previous│ <free> … │ │ │ <allocated> … │
109 // └────────┴──────────┴───────┴───┴───────────────┘
110 void *const ret = (void *)start;
111 ASAN_UNPOISON(ret, size);
112
113 return ret;
114}
115
116void *gv_arena_alloc(arena_t *arena, size_t alignment, size_t size) {
117 assert(arena != NULL);
118
119 if (size == 0) {
120 return NULL;
121 }
122
123 void *ptr = alloc(arena, alignment, size);
124
125 // if we failed, get some more memory and try again
126 if (ptr == NULL) {
127 more_core(arena, alignment, size);
128 ptr = alloc(arena, alignment, size);
129 }
130
131 return ptr;
132}
133
134char *gv_arena_strdup(arena_t *arena, const char *s) {
135 assert(arena != NULL);
136 assert(s != NULL);
137
138 const size_t len = strlen(s);
139 char *const ret = gv_arena_alloc(arena, 1, len + 1);
140 assert(ret != NULL);
141 memcpy(ret, s, len);
142 ret[len] = '\0';
143
144 return ret;
145}
146
147void gv_arena_free(arena_t *arena, void *ptr, size_t size) {
148 assert(arena != NULL);
149
150 if (ptr == NULL) {
151 return;
152 }
153
154 // teach ASan that this region should no longer be accessible
155 ASAN_POISON(ptr, size);
156
157 // we do not actually deallocate the memory, but leave it to be freed when the
158 // arena is eventually reset
159 (void)arena;
160}
161
163 assert(arena != NULL);
164
165 while (arena->source != NULL) {
166 arena_chunk_t *const previous = arena->source->previous;
167 free(arena->source);
168 arena->source = previous;
169 }
170
171 *arena = (arena_t){0};
172}
Memory allocation wrappers that exit on failure.
static void * gv_alloc(size_t size)
Definition alloc.h:47
void gv_arena_reset(arena_t *arena)
Definition arena.c:162
static void more_core(arena_t *arena, size_t req_alignment, size_t req_size)
Definition arena.c:38
static void * alloc(arena_t *arena, size_t alignment, size_t size)
Definition arena.c:71
void * gv_arena_alloc(arena_t *arena, size_t alignment, size_t size)
Definition arena.c:116
char * gv_arena_strdup(arena_t *arena, const char *s)
Definition arena.c:134
static UNUSED bool is_power_of_2(size_t value)
popcount(value) == 1?
Definition arena.c:23
void gv_arena_free(arena_t *arena, void *ptr, size_t size)
Definition arena.c:147
Region-based memory allocator.
struct arena_chunk arena_chunk_t
Definition arena.h:22
macros for interacting with Address Sanitizer
#define ASAN_POISON(addr, size)
Definition asan.h:15
#define ASAN_UNPOISON(addr, size)
Definition asan.h:22
static double len(glCompPoint p)
Definition glutils.c:136
void free(void *)
node NULL
Definition grammar.y:181
arena_chunk_t * previous
previous chunk that was in use
Definition arena.c:19
arena_chunk_t * source
current chunk being allocated out of
Definition arena.h:32
size_t remaining
number of free bytes remaining in source
Definition arena.h:33
Definition grammar.c:90
abstraction for squashing compiler warnings for unused symbols
#define UNUSED
Definition unused.h:25