Graphviz 14.1.3~dev.20260126.0926
Loading...
Searching...
No Matches
arena.c
Go to the documentation of this file.
1
3
4#ifndef NO_CONFIG // defined by test_arena.c
5#include "config.h"
6#endif
7
8#include <assert.h>
9#include <stdbool.h>
10#include <stdint.h>
11#include <stdlib.h>
12#include <string.h>
13#include <util/alloc.h>
14#include <util/arena.h>
15#include <util/asan.h>
16#include <util/unused.h>
17
25
27static UNUSED bool is_power_of_2(size_t value) {
28 if (value == 0) {
29 return false;
30 }
31 while ((value & 1) != 1) {
32 value >>= 1;
33 }
34 return value == 1;
35}
36
42static void more_core(arena_t *arena, size_t req_alignment, size_t req_size) {
43 assert(arena != NULL);
44 assert(req_alignment != 0);
45
46 // A default number of bytes to allocate in a chunk. The aim is for the
47 // resulting allocation to be a multiple of the system page size, to encourage
48 // the system to give us entire pages. If this does not work out, it is not
49 // critical.
50 enum { DEFAULT_CHUNK = 16384 - sizeof(arena_chunk_t) };
51
52 size_t chunk_size = DEFAULT_CHUNK;
53 // override the default size if we are allocating something too large
54 if (chunk_size < req_size + req_alignment - 1) {
55 chunk_size = req_size + req_alignment - 1;
56 }
57
58 arena_chunk_t *const more = gv_alloc(sizeof(arena_chunk_t) + chunk_size);
59
60 // mark the newly available space as unused
61 ASAN_POISON((char *)more + sizeof(arena_chunk_t), chunk_size);
62
63 // install the new chunk
64 more->previous = arena->source;
65 arena->source = more;
66 arena->remaining = chunk_size;
67}
68
75static void *alloc(arena_t *arena, size_t alignment, size_t size) {
76 assert(arena != NULL);
77 assert(alignment != 0);
78 assert(is_power_of_2(alignment));
79
80 if (arena->remaining < size) {
81 return NULL;
82 }
83
84 const uintptr_t base = (uintptr_t)arena->source + sizeof(arena_chunk_t);
85 const uintptr_t limit = base + arena->remaining;
86
87 // Allocate from the end of the chunk memory, for simplicity. E.g.:
88 //
89 // actual allocation ┐ ┌ wasted space
90 // ┌───┴───┬─┴─┐
91 // ┌────────┬──────────┬───────┬───┬───────────────┐
92 // chunk: │previous│ <free> … │ │ │ <allocated> … │
93 // └────────┴──────────┴───────┴───┴───────────────┘
94 // ▲ ▲ ▲ ▲
95 // base ┘ start ┘ │ └ limit
96 // └ start + size
97 const uintptr_t start = (limit - size) & ~(alignment - 1);
98
99 if (start < base) {
100 // we had enough bytes, but not enough aligned bytes
101 return NULL;
102 }
103
104 arena->remaining -= limit - start;
105
106 // Only unpoison the narrow allocation, not the full area we are carving off.
107 // Repeating the diagram from above:
108 //
109 // unpoisoning this ┐ ┌ not unpoisoning this
110 // ┌───┴───┬─┴─┐
111 // ┌────────┬──────────┬───────┬───┬───────────────┐
112 // chunk: │previous│ <free> … │ │ │ <allocated> … │
113 // └────────┴──────────┴───────┴───┴───────────────┘
114 void *const ret = (void *)start;
115 ASAN_UNPOISON(ret, size);
116
117 return ret;
118}
119
120void *gv_arena_alloc(arena_t *arena, size_t alignment, size_t size) {
121 assert(arena != NULL);
122
123 if (size == 0) {
124 return NULL;
125 }
126
127 void *ptr = alloc(arena, alignment, size);
128
129 // if we failed, get some more memory and try again
130 if (ptr == NULL) {
131 more_core(arena, alignment, size);
132 ptr = alloc(arena, alignment, size);
133 }
134
135 return ptr;
136}
137
138char *gv_arena_strdup(arena_t *arena, const char *s) {
139 assert(arena != NULL);
140 assert(s != NULL);
141
142 const size_t len = strlen(s);
143 char *const ret = gv_arena_alloc(arena, 1, len + 1);
144 assert(ret != NULL);
145 memcpy(ret, s, len);
146 ret[len] = '\0';
147
148 return ret;
149}
150
151void gv_arena_free(arena_t *arena, void *ptr, size_t size) {
152 assert(arena != NULL);
153
154 if (ptr == NULL) {
155 return;
156 }
157
158 // teach ASan that this region should no longer be accessible
159 ASAN_POISON(ptr, size);
160
161 // we do not actually deallocate the memory, but leave it to be freed when the
162 // arena is eventually reset
163 (void)arena;
164}
165
167 assert(arena != NULL);
168
169 while (arena->source != NULL) {
170 arena_chunk_t *const previous = arena->source->previous;
171 free(arena->source);
172 arena->source = previous;
173 }
174
175 *arena = (arena_t){0};
176}
Memory allocation wrappers that exit on failure.
static void * gv_alloc(size_t size)
Definition alloc.h:47
void gv_arena_reset(arena_t *arena)
Definition arena.c:166
static void more_core(arena_t *arena, size_t req_alignment, size_t req_size)
Definition arena.c:42
static void * alloc(arena_t *arena, size_t alignment, size_t size)
Definition arena.c:75
void * gv_arena_alloc(arena_t *arena, size_t alignment, size_t size)
Definition arena.c:120
char * gv_arena_strdup(arena_t *arena, const char *s)
Definition arena.c:138
static UNUSED bool is_power_of_2(size_t value)
popcount(value) == 1?
Definition arena.c:27
void gv_arena_free(arena_t *arena, void *ptr, size_t size)
Definition arena.c:151
Region-based memory allocator.
struct arena_chunk arena_chunk_t
Definition arena.h:22
macros for interacting with Address Sanitizer
#define ASAN_POISON(addr, size)
Definition asan.h:15
#define ASAN_UNPOISON(addr, size)
Definition asan.h:22
static double len(glCompPoint p)
Definition glutils.c:138
void free(void *)
node NULL
Definition grammar.y:181
arena_chunk_t * previous
previous chunk that was in use
Definition arena.c:23
arena_chunk_t * source
current chunk being allocated out of
Definition arena.h:32
size_t remaining
number of free bytes remaining in source
Definition arena.h:33
Definition grammar.c:90
abstraction for squashing compiler warnings for unused symbols
#define UNUSED
Definition unused.h:25