Graphviz 13.1.3~dev.20250829.1031
Loading...
Searching...
No Matches
list.c
Go to the documentation of this file.
1#include <assert.h>
2#include <errno.h>
3#include <stdbool.h>
4#include <stdint.h>
5#include <stdio.h>
6#include <stdlib.h>
7#include <string.h>
8#include <util/alloc.h>
9#include <util/asan.h>
10#include <util/exit.h>
11#include <util/gv_math.h>
12#include <util/list-private.h>
13#include <util/prisize_t.h>
14
15static const void *slot_from_const_list(const list_t_ *list, size_t index,
16 size_t stride) {
17 assert(list != NULL);
18 assert(list->base != NULL || index == 0 || stride == 0);
19
20 const char *const base = list->base;
21 return base + index * stride;
22}
23
24static void *slot_from_list(list_t_ *list, size_t index, size_t stride) {
25 assert(list != NULL);
26 assert(list->base != NULL || index == 0 || stride == 0);
27
28 char *const base = list->base;
29 return base + index * stride;
30}
31
32static const void *slot_from_const_base(const void *base, size_t index,
33 size_t stride) {
34 assert(base != NULL || index == 0 || stride == 0);
35
36 const char *const b = base;
37 return b + index * stride;
38}
39
40static void *slot_from_base(void *base, size_t index, size_t stride) {
41 assert(base != NULL || index == 0 || stride == 0);
42
43 char *const b = base;
44 return b + index * stride;
45}
46
47#define INDEX_TO(origin, index, stride) \
48 (_Generic((origin), \
49 const list_t_ *: slot_from_const_list, \
50 list_t_ *: slot_from_list, \
51 const void *: slot_from_const_base, \
52 void *: slot_from_base)((origin), (index), (stride)))
53
54size_t gv_list_append_slot_(list_t_ *list, size_t item_size) {
55 assert(list != NULL);
56
57 // do we need to expand the backing storage?
58 if (list->size == list->capacity) {
59 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2);
60 gv_list_reserve_(list, c, item_size);
61 }
62
63 assert(list->capacity > 0);
64 assert(list->size < list->capacity);
65
66 // append the new slot
67 const size_t new_slot = (list->head + list->size) % list->capacity;
68 void *const slot = INDEX_TO(list, new_slot, item_size);
69 ASAN_UNPOISON(slot, item_size);
70 ++list->size;
71
72 return new_slot;
73}
74
75size_t gv_list_prepend_slot_(list_t_ *list, size_t item_size) {
76 assert(list != NULL);
77
78 // do we need to expand the backing storage?
79 if (list->size == list->capacity) {
80 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2);
81 gv_list_reserve_(list, c, item_size);
82 }
83
84 assert(list->capacity > 0);
85 assert(list->size < list->capacity);
86
87 // prepend the new slot
88 list->head = (list->head + (list->capacity - 1)) % list->capacity;
89 void *const slot = INDEX_TO(list, list->head, item_size);
90 ASAN_UNPOISON(slot, item_size);
91 ++list->size;
92
93 return list->head;
94}
95
96static int try_reserve(list_t_ *list, size_t capacity, size_t item_size) {
97 assert(list != NULL);
98
99 // if we can already fit enough items, nothing to do
100 if (list->capacity >= capacity) {
101 return 0;
102 }
103
104 // will the arithmetic below overflow?
105 assert(capacity > 0);
106 if (SIZE_MAX / capacity < item_size) {
107 return EOVERFLOW;
108 }
109
110 void *const base = realloc(list->base, capacity * item_size);
111 if (base == NULL) {
112 return ENOMEM;
113 }
114
115 // zero the new memory
116 {
117 void *const new = INDEX_TO(base, list->capacity, item_size);
118 const size_t new_bytes = (capacity - list->capacity) * item_size;
119 memset(new, 0, new_bytes);
120
121 // poison the new (conceptually unallocated) memory
122 ASAN_POISON(new, new_bytes);
123 }
124
125 // Do we need to shuffle the prefix upwards? E.g.
126 //
127 // ┌───┬───┬───┬───┐
128 // old: │ 3 │ 4 │ 1 │ 2 │
129 // └───┴───┴─┼─┴─┼─┘
130 // │ └───────────────┐
131 // └───────────────┐ │
132 // ▼ ▼
133 // ┌───┬───┬───┬───┬───┬───┬───┬───┐
134 // new: │ 3 │ 4 │ │ │ │ │ 1 │ 2 │
135 // └───┴───┴───┴───┴───┴───┴───┴───┘
136 // a b c d e f g h
137 if (list->head + list->size > list->capacity) {
138 const size_t prefix = list->capacity - list->head;
139 const size_t new_head = capacity - prefix;
140 // unpoison target range, slots [g, h] in example
141 void *const target = INDEX_TO(base, new_head, item_size);
142 ASAN_UNPOISON(target, prefix * item_size);
143 const void *const src = INDEX_TO(base, list->head, item_size);
144 memmove(target, src, prefix * item_size);
145 // (re-)poison new gap, slots [c, f] in example
146 void *const gap_begin = INDEX_TO(base, list->size - prefix, item_size);
147 ASAN_POISON(gap_begin, (list->capacity - list->size) * item_size);
148 list->head = new_head;
149 }
150
151 list->base = base;
152 list->capacity = capacity;
153 return 0;
154}
155
156bool gv_list_try_reserve_(list_t_ *list, size_t capacity, size_t item_size) {
157 assert(list != NULL);
158 return try_reserve(list, capacity, item_size) == 0;
159}
160
161size_t gv_list_get_(const list_t_ list, size_t index) {
162 assert(index < list.size && "index out of bounds");
163 return (list.head + index) % list.capacity;
164}
165
166size_t gv_list_find_(const list_t_ list, const void *needle, size_t item_size) {
167
168 for (size_t i = 0; i < list.size; ++i) {
169 const size_t slot = gv_list_get_(list, i);
170 const void *candidate = INDEX_TO(&list, slot, item_size);
171 if (memcmp(needle, candidate, item_size) == 0) {
172 return i;
173 }
174 }
175
176 return SIZE_MAX;
177}
178
179void gv_list_remove_(list_t_ *list, size_t index, size_t item_size) {
180 assert(list != NULL);
181 assert(index < list->size);
182
183 // shrink the list
184 for (size_t i = index + 1; i < list->size; ++i) {
185 const size_t dst_slot = gv_list_get_(*list, i - 1);
186 void *const dst = INDEX_TO(list, dst_slot, item_size);
187 const size_t src_slot = gv_list_get_(*list, i);
188 const void *const src = INDEX_TO(list, src_slot, item_size);
189 memcpy(dst, src, item_size);
190 }
191 const size_t truncated_slot = gv_list_get_(*list, list->size - 1);
192 void *truncated = INDEX_TO(list, truncated_slot, item_size);
193 ASAN_POISON(truncated, item_size);
194 --list->size;
195}
196
197void gv_list_clear_(list_t_ *list, size_t item_size) {
198 assert(list != NULL);
199
200 for (size_t i = 0; i < list->size; ++i) {
201 const size_t slot = gv_list_get_(*list, i);
202 void *const to_poison = INDEX_TO(list, slot, item_size);
203 ASAN_POISON(to_poison, item_size);
204 }
205
206 list->size = 0;
207
208 // opportunistically re-sync the list
209 list->head = 0;
210}
211
212void gv_list_reserve_(list_t_ *list, size_t capacity, size_t item_size) {
213 const int err = try_reserve(list, capacity, item_size);
214 if (err != 0) {
215 fprintf(stderr,
216 "failed to reserve %" PRISIZE_T " elements of size %" PRISIZE_T
217 " bytes: %s\n",
218 capacity, item_size, strerror(err));
219 graphviz_exit(EXIT_FAILURE);
220 }
221}
222
223bool gv_list_contains_(const list_t_ list, const void *needle,
224 size_t item_size) {
225 return gv_list_find_(list, needle, item_size) != SIZE_MAX;
226}
227
228list_t_ gv_list_copy_(const list_t_ list, size_t item_size) {
229 list_t_ ret = {.base = gv_calloc(list.capacity, item_size),
230 .capacity = list.capacity};
231
232 // opportunistically create the new list synced
233 for (size_t i = 0; i < list.size; ++i) {
234 const size_t slot = gv_list_get_(list, i);
235 const void *const src = INDEX_TO(&list, slot, item_size);
236 void *const dst = INDEX_TO(&ret, ret.size, item_size);
237 assert(ret.size < ret.capacity);
238 memcpy(dst, src, item_size);
239 ++ret.size;
240 }
241
242 // mark the remainder of the allocated space as inaccessible
243 void *const to_poison = INDEX_TO(&ret, ret.size, item_size);
244 const size_t to_poison_len = (ret.capacity - ret.size) * item_size;
245 ASAN_POISON(to_poison, to_poison_len);
246
247 return ret;
248}
249
251 return list.head + list.size <= list.capacity;
252}
253
254void gv_list_sync_(list_t_ *list, size_t item_size) {
255 assert(list != NULL);
256
257 // Allow unrestricted access. The shuffle below accesses both allocated
258 // and unallocated elements, so just let it read and write everything.
259 ASAN_UNPOISON(list->base, list->capacity * item_size);
260
261 // Shuffle the list 1-1 until it is aligned. This is not efficient, but
262 // we assume this is a relatively rare operation.
263 while (list->head != 0) {
264 // rotate the list leftwards by 1
265 assert(list->capacity > 0);
266 // shuffle byte-by-byte to avoid dynamic allocation
267 for (size_t i = 0; i < item_size; ++i) {
268 uint8_t lowest;
269 memcpy(&lowest, list->base, sizeof(lowest));
270 const size_t remainder = list->capacity * item_size - sizeof(lowest);
271 memmove(list->base, (char *)list->base + sizeof(lowest), remainder);
272 memcpy((char *)list->base + remainder, &lowest, sizeof(lowest));
273 }
274 --list->head;
275 }
276
277 /* synchronization should have ensured the list no longer wraps */
278 assert(gv_list_is_contiguous_(*list));
279
280 /* re-establish access restrictions */
281 void *end = INDEX_TO(list, list->size, item_size);
282 ASAN_POISON(end, (list->capacity - list->size) * item_size);
283}
284
285void gv_list_sort_(list_t_ *list, int (*cmp)(const void *, const void *),
286 size_t item_size) {
287 assert(list != NULL);
288 assert(cmp != NULL);
289
290 gv_list_sync_(list, item_size);
291
292 if (list->size) {
293 qsort(list->base, list->size, item_size, cmp);
294 }
295}
296
297static void exchange(void *a, void *b, size_t size) {
298 assert(a != NULL);
299 assert(b != NULL);
300
301 // do a byte-by-byte swap of the two objects
302 char *x = a;
303 char *y = b;
304 for (size_t i = 0; i < size; ++i) {
305 SWAP(&x[i], &y[i]);
306 }
307}
308
309void gv_list_reverse_(list_t_ *list, size_t item_size) {
310 assert(list != NULL);
311
312 // move from the outside inwards, swapping elements
313 for (size_t i = 0; i < list->size / 2; ++i) {
314 const size_t a = gv_list_get_(*list, i);
315 const size_t b = gv_list_get_(*list, list->size - i - 1);
316 void *const x = INDEX_TO(list, a, item_size);
317 void *const y = INDEX_TO(list, b, item_size);
318 exchange(x, y, item_size);
319 }
320}
321
322void gv_list_shrink_to_fit_(list_t_ *list, size_t item_size) {
323 assert(list != NULL);
324
325 gv_list_sync_(list, item_size);
326
327 if (list->capacity > list->size) {
328 list->base = gv_recalloc(list->base, list->capacity, list->size, item_size);
329 list->capacity = list->size;
330 }
331}
332
334 assert(list != NULL);
335 free(list->base);
336 *list = (list_t_){0};
337}
338
339void gv_list_pop_front_(list_t_ *list, void *into, size_t item_size) {
340 assert(list != NULL);
341 assert(list->size > 0);
342 assert(into != NULL);
343
344 // find and pop the first slot
345 const size_t slot = gv_list_get_(*list, 0);
346 void *const to_pop = INDEX_TO(list, slot, item_size);
347 memcpy(into, to_pop, item_size);
348 ASAN_POISON(to_pop, item_size);
349 list->head = (list->head + 1) % list->capacity;
350 --list->size;
351}
352
353void gv_list_pop_back_(list_t_ *list, void *into, size_t item_size) {
354 assert(list != NULL);
355 assert(list->size > 0);
356 assert(into != NULL);
357
358 // find and pop last slot
359 const size_t slot = gv_list_get_(*list, list->size - 1);
360 void *const to_pop = INDEX_TO(list, slot, item_size);
361 memcpy(into, to_pop, item_size);
362 ASAN_POISON(to_pop, item_size);
363 --list->size;
364}
Memory allocation wrappers that exit on failure.
static void * gv_recalloc(void *ptr, size_t old_nmemb, size_t new_nmemb, size_t size)
Definition alloc.h:73
static void * gv_calloc(size_t nmemb, size_t size)
Definition alloc.h:26
macros for interacting with Address Sanitizer
#define ASAN_POISON(addr, size)
Definition asan.h:15
#define ASAN_UNPOISON(addr, size)
Definition asan.h:22
static char * err
Definition delaunay.c:546
static NORETURN void graphviz_exit(int status)
Definition exit.h:23
static int cmp(const void *key, const void *candidate)
void free(void *)
require define api prefix
Definition gmlparse.y:17
#define SIZE_MAX
Definition gmlscan.c:347
node NULL
Definition grammar.y:181
Arithmetic helper functions.
#define SWAP(a, b)
Definition gv_math.h:131
#define exchange(h, i, j, index)
Definition dijkstra.c:45
internal implementation details of list.h
list_t_ gv_list_copy_(const list_t_ list, size_t item_size)
Definition list.c:228
static const void * slot_from_const_list(const list_t_ *list, size_t index, size_t stride)
Definition list.c:15
size_t gv_list_prepend_slot_(list_t_ *list, size_t item_size)
Definition list.c:75
void gv_list_pop_front_(list_t_ *list, void *into, size_t item_size)
Definition list.c:339
static const void * slot_from_const_base(const void *base, size_t index, size_t stride)
Definition list.c:32
#define INDEX_TO(origin, index, stride)
Definition list.c:47
bool gv_list_contains_(const list_t_ list, const void *needle, size_t item_size)
Definition list.c:223
size_t gv_list_append_slot_(list_t_ *list, size_t item_size)
Definition list.c:54
void gv_list_reserve_(list_t_ *list, size_t capacity, size_t item_size)
Definition list.c:212
void gv_list_free_(list_t_ *list)
Definition list.c:333
static void * slot_from_base(void *base, size_t index, size_t stride)
Definition list.c:40
void gv_list_clear_(list_t_ *list, size_t item_size)
Definition list.c:197
bool gv_list_is_contiguous_(const list_t_ list)
Definition list.c:250
void gv_list_shrink_to_fit_(list_t_ *list, size_t item_size)
Definition list.c:322
void gv_list_sort_(list_t_ *list, int(*cmp)(const void *, const void *), size_t item_size)
Definition list.c:285
void gv_list_reverse_(list_t_ *list, size_t item_size)
Definition list.c:309
void gv_list_pop_back_(list_t_ *list, void *into, size_t item_size)
Definition list.c:353
size_t gv_list_find_(const list_t_ list, const void *needle, size_t item_size)
Definition list.c:166
void gv_list_sync_(list_t_ *list, size_t item_size)
Definition list.c:254
bool gv_list_try_reserve_(list_t_ *list, size_t capacity, size_t item_size)
Definition list.c:156
static int try_reserve(list_t_ *list, size_t capacity, size_t item_size)
Definition list.c:96
static void * slot_from_list(list_t_ *list, size_t index, size_t stride)
Definition list.c:24
void gv_list_remove_(list_t_ *list, size_t index, size_t item_size)
Definition list.c:179
size_t gv_list_get_(const list_t_ list, size_t index)
Definition list.c:161
#define PRISIZE_T
Definition prisize_t.h:25
size_t size
size <= capacity
size_t capacity
available storage slots
void * base
(base == NULL && capacity == 0) || (base != NULL && capacity > 0)
size_t head
(capacity == 0 && head == 0) || (capacity > 0 && head < capacity)