Graphviz 13.0.0~dev.20250210.0415
Loading...
Searching...
No Matches
list.h
Go to the documentation of this file.
1
3#pragma once
4
5#include <assert.h>
6#include <errno.h>
7#include <stdbool.h>
8#include <stdint.h>
9#include <stdio.h>
10#include <stdlib.h>
11#include <string.h>
12#include <util/alloc.h>
13#include <util/asan.h>
14#include <util/exit.h>
15#include <util/unused.h>
16
22#define DEFINE_LIST(name, type) DEFINE_LIST_WITH_DTOR(name, type, name##_noop_)
23
30#define DEFINE_LIST_WITH_DTOR(name, type, dtor) \
31 \
32 \
37 typedef struct { \
38 type *base; /* start of the allocation for backing memory */ \
39 /* (base == NULL && capacity == 0) || (base != NULL && capacity > 0) */ \
40 size_t head; /* index of the first element */ \
41 /* (capacity == 0 && head == 0) || (capacity > 0 && head < capacity) */ \
42 size_t size; /* number of elements in the list */ \
43 /* size <= capacity */ \
44 size_t capacity; /* available storage slots */ \
45 } name##_t; \
46 \
47 /* default “do nothing” destructor */ \
48 static inline UNUSED void name##_noop_(type item) { (void)item; } \
49 \
50 \
51 static inline size_t name##_size(const name##_t *list) { \
52 assert(list != NULL); \
53 return list->size; \
54 } \
55 \
56 \
57 static inline UNUSED bool name##_is_empty(const name##_t *list) { \
58 assert(list != NULL); \
59 return name##_size(list) == 0; \
60 } \
61 \
62 static inline int name##_try_append(name##_t *list, type item) { \
63 assert(list != NULL); \
64 \
65 /* do we need to expand the backing storage? */ \
66 if (list->size == list->capacity) { \
67 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2); \
68 \
69 /* will the calculation of the new size overflow? */ \
70 if (SIZE_MAX / c < sizeof(type)) { \
71 return ERANGE; \
72 } \
73 \
74 type *base = (type *)realloc(list->base, c * sizeof(type)); \
75 if (base == NULL) { \
76 return ENOMEM; \
77 } \
78 \
79 /* zero the new memory */ \
80 memset(&base[list->capacity], 0, (c - list->capacity) * sizeof(type)); \
81 \
82 /* poison the new (conceptually unallocated) memory */ \
83 ASAN_POISON(&base[list->capacity], (c - list->capacity) * sizeof(type)); \
84 \
85 /* Do we need to shuffle the prefix upwards? E.g. */ \
86 /* */ \
87 /* ┌───┬───┬───┬───┐ */ \
88 /* old: │ 3 │ 4 │ 1 │ 2 │ */ \
89 /* └───┴───┴─┼─┴─┼─┘ */ \
90 /* │ └───────────────┐ */ \
91 /* └───────────────┐ │ */ \
92 /* ▼ ▼ */ \
93 /* ┌───┬───┬───┬───┬───┬───┬───┬───┐ */ \
94 /* new: │ 3 │ 4 │ │ │ │ │ 1 │ 2 │ */ \
95 /* └───┴───┴───┴───┴───┴───┴───┴───┘ */ \
96 /* a b c d e f g h */ \
97 if (list->head + list->size > list->capacity) { \
98 const size_t prefix = list->capacity - list->head; \
99 const size_t new_head = c - prefix; \
100 /* unpoison target range, slots [g, h] in example */ \
101 ASAN_UNPOISON(&base[new_head], prefix * sizeof(type)); \
102 memmove(&base[new_head], &base[list->head], prefix * sizeof(type)); \
103 /* (re-)poison new gap, slots [c, f] in example */ \
104 ASAN_POISON(&base[list->size - prefix], \
105 (list->capacity - list->size) * sizeof(type)); \
106 list->head = new_head; \
107 } \
108 \
109 list->base = base; \
110 list->capacity = c; \
111 } \
112 \
113 const size_t new_slot = (list->head + list->size) % list->capacity; \
114 ASAN_UNPOISON(&list->base[new_slot], sizeof(type)); \
115 list->base[new_slot] = item; \
116 ++list->size; \
117 \
118 return 0; \
119 } \
120 \
121 static inline void name##_append(name##_t *list, type item) { \
122 int rc = name##_try_append(list, item); \
123 if (rc != 0) { \
124 fprintf(stderr, "realloc failed: %s\n", strerror(rc)); \
125 graphviz_exit(EXIT_FAILURE); \
126 } \
127 } \
128 \
129 \
135 static inline type name##_get(const name##_t *list, size_t index) { \
136 assert(list != NULL); \
137 assert(index < list->size && "index out of bounds"); \
138 return list->base[(list->head + index) % list->capacity]; \
139 } \
140 \
141 \
153 static inline type *name##_at(name##_t *list, size_t index) { \
154 assert(list != NULL); \
155 assert(index < list->size && "index out of bounds"); \
156 return &list->base[(list->head + index) % list->capacity]; \
157 } \
158 \
159 \
160 static inline UNUSED type *name##_front(name##_t *list) { \
161 assert(list != NULL); \
162 assert(!name##_is_empty(list)); \
163 return name##_at(list, 0); \
164 } \
165 \
166 \
167 static inline UNUSED type *name##_back(name##_t *list) { \
168 assert(list != NULL); \
169 assert(!name##_is_empty(list)); \
170 return name##_at(list, name##_size(list) - 1); \
171 } \
172 \
173 \
179 static inline void name##_set(name##_t *list, size_t index, type item) { \
180 assert(list != NULL); \
181 assert(index < name##_size(list) && "index out of bounds"); \
182 type *target = name##_at(list, index); \
183 dtor(*target); \
184 *target = item; \
185 } \
186 \
187 \
192 static inline UNUSED void name##_remove(name##_t *list, const type item) { \
193 assert(list != NULL); \
194 \
195 for (size_t i = 0; i < list->size; ++i) { \
196 /* is this the element we are looking for? */ \
197 type *candidate = name##_at(list, i); \
198 if (memcmp(candidate, &item, sizeof(type)) == 0) { \
199 \
200 /* destroy the element we are about to remove */ \
201 dtor(*candidate); \
202 \
203 /* shrink the list */ \
204 for (size_t j = i + 1; j < list->size; ++j) { \
205 type *replacement = name##_at(list, j); \
206 *candidate = *replacement; \
207 candidate = replacement; \
208 } \
209 ASAN_POISON(name##_at(list, list->size - 1), sizeof(type)); \
210 --list->size; \
211 return; \
212 } \
213 } \
214 } \
215 \
216 \
217 static inline void name##_clear(name##_t *list) { \
218 assert(list != NULL); \
219 \
220 for (size_t i = 0; i < list->size; ++i) { \
221 dtor(name##_get(list, i)); \
222 ASAN_POISON(name##_at(list, i), sizeof(type)); \
223 } \
224 \
225 list->size = 0; \
226 \
227 /* opportunistically re-sync the list */ \
228 list->head = 0; \
229 } \
230 \
231 \
236 static inline UNUSED void name##_reserve(name##_t *list, size_t capacity) { \
237 assert(list != NULL); \
238 \
239 /* if we can already fit enough items, nothing to do */ \
240 if (list->capacity >= capacity) { \
241 return; \
242 } \
243 \
244 list->base = (type *)gv_recalloc(list->base, list->capacity, capacity, \
245 sizeof(type)); \
246 \
247 /* Do we need to shuffle the prefix upwards? E.g. */ \
248 /* */ \
249 /* ┌───┬───┬───┬───┐ */ \
250 /* old: │ 3 │ 4 │ 1 │ 2 │ */ \
251 /* └───┴───┴─┼─┴─┼─┘ */ \
252 /* │ └───────────────┐ */ \
253 /* └───────────────┐ │ */ \
254 /* ▼ ▼ */ \
255 /* ┌───┬───┬───┬───┬───┬───┬───┬───┐ */ \
256 /* new: │ 3 │ 4 │ │ │ │ │ 1 │ 2 │ */ \
257 /* └───┴───┴───┴───┴───┴───┴───┴───┘ */ \
258 /* a b c d e f g h */ \
259 if (list->head + list->size > list->capacity) { \
260 const size_t prefix = list->capacity - list->head; \
261 const size_t new_head = capacity - prefix; \
262 /* unpoison target range, slots [g, h] in example */ \
263 ASAN_UNPOISON(&list->base[new_head], prefix * sizeof(type)); \
264 memmove(&list->base[new_head], &list->base[list->head], \
265 prefix * sizeof(type)); \
266 /* (re-)poison new gap, slots [c, f] in example */ \
267 ASAN_POISON(&list->base[list->size - prefix], \
268 (list->capacity - list->size) * sizeof(type)); \
269 list->head = new_head; \
270 } \
271 \
272 list->capacity = capacity; \
273 } \
274 \
275 \
281 static inline UNUSED void name##_resize(name##_t *list, size_t size, \
282 type value) { \
283 assert(list != NULL); \
284 \
285 if (list->size < size) { \
286 /* we are expanding the list */ \
287 while (list->size < size) { \
288 name##_append(list, value); \
289 } \
290 } else if (list->size > size) { \
291 /* we are shrinking the list */ \
292 while (list->size > size) { \
293 dtor(name##_get(list, list->size - 1)); \
294 ASAN_POISON(name##_at(list, list->size - 1), sizeof(type)); \
295 --list->size; \
296 } \
297 } \
298 } \
299 \
300 \
301 static inline UNUSED bool name##_contains( \
302 const name##_t *haystack, const type needle, \
303 bool (*eq)(const type a, const type b)) { \
304 assert(haystack != NULL); \
305 assert(eq != NULL); \
306 \
307 for (size_t i = 0; i < name##_size(haystack); ++i) { \
308 if (eq(name##_get(haystack, i), needle)) { \
309 return true; \
310 } \
311 } \
312 return false; \
313 } \
314 \
315 \
316 static inline UNUSED name##_t name##_copy(const name##_t *source) { \
317 assert(source != NULL); \
318 \
319 name##_t destination = {(type *)gv_calloc(source->capacity, sizeof(type)), \
320 0, 0, source->capacity}; \
321 for (size_t i = 0; i < source->size; ++i) { \
322 name##_append(&destination, name##_get(source, i)); \
323 } \
324 return destination; \
325 } \
326 \
327 \
328 \
329 \
330 \
331 \
332 \
333 \
334 \
335 \
336 \
337 \
338 \
339 \
340 \
341 static inline UNUSED bool name##_is_contiguous(const name##_t *list) { \
342 assert(list != NULL); \
343 return list->head + list->size <= list->capacity; \
344 } \
345 \
346 \
347 static inline void name##_sync(name##_t *list) { \
348 assert(list != NULL); \
349 \
350 /* Allow unrestricted access. The shuffle below accesses both allocated \
351 * and unallocated elements, so just let it read and write everything. \
352 */ \
353 ASAN_UNPOISON(list->base, list->capacity * sizeof(type)); \
354 \
355 /* Shuffle the list 1-1 until it is aligned. This is not efficient, but */ \
356 /* we assume this is a relatively rare operation. */ \
357 while (list->head != 0) { \
358 /* rotate the list leftwards by 1 */ \
359 assert(list->capacity > 0); \
360 type replacement = list->base[0]; \
361 for (size_t i = list->capacity - 1; i != SIZE_MAX; --i) { \
362 type temp = list->base[i]; \
363 list->base[i] = replacement; \
364 replacement = temp; \
365 } \
366 --list->head; \
367 } \
368 \
369 /* synchronization should have ensured the list no longer wraps */ \
370 assert(name##_is_contiguous(list)); \
371 \
372 /* re-establish access restrictions */ \
373 ASAN_POISON(&list->base[list->size], \
374 (list->capacity - list->size) * sizeof(type)); \
375 } \
376 \
377 \
378 static inline UNUSED void name##_sort( \
379 name##_t *list, int (*cmp)(const type *a, const type *b)) { \
380 assert(list != NULL); \
381 assert(cmp != NULL); \
382 \
383 name##_sync(list); \
384 \
385 int (*compar)(const void *, const void *) = \
386 (int (*)(const void *, const void *))cmp; \
387 if (list->size > 0) { \
388 qsort(list->base, list->size, sizeof(type), compar); \
389 } \
390 } \
391 \
392 \
393 static inline UNUSED void name##_reverse(name##_t *list) { \
394 assert(list != NULL); \
395 \
396 for (size_t i = 0; i < name##_size(list) / 2; ++i) { \
397 type const temp1 = name##_get(list, i); \
398 type const temp2 = name##_get(list, name##_size(list) - i - 1); \
399 name##_set(list, i, temp2); \
400 name##_set(list, name##_size(list) - i - 1, temp1); \
401 } \
402 } \
403 \
404 \
405 static inline UNUSED void name##_shrink_to_fit(name##_t *list) { \
406 assert(list != NULL); \
407 \
408 name##_sync(list); \
409 \
410 if (list->capacity > list->size) { \
411 list->base = (type *)gv_recalloc(list->base, list->capacity, list->size, \
412 sizeof(type)); \
413 list->capacity = list->size; \
414 } \
415 } \
416 \
417 \
418 static inline UNUSED void name##_free(name##_t *list) { \
419 assert(list != NULL); \
420 name##_clear(list); \
421 free(list->base); \
422 memset(list, 0, sizeof(*list)); \
423 } \
424 \
425 \
426 static inline UNUSED void name##_push_back(name##_t *list, type value) { \
427 name##_append(list, value); \
428 } \
429 \
430 \
431 static inline UNUSED type name##_pop_front(name##_t *list) { \
432 assert(list != NULL); \
433 assert(list->size > 0); \
434 \
435 type value = name##_get(list, 0); \
436 \
437 /* do not call `dtor` because we are transferring ownership of the removed \
438 * element to the caller \
439 */ \
440 ASAN_POISON(name##_at(list, 0), sizeof(type)); \
441 list->head = (list->head + 1) % list->capacity; \
442 --list->size; \
443 \
444 return value; \
445 } \
446 \
447 \
448 static inline UNUSED type name##_pop_back(name##_t *list) { \
449 assert(list != NULL); \
450 assert(list->size > 0); \
451 \
452 type value = name##_get(list, list->size - 1); \
453 \
454 /* do not call `dtor` because we are transferring ownership of the removed \
455 * element to the caller \
456 */ \
457 ASAN_POISON(name##_at(list, list->size - 1), sizeof(type)); \
458 --list->size; \
459 \
460 return value; \
461 } \
462 \
463 \
473 static inline UNUSED name##_t name##_attach(type *data, size_t size) { \
474 assert(data != NULL || size == 0); \
475 name##_t list = {data, 0, size, size}; \
476 return list; \
477 } \
478 \
479 \
488 static inline UNUSED type *name##_detach(name##_t *list) { \
489 assert(list != NULL); \
490 name##_sync(list); \
491 type *data = list->base; \
492 memset(list, 0, sizeof(*list)); \
493 return data; \
494 }
Memory allocation wrappers that exit on failure.
macros for interacting with Address Sanitizer
abstraction for squashing compiler warnings for unused symbols