Graphviz 14.1.4~dev.20260320.0055
Loading...
Searching...
No Matches
list.c
Go to the documentation of this file.
1#ifndef NO_CONFIG // defined by test_list.c
2#include "config.h"
3#endif
4
5#include <assert.h>
6#include <errno.h>
7#include <stdbool.h>
8#include <stdint.h>
9#include <stdio.h>
10#include <stdlib.h>
11#include <string.h>
12#include <util/alloc.h>
13#include <util/asan.h>
14#include <util/exit.h>
15#include <util/gv_math.h>
16#include <util/list-private.h>
17#include <util/prisize_t.h>
18
19static const void *slot_from_const_list(const list_t_ *list, size_t index,
20 size_t stride) {
21 assert(list != NULL);
22 assert(list->base != NULL || index == 0 || stride == 0);
23
24 const char *const base = list->base;
25 return base + index * stride;
26}
27
28static void *slot_from_list(list_t_ *list, size_t index, size_t stride) {
29 assert(list != NULL);
30 assert(list->base != NULL || index == 0 || stride == 0);
31
32 char *const base = list->base;
33 return base + index * stride;
34}
35
36static const void *slot_from_const_base(const void *base, size_t index,
37 size_t stride) {
38 assert(base != NULL || index == 0 || stride == 0);
39
40 const char *const b = base;
41 return b + index * stride;
42}
43
44static void *slot_from_base(void *base, size_t index, size_t stride) {
45 assert(base != NULL || index == 0 || stride == 0);
46
47 char *const b = base;
48 return b + index * stride;
49}
50
51#define INDEX_TO(origin, index, stride) \
52 (_Generic((origin), \
53 const list_t_ *: slot_from_const_list, \
54 list_t_ *: slot_from_list, \
55 const void *: slot_from_const_base, \
56 void *: slot_from_base)((origin), (index), (stride)))
57
58size_t gv_list_append_slot_(list_t_ *list, size_t item_size) {
59 assert(list != NULL);
60
61 // do we need to expand the backing storage?
62 if (list->size == list->capacity) {
63 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2);
64 gv_list_reserve_(list, c, item_size);
65 }
66
67 assert(list->capacity > 0);
68 assert(list->size < list->capacity);
69
70 // append the new slot
71 const size_t new_slot = (list->head + list->size) % list->capacity;
72 void *const slot = INDEX_TO(list, new_slot, item_size);
73 ASAN_UNPOISON(slot, item_size);
74 ++list->size;
75
76 return new_slot;
77}
78
79size_t gv_list_prepend_slot_(list_t_ *list, size_t item_size) {
80 assert(list != NULL);
81
82 // do we need to expand the backing storage?
83 if (list->size == list->capacity) {
84 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2);
85 gv_list_reserve_(list, c, item_size);
86 }
87
88 assert(list->capacity > 0);
89 assert(list->size < list->capacity);
90
91 // prepend the new slot
92 list->head = (list->head + (list->capacity - 1)) % list->capacity;
93 void *const slot = INDEX_TO(list, list->head, item_size);
94 ASAN_UNPOISON(slot, item_size);
95 ++list->size;
96
97 return list->head;
98}
99
100static int try_reserve(list_t_ *list, size_t capacity, size_t item_size) {
101 assert(list != NULL);
102
103 // if we can already fit enough items, nothing to do
104 if (list->capacity >= capacity) {
105 return 0;
106 }
107
108 // will the arithmetic below overflow?
109 assert(capacity > 0);
110 if (SIZE_MAX / capacity < item_size) {
111 return EOVERFLOW;
112 }
113
114 void *const base = realloc(list->base, capacity * item_size);
115 if (base == NULL && item_size > 0) {
116 return ENOMEM;
117 }
118
119 // zero the new memory
120 {
121 void *const new = INDEX_TO(base, list->capacity, item_size);
122 const size_t new_bytes = (capacity - list->capacity) * item_size;
123 if (new_bytes > 0) { // `new_bytes` can be 0 if `item_size == 0`
124 memset(new, 0, new_bytes);
125 }
126
127 // poison the new (conceptually unallocated) memory
128 ASAN_POISON(new, new_bytes);
129 }
130
131 // Do we need to shuffle the prefix upwards? E.g.
132 //
133 // ┌───┬───┬───┬───┐
134 // old: │ 3 │ 4 │ 1 │ 2 │
135 // └───┴───┴─┼─┴─┼─┘
136 // │ └───────────────┐
137 // └───────────────┐ │
138 // ▼ ▼
139 // ┌───┬───┬───┬───┬───┬───┬───┬───┐
140 // new: │ 3 │ 4 │ │ │ │ │ 1 │ 2 │
141 // └───┴───┴───┴───┴───┴───┴───┴───┘
142 // a b c d e f g h
143 if (list->head + list->size > list->capacity) {
144 const size_t prefix = list->capacity - list->head;
145 const size_t new_head = capacity - prefix;
146 // unpoison target range, slots [g, h] in example
147 void *const target = INDEX_TO(base, new_head, item_size);
148 ASAN_UNPOISON(target, prefix * item_size);
149 const void *const src = INDEX_TO(base, list->head, item_size);
150 if (prefix * item_size > 0) {
151 // `target` and `src` can be null when `item_size == 0`, and `memmove`
152 // would then be Undefined Behavior
153 memmove(target, src, prefix * item_size);
154 }
155 // (re-)poison new gap, slots [c, f] in example
156 void *const gap_begin = INDEX_TO(base, list->size - prefix, item_size);
157 ASAN_POISON(gap_begin, (list->capacity - list->size) * item_size);
158 list->head = new_head;
159 }
160
161 list->base = base;
162 list->capacity = capacity;
163 return 0;
164}
165
166bool gv_list_try_append_(list_t_ *list, const void *item, size_t item_size) {
167 assert(list != NULL);
168 assert(item != NULL);
169
170 // do we need to expand the backing storage?
171 if (list->size == list->capacity) {
172 do {
173 // can we attempt doubling without integer overflow?
174 if (SIZE_MAX / 2 >= list->capacity) {
175 const size_t c = list->capacity == 0 ? 1 : (list->capacity * 2);
176 if (try_reserve(list, c, item_size) == 0) {
177 // success
178 break;
179 }
180 }
181
182 // try a more conservative expansion
183 if (SIZE_MAX - 1 >= list->capacity) {
184 if (try_reserve(list, list->capacity + 1, item_size) == 0) {
185 // success
186 break;
187 }
188 }
189
190 // failed to expand the list
191 return false;
192 } while (0);
193 }
194
195 assert(list->size < list->capacity);
196
197 // we can now append, knowing it will not require backing storage expansion
198 const size_t new_slot = (list->head + list->size) % list->capacity;
199 void *const slot = INDEX_TO(list, new_slot, item_size);
200 ASAN_UNPOISON(slot, item_size);
201 if (item_size > 0) {
202 memcpy(slot, item, item_size);
203 }
204 ++list->size;
205
206 return true;
207}
208
209size_t gv_list_get_(const list_t_ list, size_t index) {
210 assert(index < list.size && "index out of bounds");
211 return (list.head + index) % list.capacity;
212}
213
214size_t gv_list_find_(const list_t_ list, const void *needle, size_t item_size) {
215
216 for (size_t i = 0; i < list.size; ++i) {
217 const size_t slot = gv_list_get_(list, i);
218 const void *candidate = INDEX_TO(&list, slot, item_size);
219 if (item_size == 0 || memcmp(needle, candidate, item_size) == 0) {
220 return i;
221 }
222 }
223
224 return SIZE_MAX;
225}
226
227void gv_list_remove_(list_t_ *list, size_t index, size_t item_size) {
228 assert(list != NULL);
229 assert(index < list->size);
230
231 // shrink the list
232 for (size_t i = index + 1; i < list->size; ++i) {
233 const size_t dst_slot = gv_list_get_(*list, i - 1);
234 void *const dst = INDEX_TO(list, dst_slot, item_size);
235 const size_t src_slot = gv_list_get_(*list, i);
236 const void *const src = INDEX_TO(list, src_slot, item_size);
237 if (item_size > 0) {
238 memcpy(dst, src, item_size);
239 }
240 }
241 const size_t truncated_slot = gv_list_get_(*list, list->size - 1);
242 void *truncated = INDEX_TO(list, truncated_slot, item_size);
243 ASAN_POISON(truncated, item_size);
244 --list->size;
245}
246
247void gv_list_clear_(list_t_ *list, size_t item_size) {
248 assert(list != NULL);
249
250 for (size_t i = 0; i < list->size; ++i) {
251 const size_t slot = gv_list_get_(*list, i);
252 void *const to_poison = INDEX_TO(list, slot, item_size);
253 ASAN_POISON(to_poison, item_size);
254 }
255
256 list->size = 0;
257
258 // opportunistically re-sync the list
259 list->head = 0;
260}
261
262void gv_list_reserve_(list_t_ *list, size_t capacity, size_t item_size) {
263 const int err = try_reserve(list, capacity, item_size);
264 if (err != 0) {
265 fprintf(stderr,
266 "failed to reserve %" PRISIZE_T " elements of size %" PRISIZE_T
267 " bytes: %s\n",
268 capacity, item_size, strerror(err));
269 graphviz_exit(EXIT_FAILURE);
270 }
271}
272
273bool gv_list_contains_(const list_t_ list, const void *needle,
274 size_t item_size) {
275 return gv_list_find_(list, needle, item_size) != SIZE_MAX;
276}
277
278list_t_ gv_list_copy_(const list_t_ list, size_t item_size) {
279 list_t_ ret = {.base = gv_calloc(list.capacity, item_size),
280 .capacity = list.capacity};
281
282 // opportunistically create the new list synced
283 for (size_t i = 0; i < list.size; ++i) {
284 const size_t slot = gv_list_get_(list, i);
285 const void *const src = INDEX_TO(&list, slot, item_size);
286 void *const dst = INDEX_TO(&ret, ret.size, item_size);
287 assert(ret.size < ret.capacity);
288 if (item_size > 0) {
289 memcpy(dst, src, item_size);
290 }
291 ++ret.size;
292 }
293
294 // mark the remainder of the allocated space as inaccessible
295 void *const to_poison = INDEX_TO(&ret, ret.size, item_size);
296 const size_t to_poison_len = (ret.capacity - ret.size) * item_size;
297 ASAN_POISON(to_poison, to_poison_len);
298
299 return ret;
300}
301
303 return list.head + list.size <= list.capacity;
304}
305
306void gv_list_sync_(list_t_ *list, size_t item_size) {
307 assert(list != NULL);
308
309 // Allow unrestricted access. The shuffle below accesses both allocated
310 // and unallocated elements, so just let it read and write everything.
311 ASAN_UNPOISON(list->base, list->capacity * item_size);
312
313 // Shuffle the list 1-1 until it is aligned. This is not efficient, but
314 // we assume this is a relatively rare operation.
315 while (list->head != 0) {
316 // rotate the list leftwards by 1
317 assert(list->capacity > 0);
318 // shuffle byte-by-byte to avoid dynamic allocation
319 for (size_t i = 0; i < item_size; ++i) {
320 uint8_t lowest;
321 memcpy(&lowest, list->base, sizeof(lowest));
322 const size_t remainder = list->capacity * item_size - sizeof(lowest);
323 memmove(list->base, (char *)list->base + sizeof(lowest), remainder);
324 memcpy((char *)list->base + remainder, &lowest, sizeof(lowest));
325 }
326 --list->head;
327 }
328
329 /* synchronization should have ensured the list no longer wraps */
330 assert(gv_list_is_contiguous_(*list));
331
332 /* re-establish access restrictions */
333 void *end = INDEX_TO(list, list->size, item_size);
334 ASAN_POISON(end, (list->capacity - list->size) * item_size);
335}
336
337void gv_list_sort_(list_t_ *list, int (*cmp)(const void *, const void *),
338 size_t item_size) {
339 assert(list != NULL);
340 assert(cmp != NULL);
341
342 gv_list_sync_(list, item_size);
343
344 if (list->size > 0 && item_size > 0) {
345 qsort(list->base, list->size, item_size, cmp);
346 }
347}
348
349static void exchange(void *a, void *b, size_t size) {
350 assert(a != NULL);
351 assert(b != NULL);
352
353 // do a byte-by-byte swap of the two objects
354 char *x = a;
355 char *y = b;
356 for (size_t i = 0; i < size; ++i) {
357 SWAP(&x[i], &y[i]);
358 }
359}
360
361void gv_list_reverse_(list_t_ *list, size_t item_size) {
362 assert(list != NULL);
363
364 // move from the outside inwards, swapping elements
365 for (size_t i = 0; i < list->size / 2; ++i) {
366 const size_t a = gv_list_get_(*list, i);
367 const size_t b = gv_list_get_(*list, list->size - i - 1);
368 void *const x = INDEX_TO(list, a, item_size);
369 void *const y = INDEX_TO(list, b, item_size);
370 exchange(x, y, item_size);
371 }
372}
373
374void gv_list_shrink_to_fit_(list_t_ *list, size_t item_size) {
375 assert(list != NULL);
376
377 gv_list_sync_(list, item_size);
378
379 if (list->capacity > list->size) {
380 list->base = gv_recalloc(list->base, list->capacity, list->size, item_size);
381 list->capacity = list->size;
382 }
383}
384
386 assert(list != NULL);
387 free(list->base);
388 *list = (list_t_){0};
389}
390
391void gv_list_pop_front_(list_t_ *list, void *into, size_t item_size) {
392 assert(list != NULL);
393 assert(list->size > 0);
394 assert(into != NULL);
395
396 // find and pop the first slot
397 const size_t slot = gv_list_get_(*list, 0);
398 void *const to_pop = INDEX_TO(list, slot, item_size);
399 if (item_size > 0) {
400 memcpy(into, to_pop, item_size);
401 }
402 ASAN_POISON(to_pop, item_size);
403 list->head = (list->head + 1) % list->capacity;
404 --list->size;
405}
406
407void gv_list_pop_back_(list_t_ *list, void *into, size_t item_size) {
408 assert(list != NULL);
409 assert(list->size > 0);
410 assert(into != NULL);
411
412 // find and pop last slot
413 const size_t slot = gv_list_get_(*list, list->size - 1);
414 void *const to_pop = INDEX_TO(list, slot, item_size);
415 if (item_size > 0) {
416 memcpy(into, to_pop, item_size);
417 }
418 ASAN_POISON(to_pop, item_size);
419 --list->size;
420}
421
422void gv_list_detach_(list_t_ *list, void *datap, size_t *sizep,
423 size_t item_size) {
424 assert(list != NULL);
425 assert(datap != NULL);
426
427 gv_list_sync_(list, item_size);
428 memcpy(datap, &list->base, sizeof(void *));
429 if (sizep != NULL) {
430 *sizep = list->size;
431 }
432
433 *list = (list_t_){0};
434}
Memory allocation wrappers that exit on failure.
static void * gv_recalloc(void *ptr, size_t old_nmemb, size_t new_nmemb, size_t size)
Definition alloc.h:73
static void * gv_calloc(size_t nmemb, size_t size)
Definition alloc.h:26
macros for interacting with Address Sanitizer
#define ASAN_POISON(addr, size)
Definition asan.h:15
#define ASAN_UNPOISON(addr, size)
Definition asan.h:22
static char * err
Definition delaunay.c:532
static NORETURN void graphviz_exit(int status)
Definition exit.h:23
static int cmp(const void *key, const void *candidate)
void free(void *)
require define api prefix
Definition gmlparse.y:17
#define SIZE_MAX
Definition gmlscan.c:347
node NULL
Definition grammar.y:181
Arithmetic helper functions.
#define SWAP(a, b)
Definition gv_math.h:134
internal implementation details of list.h
static void exchange(void *a, void *b, size_t size)
Definition list.c:349
list_t_ gv_list_copy_(const list_t_ list, size_t item_size)
Definition list.c:278
bool gv_list_try_append_(list_t_ *list, const void *item, size_t item_size)
Definition list.c:166
static const void * slot_from_const_list(const list_t_ *list, size_t index, size_t stride)
Definition list.c:19
size_t gv_list_prepend_slot_(list_t_ *list, size_t item_size)
Definition list.c:79
void gv_list_pop_front_(list_t_ *list, void *into, size_t item_size)
Definition list.c:391
static const void * slot_from_const_base(const void *base, size_t index, size_t stride)
Definition list.c:36
#define INDEX_TO(origin, index, stride)
Definition list.c:51
bool gv_list_contains_(const list_t_ list, const void *needle, size_t item_size)
Definition list.c:273
size_t gv_list_append_slot_(list_t_ *list, size_t item_size)
Definition list.c:58
void gv_list_reserve_(list_t_ *list, size_t capacity, size_t item_size)
Definition list.c:262
void gv_list_free_(list_t_ *list)
Definition list.c:385
static void * slot_from_base(void *base, size_t index, size_t stride)
Definition list.c:44
void gv_list_clear_(list_t_ *list, size_t item_size)
Definition list.c:247
void gv_list_detach_(list_t_ *list, void *datap, size_t *sizep, size_t item_size)
Definition list.c:422
bool gv_list_is_contiguous_(const list_t_ list)
Definition list.c:302
void gv_list_shrink_to_fit_(list_t_ *list, size_t item_size)
Definition list.c:374
void gv_list_sort_(list_t_ *list, int(*cmp)(const void *, const void *), size_t item_size)
Definition list.c:337
void gv_list_reverse_(list_t_ *list, size_t item_size)
Definition list.c:361
void gv_list_pop_back_(list_t_ *list, void *into, size_t item_size)
Definition list.c:407
size_t gv_list_find_(const list_t_ list, const void *needle, size_t item_size)
Definition list.c:214
void gv_list_sync_(list_t_ *list, size_t item_size)
Definition list.c:306
static int try_reserve(list_t_ *list, size_t capacity, size_t item_size)
Definition list.c:100
static void * slot_from_list(list_t_ *list, size_t index, size_t stride)
Definition list.c:28
void gv_list_remove_(list_t_ *list, size_t index, size_t item_size)
Definition list.c:227
size_t gv_list_get_(const list_t_ list, size_t index)
Definition list.c:209
#define PRISIZE_T
Definition prisize_t.h:25
Definition utils.c:753
size_t size
size <= capacity
size_t capacity
available storage slots
void * base
(base == NULL && capacity == 0) || (base != NULL && capacity > 0)
size_t head
(capacity == 0 && head == 0) || (capacity > 0 && head < capacity)