18 assert(list->
base !=
NULL || index == 0 || stride == 0);
20 const char *
const base = list->
base;
21 return base + index * stride;
26 assert(list->
base !=
NULL || index == 0 || stride == 0);
28 char *
const base = list->
base;
29 return base + index * stride;
34 assert(base !=
NULL || index == 0 || stride == 0);
36 const char *
const b = base;
37 return b + index * stride;
41 assert(base !=
NULL || index == 0 || stride == 0);
44 return b + index * stride;
47#define INDEX_TO(origin, index, stride) \
49 const list_t_ *: slot_from_const_list, \
50 list_t_ *: slot_from_list, \
51 const void *: slot_from_const_base, \
52 void *: slot_from_base)((origin), (index), (stride)))
68 void *
const slot =
INDEX_TO(list, new_slot, item_size);
89 void *
const slot =
INDEX_TO(list, list->
head, item_size);
105 assert(capacity > 0);
106 if (
SIZE_MAX / capacity < item_size) {
110 void *
const base = realloc(list->
base, capacity * item_size);
118 const size_t new_bytes = (capacity - list->
capacity) * item_size;
119 memset(
new, 0, new_bytes);
139 const size_t new_head = capacity -
prefix;
141 void *
const target =
INDEX_TO(base, new_head, item_size);
143 const void *
const src =
INDEX_TO(base, list->
head, item_size);
144 memmove(target, src,
prefix * item_size);
148 list->
head = new_head;
157 assert(list !=
NULL);
158 return try_reserve(list, capacity, item_size) == 0;
162 assert(index < list.
size &&
"index out of bounds");
168 for (
size_t i = 0; i < list.
size; ++i) {
170 const void *candidate =
INDEX_TO(&list, slot, item_size);
171 if (memcmp(needle, candidate, item_size) == 0) {
180 assert(list !=
NULL);
181 assert(index < list->size);
184 for (
size_t i = index + 1; i < list->
size; ++i) {
186 void *
const dst =
INDEX_TO(list, dst_slot, item_size);
188 const void *
const src =
INDEX_TO(list, src_slot, item_size);
189 memcpy(dst, src, item_size);
192 void *truncated =
INDEX_TO(list, truncated_slot, item_size);
198 assert(list !=
NULL);
200 for (
size_t i = 0; i < list->
size; ++i) {
202 void *
const to_poison =
INDEX_TO(list, slot, item_size);
218 capacity, item_size, strerror(
err));
233 for (
size_t i = 0; i < list.
size; ++i) {
235 const void *
const src =
INDEX_TO(&list, slot, item_size);
238 memcpy(dst, src, item_size);
243 void *
const to_poison =
INDEX_TO(&ret, ret.
size, item_size);
244 const size_t to_poison_len = (ret.
capacity - ret.
size) * item_size;
255 assert(list !=
NULL);
263 while (list->
head != 0) {
267 for (
size_t i = 0; i < item_size; ++i) {
269 memcpy(&lowest, list->
base,
sizeof(lowest));
270 const size_t remainder = list->
capacity * item_size -
sizeof(lowest);
271 memmove(list->
base, (
char *)list->
base +
sizeof(lowest), remainder);
272 memcpy((
char *)list->
base + remainder, &lowest,
sizeof(lowest));
287 assert(list !=
NULL);
297static void exchange(
void *a,
void *b,
size_t size) {
304 for (
size_t i = 0; i < size; ++i) {
310 assert(list !=
NULL);
313 for (
size_t i = 0; i < list->
size / 2; ++i) {
316 void *
const x =
INDEX_TO(list, a, item_size);
317 void *
const y =
INDEX_TO(list, b, item_size);
323 assert(list !=
NULL);
334 assert(list !=
NULL);
340 assert(list !=
NULL);
341 assert(list->
size > 0);
342 assert(into !=
NULL);
346 void *
const to_pop =
INDEX_TO(list, slot, item_size);
347 memcpy(into, to_pop, item_size);
354 assert(list !=
NULL);
355 assert(list->
size > 0);
356 assert(into !=
NULL);
360 void *
const to_pop =
INDEX_TO(list, slot, item_size);
361 memcpy(into, to_pop, item_size);
Memory allocation wrappers that exit on failure.
static void * gv_recalloc(void *ptr, size_t old_nmemb, size_t new_nmemb, size_t size)
static void * gv_calloc(size_t nmemb, size_t size)
macros for interacting with Address Sanitizer
#define ASAN_POISON(addr, size)
#define ASAN_UNPOISON(addr, size)
static NORETURN void graphviz_exit(int status)
static int cmp(const void *key, const void *candidate)
require define api prefix
Arithmetic helper functions.
#define exchange(h, i, j, index)
internal implementation details of list.h
list_t_ gv_list_copy_(const list_t_ list, size_t item_size)
static const void * slot_from_const_list(const list_t_ *list, size_t index, size_t stride)
size_t gv_list_prepend_slot_(list_t_ *list, size_t item_size)
void gv_list_pop_front_(list_t_ *list, void *into, size_t item_size)
static const void * slot_from_const_base(const void *base, size_t index, size_t stride)
#define INDEX_TO(origin, index, stride)
bool gv_list_contains_(const list_t_ list, const void *needle, size_t item_size)
size_t gv_list_append_slot_(list_t_ *list, size_t item_size)
void gv_list_reserve_(list_t_ *list, size_t capacity, size_t item_size)
void gv_list_free_(list_t_ *list)
static void * slot_from_base(void *base, size_t index, size_t stride)
void gv_list_clear_(list_t_ *list, size_t item_size)
bool gv_list_is_contiguous_(const list_t_ list)
void gv_list_shrink_to_fit_(list_t_ *list, size_t item_size)
void gv_list_sort_(list_t_ *list, int(*cmp)(const void *, const void *), size_t item_size)
void gv_list_reverse_(list_t_ *list, size_t item_size)
void gv_list_pop_back_(list_t_ *list, void *into, size_t item_size)
size_t gv_list_find_(const list_t_ list, const void *needle, size_t item_size)
void gv_list_sync_(list_t_ *list, size_t item_size)
bool gv_list_try_reserve_(list_t_ *list, size_t capacity, size_t item_size)
static int try_reserve(list_t_ *list, size_t capacity, size_t item_size)
static void * slot_from_list(list_t_ *list, size_t index, size_t stride)
void gv_list_remove_(list_t_ *list, size_t index, size_t item_size)
size_t gv_list_get_(const list_t_ list, size_t index)
size_t size
size <= capacity
size_t capacity
available storage slots
void * base
(base == NULL && capacity == 0) || (base != NULL && capacity > 0)
size_t head
(capacity == 0 && head == 0) || (capacity > 0 && head < capacity)