Home
last modified time | relevance | path

Searched refs:SIZE_SZ (Results 1 – 9 of 9) sorted by relevance

/glibc-2.36/sysdeps/generic/
Dmalloc-alignment.h27 #define MALLOC_ALIGNMENT (2 * SIZE_SZ < __alignof__ (long double) \
28 ? __alignof__ (long double) : 2 * SIZE_SZ)
Dmalloc-size.h57 #define SIZE_SZ (sizeof (INTERNAL_SIZE_T)) macro
/glibc-2.36/malloc/
Dmalloc.c313 # define tidx2usize(idx) (((size_t) idx) * MALLOC_ALIGNMENT + MINSIZE - SIZE_SZ)
852 #define DEFAULT_MXFAST (64 * SIZE_SZ / 4)
1295 #define CHUNK_HDR_SZ (2 * SIZE_SZ)
1327 (((req) + SIZE_SZ + MALLOC_ALIGN_MASK < MINSIZE) ? \
1329 ((req) + SIZE_SZ + MALLOC_ALIGN_MASK) & ~MALLOC_ALIGN_MASK)
1460 (__MTAG_GRANULE_SIZE > SIZE_SZ && __glibc_unlikely (mtag_enabled) ? \
1462 chunksize (p) - CHUNK_HDR_SZ + (chunk_is_mmapped (p) ? 0 : SIZE_SZ))
1611 (SIZE_SZ == 8 ? largebin_index_64 (sz) \
1742 ((((unsigned int) (sz)) >> (SIZE_SZ == 8 ? 4 : 3)) - 2)
1746 #define MAX_FAST_SIZE (80 * SIZE_SZ / 4)
[all …]
Darena.c86 char pad[-3 * SIZE_SZ & MALLOC_ALIGN_MASK];
92 + 2 * SIZE_SZ) % MALLOC_ALIGNMENT
654 prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ); in heap_trim()
661 new_size = chunksize (p) + (MINSIZE - 2 * SIZE_SZ) + misalign; in heap_trim()
Dmalloc-debug.c249 if (bytes > osize - SIZE_SZ) in strong_alias()
250 bytes = osize - SIZE_SZ; in strong_alias()
412 return chunksize (p) - SIZE_SZ; in strong_alias()
Dmalloc-check.c297 if (oldsize - SIZE_SZ >= chnb) in realloc_check()
/glibc-2.36/ChangeLog.old/
DChangeLog.161035 * malloc/malloc.c (MALLOC_ALIGNMENT): Revert to (2 * SIZE_SZ) value.
1044 MALLOC_ALIGNMENT > 2 * SIZE_SZ.
1180 if long double is more aligned than 2 * SIZE_SZ.
8429 don't clear SIZE_SZ bytes more than should be cleared.
DChangeLog.1818937 * malloc/malloc.c (INTERNAL_SIZE_T, SIZE_SZ, MALLOC_ALIGNMENT)
81434 (sYSMALLOc): Handle MALLOC_ALIGNMENT > 2 * SIZE_SZ. Don't update
81841 (MALLOC_ALIGNMENT): Set it to the greater of 2 * SIZE_SZ and
DChangeLog.234024 (SIZE_SZ): Remove.