Searched refs:SIZE_SZ (Results 1 – 9 of 9) sorted by relevance
27 #define MALLOC_ALIGNMENT (2 * SIZE_SZ < __alignof__ (long double) \28 ? __alignof__ (long double) : 2 * SIZE_SZ)
57 #define SIZE_SZ (sizeof (INTERNAL_SIZE_T)) macro
313 # define tidx2usize(idx) (((size_t) idx) * MALLOC_ALIGNMENT + MINSIZE - SIZE_SZ)852 #define DEFAULT_MXFAST (64 * SIZE_SZ / 4)1295 #define CHUNK_HDR_SZ (2 * SIZE_SZ)1327 (((req) + SIZE_SZ + MALLOC_ALIGN_MASK < MINSIZE) ? \1329 ((req) + SIZE_SZ + MALLOC_ALIGN_MASK) & ~MALLOC_ALIGN_MASK)1460 (__MTAG_GRANULE_SIZE > SIZE_SZ && __glibc_unlikely (mtag_enabled) ? \1462 chunksize (p) - CHUNK_HDR_SZ + (chunk_is_mmapped (p) ? 0 : SIZE_SZ))1611 (SIZE_SZ == 8 ? largebin_index_64 (sz) \1742 ((((unsigned int) (sz)) >> (SIZE_SZ == 8 ? 4 : 3)) - 2)1746 #define MAX_FAST_SIZE (80 * SIZE_SZ / 4)[all …]
86 char pad[-3 * SIZE_SZ & MALLOC_ALIGN_MASK];92 + 2 * SIZE_SZ) % MALLOC_ALIGNMENT654 prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ); in heap_trim()661 new_size = chunksize (p) + (MINSIZE - 2 * SIZE_SZ) + misalign; in heap_trim()
249 if (bytes > osize - SIZE_SZ) in strong_alias()250 bytes = osize - SIZE_SZ; in strong_alias()412 return chunksize (p) - SIZE_SZ; in strong_alias()
297 if (oldsize - SIZE_SZ >= chnb) in realloc_check()
1035 * malloc/malloc.c (MALLOC_ALIGNMENT): Revert to (2 * SIZE_SZ) value.1044 MALLOC_ALIGNMENT > 2 * SIZE_SZ.1180 if long double is more aligned than 2 * SIZE_SZ.8429 don't clear SIZE_SZ bytes more than should be cleared.
18937 * malloc/malloc.c (INTERNAL_SIZE_T, SIZE_SZ, MALLOC_ALIGNMENT)81434 (sYSMALLOc): Handle MALLOC_ALIGNMENT > 2 * SIZE_SZ. Don't update81841 (MALLOC_ALIGNMENT): Set it to the greater of 2 * SIZE_SZ and
4024 (SIZE_SZ): Remove.