Searched refs:MINSIZE (Results 1 – 4 of 4) sorted by relevance
313 # define tidx2usize(idx) (((size_t) idx) * MALLOC_ALIGNMENT + MINSIZE - SIZE_SZ)316 # define csize2tidx(x) (((x) - MINSIZE + MALLOC_ALIGNMENT - 1) / MALLOC_ALIGNMENT)1312 #define MINSIZE \ macro1327 (((req) + SIZE_SZ + MALLOC_ALIGN_MASK < MINSIZE) ? \1328 MINSIZE : \2095 assert ((unsigned long) (sz) >= MINSIZE); in do_check_chunk()2131 if ((unsigned long) (sz) >= MINSIZE) in do_check_free_chunk()2183 assert (chunksize (next) >= MINSIZE); in do_check_inuse_chunk()2211 assert ((unsigned long) (sz) >= MINSIZE); in do_check_remalloced_chunk()2216 assert ((long) (sz) - (long) (s + MINSIZE) < 0); in do_check_remalloced_chunk()[all …]
127 sz < MINSIZE || sz & MALLOC_ALIGN_MASK || !inuse (p) || in mem2chunk_check()182 chunksize (t) >= MINSIZE && in top_check()345 if (alignment < MINSIZE) in memalign_check()346 alignment = MINSIZE; in memalign_check()357 if (bytes > SIZE_MAX - alignment - MINSIZE) in memalign_check()
654 prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ); in heap_trim()661 new_size = chunksize (p) + (MINSIZE - 2 * SIZE_SZ) + misalign; in heap_trim()662 assert (new_size > 0 && new_size < (long) (2 * MINSIZE)); in heap_trim()666 if (new_size + (max_size - prev_heap->size) < pad + MINSIZE in heap_trim()696 top_area = top_size - MINSIZE - 1; in heap_trim()
14118 (long) (MINSIZE + nb - old_size) is positive.