Lines Matching refs:nb

2414 sysmalloc_mmap (INTERNAL_SIZE_T nb, size_t pagesize, int extra_flags, mstate av)  in sysmalloc_mmap()  argument
2427 size = ALIGN_UP (nb + SIZE_SZ, pagesize); in sysmalloc_mmap()
2429 size = ALIGN_UP (nb + SIZE_SZ + MALLOC_ALIGN_MASK, pagesize); in sysmalloc_mmap()
2432 if ((unsigned long) (size) <= (unsigned long) (nb)) in sysmalloc_mmap()
2502 sysmalloc_mmap_fallback (long int *s, INTERNAL_SIZE_T nb, in sysmalloc_mmap_fallback() argument
2517 if ((unsigned long) (size) <= (unsigned long) (nb)) in sysmalloc_mmap_fallback()
2541 sysmalloc (INTERNAL_SIZE_T nb, mstate av) in sysmalloc() argument
2574 || ((unsigned long) (nb) >= (unsigned long) (mp_.mmap_threshold) in sysmalloc()
2579 if (mp_.hp_pagesize > 0 && nb >= mp_.hp_pagesize) in sysmalloc()
2583 mm = sysmalloc_mmap (nb, mp_.hp_pagesize, mp_.hp_flags, av); in sysmalloc()
2588 mm = sysmalloc_mmap (nb, pagesize, 0, av); in sysmalloc()
2617 assert ((unsigned long) (old_size) < (unsigned long) (nb + MINSIZE)); in sysmalloc()
2628 if ((long) (MINSIZE + nb - old_size) > 0 in sysmalloc()
2629 && grow_heap (old_heap, MINSIZE + nb - old_size) == 0) in sysmalloc()
2635 else if ((heap = new_heap (nb + (MINSIZE + sizeof (*heap)), mp_.top_pad))) in sysmalloc()
2672 char *mm = sysmalloc_mmap (nb, pagesize, 0, av); in sysmalloc()
2681 size = nb + mp_.top_pad + MINSIZE; in sysmalloc()
2741 mbrk = sysmalloc_mmap_fallback (&size, nb, old_size, in sysmalloc()
2746 mbrk = sysmalloc_mmap_fallback (&size, nb, old_size, pagesize, in sysmalloc()
2944 if ((unsigned long) (size) >= (unsigned long) (nb + MINSIZE)) in sysmalloc()
2946 remainder_size = size - nb; in sysmalloc()
2947 remainder = chunk_at_offset (p, nb); in sysmalloc()
2949 set_head (p, nb | PREV_INUSE | (av != &main_arena ? NON_MAIN_ARENA : 0)); in sysmalloc()
2951 check_malloced_chunk (av, p, nb); in sysmalloc()
3396 INTERNAL_SIZE_T nb; /* padded request size */ in libc_hidden_def() local
3440 nb = checked_request2size (bytes); in libc_hidden_def()
3441 if (nb == 0) in libc_hidden_def()
3452 newp = mremap_chunk (oldp, nb); in libc_hidden_def()
3465 if (oldsize - SIZE_SZ >= nb) in libc_hidden_def()
3480 newp = _int_realloc (ar_ptr, oldp, oldsize, nb); in libc_hidden_def()
3489 newp = _int_realloc (ar_ptr, oldp, oldsize, nb); in libc_hidden_def()
3767 INTERNAL_SIZE_T nb; /* normalized request size */ in _int_malloc() local
3798 nb = checked_request2size (bytes); in _int_malloc()
3799 if (nb == 0) in _int_malloc()
3809 void *p = sysmalloc (nb, av); in _int_malloc()
3834 if ((unsigned long) (nb) <= (unsigned long) (get_max_fast ())) in _int_malloc()
3836 idx = fastbin_index (nb); in _int_malloc()
3855 check_remalloced_chunk (av, victim, nb); in _int_malloc()
3859 size_t tc_idx = csize2tidx (nb); in _int_malloc()
3897 if (in_smallbin_range (nb)) in _int_malloc()
3899 idx = smallbin_index (nb); in _int_malloc()
3907 set_inuse_bit_at_offset (victim, nb); in _int_malloc()
3913 check_malloced_chunk (av, victim, nb); in _int_malloc()
3917 size_t tc_idx = csize2tidx (nb); in _int_malloc()
3929 set_inuse_bit_at_offset (tc_victim, nb); in _int_malloc()
3959 idx = largebin_index (nb); in _int_malloc()
3979 size_t tc_idx = csize2tidx (nb); in _int_malloc()
3981 tcache_nb = nb; in _int_malloc()
4018 if (in_smallbin_range (nb) && in _int_malloc()
4021 (unsigned long) (size) > (unsigned long) (nb + MINSIZE)) in _int_malloc()
4024 remainder_size = size - nb; in _int_malloc()
4025 remainder = chunk_at_offset (victim, nb); in _int_malloc()
4035 set_head (victim, nb | PREV_INUSE | in _int_malloc()
4040 check_malloced_chunk (av, victim, nb); in _int_malloc()
4054 if (size == nb) in _int_malloc()
4072 check_malloced_chunk (av, victim, nb); in _int_malloc()
4179 if (!in_smallbin_range (nb)) in _int_malloc()
4186 >= (unsigned long) (nb)) in _int_malloc()
4190 (unsigned long) (nb))) in _int_malloc()
4200 remainder_size = size - nb; in _int_malloc()
4213 remainder = chunk_at_offset (victim, nb); in _int_malloc()
4229 set_head (victim, nb | PREV_INUSE | in _int_malloc()
4234 check_malloced_chunk (av, victim, nb); in _int_malloc()
4298 assert ((unsigned long) (size) >= (unsigned long) (nb)); in _int_malloc()
4300 remainder_size = size - nb; in _int_malloc()
4316 remainder = chunk_at_offset (victim, nb); in _int_malloc()
4330 if (in_smallbin_range (nb)) in _int_malloc()
4337 set_head (victim, nb | PREV_INUSE | in _int_malloc()
4342 check_malloced_chunk (av, victim, nb); in _int_malloc()
4371 if ((unsigned long) (size) >= (unsigned long) (nb + MINSIZE)) in _int_malloc()
4373 remainder_size = size - nb; in _int_malloc()
4374 remainder = chunk_at_offset (victim, nb); in _int_malloc()
4376 set_head (victim, nb | PREV_INUSE | in _int_malloc()
4380 check_malloced_chunk (av, victim, nb); in _int_malloc()
4392 if (in_smallbin_range (nb)) in _int_malloc()
4393 idx = smallbin_index (nb); in _int_malloc()
4395 idx = largebin_index (nb); in _int_malloc()
4403 void *p = sysmalloc (nb, av); in _int_malloc()
4813 INTERNAL_SIZE_T nb) in _int_realloc() argument
4840 if ((unsigned long) (oldsize) >= (unsigned long) (nb)) in _int_realloc()
4852 (unsigned long) (nb + MINSIZE)) in _int_realloc()
4854 set_head_size (oldp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4855 av->top = chunk_at_offset (oldp, nb); in _int_realloc()
4856 set_head (av->top, (newsize - nb) | PREV_INUSE); in _int_realloc()
4865 (unsigned long) (nb)) in _int_realloc()
4874 newmem = _int_malloc (av, nb - MALLOC_ALIGN_MASK); in _int_realloc()
4905 assert ((unsigned long) (newsize) >= (unsigned long) (nb)); in _int_realloc()
4907 remainder_size = newsize - nb; in _int_realloc()
4916 remainder = chunk_at_offset (newp, nb); in _int_realloc()
4919 set_head_size (newp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4938 INTERNAL_SIZE_T nb; /* padded request size */ in _int_memalign() local
4951 nb = checked_request2size (bytes); in _int_memalign()
4952 if (nb == 0) in _int_memalign()
4965 m = (char *) (_int_malloc (av, nb + alignment + MINSIZE)); in _int_memalign()
5006 assert (newsize >= nb && in _int_memalign()
5014 if ((unsigned long) (size) > (unsigned long) (nb + MINSIZE)) in _int_memalign()
5016 remainder_size = size - nb; in _int_memalign()
5017 remainder = chunk_at_offset (p, nb); in _int_memalign()
5020 set_head_size (p, nb); in _int_memalign()