Lines Matching refs:size
454 tag_region (void *ptr, size_t size) in tag_region() argument
457 return __libc_mtag_tag_region (ptr, size); in tag_region()
462 tag_new_zero_region (void *ptr, size_t size) in tag_new_zero_region() argument
465 return __libc_mtag_tag_zero_region (__libc_mtag_new_tag (ptr), size); in tag_new_zero_region()
466 return memset (ptr, 0, size); in tag_new_zero_region()
1133 #define MMAP(addr, size, prot, flags) \ argument
1134 __mmap((addr), (size), (prot), (flags)|MAP_ANONYMOUS|MAP_PRIVATE, -1, 0)
2017 madvise_thp (void *p, INTERNAL_SIZE_T size) in madvise_thp() argument
2022 if (mp_.thp_pagesize == 0 || size < mp_.thp_pagesize) in madvise_thp()
2030 size += PTR_DIFF (p, q); in madvise_thp()
2034 __madvise (p, size, MADV_HUGEPAGE); in madvise_thp()
2262 INTERNAL_SIZE_T size; in do_check_malloc_state() local
2347 size = chunksize (p); in do_check_malloc_state()
2348 total += size; in do_check_malloc_state()
2352 idx = bin_index (size); in do_check_malloc_state()
2358 if (!in_smallbin_range (size)) in do_check_malloc_state()
2381 else if (!in_smallbin_range (size)) in do_check_malloc_state()
2416 long int size; in sysmalloc_mmap() local
2427 size = ALIGN_UP (nb + SIZE_SZ, pagesize); in sysmalloc_mmap()
2429 size = ALIGN_UP (nb + SIZE_SZ + MALLOC_ALIGN_MASK, pagesize); in sysmalloc_mmap()
2432 if ((unsigned long) (size) <= (unsigned long) (nb)) in sysmalloc_mmap()
2435 char *mm = (char *) MMAP (0, size, in sysmalloc_mmap()
2443 madvise_thp (mm, size); in sysmalloc_mmap()
2473 set_head (p, (size - correction) | IS_MMAPPED); in sysmalloc_mmap()
2479 set_head (p, size | IS_MMAPPED); in sysmalloc_mmap()
2487 sum = atomic_exchange_and_add (&mp_.mmapped_mem, size) + size; in sysmalloc_mmap()
2506 long int size = *s; in sysmalloc_mmap_fallback() local
2510 size = ALIGN_UP (size + old_size, pagesize); in sysmalloc_mmap_fallback()
2513 if ((unsigned long) (size) < minsize) in sysmalloc_mmap_fallback()
2514 size = minsize; in sysmalloc_mmap_fallback()
2517 if ((unsigned long) (size) <= (unsigned long) (nb)) in sysmalloc_mmap_fallback()
2520 char *mbrk = (char *) (MMAP (0, size, in sysmalloc_mmap_fallback()
2528 madvise_thp (mbrk, size); in sysmalloc_mmap_fallback()
2536 *s = size; in sysmalloc_mmap_fallback()
2547 long size; /* arg to first MORECORE or mmap call */ in sysmalloc() local
2627 old_heap_size = old_heap->size; in sysmalloc()
2631 av->system_mem += old_heap->size - old_heap_size; in sysmalloc()
2632 set_head (old_top, (((char *) old_heap + old_heap->size) - (char *) old_top) in sysmalloc()
2640 av->system_mem += heap->size; in sysmalloc()
2643 set_head (top (av), (heap->size - sizeof (*heap)) | PREV_INUSE); in sysmalloc()
2681 size = nb + mp_.top_pad + MINSIZE; in sysmalloc()
2690 size -= old_size; in sysmalloc()
2705 uintptr_t top = ALIGN_UP ((uintptr_t) __curbrk + size, in sysmalloc()
2707 size = top - (uintptr_t) __curbrk; in sysmalloc()
2711 size = ALIGN_UP (size, GLRO(dl_pagesize)); in sysmalloc()
2719 if (size > 0) in sysmalloc()
2721 brk = (char *) (MORECORE (size)); in sysmalloc()
2723 madvise_thp (brk, size); in sysmalloc()
2724 LIBC_PROBE (memory_sbrk_more, 2, brk, size); in sysmalloc()
2741 mbrk = sysmalloc_mmap_fallback (&size, nb, old_size, in sysmalloc()
2746 mbrk = sysmalloc_mmap_fallback (&size, nb, old_size, pagesize, in sysmalloc()
2752 snd_brk = brk + size; in sysmalloc()
2760 av->system_mem += size; in sysmalloc()
2767 set_head (old_top, (size + old_size) | PREV_INUSE); in sysmalloc()
2831 end_misalign = (INTERNAL_SIZE_T) (brk + size + correction); in sysmalloc()
2941 size = chunksize (p); in sysmalloc()
2944 if ((unsigned long) (size) >= (unsigned long) (nb + MINSIZE)) in sysmalloc()
2946 remainder_size = size - nb; in sysmalloc()
3040 INTERNAL_SIZE_T size = chunksize (p); in munmap_chunk() local
3046 size_t total_size = prev_size (p) + size; in munmap_chunk()
3072 INTERNAL_SIZE_T size = chunksize (p); in mremap_chunk() local
3079 size_t total_size = offset + size; in mremap_chunk()
3107 new = atomic_exchange_and_add (&mp_.mmapped_mem, new_size - size - offset) in mremap_chunk()
3108 + new_size - size - offset; in mremap_chunk()
3772 INTERNAL_SIZE_T size; /* its size */ in _int_malloc() local
3993 size = chunksize (victim); in _int_malloc()
3994 mchunkptr next = chunk_at_offset (victim, size); in _int_malloc()
3996 if (__glibc_unlikely (size <= CHUNK_HDR_SZ) in _int_malloc()
3997 || __glibc_unlikely (size > av->system_mem)) in _int_malloc()
4002 if (__glibc_unlikely ((prev_size (next) & ~(SIZE_BITS)) != size)) in _int_malloc()
4021 (unsigned long) (size) > (unsigned long) (nb + MINSIZE)) in _int_malloc()
4024 remainder_size = size - nb; in _int_malloc()
4054 if (size == nb) in _int_malloc()
4056 set_inuse_bit_at_offset (victim, size); in _int_malloc()
4083 if (in_smallbin_range (size)) in _int_malloc()
4085 victim_index = smallbin_index (size); in _int_malloc()
4091 victim_index = largebin_index (size); in _int_malloc()
4099 size |= PREV_INUSE; in _int_malloc()
4102 if ((unsigned long) (size) in _int_malloc()
4115 while ((unsigned long) size < chunksize_nomask (fwd)) in _int_malloc()
4121 if ((unsigned long) size in _int_malloc()
4189 while (((unsigned long) (size = chunksize (victim)) < in _int_malloc()
4200 remainder_size = size - nb; in _int_malloc()
4206 set_inuse_bit_at_offset (victim, size); in _int_malloc()
4295 size = chunksize (victim); in _int_malloc()
4298 assert ((unsigned long) (size) >= (unsigned long) (nb)); in _int_malloc()
4300 remainder_size = size - nb; in _int_malloc()
4308 set_inuse_bit_at_offset (victim, size); in _int_malloc()
4366 size = chunksize (victim); in _int_malloc()
4368 if (__glibc_unlikely (size > av->system_mem)) in _int_malloc()
4371 if ((unsigned long) (size) >= (unsigned long) (nb + MINSIZE)) in _int_malloc()
4373 remainder_size = size - nb; in _int_malloc()
4418 INTERNAL_SIZE_T size; /* its size */ in _int_free() local
4427 size = chunksize (p); in _int_free()
4433 if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0) in _int_free()
4438 if (__glibc_unlikely (size < MINSIZE || !aligned_OK (size))) in _int_free()
4445 size_t tc_idx = csize2tidx (size); in _int_free()
4489 if ((unsigned long)(size) <= (unsigned long)(get_max_fast ()) in _int_free()
4496 && (chunk_at_offset(p, size) != av->top) in _int_free()
4500 if (__builtin_expect (chunksize_nomask (chunk_at_offset (p, size)) in _int_free()
4502 || __builtin_expect (chunksize (chunk_at_offset (p, size)) in _int_free()
4512 fail = (chunksize_nomask (chunk_at_offset (p, size)) <= CHUNK_HDR_SZ in _int_free()
4513 || chunksize (chunk_at_offset (p, size)) >= av->system_mem); in _int_free()
4521 free_perturb (chunk2mem(p), size - CHUNK_HDR_SZ); in _int_free()
4524 unsigned int idx = fastbin_index(size); in _int_free()
4574 nextchunk = chunk_at_offset(p, size); in _int_free()
4594 free_perturb (chunk2mem(p), size - CHUNK_HDR_SZ); in _int_free()
4599 size += prevsize; in _int_free()
4613 size += nextsize; in _int_free()
4629 if (!in_smallbin_range(size)) in _int_free()
4637 set_head(p, size | PREV_INUSE); in _int_free()
4638 set_foot(p, size); in _int_free()
4649 size += nextsize; in _int_free()
4650 set_head(p, size | PREV_INUSE); in _int_free()
4668 if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD) { in _int_free()
4721 INTERNAL_SIZE_T size; in malloc_consolidate() local
4758 size = chunksize (p); in malloc_consolidate()
4759 nextchunk = chunk_at_offset(p, size); in malloc_consolidate()
4764 size += prevsize; in malloc_consolidate()
4775 size += nextsize; in malloc_consolidate()
4784 if (!in_smallbin_range (size)) { in malloc_consolidate()
4789 set_head(p, size | PREV_INUSE); in malloc_consolidate()
4792 set_foot(p, size); in malloc_consolidate()
4796 size += nextsize; in malloc_consolidate()
4797 set_head(p, size | PREV_INUSE); in malloc_consolidate()
4947 INTERNAL_SIZE_T size; in _int_memalign() local
5013 size = chunksize (p); in _int_memalign()
5014 if ((unsigned long) (size) > (unsigned long) (nb + MINSIZE)) in _int_memalign()
5016 remainder_size = size - nb; in _int_memalign()
5052 INTERNAL_SIZE_T size = chunksize (p); in mtrim() local
5054 if (size > psm1 + sizeof (struct malloc_chunk)) in mtrim()
5063 assert ((char *) p + size > paligned_mem); in mtrim()
5066 size -= paligned_mem - (char *) p; in mtrim()
5068 if (size > psm1) in mtrim()
5073 memset (paligned_mem, 0x89, size & ~psm1); in mtrim()
5075 __madvise (paligned_mem, size & ~psm1, MADV_DONTNEED); in mtrim()
5670 __posix_memalign (void **memptr, size_t alignment, size_t size) in __posix_memalign() argument
5686 mem = _mid_memalign (alignment, size, address); in __posix_memalign()
5821 heap_size += heap->size; in weak_alias()