Home
last modified time | relevance | path

Searched refs:vmi (Results 1 – 25 of 47) sorted by relevance

12

/linux-6.6.21/mm/
Dinternal.h1063 static inline void vma_iter_config(struct vma_iterator *vmi, in vma_iter_config() argument
1066 MAS_BUG_ON(&vmi->mas, vmi->mas.node != MAS_START && in vma_iter_config()
1067 (vmi->mas.index > index || vmi->mas.last < index)); in vma_iter_config()
1068 __mas_set_range(&vmi->mas, index, last - 1); in vma_iter_config()
1074 static inline int vma_iter_prealloc(struct vma_iterator *vmi, in vma_iter_prealloc() argument
1077 return mas_preallocate(&vmi->mas, vma, GFP_KERNEL); in vma_iter_prealloc()
1080 static inline void vma_iter_clear(struct vma_iterator *vmi) in vma_iter_clear() argument
1082 mas_store_prealloc(&vmi->mas, NULL); in vma_iter_clear()
1085 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
1088 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
[all …]
Dmmap.c151 static inline struct vm_area_struct *vma_prev_limit(struct vma_iterator *vmi, in vma_prev_limit() argument
154 return mas_prev(&vmi->mas, min); in vma_prev_limit()
176 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *brkvma,
186 struct vma_iterator vmi; in SYSCALL_DEFINE1() local
229 vma_iter_init(&vmi, mm, newbrk); in SYSCALL_DEFINE1()
230 brkvma = vma_find(&vmi, oldbrk); in SYSCALL_DEFINE1()
239 if (do_vma_munmap(&vmi, brkvma, newbrk, oldbrk, &uf, true)) in SYSCALL_DEFINE1()
252 vma_iter_init(&vmi, mm, oldbrk); in SYSCALL_DEFINE1()
253 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap); in SYSCALL_DEFINE1()
257 brkvma = vma_prev_limit(&vmi, mm->start_brk); in SYSCALL_DEFINE1()
[all …]
Dmlock.c412 static int mlock_fixup(struct vma_iterator *vmi, struct vm_area_struct *vma, in mlock_fixup() argument
429 *prev = vma_merge(vmi, mm, *prev, start, end, newflags, in mlock_fixup()
438 ret = split_vma(vmi, vma, start, 1); in mlock_fixup()
444 ret = split_vma(vmi, vma, end, 0); in mlock_fixup()
482 VMA_ITERATOR(vmi, current->mm, start); in apply_vma_lock_flags()
491 vma = vma_iter_load(&vmi); in apply_vma_lock_flags()
495 prev = vma_prev(&vmi); in apply_vma_lock_flags()
501 for_each_vma_range(vmi, vma, end) { in apply_vma_lock_flags()
514 error = mlock_fixup(&vmi, vma, &prev, nstart, tmp, newflags); in apply_vma_lock_flags()
517 tmp = vma_iter_end(&vmi); in apply_vma_lock_flags()
[all …]
Dnommu.c584 VMA_ITERATOR(vmi, vma->vm_mm, vma->vm_start); in delete_vma_from_mm()
586 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
587 if (vma_iter_prealloc(&vmi, vma)) { in delete_vma_from_mm()
595 vma_iter_clear(&vmi); in delete_vma_from_mm()
628 VMA_ITERATOR(vmi, mm, addr); in find_vma()
630 return vma_iter_load(&vmi); in find_vma()
675 VMA_ITERATOR(vmi, mm, addr); in find_vma_exact()
677 vma = vma_iter_load(&vmi); in find_vma_exact()
1029 VMA_ITERATOR(vmi, current->mm, 0); in do_mmap()
1199 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in do_mmap()
[all …]
Dmprotect.c575 mprotect_fixup(struct vma_iterator *vmi, struct mmu_gather *tlb, in mprotect_fixup() argument
632 *pprev = vma_merge(vmi, mm, *pprev, start, end, newflags, in mprotect_fixup()
644 error = split_vma(vmi, vma, start, 1); in mprotect_fixup()
650 error = split_vma(vmi, vma, end, 0); in mprotect_fixup()
700 struct vma_iterator vmi; in do_mprotect_pkey() local
732 vma_iter_init(&vmi, current->mm, start); in do_mprotect_pkey()
733 vma = vma_find(&vmi, end); in do_mprotect_pkey()
756 prev = vma_prev(&vmi); in do_mprotect_pkey()
763 for_each_vma_range(vmi, vma, end) { in do_mprotect_pkey()
819 error = mprotect_fixup(&vmi, &tlb, vma, &prev, nstart, tmp, newflags); in do_mprotect_pkey()
[all …]
Ddebug.c272 void vma_iter_dump_tree(const struct vma_iterator *vmi) in vma_iter_dump_tree() argument
275 mas_dump(&vmi->mas); in vma_iter_dump_tree()
276 mt_dump(vmi->mas.tree, mt_dump_hex); in vma_iter_dump_tree()
Dmremap.c600 struct vma_iterator vmi; in move_vma() local
717 vma_iter_init(&vmi, mm, old_addr); in move_vma()
718 if (do_vmi_munmap(&vmi, mm, old_addr, old_len, uf_unmap, false) < 0) { in move_vma()
734 vma = vma_prev(&vmi); in move_vma()
739 vma = vma_next(&vmi); in move_vma()
1004 VMA_ITERATOR(vmi, mm, addr + new_len); in SYSCALL_DEFINE5()
1011 ret = do_vmi_munmap(&vmi, mm, addr + new_len, old_len - new_len, in SYSCALL_DEFINE5()
1039 VMA_ITERATOR(vmi, mm, extension_start); in SYSCALL_DEFINE5()
1056 vma = vma_merge(&vmi, mm, vma, extension_start, in SYSCALL_DEFINE5()
Dmempolicy.c388 VMA_ITERATOR(vmi, mm, 0); in mpol_rebind_mm()
391 for_each_vma(vmi, vma) { in mpol_rebind_mm()
814 static int mbind_range(struct vma_iterator *vmi, struct vm_area_struct *vma, in mbind_range() argument
837 merged = vma_merge(vmi, vma->vm_mm, *prev, vmstart, vmend, vma->vm_flags, in mbind_range()
846 err = split_vma(vmi, vma, vmstart, 1); in mbind_range()
852 err = split_vma(vmi, vma, vmend, 0); in mbind_range()
1222 VMA_ITERATOR(vmi, current->mm, start); in new_folio()
1225 for_each_vma(vmi, vma) { in new_folio()
1271 struct vma_iterator vmi; in do_mbind() local
1345 vma_iter_init(&vmi, mm, start); in do_mbind()
[all …]
/linux-6.6.21/arch/xtensa/kernel/
Dsyscall.c61 struct vma_iterator vmi; in arch_get_unmapped_area() local
83 vma_iter_init(&vmi, current->mm, addr); in arch_get_unmapped_area()
84 for_each_vma(vmi, vmm) { in arch_get_unmapped_area()
/linux-6.6.21/fs/proc/
Dtask_nommu.c23 VMA_ITERATOR(vmi, mm, 0); in task_mem()
29 for_each_vma(vmi, vma) { in task_mem()
83 VMA_ITERATOR(vmi, mm, 0); in task_vsize()
88 for_each_vma(vmi, vma) in task_vsize()
98 VMA_ITERATOR(vmi, mm, 0); in task_statm()
104 for_each_vma(vmi, vma) { in task_statm()
/linux-6.6.21/fs/
Duserfaultfd.c654 VMA_ITERATOR(vmi, mm, 0); in userfaultfd_event_wait_completion()
658 for_each_vma(vmi, vma) { in userfaultfd_event_wait_completion()
899 VMA_ITERATOR(vmi, mm, 0); in userfaultfd_release()
916 for_each_vma(vmi, vma) { in userfaultfd_release()
925 prev = vma_merge(&vmi, mm, prev, vma->vm_start, vma->vm_end, in userfaultfd_release()
1327 struct vma_iterator vmi; in userfaultfd_register() local
1372 vma_iter_init(&vmi, mm, start); in userfaultfd_register()
1373 vma = vma_find(&vmi, end); in userfaultfd_register()
1451 } for_each_vma_range(vmi, cur, end); in userfaultfd_register()
1454 vma_iter_set(&vmi, start); in userfaultfd_register()
[all …]
Dexec.c691 VMA_ITERATOR(vmi, mm, new_start); in shift_arg_pages()
701 if (vma != vma_next(&vmi)) in shift_arg_pages()
704 vma_iter_prev_range(&vmi); in shift_arg_pages()
708 if (vma_expand(&vmi, vma, new_start, old_end, vma->vm_pgoff, NULL)) in shift_arg_pages()
721 next = vma_next(&vmi); in shift_arg_pages()
740 vma_prev(&vmi); in shift_arg_pages()
742 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in shift_arg_pages()
764 struct vma_iterator vmi; in setup_arg_pages() local
819 vma_iter_init(&vmi, mm, vma->vm_start); in setup_arg_pages()
822 ret = mprotect_fixup(&vmi, &tlb, vma, &prev, vma->vm_start, vma->vm_end, in setup_arg_pages()
/linux-6.6.21/include/linux/
Dmm.h941 struct vm_area_struct *vma_find(struct vma_iterator *vmi, unsigned long max) in vma_find() argument
943 return mas_find(&vmi->mas, max - 1); in vma_find()
946 static inline struct vm_area_struct *vma_next(struct vma_iterator *vmi) in vma_next() argument
952 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
956 struct vm_area_struct *vma_iter_next_range(struct vma_iterator *vmi) in vma_iter_next_range() argument
958 return mas_next_range(&vmi->mas, ULONG_MAX); in vma_iter_next_range()
962 static inline struct vm_area_struct *vma_prev(struct vma_iterator *vmi) in vma_prev() argument
964 return mas_prev(&vmi->mas, 0); in vma_prev()
968 struct vm_area_struct *vma_iter_prev_range(struct vma_iterator *vmi) in vma_iter_prev_range() argument
970 return mas_prev_range(&vmi->mas, 0); in vma_iter_prev_range()
[all …]
/linux-6.6.21/arch/powerpc/mm/book3s32/
Dtlb.c84 VMA_ITERATOR(vmi, mm, 0); in hash__flush_tlb_mm()
92 for_each_vma(vmi, mp) in hash__flush_tlb_mm()
/linux-6.6.21/arch/x86/entry/vdso/
Dvma.c115 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
118 for_each_vma(vmi, vma) { in vdso_join_timens()
334 VMA_ITERATOR(vmi, mm, 0); in map_vdso_once()
344 for_each_vma(vmi, vma) { in map_vdso_once()
/linux-6.6.21/arch/loongarch/kernel/
Dvdso.c138 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
141 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux-6.6.21/arch/s390/kernel/
Dvdso.c57 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
61 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux-6.6.21/arch/riscv/kernel/
Dvdso.c112 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
116 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux-6.6.21/arch/powerpc/mm/book3s64/
Dsubpage_prot.c155 VMA_ITERATOR(vmi, mm, addr); in subpage_mark_vma_nohuge()
161 for_each_vma_range(vmi, vma, addr + len) { in subpage_mark_vma_nohuge()
/linux-6.6.21/arch/um/kernel/
Dtlb.c588 VMA_ITERATOR(vmi, mm, 0); in flush_tlb_mm()
590 for_each_vma(vmi, vma) in flush_tlb_mm()
598 VMA_ITERATOR(vmi, mm, 0); in force_flush_all()
601 for_each_vma(vmi, vma) in force_flush_all()
/linux-6.6.21/drivers/misc/cxl/
Dfault.c297 VMA_ITERATOR(vmi, mm, 0); in cxl_prefault_vma()
302 for_each_vma(vmi, vma) { in cxl_prefault_vma()
/linux-6.6.21/arch/ia64/kernel/
Dsetup.c939 pal_vm_info_2_u_t vmi; in cpu_init() local
1045 if (ia64_pal_vm_summary(NULL, &vmi) == 0) { in cpu_init()
1046 max_ctx = (1U << (vmi.pal_vm_info_2_s.rid_size - 3)) - 1; in cpu_init()
1047 setup_ptcg_sem(vmi.pal_vm_info_2_s.max_purges, NPTCG_FROM_PAL); in cpu_init()
/linux-6.6.21/arch/parisc/kernel/
Dcache.c712 VMA_ITERATOR(vmi, mm, 0); in mm_total_size()
714 for_each_vma(vmi, vma) { in mm_total_size()
725 VMA_ITERATOR(vmi, mm, 0); in flush_cache_mm()
745 for_each_vma(vmi, vma) in flush_cache_mm()
/linux-6.6.21/arch/powerpc/kernel/
Dvdso.c118 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
122 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux-6.6.21/arch/arm64/kernel/
Dvdso.c136 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
140 for_each_vma(vmi, vma) { in vdso_join_timens()

12