Lines Matching refs:vm_mm
141 struct mm_struct *mm = vma->vm_mm; in move_ptes()
177 flush_tlb_batched_pending(vma->vm_mm); in move_ptes()
229 struct mm_struct *mm = vma->vm_mm; in move_normal_pmd()
264 old_ptl = pmd_lock(vma->vm_mm, old_pmd); in move_normal_pmd()
297 struct mm_struct *mm = vma->vm_mm; in move_normal_pud()
313 old_ptl = pud_lock(vma->vm_mm, old_pud); in move_normal_pud()
346 struct mm_struct *mm = vma->vm_mm; in move_huge_pud()
360 old_ptl = pud_lock(vma->vm_mm, old_pud); in move_huge_pud()
501 mmu_notifier_range_init(&range, MMU_NOTIFY_UNMAP, 0, vma, vma->vm_mm, in move_page_tables()
513 old_pud = get_old_pud(vma->vm_mm, old_addr); in move_page_tables()
516 new_pud = alloc_new_pud(vma->vm_mm, vma, new_addr); in move_page_tables()
534 old_pmd = get_old_pmd(vma->vm_mm, old_addr); in move_page_tables()
537 new_pmd = alloc_new_pmd(vma->vm_mm, vma, new_addr); in move_page_tables()
560 if (pte_alloc(new_vma->vm_mm, new_pmd)) in move_page_tables()
578 struct mm_struct *mm = vma->vm_mm; in move_vma()
874 if (find_vma_intersection(vma->vm_mm, vma->vm_end, end)) in vma_expandable()