Lines Matching refs:walk

24 				unsigned long end, struct mm_walk *walk)  in walk_pte_range_inner()  argument
26 const struct mm_walk_ops *ops = walk->ops; in walk_pte_range_inner()
30 err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range_inner()
42 struct mm_walk *walk) in walk_pte_range() argument
48 if (walk->no_vma) { in walk_pte_range()
50 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range()
53 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in walk_pte_range()
54 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range()
63 unsigned long end, struct mm_walk *walk, int pdshift) in walk_hugepd_range() argument
66 const struct mm_walk_ops *ops = walk->ops; in walk_hugepd_range()
79 spin_lock(&walk->mm->page_table_lock); in walk_hugepd_range()
81 err = ops->pte_entry(pte, addr, addr + page_size, walk); in walk_hugepd_range()
82 spin_unlock(&walk->mm->page_table_lock); in walk_hugepd_range()
94 unsigned long end, struct mm_walk *walk, int pdshift) in walk_hugepd_range() argument
101 struct mm_walk *walk) in walk_pmd_range() argument
105 const struct mm_walk_ops *ops = walk->ops; in walk_pmd_range()
115 err = ops->pte_hole(addr, next, depth, walk); in walk_pmd_range()
121 walk->action = ACTION_SUBTREE; in walk_pmd_range()
128 err = ops->pmd_entry(pmd, addr, next, walk); in walk_pmd_range()
132 if (walk->action == ACTION_AGAIN) in walk_pmd_range()
139 if ((!walk->vma && (pmd_leaf(*pmd) || !pmd_present(*pmd))) || in walk_pmd_range()
140 walk->action == ACTION_CONTINUE || in walk_pmd_range()
144 if (walk->vma) { in walk_pmd_range()
145 split_huge_pmd(walk->vma, pmd, addr); in walk_pmd_range()
151 err = walk_hugepd_range((hugepd_t *)pmd, addr, next, walk, PMD_SHIFT); in walk_pmd_range()
153 err = walk_pte_range(pmd, addr, next, walk); in walk_pmd_range()
162 struct mm_walk *walk) in walk_pud_range() argument
166 const struct mm_walk_ops *ops = walk->ops; in walk_pud_range()
176 err = ops->pte_hole(addr, next, depth, walk); in walk_pud_range()
182 walk->action = ACTION_SUBTREE; in walk_pud_range()
185 err = ops->pud_entry(pud, addr, next, walk); in walk_pud_range()
189 if (walk->action == ACTION_AGAIN) in walk_pud_range()
192 if ((!walk->vma && (pud_leaf(*pud) || !pud_present(*pud))) || in walk_pud_range()
193 walk->action == ACTION_CONTINUE || in walk_pud_range()
197 if (walk->vma) in walk_pud_range()
198 split_huge_pud(walk->vma, pud, addr); in walk_pud_range()
203 err = walk_hugepd_range((hugepd_t *)pud, addr, next, walk, PUD_SHIFT); in walk_pud_range()
205 err = walk_pmd_range(pud, addr, next, walk); in walk_pud_range()
214 struct mm_walk *walk) in walk_p4d_range() argument
218 const struct mm_walk_ops *ops = walk->ops; in walk_p4d_range()
227 err = ops->pte_hole(addr, next, depth, walk); in walk_p4d_range()
233 err = ops->p4d_entry(p4d, addr, next, walk); in walk_p4d_range()
238 err = walk_hugepd_range((hugepd_t *)p4d, addr, next, walk, P4D_SHIFT); in walk_p4d_range()
240 err = walk_pud_range(p4d, addr, next, walk); in walk_p4d_range()
249 struct mm_walk *walk) in walk_pgd_range() argument
253 const struct mm_walk_ops *ops = walk->ops; in walk_pgd_range()
256 if (walk->pgd) in walk_pgd_range()
257 pgd = walk->pgd + pgd_index(addr); in walk_pgd_range()
259 pgd = pgd_offset(walk->mm, addr); in walk_pgd_range()
264 err = ops->pte_hole(addr, next, 0, walk); in walk_pgd_range()
270 err = ops->pgd_entry(pgd, addr, next, walk); in walk_pgd_range()
275 err = walk_hugepd_range((hugepd_t *)pgd, addr, next, walk, PGDIR_SHIFT); in walk_pgd_range()
277 err = walk_p4d_range(pgd, addr, next, walk); in walk_pgd_range()
294 struct mm_walk *walk) in walk_hugetlb_range() argument
296 struct vm_area_struct *vma = walk->vma; in walk_hugetlb_range()
302 const struct mm_walk_ops *ops = walk->ops; in walk_hugetlb_range()
307 pte = huge_pte_offset(walk->mm, addr & hmask, sz); in walk_hugetlb_range()
310 err = ops->hugetlb_entry(pte, hmask, addr, next, walk); in walk_hugetlb_range()
312 err = ops->pte_hole(addr, next, -1, walk); in walk_hugetlb_range()
323 struct mm_walk *walk) in walk_hugetlb_range() argument
337 struct mm_walk *walk) in walk_page_test() argument
339 struct vm_area_struct *vma = walk->vma; in walk_page_test()
340 const struct mm_walk_ops *ops = walk->ops; in walk_page_test()
343 return ops->test_walk(start, end, walk); in walk_page_test()
356 err = ops->pte_hole(start, end, -1, walk); in walk_page_test()
363 struct mm_walk *walk) in __walk_page_range() argument
366 struct vm_area_struct *vma = walk->vma; in __walk_page_range()
367 const struct mm_walk_ops *ops = walk->ops; in __walk_page_range()
370 err = ops->pre_vma(start, end, walk); in __walk_page_range()
377 err = walk_hugetlb_range(start, end, walk); in __walk_page_range()
379 err = walk_pgd_range(start, end, walk); in __walk_page_range()
382 ops->post_vma(walk); in __walk_page_range()
434 struct mm_walk walk = { in walk_page_range() local
443 if (!walk.mm) in walk_page_range()
446 mmap_assert_locked(walk.mm); in walk_page_range()
448 vma = find_vma(walk.mm, start); in walk_page_range()
451 walk.vma = NULL; in walk_page_range()
454 err = ops->pte_hole(start, next, -1, &walk); in walk_page_range()
456 walk.vma = NULL; in walk_page_range()
459 err = ops->pte_hole(start, next, -1, &walk); in walk_page_range()
461 walk.vma = vma; in walk_page_range()
465 err = walk_page_test(start, next, &walk); in walk_page_range()
477 err = __walk_page_range(start, next, &walk); in walk_page_range()
504 struct mm_walk walk = { in walk_page_range_novma() local
512 if (start >= end || !walk.mm) in walk_page_range_novma()
515 mmap_assert_write_locked(walk.mm); in walk_page_range_novma()
517 return walk_pgd_range(start, end, &walk); in walk_page_range_novma()
523 struct mm_walk walk = { in walk_page_vma() local
531 if (!walk.mm) in walk_page_vma()
534 mmap_assert_locked(walk.mm); in walk_page_vma()
536 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
541 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
578 struct mm_walk walk = { in walk_page_mapping() local
603 walk.vma = vma; in walk_page_mapping()
604 walk.mm = vma->vm_mm; in walk_page_mapping()
606 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()
613 err = __walk_page_range(start_addr, end_addr, &walk); in walk_page_mapping()