/linux-5.19.10/mm/ |
D | userfaultfd.c | 23 struct vm_area_struct *find_dst_vma(struct mm_struct *dst_mm, in find_dst_vma() argument 33 dst_vma = find_vma(dst_mm, dst_start); in find_dst_vma() 58 int mfill_atomic_install_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, in mfill_atomic_install_pte() argument 95 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_install_pte() 130 inc_mm_counter(dst_mm, mm_counter(page)); in mfill_atomic_install_pte() 132 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in mfill_atomic_install_pte() 142 static int mcopy_atomic_pte(struct mm_struct *dst_mm, in mcopy_atomic_pte() argument 188 if (mem_cgroup_charge(page_folio(page), dst_mm, GFP_KERNEL)) in mcopy_atomic_pte() 191 ret = mfill_atomic_install_pte(dst_mm, dst_pmd, dst_vma, dst_addr, in mcopy_atomic_pte() 202 static int mfill_zeropage_pte(struct mm_struct *dst_mm, in mfill_zeropage_pte() argument [all …]
|
D | huge_memory.c | 1015 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_huge_pmd() argument 1029 pgtable = pte_alloc_one(dst_mm); in copy_huge_pmd() 1033 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd() 1055 add_mm_counter(dst_mm, MM_ANONPAGES, HPAGE_PMD_NR); in copy_huge_pmd() 1056 mm_inc_nr_ptes(dst_mm); in copy_huge_pmd() 1057 pgtable_trans_huge_deposit(dst_mm, dst_pmd, pgtable); in copy_huge_pmd() 1060 set_pmd_at(dst_mm, addr, dst_pmd, pmd); in copy_huge_pmd() 1067 pte_free(dst_mm, pgtable); in copy_huge_pmd() 1081 mm_get_huge_zero_page(dst_mm); in copy_huge_pmd() 1092 pte_free(dst_mm, pgtable); in copy_huge_pmd() [all …]
|
D | memory.c | 777 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_nonpresent_pte() argument 791 if (unlikely(list_empty(&dst_mm->mmlist))) { in copy_nonpresent_pte() 793 if (list_empty(&dst_mm->mmlist)) in copy_nonpresent_pte() 794 list_add(&dst_mm->mmlist, in copy_nonpresent_pte() 875 set_pte_at(dst_mm, addr, dst_pte, pte); in copy_nonpresent_pte() 880 set_pte_at(dst_mm, addr, dst_pte, pte); in copy_nonpresent_pte() 1014 struct mm_struct *dst_mm = dst_vma->vm_mm; in copy_pte_range() local 1028 dst_pte = pte_alloc_map_lock(dst_mm, dst_pmd, addr, &dst_ptl); in copy_pte_range() 1056 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range() 1101 add_mm_rss_vec(dst_mm, rss); in copy_pte_range() [all …]
|
D | hugetlb.c | 5883 int hugetlb_mcopy_atomic_pte(struct mm_struct *dst_mm, in hugetlb_mcopy_atomic_pte() argument 6003 ptl = huge_pte_lockptr(h, dst_mm, dst_pte); in hugetlb_mcopy_atomic_pte() 6058 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in hugetlb_mcopy_atomic_pte() 6062 hugetlb_count_add(pages_per_huge_page(h), dst_mm); in hugetlb_mcopy_atomic_pte()
|
D | shmem.c | 2342 int shmem_mfill_atomic_pte(struct mm_struct *dst_mm, in shmem_mfill_atomic_pte() argument 2417 gfp & GFP_RECLAIM_MASK, dst_mm); in shmem_mfill_atomic_pte() 2421 ret = mfill_atomic_install_pte(dst_mm, dst_pmd, dst_vma, dst_addr, in shmem_mfill_atomic_pte()
|
/linux-5.19.10/include/linux/ |
D | userfaultfd_k.h | 59 extern int mfill_atomic_install_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, 64 extern ssize_t mcopy_atomic(struct mm_struct *dst_mm, unsigned long dst_start, 67 extern ssize_t mfill_zeropage(struct mm_struct *dst_mm, 71 extern ssize_t mcopy_continue(struct mm_struct *dst_mm, unsigned long dst_start, 73 extern int mwriteprotect_range(struct mm_struct *dst_mm, 76 extern void uffd_wp_range(struct mm_struct *dst_mm, struct vm_area_struct *vma,
|
D | shmem_fs.h | 144 extern int shmem_mfill_atomic_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, 151 #define shmem_mfill_atomic_pte(dst_mm, dst_pmd, dst_vma, dst_addr, \ argument
|
D | huge_mm.h | 11 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, 15 int copy_huge_pud(struct mm_struct *dst_mm, struct mm_struct *src_mm,
|
D | hugetlb.h | 160 int hugetlb_mcopy_atomic_pte(struct mm_struct *dst_mm, pte_t *dst_pte, 359 static inline int hugetlb_mcopy_atomic_pte(struct mm_struct *dst_mm, in hugetlb_mcopy_atomic_pte() argument
|
/linux-5.19.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ttm.c | 309 struct amdgpu_res_cursor src_mm, dst_mm; in amdgpu_ttm_copy_mem_to_mem() local 319 amdgpu_res_first(dst->mem, dst->offset, size, &dst_mm); in amdgpu_ttm_copy_mem_to_mem() 327 cur_size = min3(src_mm.size, dst_mm.size, 256ULL << 20); in amdgpu_ttm_copy_mem_to_mem() 335 r = amdgpu_ttm_map_buffer(dst->bo, dst->mem, &dst_mm, in amdgpu_ttm_copy_mem_to_mem() 349 amdgpu_res_next(&dst_mm, cur_size); in amdgpu_ttm_copy_mem_to_mem()
|