/linux-6.6.21/drivers/media/platform/nvidia/tegra-vde/ |
D | iommu.c | 21 struct iova **iovap, in tegra_vde_iommu_map() 24 struct iova *iova; in tegra_vde_iommu_map() local 30 size = iova_align(&vde->iova, size); in tegra_vde_iommu_map() 31 shift = iova_shift(&vde->iova); in tegra_vde_iommu_map() 33 iova = alloc_iova(&vde->iova, size >> shift, end >> shift, true); in tegra_vde_iommu_map() 34 if (!iova) in tegra_vde_iommu_map() 37 addr = iova_dma_addr(&vde->iova, iova); in tegra_vde_iommu_map() 42 __free_iova(&vde->iova, iova); in tegra_vde_iommu_map() 46 *iovap = iova; in tegra_vde_iommu_map() 51 void tegra_vde_iommu_unmap(struct tegra_vde *vde, struct iova *iova) in tegra_vde_iommu_unmap() argument [all …]
|
D | dmabuf-cache.c | 28 struct iova *iova; member 39 tegra_vde_iommu_unmap(entry->vde, entry->iova); in tegra_vde_release_entry() 73 struct iova *iova; in tegra_vde_dmabuf_cache_map() local 91 *addrp = iova_dma_addr(&vde->iova, entry->iova); in tegra_vde_dmabuf_cache_map() 125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map() 129 *addrp = iova_dma_addr(&vde->iova, iova); in tegra_vde_dmabuf_cache_map() 132 iova = NULL; in tegra_vde_dmabuf_cache_map() 139 entry->iova = iova; in tegra_vde_dmabuf_cache_map()
|
/linux-6.6.21/drivers/iommu/ |
D | iova.c | 46 static struct iova *to_iova(struct rb_node *node) in to_iova() 48 return rb_entry(node, struct iova, node); in to_iova() 86 __cached_rbnode_insert_update(struct iova_domain *iovad, struct iova *new) in __cached_rbnode_insert_update() 95 __cached_rbnode_delete_update(struct iova_domain *iovad, struct iova *free) in __cached_rbnode_delete_update() 97 struct iova *cached_iova; in __cached_rbnode_delete_update() 152 iova_insert_rbtree(struct rb_root *root, struct iova *iova, in iova_insert_rbtree() argument 160 struct iova *this = to_iova(*new); in iova_insert_rbtree() 164 if (iova->pfn_lo < this->pfn_lo) in iova_insert_rbtree() 166 else if (iova->pfn_lo > this->pfn_lo) in iova_insert_rbtree() 174 rb_link_node(&iova->node, parent, new); in iova_insert_rbtree() [all …]
|
D | tegra-gart.c | 60 #define for_each_gart_pte(gart, iova) \ argument 61 for (iova = gart->iovmm_base; \ 62 iova < gart->iovmm_end; \ 63 iova += GART_PAGE_SIZE) 66 unsigned long iova, unsigned long pte) in gart_set_pte() argument 68 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_set_pte() 73 unsigned long iova) in gart_read_pte() argument 77 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_read_pte() 85 unsigned long iova; in do_gart_setup() local 87 for_each_gart_pte(gart, iova) in do_gart_setup() [all …]
|
D | io-pgtable-arm-v7s.c | 432 unsigned long iova, phys_addr_t paddr, int prot, in arm_v7s_init_pte() argument 448 tblp = ptep - ARM_V7S_LVL_IDX(iova, lvl, cfg); in arm_v7s_init_pte() 449 if (WARN_ON(__arm_v7s_unmap(data, NULL, iova + i * sz, in arm_v7s_init_pte() 497 static int __arm_v7s_map(struct arm_v7s_io_pgtable *data, unsigned long iova, in __arm_v7s_map() argument 506 ptep += ARM_V7S_LVL_IDX(iova, lvl, cfg); in __arm_v7s_map() 510 return arm_v7s_init_pte(data, iova, paddr, prot, in __arm_v7s_map() 541 return __arm_v7s_map(data, iova, paddr, size, prot, lvl + 1, cptep, gfp); in __arm_v7s_map() 544 static int arm_v7s_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_map_pages() argument 551 if (WARN_ON(iova >= (1ULL << data->iop.cfg.ias) || in arm_v7s_map_pages() 560 ret = __arm_v7s_map(data, iova, paddr, pgsize, prot, 1, data->pgd, in arm_v7s_map_pages() [all …]
|
D | sun50i-iommu.c | 165 static u32 sun50i_iova_get_dte_index(dma_addr_t iova) in sun50i_iova_get_dte_index() argument 167 return FIELD_GET(SUN50I_IOVA_DTE_MASK, iova); in sun50i_iova_get_dte_index() 170 static u32 sun50i_iova_get_pte_index(dma_addr_t iova) in sun50i_iova_get_pte_index() argument 172 return FIELD_GET(SUN50I_IOVA_PTE_MASK, iova); in sun50i_iova_get_pte_index() 175 static u32 sun50i_iova_get_page_offset(dma_addr_t iova) in sun50i_iova_get_page_offset() argument 177 return FIELD_GET(SUN50I_IOVA_PAGE_MASK, iova); in sun50i_iova_get_page_offset() 301 unsigned long iova) in sun50i_iommu_zap_iova() argument 306 iommu_write(iommu, IOMMU_TLB_IVLD_ADDR_REG, iova); in sun50i_iommu_zap_iova() 318 unsigned long iova) in sun50i_iommu_zap_ptw_cache() argument 323 iommu_write(iommu, IOMMU_PC_IVLD_ADDR_REG, iova); in sun50i_iommu_zap_ptw_cache() [all …]
|
D | io-pgtable-dart.c | 124 unsigned long iova, phys_addr_t paddr, in dart_init_pte() argument 173 static int dart_get_table(struct dart_io_pgtable *data, unsigned long iova) in dart_get_table() argument 175 return (iova >> (3 * data->bits_per_level + ilog2(sizeof(dart_iopte)))) & in dart_get_table() 179 static int dart_get_l1_index(struct dart_io_pgtable *data, unsigned long iova) in dart_get_l1_index() argument 182 return (iova >> (2 * data->bits_per_level + ilog2(sizeof(dart_iopte)))) & in dart_get_l1_index() 186 static int dart_get_l2_index(struct dart_io_pgtable *data, unsigned long iova) in dart_get_l2_index() argument 189 return (iova >> (data->bits_per_level + ilog2(sizeof(dart_iopte)))) & in dart_get_l2_index() 193 static dart_iopte *dart_get_l2(struct dart_io_pgtable *data, unsigned long iova) in dart_get_l2() argument 196 int tbl = dart_get_table(data, iova); in dart_get_l2() 202 ptep += dart_get_l1_index(data, iova); in dart_get_l2() [all …]
|
D | exynos-iommu.c | 97 #define section_offs(iova) (iova & (SECT_SIZE - 1)) argument 99 #define lpage_offs(iova) (iova & (LPAGE_SIZE - 1)) argument 101 #define spage_offs(iova) (iova & (SPAGE_SIZE - 1)) argument 106 static u32 lv1ent_offset(sysmmu_iova_t iova) in lv1ent_offset() argument 108 return iova >> SECT_ORDER; in lv1ent_offset() 111 static u32 lv2ent_offset(sysmmu_iova_t iova) in lv2ent_offset() argument 113 return (iova >> SPAGE_ORDER) & (NUM_LV2ENTRIES - 1); in lv2ent_offset() 177 static sysmmu_pte_t *section_entry(sysmmu_pte_t *pgtable, sysmmu_iova_t iova) in section_entry() argument 179 return pgtable + lv1ent_offset(iova); in section_entry() 182 static sysmmu_pte_t *page_entry(sysmmu_pte_t *sent, sysmmu_iova_t iova) in page_entry() argument [all …]
|
D | io-pgtable-arm.c | 255 unsigned long iova, size_t size, size_t pgcount, 280 unsigned long iova, phys_addr_t paddr, in arm_lpae_init_pte() argument 299 tblp = ptep - ARM_LPAE_LVL_IDX(iova, lvl, data); in arm_lpae_init_pte() 300 if (__arm_lpae_unmap(data, NULL, iova + i * sz, sz, 1, in arm_lpae_init_pte() 343 static int __arm_lpae_map(struct arm_lpae_io_pgtable *data, unsigned long iova, in __arm_lpae_map() argument 355 map_idx_start = ARM_LPAE_LVL_IDX(iova, lvl, data); in __arm_lpae_map() 362 ret = arm_lpae_init_pte(data, iova, paddr, prot, lvl, num_entries, ptep); in __arm_lpae_map() 396 return __arm_lpae_map(data, iova, paddr, size, pgcount, prot, lvl + 1, in __arm_lpae_map() 464 static int arm_lpae_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_map_pages() argument 473 long iaext = (s64)iova >> cfg->ias; in arm_lpae_map_pages() [all …]
|
D | tegra-smmu.c | 156 static unsigned int iova_pd_index(unsigned long iova) in iova_pd_index() argument 158 return (iova >> SMMU_PDE_SHIFT) & (SMMU_NUM_PDE - 1); in iova_pd_index() 161 static unsigned int iova_pt_index(unsigned long iova) in iova_pt_index() argument 163 return (iova >> SMMU_PTE_SHIFT) & (SMMU_NUM_PTE - 1); in iova_pt_index() 223 unsigned long iova) in smmu_flush_tlb_section() argument 232 value |= SMMU_TLB_FLUSH_ASID_MATCH | SMMU_TLB_FLUSH_VA_SECTION(iova); in smmu_flush_tlb_section() 238 unsigned long iova) in smmu_flush_tlb_group() argument 247 value |= SMMU_TLB_FLUSH_ASID_MATCH | SMMU_TLB_FLUSH_VA_GROUP(iova); in smmu_flush_tlb_group() 531 static void tegra_smmu_set_pde(struct tegra_smmu_as *as, unsigned long iova, in tegra_smmu_set_pde() argument 534 unsigned int pd_index = iova_pd_index(iova); in tegra_smmu_set_pde() [all …]
|
D | rockchip-iommu.c | 320 static u32 rk_iova_dte_index(dma_addr_t iova) in rk_iova_dte_index() argument 322 return (u32)(iova & RK_IOVA_DTE_MASK) >> RK_IOVA_DTE_SHIFT; in rk_iova_dte_index() 325 static u32 rk_iova_pte_index(dma_addr_t iova) in rk_iova_pte_index() argument 327 return (u32)(iova & RK_IOVA_PTE_MASK) >> RK_IOVA_PTE_SHIFT; in rk_iova_pte_index() 330 static u32 rk_iova_page_offset(dma_addr_t iova) in rk_iova_page_offset() argument 332 return (u32)(iova & RK_IOVA_PAGE_MASK) >> RK_IOVA_PAGE_SHIFT; in rk_iova_page_offset() 367 dma_addr_t iova; in rk_iommu_zap_lines() local 369 for (iova = iova_start; iova < iova_end; iova += SPAGE_SIZE) in rk_iommu_zap_lines() 370 rk_iommu_write(iommu->bases[i], RK_MMU_ZAP_ONE_LINE, iova); in rk_iommu_zap_lines() 533 static void log_iova(struct rk_iommu *iommu, int index, dma_addr_t iova) in log_iova() argument [all …]
|
/linux-6.6.21/include/linux/ |
D | iova.h | 18 struct iova { struct 37 struct iova anchor; /* rbtree lookup anchor */ 43 static inline unsigned long iova_size(struct iova *iova) in iova_size() argument 45 return iova->pfn_hi - iova->pfn_lo + 1; in iova_size() 58 static inline size_t iova_offset(struct iova_domain *iovad, dma_addr_t iova) in iova_offset() argument 60 return iova & iova_mask(iovad); in iova_offset() 68 static inline dma_addr_t iova_dma_addr(struct iova_domain *iovad, struct iova *iova) in iova_dma_addr() argument 70 return (dma_addr_t)iova->pfn_lo << iova_shift(iovad); in iova_dma_addr() 73 static inline unsigned long iova_pfn(struct iova_domain *iovad, dma_addr_t iova) in iova_pfn() argument 75 return iova >> iova_shift(iovad); in iova_pfn() [all …]
|
/linux-6.6.21/drivers/fpga/ |
D | dfl-afu-dma-region.c | 125 u64 iova, u64 size) in dma_region_check_iova() argument 127 if (!size && region->iova != iova) in dma_region_check_iova() 130 return (region->iova <= iova) && in dma_region_check_iova() 131 (region->length + region->iova >= iova + size); in dma_region_check_iova() 150 (unsigned long long)region->iova); in afu_dma_region_add() 161 if (dma_region_check_iova(this, region->iova, region->length)) in afu_dma_region_add() 164 if (region->iova < this->iova) in afu_dma_region_add() 166 else if (region->iova > this->iova) in afu_dma_region_add() 191 (unsigned long long)region->iova); in afu_dma_region_remove() 213 (unsigned long long)region->iova); in afu_dma_region_destroy() [all …]
|
/linux-6.6.21/include/trace/events/ |
D | iommu.h | 81 TP_PROTO(unsigned long iova, phys_addr_t paddr, size_t size), 83 TP_ARGS(iova, paddr, size), 86 __field(u64, iova) 92 __entry->iova = iova; 98 __entry->iova, __entry->iova + __entry->size, __entry->paddr, 105 TP_PROTO(unsigned long iova, size_t size, size_t unmapped_size), 107 TP_ARGS(iova, size, unmapped_size), 110 __field(u64, iova) 116 __entry->iova = iova; 122 __entry->iova, __entry->iova + __entry->size, [all …]
|
/linux-6.6.21/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 102 struct iova *iova; in imgu_dmamap_alloc() local 107 iova = alloc_iova(&imgu->iova_domain, size >> shift, in imgu_dmamap_alloc() 109 if (!iova) in imgu_dmamap_alloc() 117 iovaddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 133 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 142 imgu_mmu_unmap(imgu->mmu, iova_dma_addr(&imgu->iova_domain, iova), in imgu_dmamap_alloc() 146 __free_iova(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 153 struct iova *iova; in imgu_dmamap_unmap() local 155 iova = find_iova(&imgu->iova_domain, in imgu_dmamap_unmap() 157 if (WARN_ON(!iova)) in imgu_dmamap_unmap() [all …]
|
D | ipu3-mmu.c | 154 static inline void address_to_pte_idx(unsigned long iova, u32 *l1pt_idx, in address_to_pte_idx() argument 157 iova >>= IPU3_PAGE_SHIFT; in address_to_pte_idx() 160 *l2pt_idx = iova & IPU3_L2PT_MASK; in address_to_pte_idx() 162 iova >>= IPU3_L2PT_SHIFT; in address_to_pte_idx() 165 *l1pt_idx = iova & IPU3_L1PT_MASK; in address_to_pte_idx() 210 static int __imgu_mmu_map(struct imgu_mmu *mmu, unsigned long iova, in __imgu_mmu_map() argument 220 address_to_pte_idx(iova, &l1pt_idx, &l2pt_idx); in __imgu_mmu_map() 251 int imgu_mmu_map(struct imgu_mmu_info *info, unsigned long iova, in imgu_mmu_map() argument 262 if (!IS_ALIGNED(iova | paddr | size, IPU3_PAGE_SIZE)) { in imgu_mmu_map() 264 iova, &paddr, size); in imgu_mmu_map() [all …]
|
/linux-6.6.21/drivers/vfio/ |
D | vfio_iommu_type1.c | 90 dma_addr_t iova; /* Device address */ member 129 dma_addr_t iova; /* Device address */ member 136 dma_addr_t iova; member 173 if (start + size <= dma->iova) in vfio_find_dma() 175 else if (start >= dma->iova + dma->size) in vfio_find_dma() 194 if (start < dma->iova + dma->size) { in vfio_find_dma_first_node() 197 if (start >= dma->iova) in vfio_find_dma_first_node() 204 if (res && size && dma_res->iova >= start + size) in vfio_find_dma_first_node() 218 if (new->iova + new->size <= dma->iova) in vfio_link_dma() 268 bitmap_set(dma->bitmap, (vpfn->iova - dma->iova) >> pgshift, 1); in vfio_dma_populate_bitmap() [all …]
|
D | iova_bitmap.c | 36 unsigned long iova; member 112 unsigned long iova; member 128 unsigned long iova) in iova_bitmap_offset_to_index() 132 return iova / (BITS_PER_TYPE(*bitmap->bitmap) * pgsize); in iova_bitmap_offset_to_index() 153 return bitmap->iova + iova_bitmap_index_to_offset(bitmap, skip); in iova_bitmap_mapped_iova() 198 mapped->iova = iova_bitmap_mapped_iova(bitmap); in iova_bitmap_get() 238 struct iova_bitmap *iova_bitmap_alloc(unsigned long iova, size_t length, in iova_bitmap_alloc() argument 254 bitmap->iova = iova; in iova_bitmap_alloc() 256 mapped->iova = iova; in iova_bitmap_alloc() 316 unsigned long max_iova = bitmap->iova + bitmap->length - 1; in iova_bitmap_mapped_length() [all …]
|
/linux-6.6.21/drivers/vdpa/vdpa_user/ |
D | iova_domain.c | 104 u64 iova, u64 size, u64 paddr) in vduse_domain_map_bounce_page() argument 107 u64 last = iova + size - 1; in vduse_domain_map_bounce_page() 109 while (iova <= last) { in vduse_domain_map_bounce_page() 110 map = &domain->bounce_maps[iova >> PAGE_SHIFT]; in vduse_domain_map_bounce_page() 118 iova += PAGE_SIZE; in vduse_domain_map_bounce_page() 124 u64 iova, u64 size) in vduse_domain_unmap_bounce_page() argument 127 u64 last = iova + size - 1; in vduse_domain_unmap_bounce_page() 129 while (iova <= last) { in vduse_domain_unmap_bounce_page() 130 map = &domain->bounce_maps[iova >> PAGE_SHIFT]; in vduse_domain_unmap_bounce_page() 132 iova += PAGE_SIZE; in vduse_domain_unmap_bounce_page() [all …]
|
/linux-6.6.21/drivers/infiniband/sw/rxe/ |
D | rxe_mr.c | 27 int mr_check_range(struct rxe_mr *mr, u64 iova, size_t length) in mr_check_range() argument 35 if (iova < mr->ibmr.iova || in mr_check_range() 36 iova + length > mr->ibmr.iova + mr->ibmr.length) { in mr_check_range() 75 static unsigned long rxe_mr_iova_to_index(struct rxe_mr *mr, u64 iova) in rxe_mr_iova_to_index() argument 77 return (iova >> mr->page_shift) - (mr->ibmr.iova >> mr->page_shift); in rxe_mr_iova_to_index() 80 static unsigned long rxe_mr_iova_to_page_offset(struct rxe_mr *mr, u64 iova) in rxe_mr_iova_to_page_offset() argument 82 return iova & (mr_page_size(mr) - 1); in rxe_mr_iova_to_page_offset() 129 int rxe_mr_init_user(struct rxe_dev *rxe, u64 start, u64 length, u64 iova, in rxe_mr_init_user() argument 240 mr->page_offset = mr->ibmr.iova & (page_size - 1); in rxe_map_mr_sg() 245 static int rxe_mr_copy_xarray(struct rxe_mr *mr, u64 iova, void *addr, in rxe_mr_copy_xarray() argument [all …]
|
/linux-6.6.21/drivers/gpu/drm/msm/ |
D | msm_iommu.c | 37 unsigned long iova, phys_addr_t paddr, in calc_pgsize() argument 43 unsigned long addr_merge = paddr | iova; in calc_pgsize() 73 if ((iova ^ paddr) & (pgsize_next - 1)) in calc_pgsize() 91 static int msm_iommu_pagetable_unmap(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_unmap() argument 100 pgsize = calc_pgsize(pagetable, iova, iova, size, &count); in msm_iommu_pagetable_unmap() 102 unmapped = ops->unmap_pages(ops, iova, pgsize, count, NULL); in msm_iommu_pagetable_unmap() 106 iova += unmapped; in msm_iommu_pagetable_unmap() 115 static int msm_iommu_pagetable_map(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_map() argument 121 u64 addr = iova; in msm_iommu_pagetable_map() 145 msm_iommu_pagetable_unmap(mmu, iova, addr - iova); in msm_iommu_pagetable_map() [all …]
|
/linux-6.6.21/drivers/iommu/amd/ |
D | io_pgtable_v2.c | 135 static u64 *v2_alloc_pte(int nid, u64 *pgd, unsigned long iova, in v2_alloc_pte() argument 143 pte = &pgd[PM_LEVEL_INDEX(level, iova)]; in v2_alloc_pte() 144 iova = PAGE_SIZE_ALIGN(iova, PAGE_SIZE); in v2_alloc_pte() 175 pte = &pte[PM_LEVEL_INDEX(level, iova)]; in v2_alloc_pte() 199 unsigned long iova, unsigned long *page_size) in fetch_pte() argument 205 pte = &pgtable->pgd[PM_LEVEL_INDEX(level, iova)]; in fetch_pte() 216 pte = &pte[PM_LEVEL_INDEX(level - 1, iova)]; in fetch_pte() 236 static int iommu_v2_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in iommu_v2_map_pages() argument 245 unsigned long o_iova = iova; in iommu_v2_map_pages() 260 iova, map_size, gfp, &updated); in iommu_v2_map_pages() [all …]
|
/linux-6.6.21/drivers/iommu/iommufd/ |
D | selftest.c | 48 u64 *iova) in iommufd_test_syz_conv_iova() argument 54 struct syz_layout *syz = (void *)iova; in iommufd_test_syz_conv_iova() 73 unsigned int ioas_id, u64 *iova, u32 *flags) in iommufd_test_syz_conv_iova_id() argument 84 *iova = iommufd_test_syz_conv_iova(&ioas->iopt, iova); in iommufd_test_syz_conv_iova_id() 179 unsigned long iova, phys_addr_t paddr, in mock_domain_map_pages() argument 186 unsigned long start_iova = iova; in mock_domain_map_pages() 195 WARN_ON(iova % MOCK_IO_PAGE_SIZE); in mock_domain_map_pages() 205 old = xa_store(&mock->pfns, iova / MOCK_IO_PAGE_SIZE, in mock_domain_map_pages() 210 for (; start_iova != iova; in mock_domain_map_pages() 218 iova += MOCK_IO_PAGE_SIZE; in mock_domain_map_pages() [all …]
|
/linux-6.6.21/tools/testing/selftests/iommu/ |
D | iommufd_fail_nth.c | 234 __u64 iova; in TEST_FAIL_NTH() local 270 if (_test_ioctl_ioas_map(self->fd, ioas_id, buffer, BUFFER_SIZE, &iova, in TEST_FAIL_NTH() 282 .src_iova = iova, in TEST_FAIL_NTH() 290 if (_test_ioctl_ioas_unmap(self->fd, ioas_id, iova, BUFFER_SIZE, in TEST_FAIL_NTH() 294 _test_ioctl_ioas_unmap(self->fd, ioas_id, iova, BUFFER_SIZE, NULL); in TEST_FAIL_NTH() 304 __u64 iova; in TEST_FAIL_NTH() local 321 if (_test_ioctl_ioas_map(self->fd, ioas_id, buffer, 262144, &iova, in TEST_FAIL_NTH() 341 __u64 iova; in TEST_FAIL_NTH() local 362 if (_test_ioctl_ioas_map(self->fd, ioas_id, buffer, 262144, &iova, in TEST_FAIL_NTH() 386 __u64 iova; in TEST_FAIL_NTH() local [all …]
|
/linux-6.6.21/drivers/gpu/drm/etnaviv/ |
D | etnaviv_mmu.c | 17 unsigned long iova, size_t size) in etnaviv_context_unmap() argument 22 if (!IS_ALIGNED(iova | size, pgsize)) { in etnaviv_context_unmap() 24 iova, size, pgsize); in etnaviv_context_unmap() 29 unmapped_page = context->global->ops->unmap(context, iova, in etnaviv_context_unmap() 34 iova += unmapped_page; in etnaviv_context_unmap() 40 unsigned long iova, phys_addr_t paddr, in etnaviv_context_map() argument 43 unsigned long orig_iova = iova; in etnaviv_context_map() 48 if (!IS_ALIGNED(iova | paddr | size, pgsize)) { in etnaviv_context_map() 50 iova, &paddr, size, pgsize); in etnaviv_context_map() 55 ret = context->global->ops->map(context, iova, paddr, pgsize, in etnaviv_context_map() [all …]
|