Lines Matching refs:v2_context
49 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_free() local
55 if (v2_context->stlb_cpu[i]) in etnaviv_iommuv2_free()
57 v2_context->stlb_cpu[i], in etnaviv_iommuv2_free()
58 v2_context->stlb_dma[i]); in etnaviv_iommuv2_free()
61 dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu, in etnaviv_iommuv2_free()
62 v2_context->mtlb_dma); in etnaviv_iommuv2_free()
64 clear_bit(v2_context->id, context->global->v2.pta_alloc); in etnaviv_iommuv2_free()
66 vfree(v2_context); in etnaviv_iommuv2_free()
69 etnaviv_iommuv2_ensure_stlb(struct etnaviv_iommuv2_context *v2_context, in etnaviv_iommuv2_ensure_stlb() argument
72 if (v2_context->stlb_cpu[stlb]) in etnaviv_iommuv2_ensure_stlb()
75 v2_context->stlb_cpu[stlb] = in etnaviv_iommuv2_ensure_stlb()
76 dma_alloc_wc(v2_context->base.global->dev, SZ_4K, in etnaviv_iommuv2_ensure_stlb()
77 &v2_context->stlb_dma[stlb], in etnaviv_iommuv2_ensure_stlb()
80 if (!v2_context->stlb_cpu[stlb]) in etnaviv_iommuv2_ensure_stlb()
83 memset32(v2_context->stlb_cpu[stlb], MMUv2_PTE_EXCEPTION, in etnaviv_iommuv2_ensure_stlb()
86 v2_context->mtlb_cpu[stlb] = in etnaviv_iommuv2_ensure_stlb()
87 v2_context->stlb_dma[stlb] | MMUv2_PTE_PRESENT; in etnaviv_iommuv2_ensure_stlb()
96 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_map() local
112 ret = etnaviv_iommuv2_ensure_stlb(v2_context, mtlb_entry); in etnaviv_iommuv2_map()
116 v2_context->stlb_cpu[mtlb_entry][stlb_entry] = entry; in etnaviv_iommuv2_map()
140 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_dump_size() local
145 if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) in etnaviv_iommuv2_dump_size()
153 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_dump() local
156 memcpy(buf, v2_context->mtlb_cpu, SZ_4K); in etnaviv_iommuv2_dump()
159 if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) { in etnaviv_iommuv2_dump()
160 memcpy(buf, v2_context->stlb_cpu[i], SZ_4K); in etnaviv_iommuv2_dump()
168 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_restore_nonsec() local
180 (u32)v2_context->mtlb_dma, in etnaviv_iommuv2_restore_nonsec()
192 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_restore_sec() local
219 context->global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma | in etnaviv_iommuv2_restore_sec()
223 prefetch = etnaviv_buffer_config_pta(gpu, v2_context->id); in etnaviv_iommuv2_restore_sec()
233 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_get_mtlb_addr() local
235 return v2_context->mtlb_dma; in etnaviv_iommuv2_get_mtlb_addr()
240 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_get_pta_id() local
242 return v2_context->id; in etnaviv_iommuv2_get_pta_id()
272 struct etnaviv_iommuv2_context *v2_context; in etnaviv_iommuv2_context_alloc() local
275 v2_context = vzalloc(sizeof(*v2_context)); in etnaviv_iommuv2_context_alloc()
276 if (!v2_context) in etnaviv_iommuv2_context_alloc()
280 v2_context->id = find_first_zero_bit(global->v2.pta_alloc, in etnaviv_iommuv2_context_alloc()
282 if (v2_context->id < ETNAVIV_PTA_ENTRIES) { in etnaviv_iommuv2_context_alloc()
283 set_bit(v2_context->id, global->v2.pta_alloc); in etnaviv_iommuv2_context_alloc()
290 v2_context->mtlb_cpu = dma_alloc_wc(global->dev, SZ_4K, in etnaviv_iommuv2_context_alloc()
291 &v2_context->mtlb_dma, GFP_KERNEL); in etnaviv_iommuv2_context_alloc()
292 if (!v2_context->mtlb_cpu) in etnaviv_iommuv2_context_alloc()
295 memset32(v2_context->mtlb_cpu, MMUv2_PTE_EXCEPTION, in etnaviv_iommuv2_context_alloc()
298 global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma; in etnaviv_iommuv2_context_alloc()
300 context = &v2_context->base; in etnaviv_iommuv2_context_alloc()
310 clear_bit(v2_context->id, global->v2.pta_alloc); in etnaviv_iommuv2_context_alloc()
312 vfree(v2_context); in etnaviv_iommuv2_context_alloc()