/linux-6.1.9/drivers/gpu/drm/etnaviv/ |
D | etnaviv_buffer.c | 123 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_dump() 158 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_reserve() 173 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_buffer_init() 325 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_sync_point_queue() 340 struct etnaviv_iommu_context *mmu_context, unsigned int event, in etnaviv_buffer_queue() argument 348 bool switch_mmu_context = gpu->mmu_context != mmu_context; in etnaviv_buffer_queue() 349 unsigned int new_flush_seq = READ_ONCE(gpu->mmu_context->flush_seq); in etnaviv_buffer_queue() 360 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_buffer_queue() 376 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1) in etnaviv_buffer_queue() 398 struct etnaviv_iommu_context *old_context = gpu->mmu_context; in etnaviv_buffer_queue() [all …]
|
D | etnaviv_dump.c | 128 mutex_lock(&gpu->mmu_context->lock); in etnaviv_core_dump() 130 mmu_size = etnaviv_iommu_dump_size(gpu->mmu_context); in etnaviv_core_dump() 160 mutex_unlock(&gpu->mmu_context->lock); in etnaviv_core_dump() 172 etnaviv_core_dump_mmu(&iter, gpu->mmu_context, mmu_size); in etnaviv_core_dump() 176 &gpu->mmu_context->cmdbuf_mapping)); in etnaviv_core_dump() 181 &gpu->mmu_context->cmdbuf_mapping)); in etnaviv_core_dump() 183 mutex_unlock(&gpu->mmu_context->lock); in etnaviv_core_dump()
|
D | etnaviv_iommu_v2.c | 175 if (gpu->mmu_context) in etnaviv_iommuv2_restore_nonsec() 176 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_nonsec() 177 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_nonsec() 199 if (gpu->mmu_context) in etnaviv_iommuv2_restore_sec() 200 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_sec() 201 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_sec()
|
D | etnaviv_drv.c | 133 struct etnaviv_iommu_context *mmu_context; in etnaviv_mmu_show() local 143 mmu_context = gpu->mmu_context; in etnaviv_mmu_show() 144 if (mmu_context) in etnaviv_mmu_show() 145 etnaviv_iommu_context_get(mmu_context); in etnaviv_mmu_show() 148 if (!mmu_context) in etnaviv_mmu_show() 151 mutex_lock(&mmu_context->lock); in etnaviv_mmu_show() 152 drm_mm_print(&mmu_context->mm, &p); in etnaviv_mmu_show() 153 mutex_unlock(&mmu_context->lock); in etnaviv_mmu_show() 155 etnaviv_iommu_context_put(mmu_context); in etnaviv_mmu_show()
|
D | etnaviv_iommu.c | 95 if (gpu->mmu_context) in etnaviv_iommuv1_restore() 96 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv1_restore() 97 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv1_restore()
|
D | etnaviv_gem.c | 241 struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context, in etnaviv_gem_mapping_get() argument 250 mapping = etnaviv_gem_get_vram_mapping(etnaviv_obj, mmu_context); in etnaviv_gem_mapping_get() 259 mutex_lock(&mmu_context->lock); in etnaviv_gem_mapping_get() 260 if (mapping->context == mmu_context) in etnaviv_gem_mapping_get() 269 mutex_unlock(&mmu_context->lock); in etnaviv_gem_mapping_get() 304 ret = etnaviv_iommu_map_gem(mmu_context, etnaviv_obj, in etnaviv_gem_mapping_get() 305 mmu_context->global->memory_base, in etnaviv_gem_mapping_get()
|
D | etnaviv_gem.h | 94 struct etnaviv_iommu_context *mmu_context, *prev_mmu_context; member 120 struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context,
|
D | etnaviv_gem_submit.c | 222 submit->mmu_context, in submit_pin_objects() 371 if (submit->mmu_context) in submit_cleanup() 372 etnaviv_iommu_context_put(submit->mmu_context); in submit_cleanup() 528 submit->mmu_context = etnaviv_iommu_context_get(submit->ctx->mmu); in etnaviv_ioctl_gem_submit()
|
D | etnaviv_gpu.c | 581 if (gpu->mmu_context) in etnaviv_hw_reset() 582 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_hw_reset() 583 gpu->mmu_context = NULL; in etnaviv_hw_reset() 669 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_gpu_start_fe_idleloop() 1388 etnaviv_gpu_start_fe_idleloop(gpu, submit->mmu_context); in etnaviv_gpu_submit() 1392 submit->prev_mmu_context = etnaviv_iommu_context_get(gpu->mmu_context); in etnaviv_gpu_submit() 1403 etnaviv_buffer_queue(gpu, submit->exec_state, submit->mmu_context, in etnaviv_gpu_submit() 1754 if (gpu->mmu_context) in etnaviv_gpu_unbind() 1755 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_gpu_unbind()
|
D | etnaviv_gpu.h | 138 struct etnaviv_iommu_context *mmu_context; member
|
/linux-6.1.9/arch/microblaze/mm/ |
D | Makefile | 6 obj-y := consistent.o init.o pgtable.o mmu_context.o fault.o
|
/linux-6.1.9/arch/powerpc/mm/book3s32/ |
D | Makefile | 9 obj-y += mmu.o mmu_context.o
|
/linux-6.1.9/arch/nios2/mm/ |
D | Makefile | 12 obj-y += mmu_context.o
|
/linux-6.1.9/arch/powerpc/mm/nohash/ |
D | Makefile | 5 obj-y += mmu_context.o tlb.o tlb_low.o kup.o
|
/linux-6.1.9/arch/powerpc/mm/ |
D | Makefile | 11 init-common.o mmu_context.o drmem.o \
|
/linux-6.1.9/arch/powerpc/mm/book3s64/ |
D | Makefile | 5 obj-y += mmu_context.o pgtable.o trace.o
|
/linux-6.1.9/include/asm-generic/ |
D | Kbuild | 41 mandatory-y += mmu_context.h
|