Home
last modified time | relevance | path

Searched refs:bo_va (Results 1 – 22 of 22) sorted by relevance

/linux-6.1.9/drivers/gpu/drm/radeon/
Dradeon_vm.c297 struct radeon_bo_va *bo_va; in radeon_vm_bo_find() local
299 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
300 if (bo_va->vm == vm) in radeon_vm_bo_find()
301 return bo_va; in radeon_vm_bo_find()
324 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local
326 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add()
327 if (bo_va == NULL) in radeon_vm_bo_add()
330 bo_va->vm = vm; in radeon_vm_bo_add()
331 bo_va->bo = bo; in radeon_vm_bo_add()
332 bo_va->it.start = 0; in radeon_vm_bo_add()
[all …]
Dradeon_gem.c204 struct radeon_bo_va *bo_va; in radeon_gem_object_open() local
217 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_open()
218 if (!bo_va) { in radeon_gem_object_open()
219 bo_va = radeon_vm_bo_add(rdev, vm, rbo); in radeon_gem_object_open()
221 ++bo_va->ref_count; in radeon_gem_object_open()
235 struct radeon_bo_va *bo_va; in radeon_gem_object_close() local
249 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_close()
250 if (bo_va) { in radeon_gem_object_close()
251 if (--bo_va->ref_count == 0) { in radeon_gem_object_close()
252 radeon_vm_bo_rmv(rdev, bo_va); in radeon_gem_object_close()
[all …]
Dradeon_trace.h66 TP_PROTO(struct radeon_bo_va *bo_va),
67 TP_ARGS(bo_va),
75 __entry->soffset = bo_va->it.start;
76 __entry->eoffset = bo_va->it.last + 1;
77 __entry->flags = bo_va->flags;
Dradeon_cs.c502 struct radeon_bo_va *bo_va; in radeon_bo_vm_update_pte() local
527 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte()
528 if (bo_va == NULL) { in radeon_bo_vm_update_pte()
533 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte()
537 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte()
Dradeon.h2892 struct radeon_bo_va *bo_va,
2902 struct radeon_bo_va *bo_va,
2906 struct radeon_bo_va *bo_va);
/linux-6.1.9/drivers/gpu/drm/lima/
Dlima_vm.c79 struct lima_bo_va *bo_va, *ret = NULL; in lima_vm_bo_find() local
81 list_for_each_entry(bo_va, &bo->va, list) { in lima_vm_bo_find()
82 if (bo_va->vm == vm) { in lima_vm_bo_find()
83 ret = bo_va; in lima_vm_bo_find()
93 struct lima_bo_va *bo_va; in lima_vm_bo_add() local
99 bo_va = lima_vm_bo_find(vm, bo); in lima_vm_bo_add()
100 if (bo_va) { in lima_vm_bo_add()
101 bo_va->ref_count++; in lima_vm_bo_add()
112 bo_va = kzalloc(sizeof(*bo_va), GFP_KERNEL); in lima_vm_bo_add()
113 if (!bo_va) { in lima_vm_bo_add()
[all …]
/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vm.c915 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_get_memory() local
918 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_vm_get_memory()
919 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
921 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
924 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_vm_get_memory()
925 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
927 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
930 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_vm_get_memory()
931 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
933 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
[all …]
Damdgpu_csa.c65 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in amdgpu_map_static_csa() argument
88 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_map_static_csa()
89 if (!*bo_va) { in amdgpu_map_static_csa()
95 r = amdgpu_vm_bo_map(adev, *bo_va, csa_addr, 0, size, in amdgpu_map_static_csa()
101 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_map_static_csa()
Damdgpu_gem.c164 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_open() local
180 bo_va = amdgpu_vm_bo_find(vm, abo); in amdgpu_gem_object_open()
181 if (!bo_va) { in amdgpu_gem_object_open()
182 bo_va = amdgpu_vm_bo_add(adev, vm, abo); in amdgpu_gem_object_open()
184 ++bo_va->ref_count; in amdgpu_gem_object_open()
203 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_close() local
221 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close()
222 if (!bo_va || --bo_va->ref_count) in amdgpu_gem_object_close()
225 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_gem_object_close()
602 struct amdgpu_bo_va *bo_va, in amdgpu_gem_va_update_vm() argument
[all …]
Damdgpu_amdkfd_gpuvm.c77 if (entry->bo_va->base.vm == avm) in kfd_mem_is_attached()
505 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_userptr()
555 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_dmabuf()
592 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_sg_bo()
669 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_userptr()
689 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_dmabuf()
716 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_sg_bo()
895 attachment[i]->bo_va = amdgpu_vm_bo_add(adev, vm, bo[i]); in kfd_mem_attach()
897 if (unlikely(!attachment[i]->bo_va)) { in kfd_mem_attach()
917 if (attachment[i]->bo_va) { in kfd_mem_attach()
[all …]
Damdgpu_trace.h246 TP_PROTO(struct amdgpu_bo_va *bo_va,
248 TP_ARGS(bo_va, mapping),
258 __entry->bo = bo_va ? bo_va->base.bo : NULL;
270 TP_PROTO(struct amdgpu_bo_va *bo_va,
272 TP_ARGS(bo_va, mapping),
282 __entry->bo = bo_va ? bo_va->base.bo : NULL;
Damdgpu_vm.h417 struct amdgpu_bo_va *bo_va,
429 struct amdgpu_bo_va *bo_va,
433 struct amdgpu_bo_va *bo_va,
437 struct amdgpu_bo_va *bo_va,
446 struct amdgpu_bo_va *bo_va);
Damdgpu_cs.c943 e->bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_cs_parser_bos()
1096 struct amdgpu_bo_va *bo_va; in amdgpu_cs_vm_handling() local
1114 bo_va = fpriv->csa_va; in amdgpu_cs_vm_handling()
1115 BUG_ON(!bo_va); in amdgpu_cs_vm_handling()
1116 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling()
1120 r = amdgpu_sync_fence(&job->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling()
1131 bo_va = e->bo_va; in amdgpu_cs_vm_handling()
1132 if (bo_va == NULL) in amdgpu_cs_vm_handling()
1135 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling()
1139 r = amdgpu_sync_fence(&job->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling()
[all …]
Damdgpu_mes.c1126 struct amdgpu_bo_va *bo_va; in amdgpu_mes_ctx_map_meta_data() local
1150 bo_va = amdgpu_vm_bo_add(adev, vm, ctx_data->meta_data_obj); in amdgpu_mes_ctx_map_meta_data()
1151 if (!bo_va) { in amdgpu_mes_ctx_map_meta_data()
1157 r = amdgpu_vm_bo_map(adev, bo_va, ctx_data->meta_data_gpu_addr, 0, in amdgpu_mes_ctx_map_meta_data()
1167 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_mes_ctx_map_meta_data()
1172 amdgpu_sync_fence(&sync, bo_va->last_pt_update); in amdgpu_mes_ctx_map_meta_data()
1185 ctx_data->meta_data_va = bo_va; in amdgpu_mes_ctx_map_meta_data()
1189 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_mes_ctx_map_meta_data()
1198 struct amdgpu_bo_va *bo_va = ctx_data->meta_data_va; in amdgpu_mes_ctx_unmap_meta_data() local
1200 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_mes_ctx_unmap_meta_data()
[all …]
Damdgpu_csa.h35 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va,
Damdgpu_bo_list.h38 struct amdgpu_bo_va *bo_va; member
Damdgpu_object.h63 struct amdgpu_bo_va *bo_va; member
Damdgpu_amdkfd.h58 struct amdgpu_bo_va *bo_va; member
Dvcn_v1_0.c1927 if (!mapping || !mapping->bo_va || !mapping->bo_va->base.bo) in vcn_v1_0_validate_bo()
1930 bo = mapping->bo_va->base.bo; in vcn_v1_0_validate_bo()
Dgmc_v9_0.c1134 mapping->bo_va->is_xgmi) in gmc_v9_0_get_vm_pte()
/linux-6.1.9/drivers/gpu/drm/amd/amdkfd/
Dkfd_chardev.c359 wptr_bo = wptr_mapping->bo_va->base.bo; in kfd_ioctl_create_queue()
Dkfd_svm.c2628 bo = mapping->bo_va->base.bo; in svm_range_check_vm_userptr()