/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_object.c | 56 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument 58 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() 62 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy() 63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 64 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy() 69 static void amdgpu_bo_user_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_user_destroy() argument 71 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() 76 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy() 79 static void amdgpu_bo_vm_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_vm_destroy() argument 81 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_bo_vm_destroy() [all …]
|
D | amdgpu_dma_buf.c | 58 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach() 88 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach() 144 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map() 148 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 159 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 163 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type) & in amdgpu_dma_buf_map() 168 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map() 171 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 172 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map() 182 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo.resource, 0, in amdgpu_dma_buf_map() [all …]
|
D | amdgpu_object.h | 99 struct ttm_buffer_object tbo; member 129 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument 131 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo() 172 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve() 175 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve() 186 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 191 return bo->tbo.base.size; in amdgpu_bo_size() 196 return bo->tbo.base.size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages() 201 return (bo->tbo.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment() 212 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in amdgpu_bo_mmap_offset() [all …]
|
D | amdgpu_amdkfd_gpuvm.c | 232 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify() 261 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, align, in create_dmamap_sg_bo() 263 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj); in create_dmamap_sg_bo() 298 dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context, in amdgpu_amdkfd_remove_eviction_fence() 332 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 334 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 346 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate() 352 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate() 377 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_validate_pt_pd_bos() 394 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_update_pds() [all …]
|
D | amdgpu_gem.c | 123 *obj = &bo->tbo.base; in amdgpu_gem_object_create() 161 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_gem_object_open() 168 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open() 173 abo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_gem_object_open() 194 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_object_close() 209 tv.bo = &bo->tbo; in amdgpu_gem_object_close() 247 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_gem_object_mmap() 330 resv = vm->root.bo->tbo.base.resv; in amdgpu_gem_create_ioctl() 413 r = amdgpu_ttm_tt_set_userptr(&bo->tbo, args->addr, args->flags); in amdgpu_gem_userptr_ioctl() 422 r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, in amdgpu_gem_userptr_ioctl() [all …]
|
D | amdgpu_vm.c | 161 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted() 270 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init() 273 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init() 275 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init() 276 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init() 282 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init() 308 entry->tv.bo = &vm->root.bo->tbo; in amdgpu_vm_get_pd_bo() 373 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos() 985 resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_bo_update() 987 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_vm_bo_update() [all …]
|
D | amdgpu_cs.c | 131 p->uf_entry.tv.bo = &bo->tbo; in amdgpu_cs_p1_user_fence() 143 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_cs_p1_user_fence() 765 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_cs_bo_validate() 770 .resv = bo->tbo.base.resv in amdgpu_cs_bo_validate() 775 if (bo->tbo.pin_count) in amdgpu_cs_bo_validate() 782 (!bo->tbo.base.dma_buf || in amdgpu_cs_bo_validate() 783 list_empty(&bo->tbo.base.dma_buf->attachments))) { in amdgpu_cs_bo_validate() 803 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_bo_validate() 829 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_cs_list_validate() 833 if (amdgpu_ttm_tt_is_userptr(bo->tbo.ttm) && in amdgpu_cs_list_validate() [all …]
|
D | amdgpu_mn.c | 68 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_invalidate_gfx() 78 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in amdgpu_mn_invalidate_gfx() 105 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_invalidate_hsa()
|
D | amdgpu_vm_sdma.c | 40 r = amdgpu_ttm_alloc_gart(&table->bo.tbo); in amdgpu_vm_sdma_map_table() 45 r = amdgpu_ttm_alloc_gart(&table->shadow->tbo); in amdgpu_vm_sdma_map_table() 115 dma_resv_add_fence(p->vm->root.bo->tbo.base.resv, f, in amdgpu_vm_sdma_commit() 222 dma_resv_iter_begin(&cursor, bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL); in amdgpu_vm_sdma_update()
|
D | amdgpu_gtt_mgr.c | 114 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_new() argument 119 uint32_t num_pages = PFN_UP(tbo->base.size); in amdgpu_gtt_mgr_new() 127 ttm_resource_init(tbo, place, &node->base); in amdgpu_gtt_mgr_new() 137 num_pages, tbo->page_alignment, in amdgpu_gtt_mgr_new()
|
D | amdgpu_ttm.c | 479 if (WARN_ON_ONCE(abo->tbo.pin_count > 0)) in amdgpu_bo_move() 661 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() 810 struct ttm_buffer_object *tbo, in amdgpu_ttm_gart_bind() argument 813 struct amdgpu_bo *abo = ttm_to_amdgpu_bo(tbo); in amdgpu_ttm_gart_bind() 814 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() 969 void amdgpu_ttm_recover_gart(struct ttm_buffer_object *tbo) in amdgpu_ttm_recover_gart() argument 971 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_ttm_recover_gart() 974 if (!tbo->ttm) in amdgpu_ttm_recover_gart() 977 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart() 978 amdgpu_ttm_gart_bind(adev, tbo, flags); in amdgpu_ttm_recover_gart() [all …]
|
D | amdgpu_vram_mgr.c | 250 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vram_mgr_bo_visible_size() 251 struct ttm_resource *res = bo->tbo.resource; in amdgpu_vram_mgr_bo_visible_size() 384 struct ttm_buffer_object *tbo, in amdgpu_vram_mgr_new() argument 405 if (tbo->type != ttm_bo_type_kernel) in amdgpu_vram_mgr_new() 418 tbo->page_alignment); in amdgpu_vram_mgr_new() 425 ttm_resource_init(tbo, place, &vres->base); in amdgpu_vram_mgr_new() 446 if (tbo->page_alignment) in amdgpu_vram_mgr_new() 447 min_block_size = (u64)tbo->page_alignment << PAGE_SHIFT; in amdgpu_vram_mgr_new()
|
D | amdgpu_vm_pt.c | 419 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_vm_pt_clear() 426 r = ttm_bo_validate(&shadow->tbo, &shadow->placement, &ctx); in amdgpu_vm_pt_clear() 533 bp.resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_pt_create() 546 WARN_ON(dma_resv_lock(bo->tbo.base.resv, in amdgpu_vm_pt_create() 554 bp.resv = bo->tbo.base.resv; in amdgpu_vm_pt_create() 560 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_pt_create() 637 ttm_bo_set_bulk_move(&shadow->tbo, NULL); in amdgpu_vm_pt_free() 640 ttm_bo_set_bulk_move(&entry->bo->tbo, NULL); in amdgpu_vm_pt_free()
|
D | amdgpu_preempt_mgr.c | 60 struct ttm_buffer_object *tbo, in amdgpu_preempt_mgr_new() argument 68 ttm_resource_init(tbo, place, *res); in amdgpu_preempt_mgr_new()
|
/linux-6.1.9/drivers/gpu/drm/qxl/ |
D | qxl_object.c | 35 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 40 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy() 41 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 48 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy() 65 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain() 128 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size); in qxl_bo_create() 133 bo->tbo.base.funcs = &qxl_object_funcs; in qxl_bo_create() 143 bo->tbo.priority = priority; in qxl_bo_create() 144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create() 155 ttm_bo_pin(&bo->tbo); in qxl_bo_create() [all …]
|
D | qxl_gem.c | 35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local 41 tbo = &qobj->tbo; in qxl_gem_object_free() 42 ttm_bo_put(tbo); in qxl_gem_object_free() 66 *obj = &qbo->tbo.base; in qxl_gem_object_create()
|
D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 53 return bo->tbo.base.size; in qxl_bo_size()
|
D | qxl_debugfs.c | 64 dma_resv_iter_begin(&cursor, bo->tbo.base.resv, in qxl_debugfs_buffers_info() 73 (unsigned long)bo->tbo.base.size, in qxl_debugfs_buffers_info() 74 bo->tbo.pin_count, rel); in qxl_debugfs_buffers_info()
|
D | qxl_release.c | 176 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add() 185 entry->tv.bo = &bo->tbo; in qxl_release_list_add() 196 if (!bo->tbo.pin_count) { in qxl_release_validate_bo() 198 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_release_validate_bo() 203 ret = dma_resv_reserve_fences(bo->tbo.base.resv, 1); in qxl_release_validate_bo() 208 ret = qxl_bo_check_id(to_qxl(bo->tbo.base.dev), bo); in qxl_release_validate_bo()
|
D | qxl_drv.h | 76 struct ttm_buffer_object tbo; member 97 #define gem_to_qxl_bo(gobj) container_of((gobj), struct qxl_bo, tbo.base) 98 #define to_qxl_bo(tobj) container_of((tobj), struct qxl_bo, tbo) 289 (bo->tbo.resource->mem_type == TTM_PL_VRAM) in qxl_bo_physical_address() 294 return slot->high_bits | ((bo->tbo.resource->start << PAGE_SHIFT) + offset); in qxl_bo_physical_address()
|
/linux-6.1.9/drivers/gpu/drm/radeon/ |
D | radeon_object.c | 52 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy() 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 65 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy() 155 drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size); in radeon_bo_create() 205 r = ttm_bo_init_validate(&rdev->mman.bdev, &bo->tbo, type, in radeon_bo_create() 224 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in radeon_bo_kmap() 235 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.resource->num_pages, &bo->kmap); in radeon_bo_kmap() 261 ttm_bo_get(&bo->tbo); in radeon_bo_ref() [all …]
|
D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
|
D | radeon_mn.c | 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
|
D | radeon_prime.c | 39 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table() 40 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table() 59 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table() 66 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 107 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
|
D | radeon_gem.c | 133 *obj = &robj->tbo.base; in radeon_gem_object_create() 165 r = dma_resv_wait_timeout(robj->tbo.base.resv, in radeon_gem_set_domain() 271 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap() 273 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap() 408 r = radeon_ttm_tt_set_userptr(rdev, bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl() 427 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl() 497 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) { in radeon_mode_dumb_mmap() 529 r = dma_resv_test_signaled(robj->tbo.base.resv, DMA_RESV_USAGE_READ); in radeon_gem_busy_ioctl() 535 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_busy_ioctl() 558 ret = dma_resv_wait_timeout(robj->tbo.base.resv, DMA_RESV_USAGE_READ, in radeon_gem_wait_idle_ioctl() [all …]
|