Home
last modified time | relevance | path

Searched refs:tbo (Results 1 – 25 of 89) sorted by relevance

1234

/linux-6.6.21/drivers/gpu/drm/loongson/
Dlsdc_ttm.c53 if (lbo->tbo.base.size <= PAGE_SIZE) in lsdc_bo_set_placement()
95 lsdc_ttm_tt_create(struct ttm_buffer_object *tbo, uint32_t page_flags) in lsdc_ttm_tt_create() argument
104 ret = ttm_sg_tt_init(tt, tbo, page_flags, ttm_cached); in lsdc_ttm_tt_create()
141 static void lsdc_bo_evict_flags(struct ttm_buffer_object *tbo, in lsdc_bo_evict_flags() argument
144 struct ttm_resource *resource = tbo->resource; in lsdc_bo_evict_flags()
145 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_evict_flags()
160 static int lsdc_bo_move(struct ttm_buffer_object *tbo, in lsdc_bo_move() argument
166 struct drm_device *ddev = tbo->base.dev; in lsdc_bo_move()
167 struct ttm_resource *old_mem = tbo->resource; in lsdc_bo_move()
168 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_move()
[all …]
Dlsdc_gem.c53 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_prime_get_sg_table() local
54 struct ttm_tt *tt = tbo->ttm; in lsdc_gem_prime_get_sg_table()
66 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_free() local
68 if (tbo) in lsdc_gem_object_free()
69 ttm_bo_put(tbo); in lsdc_gem_object_free()
74 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vmap() local
75 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vmap()
89 ret = ttm_bo_vmap(tbo, &lbo->map); in lsdc_gem_object_vmap()
106 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vunmap() local
107 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vunmap()
[all …]
Dlsdc_ttm.h24 struct ttm_buffer_object tbo; member
52 static inline struct lsdc_bo *to_lsdc_bo(struct ttm_buffer_object *tbo) in to_lsdc_bo() argument
54 return container_of(tbo, struct lsdc_bo, tbo); in to_lsdc_bo()
59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/
Damdgpu_object.c56 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument
58 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy()
62 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy()
63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
64 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy()
69 static void amdgpu_bo_user_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_user_destroy() argument
71 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy()
76 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy()
79 static void amdgpu_bo_vm_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_vm_destroy() argument
81 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_bo_vm_destroy()
[all …]
Damdgpu_dma_buf.c59 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
89 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach()
145 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
149 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
160 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
164 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type) & in amdgpu_dma_buf_map()
169 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map()
172 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
173 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map()
183 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo.resource, 0, in amdgpu_dma_buf_map()
[all …]
Damdgpu_object.h101 struct ttm_buffer_object tbo; member
159 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument
161 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo()
204 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve()
207 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve()
218 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
223 return bo->tbo.base.size; in amdgpu_bo_size()
228 return bo->tbo.base.size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages()
233 return (bo->tbo.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment()
244 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in amdgpu_bo_mmap_offset()
[all …]
Damdgpu_amdkfd_gpuvm.c286 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify()
321 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, 1, in create_dmamap_sg_bo()
323 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj, 0); in create_dmamap_sg_bo()
358 dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context, in amdgpu_amdkfd_remove_eviction_fence()
392 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
394 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
406 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
412 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate()
437 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_validate_pt_pd_bos()
454 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_update_pds()
[all …]
Damdgpu_vm.c184 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted()
289 if (!bo || bo->tbo.type != ttm_bo_type_kernel) in amdgpu_vm_bo_reset_state_machine()
320 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
323 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init()
325 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init()
326 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init()
332 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init()
356 return drm_exec_prepare_obj(exec, &vm->root.bo->tbo.base, in amdgpu_vm_lock_pd()
479 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos()
1020 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_get_memory()
[all …]
Damdgpu_gem.c127 *obj = &bo->tbo.base; in amdgpu_gem_object_create()
165 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_gem_object_open()
172 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open()
177 abo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_gem_object_open()
197 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_object_close()
208 r = drm_exec_prepare_obj(&exec, &bo->tbo.base, 1); in amdgpu_gem_object_close()
247 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_gem_object_mmap()
334 resv = vm->root.bo->tbo.base.resv; in amdgpu_gem_create_ioctl()
418 r = amdgpu_ttm_tt_set_userptr(&bo->tbo, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
427 r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, in amdgpu_gem_userptr_ioctl()
[all …]
Damdgpu_cs.c143 if (amdgpu_ttm_tt_get_usermm(p->uf_bo->tbo.ttm)) in amdgpu_cs_p1_user_fence()
780 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_cs_bo_validate()
785 .resv = bo->tbo.base.resv in amdgpu_cs_bo_validate()
790 if (bo->tbo.pin_count) in amdgpu_cs_bo_validate()
797 (!bo->tbo.base.dma_buf || in amdgpu_cs_bo_validate()
798 list_empty(&bo->tbo.base.dma_buf->attachments))) { in amdgpu_cs_bo_validate()
818 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_bo_validate()
873 e->user_pages = kvmalloc_array(bo->tbo.ttm->num_pages, in amdgpu_cs_parser_bos()
889 for (i = 0; i < bo->tbo.ttm->num_pages; i++) { in amdgpu_cs_parser_bos()
890 if (bo->tbo.ttm->pages[i] != e->user_pages[i]) { in amdgpu_cs_parser_bos()
[all …]
/linux-6.6.21/drivers/gpu/drm/qxl/
Dqxl_object.c35 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument
40 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
41 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
48 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy()
65 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain()
128 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size); in qxl_bo_create()
133 bo->tbo.base.funcs = &qxl_object_funcs; in qxl_bo_create()
143 bo->tbo.priority = priority; in qxl_bo_create()
144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create()
155 ttm_bo_pin(&bo->tbo); in qxl_bo_create()
[all …]
Dqxl_gem.c35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local
41 tbo = &qobj->tbo; in qxl_gem_object_free()
42 ttm_bo_put(tbo); in qxl_gem_object_free()
66 *obj = &qbo->tbo.base; in qxl_gem_object_create()
Dqxl_object.h34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
53 return bo->tbo.base.size; in qxl_bo_size()
Dqxl_debugfs.c64 dma_resv_iter_begin(&cursor, bo->tbo.base.resv, in qxl_debugfs_buffers_info()
73 (unsigned long)bo->tbo.base.size, in qxl_debugfs_buffers_info()
74 bo->tbo.pin_count, rel); in qxl_debugfs_buffers_info()
/linux-6.6.21/drivers/gpu/drm/radeon/
Dradeon_object.c52 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument
56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy()
64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy()
65 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy()
155 drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size); in radeon_bo_create()
205 r = ttm_bo_init_validate(&rdev->mman.bdev, &bo->tbo, type, in radeon_bo_create()
224 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in radeon_bo_kmap()
235 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap()
261 ttm_bo_get(&bo->tbo); in radeon_bo_ref()
[all …]
Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset()
98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset()
107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset()
112 return bo->tbo.base.size; in radeon_bo_size()
117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages()
122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment()
133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
Dradeon_mn.c57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate()
75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
Dradeon_prime.c41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table()
42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table()
61 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table()
68 return &bo->tbo.base; in radeon_gem_prime_import_sg_table()
109 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
Dradeon_gem.c133 *obj = &robj->tbo.base; in radeon_gem_object_create()
165 r = dma_resv_wait_timeout(robj->tbo.base.resv, in radeon_gem_set_domain()
271 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap()
273 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap()
392 r = radeon_ttm_tt_set_userptr(rdev, bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
411 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl()
479 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
511 r = dma_resv_test_signaled(robj->tbo.base.resv, DMA_RESV_USAGE_READ); in radeon_gem_busy_ioctl()
517 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_busy_ioctl()
540 ret = dma_resv_wait_timeout(robj->tbo.base.resv, DMA_RESV_USAGE_READ, in radeon_gem_wait_idle_ioctl()
[all …]
/linux-6.6.21/drivers/gpu/drm/vmwgfx/
Dvmwgfx_bo.c37 WARN_ON(vbo->tbo.base.funcs && in vmw_bo_release()
38 kref_read(&vbo->tbo.base.refcount) != 0); in vmw_bo_release()
40 drm_gem_object_release(&vbo->tbo.base); in vmw_bo_release()
74 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_placement()
110 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_vram_or_gmr()
179 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_start_of_vram()
195 buf->tbo.pin_count == 0) { in vmw_bo_pin_in_start_of_vram()
236 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_unpin()
283 struct ttm_buffer_object *bo = &vbo->tbo; in vmw_bo_pin_reserved()
328 struct ttm_buffer_object *bo = &vbo->tbo; in vmw_bo_map_and_cache()
[all …]
Dvmwgfx_gem.c123 (*p_vbo)->tbo.base.funcs = &vmw_gem_object_funcs; in vmw_gem_object_create()
147 ret = drm_gem_handle_create(filp, &(*p_vbo)->tbo.base, handle); in vmw_gem_object_create_with_handle()
171 rep->map_handle = drm_vma_node_offset_addr(&vbo->tbo.base.vma_node); in vmw_gem_object_create_ioctl()
175 drm_gem_object_put(&vbo->tbo.base); in vmw_gem_object_create_ioctl()
187 switch (bo->tbo.resource->mem_type) { in vmw_bo_print_info()
208 switch (bo->tbo.type) { in vmw_bo_print_info()
224 id, bo->tbo.base.size, placement, type); in vmw_bo_print_info()
226 bo->tbo.priority, in vmw_bo_print_info()
227 bo->tbo.pin_count, in vmw_bo_print_info()
228 kref_read(&bo->tbo.base.refcount), in vmw_bo_print_info()
[all …]
Dvmwgfx_bo.h75 struct ttm_buffer_object tbo; member
145 vbo->tbo.priority = i; in vmw_bo_prio_adjust()
150 vbo->tbo.priority = 3; in vmw_bo_prio_adjust()
189 ttm_bo_put(&tmp_buf->tbo); in vmw_bo_unreference()
194 ttm_bo_get(&buf->tbo); in vmw_bo_reference()
200 drm_gem_object_get(&vbo->tbo.base); in vmw_user_bo_ref()
210 drm_gem_object_put(&tmp_buf->tbo.base); in vmw_user_bo_unref()
215 return container_of((gobj), struct vmw_bo, tbo.base); in to_vmw_bo()
Dvmwgfx_page_dirty.c85 pgoff_t offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_scan_pagetable()
86 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_pagetable()
123 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_scan_mkwrite()
124 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_mkwrite()
130 num_marked = wp_shared_mapping_range(vbo->tbo.bdev->dev_mapping, in vmw_bo_dirty_scan_mkwrite()
188 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_pre_unmap()
189 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_pre_unmap()
212 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_unmap()
213 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_unmap()
233 pgoff_t num_pages = PFN_UP(vbo->tbo.resource->size); in vmw_bo_dirty_add()
[all …]
Dvmwgfx_resource.c46 dma_resv_assert_held(gbo->tbo.base.resv); in vmw_resource_mob_attach()
73 dma_resv_assert_held(gbo->tbo.base.resv); in vmw_resource_mob_detach()
125 struct ttm_buffer_object *bo = &res->guest_memory_bo->tbo; in vmw_resource_release()
337 BUG_ON(res->guest_memory_bo->tbo.base.size < size); in vmw_resource_buf_alloc()
525 ttm_bo_get(&res->guest_memory_bo->tbo); in vmw_resource_check_buffer()
526 val_buf->bo = &res->guest_memory_bo->tbo; in vmw_resource_check_buffer()
539 ret = ttm_bo_validate(&res->guest_memory_bo->tbo, in vmw_resource_check_buffer()
688 val_buf.bo = &res->guest_memory_bo->tbo; in vmw_resource_validate()
754 .bo = &vbo->tbo, in vmw_resource_unbind_list()
758 dma_resv_assert_held(vbo->tbo.base.resv); in vmw_resource_unbind_list()
[all …]
Dvmwgfx_mob.c207 bo = &otable->page_table->pt_bo->tbo; in vmw_takedown_otable_base()
267 &batch->otable_bo->tbo, in vmw_otable_batch_setup()
284 vmw_bo_unpin_unlocked(&batch->otable_bo->tbo); in vmw_otable_batch_setup()
285 ttm_bo_put(&batch->otable_bo->tbo); in vmw_otable_batch_setup()
336 struct ttm_buffer_object *bo = &batch->otable_bo->tbo; in vmw_otable_batch_takedown()
502 struct ttm_buffer_object *bo = &mob->pt_bo->tbo; in vmw_mob_pt_setup()
539 vmw_bo_unpin_unlocked(&mob->pt_bo->tbo); in vmw_mob_destroy()
559 struct ttm_buffer_object *bo = &mob->pt_bo->tbo; in vmw_mob_unbind()
651 vmw_bo_unpin_unlocked(&mob->pt_bo->tbo); in vmw_mob_bind()

1234