Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 25 of 60) sorted by relevance

123

/linux-6.6.21/drivers/gpu/drm/ttm/
Dttm_tt.c67 if (bo->ttm) in ttm_tt_create()
85 bo->ttm = bdev->funcs->ttm_tt_create(bo, page_flags); in ttm_tt_create()
86 if (unlikely(bo->ttm == NULL)) in ttm_tt_create()
89 WARN_ON(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL_MAPPABLE && in ttm_tt_create()
90 !(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)); in ttm_tt_create()
98 static int ttm_tt_alloc_page_directory(struct ttm_tt *ttm) in ttm_tt_alloc_page_directory() argument
100 ttm->pages = kvcalloc(ttm->num_pages, sizeof(void*), GFP_KERNEL); in ttm_tt_alloc_page_directory()
101 if (!ttm->pages) in ttm_tt_alloc_page_directory()
107 static int ttm_dma_tt_alloc_page_directory(struct ttm_tt *ttm) in ttm_dma_tt_alloc_page_directory() argument
109 ttm->pages = kvcalloc(ttm->num_pages, sizeof(*ttm->pages) + in ttm_dma_tt_alloc_page_directory()
[all …]
Dttm_agp_backend.c45 struct ttm_tt ttm; member
50 int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_resource *bo_mem) in ttm_agp_bind() argument
52 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
55 int ret, cached = ttm->caching == ttm_cached; in ttm_agp_bind()
61 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind()
66 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind()
67 struct page *page = ttm->pages[i]; in ttm_agp_bind()
87 void ttm_agp_unbind(struct ttm_tt *ttm) in ttm_agp_unbind() argument
89 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
102 bool ttm_agp_is_bound(struct ttm_tt *ttm) in ttm_agp_is_bound() argument
[all …]
Dttm_bo_util.c149 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
164 if (ttm && ((ttm->page_flags & TTM_TT_FLAG_SWAPPED) || in ttm_bo_move_memcpy()
166 ret = ttm_tt_populate(bdev, ttm, ctx); in ttm_bo_move_memcpy()
173 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
179 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
185 clear = src_iter->ops->maps_tt && (!ttm || !ttm_tt_is_populated(ttm)); in ttm_bo_move_memcpy()
186 if (!(clear && ttm && !(ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC))) in ttm_bo_move_memcpy()
297 caching = man->use_tt ? bo->ttm->caching : res->bus.caching; in ttm_io_prot()
339 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm() local
343 BUG_ON(!ttm); in ttm_bo_kmap_ttm()
[all …]
DMakefile5 ttm-y := ttm_tt.o ttm_bo.o ttm_bo_util.o ttm_bo_vm.o ttm_module.o \
8 ttm-$(CONFIG_AGP) += ttm_agp_backend.o
10 obj-$(CONFIG_DRM_TTM) += ttm.o
Dttm_bo_vm.c152 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve()
153 if (!(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL_MAPPABLE)) { in ttm_bo_vm_reserve()
191 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault_reserved() local
226 ttm = bo->ttm; in ttm_bo_vm_fault_reserved()
227 err = ttm_tt_populate(bdev, bo->ttm, &ctx); in ttm_bo_vm_fault_reserved()
249 page = ttm->pages[page_offset]; in ttm_bo_vm_fault_reserved()
Dttm_bo.c142 ret = ttm_tt_populate(bo->bdev, bo->ttm, ctx); in ttm_bo_handle_move_mem()
348 (want_init_on_free() && (bo->ttm != NULL)) || in ttm_bo_release()
1143 if (!bo->ttm || !ttm_tt_is_populated(bo->ttm) || in ttm_bo_swapout()
1144 bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL || in ttm_bo_swapout()
1145 bo->ttm->page_flags & TTM_TT_FLAG_SWAPPED || in ttm_bo_swapout()
1198 if (ttm_tt_is_populated(bo->ttm)) in ttm_bo_swapout()
1199 ret = ttm_tt_swapout(bo->bdev, bo->ttm, gfp_flags); in ttm_bo_swapout()
1214 if (bo->ttm == NULL) in ttm_bo_tt_destroy()
1217 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_tt_destroy()
1218 ttm_tt_destroy(bo->bdev, bo->ttm); in ttm_bo_tt_destroy()
[all …]
/linux-6.6.21/drivers/gpu/drm/radeon/
Dradeon_ttm.c55 static int radeon_ttm_tt_bind(struct ttm_device *bdev, struct ttm_tt *ttm,
57 static void radeon_ttm_tt_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
204 r = radeon_ttm_tt_bind(bo->bdev, bo->ttm, new_mem); in radeon_bo_move()
216 bo->ttm == NULL)) { in radeon_bo_move()
228 radeon_ttm_tt_unbind(bo->bdev, bo->ttm); in radeon_bo_move()
321 struct ttm_tt ttm; member
331 static int radeon_ttm_tt_pin_userptr(struct ttm_device *bdev, struct ttm_tt *ttm) in radeon_ttm_tt_pin_userptr() argument
334 struct radeon_ttm_tt *gtt = (void *)ttm; in radeon_ttm_tt_pin_userptr()
348 unsigned long end = gtt->userptr + (u64)ttm->num_pages * PAGE_SIZE; in radeon_ttm_tt_pin_userptr()
356 unsigned num_pages = ttm->num_pages - pinned; in radeon_ttm_tt_pin_userptr()
[all …]
Dradeon_prime.c41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table()
42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table()
109 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
/linux-6.6.21/include/drm/ttm/
Dttm_tt.h150 int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
163 void ttm_tt_fini(struct ttm_tt *ttm);
173 void ttm_tt_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
182 int ttm_tt_swapin(struct ttm_tt *ttm);
183 int ttm_tt_swapout(struct ttm_device *bdev, struct ttm_tt *ttm,
195 int ttm_tt_populate(struct ttm_device *bdev, struct ttm_tt *ttm,
206 void ttm_tt_unpopulate(struct ttm_device *bdev, struct ttm_tt *ttm);
216 static inline void ttm_tt_mark_for_clear(struct ttm_tt *ttm) in ttm_tt_mark_for_clear() argument
218 ttm->page_flags |= TTM_TT_FLAG_ZERO_ALLOC; in ttm_tt_mark_for_clear()
244 int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_resource *bo_mem);
[all …]
Dttm_device.h86 struct ttm_tt *ttm,
97 struct ttm_tt *ttm);
108 void (*ttm_tt_destroy)(struct ttm_device *bdev, struct ttm_tt *ttm);
/linux-6.6.21/drivers/gpu/drm/i915/gem/
Di915_gem_ttm.c50 struct ttm_tt ttm; member
189 struct ttm_tt *ttm, in i915_ttm_tt_shmem_populate() argument
194 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_populate()
196 const size_t size = (size_t)ttm->num_pages << PAGE_SHIFT; in i915_ttm_tt_shmem_populate()
234 ttm->pages[i++] = page; in i915_ttm_tt_shmem_populate()
236 if (ttm->page_flags & TTM_TT_FLAG_SWAPPED) in i915_ttm_tt_shmem_populate()
237 ttm->page_flags &= ~TTM_TT_FLAG_SWAPPED; in i915_ttm_tt_shmem_populate()
247 static void i915_ttm_tt_shmem_unpopulate(struct ttm_tt *ttm) in i915_ttm_tt_shmem_unpopulate() argument
249 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_unpopulate()
250 bool backup = ttm->page_flags & TTM_TT_FLAG_SWAPPED; in i915_ttm_tt_shmem_unpopulate()
[all …]
Di915_gem_ttm_pm.c24 if (obj->ttm.backup) { in i915_ttm_backup_free()
25 i915_gem_object_put(obj->ttm.backup); in i915_ttm_backup_free()
26 obj->ttm.backup = NULL; in i915_ttm_backup_free()
56 if (!i915_ttm_cpu_maps_iomem(bo->resource) || obj->ttm.backup) in i915_ttm_backup()
93 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_backup()
106 obj->ttm.backup = backup; in i915_ttm_backup()
173 struct drm_i915_gem_object *backup = obj->ttm.backup; in i915_ttm_restore()
192 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_restore()
199 obj->ttm.backup = NULL; in i915_ttm_restore()
Di915_gem_ttm_move.c53 struct ttm_tt *ttm) in i915_ttm_cache_level() argument
57 ttm->caching == ttm_cached) ? I915_CACHE_LLC : in i915_ttm_cache_level()
86 if (i915_ttm_cpu_maps_iomem(bo->resource) || bo->ttm->caching != ttm_cached) { in i915_ttm_adjust_domains_after_move()
125 bo->ttm); in i915_ttm_adjust_gem_after_move()
198 struct ttm_tt *src_ttm = bo->ttm; in i915_ttm_accel_move()
330 ttm_kmap_iter_tt_init(&arg->_src_iter.tt, bo->ttm) : in i915_ttm_memcpy_init()
332 &obj->ttm.cached_io_rsgt->table, in i915_ttm_memcpy_init()
579 struct ttm_tt *ttm = bo->ttm; in i915_ttm_move() local
626 if (ttm && (dst_man->use_tt || (ttm->page_flags & TTM_TT_FLAG_SWAPPED))) { in i915_ttm_move()
627 ret = ttm_tt_populate(bo->bdev, ttm, ctx); in i915_ttm_move()
[all …]
/linux-6.6.21/drivers/gpu/drm/nouveau/
Dnouveau_sgdma.c15 struct ttm_tt ttm; member
20 nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_destroy() argument
22 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_destroy()
24 if (ttm) { in nouveau_sgdma_destroy()
25 ttm_tt_fini(&nvbe->ttm); in nouveau_sgdma_destroy()
31 nouveau_sgdma_bind(struct ttm_device *bdev, struct ttm_tt *ttm, struct ttm_resource *reg) in nouveau_sgdma_bind() argument
33 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_bind()
41 ret = nouveau_mem_host(reg, &nvbe->ttm); in nouveau_sgdma_bind()
58 nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_unbind() argument
60 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_unbind()
[all …]
Dnouveau_ttm.c170 drm->ttm.type_host[!!kind] = typei; in nouveau_ttm_init_host()
176 drm->ttm.type_ncoh[!!kind] = typei; in nouveau_ttm_init_host()
191 ttm_resource_manager_init(man, &drm->ttm.bdev, in nouveau_ttm_init_vram()
193 ttm_set_driver_manager(&drm->ttm.bdev, TTM_PL_VRAM, man); in nouveau_ttm_init_vram()
197 return ttm_range_man_init(&drm->ttm.bdev, TTM_PL_VRAM, false, in nouveau_ttm_init_vram()
205 struct ttm_resource_manager *man = ttm_manager_type(&drm->ttm.bdev, TTM_PL_VRAM); in nouveau_ttm_fini_vram()
209 ttm_resource_manager_evict_all(&drm->ttm.bdev, man); in nouveau_ttm_fini_vram()
211 ttm_set_driver_manager(&drm->ttm.bdev, TTM_PL_VRAM, NULL); in nouveau_ttm_fini_vram()
214 ttm_range_man_fini(&drm->ttm.bdev, TTM_PL_VRAM); in nouveau_ttm_fini_vram()
229 return ttm_range_man_init(&drm->ttm.bdev, TTM_PL_TT, true, in nouveau_ttm_init_gtt()
[all …]
Dnouveau_bo.c47 static int nouveau_ttm_tt_bind(struct ttm_device *bdev, struct ttm_tt *ttm,
49 static void nouveau_ttm_tt_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
222 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_alloc()
605 struct ttm_tt *ttm_dma = (struct ttm_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_device()
641 struct ttm_tt *ttm_dma = (struct ttm_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_cpu()
678 mutex_lock(&drm->ttm.io_reserve_mutex); in nouveau_bo_add_io_reserve_lru()
679 list_move_tail(&nvbo->io_reserve_lru, &drm->ttm.io_reserve_lru); in nouveau_bo_add_io_reserve_lru()
680 mutex_unlock(&drm->ttm.io_reserve_mutex); in nouveau_bo_add_io_reserve_lru()
688 mutex_lock(&drm->ttm.io_reserve_mutex); in nouveau_bo_del_io_reserve_lru()
690 mutex_unlock(&drm->ttm.io_reserve_mutex); in nouveau_bo_del_io_reserve_lru()
[all …]
Dnouveau_ttm.h8 return container_of(bd, struct nouveau_drm, ttm.bdev); in nouveau_bdev()
24 int nouveau_sgdma_bind(struct ttm_device *bdev, struct ttm_tt *ttm, struct ttm_resource *reg);
25 void nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
26 void nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ttm.c69 struct ttm_tt *ttm,
72 struct ttm_tt *ttm);
249 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, mem); in amdgpu_ttm_map_buffer()
258 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
464 r = amdgpu_ttm_backend_bind(bo->bdev, bo->ttm, new_mem); in amdgpu_bo_move()
473 bo->ttm == NULL)) { in amdgpu_bo_move()
490 amdgpu_ttm_backend_unbind(bo->bdev, bo->ttm); in amdgpu_bo_move()
640 struct ttm_tt ttm; member
650 #define ttm_to_amdgpu_ttm_tt(ptr) container_of(ptr, struct amdgpu_ttm_tt, ttm)
663 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
[all …]
Damdgpu_ttm.h171 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm,
173 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm,
182 static inline void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
186 static inline bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
193 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages);
198 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
199 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
200 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
202 bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
204 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm);
[all …]
Damdgpu_amdkfd_gpuvm.c406 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
519 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
520 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr() local
523 if (WARN_ON(ttm->num_pages != src_ttm->num_pages)) in kfd_mem_dmamap_userptr()
526 ttm->sg = kmalloc(sizeof(*ttm->sg), GFP_KERNEL); in kfd_mem_dmamap_userptr()
527 if (unlikely(!ttm->sg)) in kfd_mem_dmamap_userptr()
531 ret = sg_alloc_table_from_pages(ttm->sg, src_ttm->pages, in kfd_mem_dmamap_userptr()
532 ttm->num_pages, 0, in kfd_mem_dmamap_userptr()
533 (u64)ttm->num_pages << PAGE_SHIFT, in kfd_mem_dmamap_userptr()
538 ret = dma_map_sgtable(adev->dev, ttm->sg, direction, 0); in kfd_mem_dmamap_userptr()
[all …]
/linux-6.6.21/drivers/gpu/drm/vmwgfx/
Dvmwgfx_ttm_buffer.c293 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm); in vmw_bo_sg_table()
300 struct ttm_tt *ttm, struct ttm_resource *bo_mem) in vmw_ttm_bind() argument
303 container_of(ttm, struct vmw_ttm_tt, dma_ttm); in vmw_ttm_bind()
322 ttm->num_pages, vmw_be->gmr_id); in vmw_ttm_bind()
327 vmw_mob_create(ttm->num_pages); in vmw_ttm_bind()
333 &vmw_be->vsgt, ttm->num_pages, in vmw_ttm_bind()
347 struct ttm_tt *ttm) in vmw_ttm_unbind() argument
350 container_of(ttm, struct vmw_ttm_tt, dma_ttm); in vmw_ttm_unbind()
374 static void vmw_ttm_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in vmw_ttm_destroy() argument
377 container_of(ttm, struct vmw_ttm_tt, dma_ttm); in vmw_ttm_destroy()
[all …]
Dvmwgfx_blit.c468 if (!ttm_tt_is_populated(dst->ttm)) { in vmw_bo_cpu_blit()
469 ret = dst->bdev->funcs->ttm_tt_populate(dst->bdev, dst->ttm, &ctx); in vmw_bo_cpu_blit()
474 if (!ttm_tt_is_populated(src->ttm)) { in vmw_bo_cpu_blit()
475 ret = src->bdev->funcs->ttm_tt_populate(src->bdev, src->ttm, &ctx); in vmw_bo_cpu_blit()
484 d.dst_pages = dst->ttm->pages; in vmw_bo_cpu_blit()
485 d.src_pages = src->ttm->pages; in vmw_bo_cpu_blit()
/linux-6.6.21/drivers/gpu/drm/qxl/
Dqxl_ttm.c102 static void qxl_ttm_backend_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument
104 ttm_tt_fini(ttm); in qxl_ttm_backend_destroy()
105 kfree(ttm); in qxl_ttm_backend_destroy()
111 struct ttm_tt *ttm; in qxl_ttm_tt_create() local
113 ttm = kzalloc(sizeof(struct ttm_tt), GFP_KERNEL); in qxl_ttm_tt_create()
114 if (ttm == NULL) in qxl_ttm_tt_create()
116 if (ttm_tt_init(ttm, bo, page_flags, ttm_cached, 0)) { in qxl_ttm_tt_create()
117 kfree(ttm); in qxl_ttm_tt_create()
120 return ttm; in qxl_ttm_tt_create()
163 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in qxl_bo_move()
/linux-6.6.21/arch/powerpc/perf/
Dppc970-pmu.c264 unsigned int ttm, grp; in p970_compute_mmcr() local
321 ttm = unitmap[i]; in p970_compute_mmcr()
322 ++ttmuse[(ttm >> 2) & 1]; in p970_compute_mmcr()
323 mmcr1 |= (unsigned long)(ttm & ~4) << MMCR1_TTM1SEL_SH; in p970_compute_mmcr()
335 ttm = (unitmap[unit] >> 2) & 1; in p970_compute_mmcr()
337 ttm = 2; in p970_compute_mmcr()
339 ttm = 3; in p970_compute_mmcr()
343 mmcr1 |= (unsigned long)ttm in p970_compute_mmcr()
/linux-6.6.21/drivers/gpu/drm/loongson/
Dlsdc_ttm.c114 struct ttm_tt *ttm, in lsdc_ttm_tt_populate() argument
117 bool slave = !!(ttm->page_flags & TTM_TT_FLAG_EXTERNAL); in lsdc_ttm_tt_populate()
119 if (slave && ttm->sg) { in lsdc_ttm_tt_populate()
120 drm_prime_sg_to_dma_addr_array(ttm->sg, in lsdc_ttm_tt_populate()
121 ttm->dma_address, in lsdc_ttm_tt_populate()
122 ttm->num_pages); in lsdc_ttm_tt_populate()
127 return ttm_pool_alloc(&bdev->pool, ttm, ctx); in lsdc_ttm_tt_populate()
131 struct ttm_tt *ttm) in lsdc_ttm_tt_unpopulate() argument
133 bool slave = !!(ttm->page_flags & TTM_TT_FLAG_EXTERNAL); in lsdc_ttm_tt_unpopulate()
138 return ttm_pool_free(&bdev->pool, ttm); in lsdc_ttm_tt_unpopulate()
[all …]

123