Home
last modified time | relevance | path

Searched refs:ppgtt (Results 1 – 19 of 19) sorted by relevance

/linux-6.1.9/drivers/gpu/drm/i915/gt/
Dgen6_ppgtt.c19 static void gen6_write_pde(const struct gen6_ppgtt *ppgtt, in gen6_write_pde() argument
23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
27 ppgtt->pd_addr + pde); in gen6_write_pde()
77 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local
86 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range()
94 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range()
115 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local
116 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries()
150 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument
152 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_flush_pd()
[all …]
Dgen8_ppgtt.c58 static void gen8_ppgtt_notify_vgt(struct i915_ppgtt *ppgtt, bool create) in gen8_ppgtt_notify_vgt() argument
60 struct drm_i915_private *i915 = ppgtt->vm.i915; in gen8_ppgtt_notify_vgt()
61 struct intel_uncore *uncore = ppgtt->vm.gt->uncore; in gen8_ppgtt_notify_vgt()
66 atomic_inc(px_used(ppgtt->pd)); /* never remove */ in gen8_ppgtt_notify_vgt()
68 atomic_dec(px_used(ppgtt->pd)); in gen8_ppgtt_notify_vgt()
72 if (i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_notify_vgt()
73 const u64 daddr = px_dma(ppgtt->pd); in gen8_ppgtt_notify_vgt()
85 const u64 daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_ppgtt_notify_vgt()
160 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_pdp_for_page_index() local
163 return ppgtt->pd; in gen8_pdp_for_page_index()
[all …]
Dintel_ppgtt.c170 struct i915_ppgtt *ppgtt; in i915_ppgtt_create() local
172 ppgtt = __ppgtt_create(gt, lmem_pt_obj_flags); in i915_ppgtt_create()
173 if (IS_ERR(ppgtt)) in i915_ppgtt_create()
174 return ppgtt; in i915_ppgtt_create()
176 trace_i915_ppgtt_create(&ppgtt->vm); in i915_ppgtt_create()
178 return ppgtt; in i915_ppgtt_create()
307 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt, in ppgtt_init() argument
312 ppgtt->vm.gt = gt; in ppgtt_init()
313 ppgtt->vm.i915 = i915; in ppgtt_init()
314 ppgtt->vm.dma = i915->drm.dev; in ppgtt_init()
[all …]
Dintel_lrc_reg.h33 #define ASSIGN_CTX_PDP(ppgtt, reg_state, n) do { \ argument
35 const u64 addr__ = i915_page_dir_dma_addr((ppgtt), (n)); \
40 #define ASSIGN_CTX_PML4(ppgtt, reg_state) do { \ argument
42 const u64 addr__ = px_dma((ppgtt)->pd); \
Dintel_ggtt.c669 struct i915_ppgtt *ppgtt; in init_aliasing_ppgtt() local
672 ppgtt = i915_ppgtt_create(ggtt->vm.gt, 0); in init_aliasing_ppgtt()
673 if (IS_ERR(ppgtt)) in init_aliasing_ppgtt()
674 return PTR_ERR(ppgtt); in init_aliasing_ppgtt()
676 if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { in init_aliasing_ppgtt()
681 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total); in init_aliasing_ppgtt()
685 i915_gem_object_lock(ppgtt->vm.scratch[0], NULL); in init_aliasing_ppgtt()
686 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt()
687 i915_gem_object_unlock(ppgtt->vm.scratch[0]); in init_aliasing_ppgtt()
697 ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total); in init_aliasing_ppgtt()
[all …]
Dintel_gtt.h561 i915_page_dir_dma_addr(const struct i915_ppgtt *ppgtt, const unsigned int n) in i915_page_dir_dma_addr() argument
563 struct i915_page_table *pt = ppgtt->pd->entry[n]; in i915_page_dir_dma_addr()
565 return __px_dma(pt ? px_base(pt) : ppgtt->vm.scratch[ppgtt->vm.top]); in i915_page_dir_dma_addr()
568 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt,
Dintel_lrc.c825 static void init_ppgtt_regs(u32 *regs, const struct i915_ppgtt *ppgtt) in init_ppgtt_regs() argument
827 if (i915_vm_is_4lvl(&ppgtt->vm)) { in init_ppgtt_regs()
832 ASSIGN_CTX_PML4(ppgtt, regs); in init_ppgtt_regs()
834 ASSIGN_CTX_PDP(ppgtt, regs, 3); in init_ppgtt_regs()
835 ASSIGN_CTX_PDP(ppgtt, regs, 2); in init_ppgtt_regs()
836 ASSIGN_CTX_PDP(ppgtt, regs, 1); in init_ppgtt_regs()
837 ASSIGN_CTX_PDP(ppgtt, regs, 0); in init_ppgtt_regs()
Dintel_gtt.c169 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in i915_vm_lock_objects() local
172 return i915_gem_object_lock(ppgtt->pd->pt.base, ww); in i915_vm_lock_objects()
Dselftest_hangcheck.c1607 struct i915_ppgtt *ppgtt; in igt_reset_evict_ppgtt() local
1614 ppgtt = i915_ppgtt_create(gt, 0); in igt_reset_evict_ppgtt()
1615 if (IS_ERR(ppgtt)) in igt_reset_evict_ppgtt()
1616 return PTR_ERR(ppgtt); in igt_reset_evict_ppgtt()
1618 err = __igt_reset_evict_vma(gt, &ppgtt->vm, in igt_reset_evict_ppgtt()
1620 i915_vm_put(&ppgtt->vm); in igt_reset_evict_ppgtt()
Dintel_execlists_submission.c2707 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(rq->context->vm); in emit_pdps() local
2745 const dma_addr_t pd_daddr = i915_page_dir_dma_addr(ppgtt, i); in emit_pdps()
/linux-6.1.9/drivers/gpu/drm/i915/selftests/
Dmock_gtt.c67 struct i915_ppgtt *ppgtt; in mock_ppgtt() local
69 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in mock_ppgtt()
70 if (!ppgtt) in mock_ppgtt()
73 ppgtt->vm.gt = to_gt(i915); in mock_ppgtt()
74 ppgtt->vm.i915 = i915; in mock_ppgtt()
75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt()
76 ppgtt->vm.dma = i915->drm.dev; in mock_ppgtt()
78 i915_address_space_init(&ppgtt->vm, VM_CLASS_PPGTT); in mock_ppgtt()
80 ppgtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_ppgtt()
81 ppgtt->vm.alloc_scratch_dma = alloc_pt_dma; in mock_ppgtt()
[all …]
Di915_gem_gtt.c152 struct i915_ppgtt *ppgtt; in igt_ppgtt_alloc() local
162 ppgtt = i915_ppgtt_create(to_gt(dev_priv), 0); in igt_ppgtt_alloc()
163 if (IS_ERR(ppgtt)) in igt_ppgtt_alloc()
164 return PTR_ERR(ppgtt); in igt_ppgtt_alloc()
166 if (!ppgtt->vm.allocate_va_range) in igt_ppgtt_alloc()
177 limit = min(ppgtt->vm.total, limit); in igt_ppgtt_alloc()
181 err = i915_vm_lock_objects(&ppgtt->vm, &ww); in igt_ppgtt_alloc()
189 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, size); in igt_ppgtt_alloc()
193 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc()
195 i915_vm_free_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc()
[all …]
/linux-6.1.9/drivers/gpu/drm/i915/gem/selftests/
Dhuge_pages.c420 struct i915_ppgtt *ppgtt = arg; in igt_mock_exhaust_device_supported_pages() local
421 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_exhaust_device_supported_pages()
457 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_exhaust_device_supported_pages()
496 struct i915_ppgtt *ppgtt = arg; in igt_mock_memory_region_huge_pages() local
497 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_memory_region_huge_pages()
525 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_memory_region_huge_pages()
574 struct i915_ppgtt *ppgtt = arg; in igt_mock_ppgtt_misaligned_dma() local
575 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_ppgtt_misaligned_dma()
617 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_ppgtt_misaligned_dma()
695 struct i915_ppgtt *ppgtt) in close_object_list() argument
[all …]
Dmock_context.c37 struct i915_ppgtt *ppgtt; in mock_context() local
41 ppgtt = mock_ppgtt(i915, name); in mock_context()
42 if (!ppgtt) in mock_context()
45 ctx->vm = &ppgtt->vm; in mock_context()
Di915_gem_migrate.c375 struct i915_ppgtt *ppgtt; in igt_async_migrate() local
379 ppgtt = i915_ppgtt_create(gt, 0); in igt_async_migrate()
380 if (IS_ERR(ppgtt)) in igt_async_migrate()
381 return PTR_ERR(ppgtt); in igt_async_migrate()
425 err = __igt_lmem_pages_migrate(gt, &ppgtt->vm, &deps, &spin, in igt_async_migrate()
436 i915_vm_put(&ppgtt->vm); in igt_async_migrate()
/linux-6.1.9/drivers/gpu/drm/i915/gvt/
Dscheduler.c438 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm); in set_context_ppgtt_from_shadow() local
442 set_dma_address(ppgtt->pd, mm->ppgtt_mm.shadow_pdps[0]); in set_context_ppgtt_from_shadow()
446 i915_pd_entry(ppgtt->pd, i); in set_context_ppgtt_from_shadow()
551 if (!bb->ppgtt) { in prepare_shadow_batch_buffer()
1295 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_restore() argument
1299 if (i915_vm_is_4lvl(&ppgtt->vm)) { in i915_context_ppgtt_root_restore()
1300 set_dma_address(ppgtt->pd, s->i915_context_pml4); in i915_context_ppgtt_root_restore()
1304 i915_pd_entry(ppgtt->pd, i); in i915_context_ppgtt_root_restore()
1356 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_save() argument
1360 if (i915_vm_is_4lvl(&ppgtt->vm)) { in i915_context_ppgtt_root_save()
[all …]
Dscheduler.h133 bool ppgtt; member
Dcmd_parser.c1916 bb->ppgtt = (s->buf_addr_type == GTT_BUFFER) ? false : true; in perform_bb_shadow()
1929 if (bb->ppgtt) in perform_bb_shadow()
/linux-6.1.9/drivers/gpu/drm/i915/gem/
Di915_gem_context.c1612 struct i915_ppgtt *ppgtt; in i915_gem_create_context() local
1614 ppgtt = i915_ppgtt_create(to_gt(i915), 0); in i915_gem_create_context()
1615 if (IS_ERR(ppgtt)) { in i915_gem_create_context()
1617 PTR_ERR(ppgtt)); in i915_gem_create_context()
1618 err = PTR_ERR(ppgtt); in i915_gem_create_context()
1621 vm = &ppgtt->vm; in i915_gem_create_context()
1793 struct i915_ppgtt *ppgtt; in i915_gem_vm_create_ioctl() local
1803 ppgtt = i915_ppgtt_create(to_gt(i915), 0); in i915_gem_vm_create_ioctl()
1804 if (IS_ERR(ppgtt)) in i915_gem_vm_create_ioctl()
1805 return PTR_ERR(ppgtt); in i915_gem_vm_create_ioctl()
[all …]