/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gfx.c | 310 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init_ring() local 313 spin_lock_init(&kiq->ring_lock); in amdgpu_gfx_kiq_init_ring() 321 (adev->doorbell_index.kiq + in amdgpu_gfx_kiq_init_ring() 329 ring->eop_gpu_addr = kiq->eop_gpu_addr; in amdgpu_gfx_kiq_init_ring() 347 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_fini() local 349 amdgpu_bo_free_kernel(&kiq->eop_obj, &kiq->eop_gpu_addr, NULL); in amdgpu_gfx_kiq_fini() 357 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init() local 360 AMDGPU_GEM_DOMAIN_GTT, &kiq->eop_obj, in amdgpu_gfx_kiq_init() 361 &kiq->eop_gpu_addr, (void **)&hpd); in amdgpu_gfx_kiq_init() 369 r = amdgpu_bo_reserve(kiq->eop_obj, true); in amdgpu_gfx_kiq_init() [all …]
|
D | amdgpu_amdkfd.c | 823 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_amdkfd_unmap_hiq() local 824 struct amdgpu_ring *kiq_ring = &kiq->ring; in amdgpu_amdkfd_unmap_hiq() 829 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_amdkfd_unmap_hiq() 846 spin_lock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 848 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in amdgpu_amdkfd_unmap_hiq() 849 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 854 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, 0, 0); in amdgpu_amdkfd_unmap_hiq() 859 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq()
|
D | mes_v11_0.c | 877 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v11_0_kiq_enable_queue() local 878 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_enable_queue() 881 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v11_0_kiq_enable_queue() 884 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v11_0_kiq_enable_queue() 890 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring); in mes_v11_0_kiq_enable_queue() 902 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_queue_init() 969 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v11_0_kiq_ring_init() 971 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_ring_init() 997 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_mqd_sw_init() 1085 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v11_0_sw_fini() [all …]
|
D | mes_v10_1.c | 803 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v10_1_kiq_enable_queue() local 804 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v10_1_kiq_enable_queue() 807 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v10_1_kiq_enable_queue() 810 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v10_1_kiq_enable_queue() 816 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring); in mes_v10_1_kiq_enable_queue() 863 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v10_1_kiq_ring_init() 865 ring = &adev->gfx.kiq[0].ring; in mes_v10_1_kiq_ring_init() 891 ring = &adev->gfx.kiq[0].ring; in mes_v10_1_mqd_sw_init() 977 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v10_1_sw_fini() 978 &adev->gfx.kiq[0].ring.mqd_gpu_addr, in mes_v10_1_sw_fini() [all …]
|
D | gmc_v11_0.c | 297 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring.sched.ready) && in gmc_v11_0_flush_gpu_tlb() 335 struct amdgpu_ring *ring = &adev->gfx.kiq[0].ring; in gmc_v11_0_flush_gpu_tlb_pasid() 336 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gmc_v11_0_flush_gpu_tlb_pasid() local 339 spin_lock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid() 341 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v11_0_flush_gpu_tlb_pasid() 342 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v11_0_flush_gpu_tlb_pasid() 347 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid() 352 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v11_0_flush_gpu_tlb_pasid()
|
D | gmc_v10_0.c | 339 if (adev->gfx.kiq[0].ring.sched.ready && !adev->enable_mes && in gmc_v10_0_flush_gpu_tlb() 425 struct amdgpu_ring *ring = &adev->gfx.kiq[0].ring; in gmc_v10_0_flush_gpu_tlb_pasid() 426 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gmc_v10_0_flush_gpu_tlb_pasid() local 429 spin_lock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 431 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v10_0_flush_gpu_tlb_pasid() 432 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v10_0_flush_gpu_tlb_pasid() 437 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 442 spin_unlock(&adev->gfx.kiq[0].ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid()
|
D | gmc_v9_0.c | 847 if (adev->gfx.kiq[0].ring.sched.ready && in gmc_v9_0_flush_gpu_tlb() 956 struct amdgpu_ring *ring = &adev->gfx.kiq[inst].ring; in gmc_v9_0_flush_gpu_tlb_pasid() 957 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in gmc_v9_0_flush_gpu_tlb_pasid() local 972 unsigned int ndw = kiq->pmf->invalidate_tlbs_size + 8; in gmc_v9_0_flush_gpu_tlb_pasid() 975 ndw += kiq->pmf->invalidate_tlbs_size; in gmc_v9_0_flush_gpu_tlb_pasid() 977 spin_lock(&adev->gfx.kiq[inst].ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() 981 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 987 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 990 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid() 995 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() [all …]
|
D | amdgpu_virt.c | 78 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in amdgpu_virt_kiq_reg_write_reg_wait() local 79 struct amdgpu_ring *ring = &kiq->ring; in amdgpu_virt_kiq_reg_write_reg_wait() 90 spin_lock_irqsave(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 99 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 121 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait()
|
D | gfx_v9_0.c | 893 adev->gfx.kiq[0].pmf = &gfx_v9_0_kiq_pm4_funcs; in gfx_v9_0_set_kiq_pm4_funcs() 2001 struct amdgpu_kiq *kiq; in gfx_v9_0_sw_init() local 2154 kiq = &adev->gfx.kiq[0]; in gfx_v9_0_sw_init() 2155 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v9_0_sw_init() 2196 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v9_0_sw_fini() 3169 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v9_0_cp_compute_enable() 3451 (adev->doorbell_index.kiq * 2) << 2); in gfx_v9_0_kiq_init_register() 3542 tmp_mqd = (struct v9_mqd *)adev->gfx.kiq[0].mqd_backup; in gfx_v9_0_kiq_init_queue() 3545 if (adev->gfx.kiq[0].mqd_backup) in gfx_v9_0_kiq_init_queue() 3546 memcpy(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue() [all …]
|
D | gfx_v9_4_3.c | 189 adev->gfx.kiq[i].pmf = &gfx_v9_4_3_kiq_pm4_funcs; in gfx_v9_4_3_set_kiq_pm4_funcs() 781 struct amdgpu_kiq *kiq; in gfx_v9_4_3_sw_init() local 850 kiq = &adev->gfx.kiq[xcc_id]; in gfx_v9_4_3_sw_init() 851 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, xcc_id); in gfx_v9_4_3_sw_init() 888 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[i].ring); in gfx_v9_4_3_sw_fini() 1389 adev->gfx.kiq[xcc_id].ring.sched.ready = false; in gfx_v9_4_3_xcc_cp_compute_enable() 1676 ((adev->doorbell_index.kiq + in gfx_v9_4_3_xcc_kiq_init_register() 1765 tmp_mqd = (struct v9_mqd *)adev->gfx.kiq[xcc_id].mqd_backup; in gfx_v9_4_3_xcc_kiq_init_queue() 1768 if (adev->gfx.kiq[xcc_id].mqd_backup) in gfx_v9_4_3_xcc_kiq_init_queue() 1769 memcpy(mqd, adev->gfx.kiq[xcc_id].mqd_backup, sizeof(struct v9_mqd_allocation)); in gfx_v9_4_3_xcc_kiq_init_queue() [all …]
|
D | amdgpu_amdkfd_gfx_v10_3.c | 280 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v10_3() 295 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3() 322 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3()
|
D | gfx_v11_0.c | 199 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx11_kiq_unmap_queues() 267 adev->gfx.kiq[0].pmf = &gfx_v11_0_kiq_pm4_funcs; in gfx_v11_0_set_kiq_pm4_funcs() 1309 struct amdgpu_kiq *kiq; in gfx_v11_0_sw_init() local 1443 kiq = &adev->gfx.kiq[0]; in gfx_v11_0_sw_init() 1444 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v11_0_sw_init() 1514 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v11_0_sw_fini() 3590 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_cp_set_doorbell_range() 3948 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_kiq_init_register() 3987 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 3988 memcpy_toio(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(*mqd)); in gfx_v11_0_kiq_init_queue() [all …]
|
D | vega10_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_DOORBELL64_KIQ; in vega10_doorbell_index_init()
|
D | amdgpu_amdkfd_gfx_v11.c | 265 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v11() 280 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11() 307 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11()
|
D | vega20_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init()
|
D | amdgpu_doorbell.h | 52 uint32_t kiq; member
|
D | amdgpu_amdkfd_gfx_v10.c | 294 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in kgd_hiq_mqd_load() 309 spin_lock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load() 336 spin_unlock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load()
|
D | gfx_v10_0.c | 3557 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx10_kiq_unmap_queues() 3625 adev->gfx.kiq[0].pmf = &gfx_v10_0_kiq_pm4_funcs; in gfx_v10_0_set_kiq_pm4_funcs() 4486 struct amdgpu_kiq *kiq; in gfx_v10_0_sw_init() local 4530 &adev->gfx.kiq[0].irq); in gfx_v10_0_sw_init() 4615 kiq = &adev->gfx.kiq[0]; in gfx_v10_0_sw_init() 4616 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v10_0_sw_init() 4673 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v10_0_sw_fini() 6223 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v10_0_cp_compute_enable() 6693 (adev->doorbell_index.kiq * 2) << 2); in gfx_v10_0_kiq_init_register() 6732 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue() [all …]
|
D | amdgpu_amdkfd_gfx_v9.c | 307 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[inst].ring; in kgd_gfx_v9_hiq_mqd_load() 322 spin_lock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load() 349 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load()
|
D | gfx_v8_0.c | 1904 struct amdgpu_kiq *kiq; in gfx_v8_0_sw_init() local 2025 kiq = &adev->gfx.kiq[0]; in gfx_v8_0_sw_init() 2026 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq, 0); in gfx_v8_0_sw_init() 2055 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v8_0_sw_fini() 4296 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v8_0_cp_compute_enable() 4318 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_kiq_kcq_enable() 4604 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4605 memcpy(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(struct vi_mqd_allocation)); in gfx_v8_0_kiq_init_queue() 4628 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4629 memcpy(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(struct vi_mqd_allocation)); in gfx_v8_0_kiq_init_queue() [all …]
|
D | amdgpu_gfx.h | 356 struct amdgpu_kiq kiq[AMDGPU_MAX_GC_INSTANCES]; member
|
D | aqua_vanjaram.c | 41 adev->doorbell_index.kiq = AMDGPU_DOORBELL_LAYOUT1_KIQ_START; in aqua_vanjaram_doorbell_index_init()
|
D | soc21.c | 480 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in soc21_init_doorbell_index()
|
D | nv.c | 562 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in nv_init_doorbell_index()
|
D | vi.c | 2207 adev->doorbell_index.kiq = AMDGPU_DOORBELL_KIQ; in legacy_doorbell_index_init()
|