Home
last modified time | relevance | path

Searched refs:num_compute_rings (Results 1 – 9 of 9) sorted by relevance

/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gfx.c198 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue()
211 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()
413 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init()
449 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini()
475 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq()
480 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_disable_kcq()
533 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()
545 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_enable_kcq()
Damdgpu_gfx.h319 unsigned num_compute_rings; member
Dgfx_v8_0.c1340 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()
2077 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()
4363 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 8); in gfx_v8_0_kiq_kcq_enable()
4377 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()
4731 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume()
4774 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings()
4838 r = amdgpu_ring_alloc(kiq_ring, 6 * adev->gfx.num_compute_rings); in gfx_v8_0_kcq_disable()
4842 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable()
5041 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()
5136 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()
[all …]
Dgfx_v7_0.c2747 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()
3088 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()
3098 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()
4215 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v7_0_early_init()
4520 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()
4871 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()
4896 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault()
5086 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
Dgfx_v11_0.c742 mec_hpd_size = adev->gfx.num_compute_rings * GFX11_MEC_HPD_SIZE; in gfx_v11_0_mec_init()
1471 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_sw_fini()
4101 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_kcq_resume()
4180 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_cp_resume()
4618 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_check_soft_reset()
4675 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v11_0_early_init()
5881 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_eop_irq()
5959 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_handle_priv_fault()
6200 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_set_ring_funcs()
Dgfx_v9_0.c1780 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()
2250 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()
3671 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()
3735 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()
4547 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()
5717 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()
5747 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()
6795 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
Dgfx_v6_0.c3045 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v6_0_early_init()
3097 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()
3130 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()
3533 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
Dgfx_v10_0.c4303 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()
4769 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()
6990 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()
7059 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()
7618 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v10_0_early_init()
9121 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()
9199 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()
9464 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
Damdgpu_kms.c383 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()