/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_reset.c | 119 struct amdgpu_reset_domain *reset_domain = container_of(ref, in amdgpu_reset_destroy_reset_domain() local 122 if (reset_domain->wq) in amdgpu_reset_destroy_reset_domain() 123 destroy_workqueue(reset_domain->wq); in amdgpu_reset_destroy_reset_domain() 125 kvfree(reset_domain); in amdgpu_reset_destroy_reset_domain() 131 struct amdgpu_reset_domain *reset_domain; in amdgpu_reset_create_reset_domain() local 133 reset_domain = kvzalloc(sizeof(struct amdgpu_reset_domain), GFP_KERNEL); in amdgpu_reset_create_reset_domain() 134 if (!reset_domain) { in amdgpu_reset_create_reset_domain() 139 reset_domain->type = type; in amdgpu_reset_create_reset_domain() 140 kref_init(&reset_domain->refcount); in amdgpu_reset_create_reset_domain() 142 reset_domain->wq = create_singlethread_workqueue(wq_name); in amdgpu_reset_create_reset_domain() [all …]
|
D | mxgpu_ai.c | 262 if (atomic_cmpxchg(&adev->reset_domain->in_gpu_reset, 0, 1) != 0) in xgpu_ai_mailbox_flr_work() 265 down_write(&adev->reset_domain->sem); in xgpu_ai_mailbox_flr_work() 280 atomic_set(&adev->reset_domain->in_gpu_reset, 0); in xgpu_ai_mailbox_flr_work() 281 up_write(&adev->reset_domain->sem); in xgpu_ai_mailbox_flr_work() 321 WARN_ONCE(!amdgpu_reset_domain_schedule(adev->reset_domain, in xgpu_ai_mailbox_rcv_irq()
|
D | mxgpu_nv.c | 284 if (atomic_cmpxchg(&adev->reset_domain->in_gpu_reset, 0, 1) != 0) in xgpu_nv_mailbox_flr_work() 287 down_write(&adev->reset_domain->sem); in xgpu_nv_mailbox_flr_work() 302 atomic_set(&adev->reset_domain->in_gpu_reset, 0); in xgpu_nv_mailbox_flr_work() 303 up_write(&adev->reset_domain->sem); in xgpu_nv_mailbox_flr_work() 349 WARN_ONCE(!amdgpu_reset_domain_schedule(adev->reset_domain, in xgpu_nv_mailbox_rcv_irq()
|
D | amdgpu_reset.h | 125 void amdgpu_device_lock_reset_domain(struct amdgpu_reset_domain *reset_domain); 127 void amdgpu_device_unlock_reset_domain(struct amdgpu_reset_domain *reset_domain);
|
D | amdgpu_ras_eeprom.c | 268 down_read(&adev->reset_domain->sem); in __write_table_header() 273 up_read(&adev->reset_domain->sem); in __write_table_header() 329 down_read(&adev->reset_domain->sem); in __write_table_ras_info() 334 up_read(&adev->reset_domain->sem); in __write_table_ras_info() 581 down_read(&adev->reset_domain->sem); in __amdgpu_ras_eeprom_write() 587 up_read(&adev->reset_domain->sem); in __amdgpu_ras_eeprom_write() 757 down_read(&adev->reset_domain->sem); in amdgpu_ras_eeprom_update_header() 762 up_read(&adev->reset_domain->sem); in amdgpu_ras_eeprom_update_header() 867 down_read(&adev->reset_domain->sem); in __amdgpu_ras_eeprom_read() 873 up_read(&adev->reset_domain->sem); in __amdgpu_ras_eeprom_read()
|
D | amdgpu_xgmi.c | 300 amdgpu_reset_put_reset_domain(hive->reset_domain); in amdgpu_xgmi_hive_release() 301 hive->reset_domain = NULL; in amdgpu_xgmi_hive_release() 532 if (adev->reset_domain->type != XGMI_HIVE) { in amdgpu_get_xgmi_hive() 533 hive->reset_domain = in amdgpu_get_xgmi_hive() 535 if (!hive->reset_domain) { in amdgpu_get_xgmi_hive() 543 amdgpu_reset_get_reset_domain(adev->reset_domain); in amdgpu_get_xgmi_hive() 544 hive->reset_domain = adev->reset_domain; in amdgpu_get_xgmi_hive()
|
D | amdgpu_device.c | 379 if (down_read_trylock(&adev->reset_domain->sem)) in amdgpu_device_skip_hw_access() 380 up_read(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access() 382 lockdep_assert_held(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access() 408 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_rreg() 410 up_read(&adev->reset_domain->sem); in amdgpu_device_rreg() 492 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_wreg() 494 up_read(&adev->reset_domain->sem); in amdgpu_device_wreg() 2280 timeout, adev->reset_domain->wq, in amdgpu_device_init_schedulers() 2429 if (!hive->reset_domain || in amdgpu_device_ip_init() 2430 !amdgpu_reset_get_reset_domain(hive->reset_domain)) { in amdgpu_device_ip_init() [all …]
|
D | amdgpu_xgmi.h | 45 struct amdgpu_reset_domain *reset_domain; member
|
D | amdgpu_debugfs.c | 1666 r = down_write_killable(&adev->reset_domain->sem); in amdgpu_debugfs_test_ib_show() 1695 up_write(&adev->reset_domain->sem); in amdgpu_debugfs_test_ib_show() 1925 r = down_read_killable(&adev->reset_domain->sem); in amdgpu_debugfs_ib_preempt() 1966 up_read(&adev->reset_domain->sem); in amdgpu_debugfs_ib_preempt() 2027 ret = down_read_killable(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_read() 2033 up_read(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_read() 2038 ret = down_read_killable(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_read() 2043 up_read(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_read() 2085 ret = down_write_killable(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_write() 2092 up_write(&adev->reset_domain->sem); in amdgpu_reset_dump_register_list_write()
|
D | amdgpu_amdkfd_arcturus.c | 321 if (!down_read_trylock(&adev->reset_domain->sem)) in set_barrier_auto_waitcnt() 339 up_read(&adev->reset_domain->sem); in set_barrier_auto_waitcnt()
|
D | gmc_v9_0.c | 849 down_read_trylock(&adev->reset_domain->sem)) { in gmc_v9_0_flush_gpu_tlb() 855 up_read(&adev->reset_domain->sem); in gmc_v9_0_flush_gpu_tlb() 962 if (ring->sched.ready && down_read_trylock(&adev->reset_domain->sem)) { in gmc_v9_0_flush_gpu_tlb_pasid() 996 up_read(&adev->reset_domain->sem); in gmc_v9_0_flush_gpu_tlb_pasid() 1005 up_read(&adev->reset_domain->sem); in gmc_v9_0_flush_gpu_tlb_pasid() 1008 up_read(&adev->reset_domain->sem); in gmc_v9_0_flush_gpu_tlb_pasid()
|
D | amdgpu_fence.c | 956 if (amdgpu_reset_domain_schedule(adev->reset_domain, &adev->reset_work)) in gpu_recover_get() 959 *val = atomic_read(&adev->reset_domain->reset_res); in gpu_recover_get()
|
D | mxgpu_vi.c | 564 WARN_ONCE(!amdgpu_reset_domain_schedule(adev->reset_domain, in xgpu_vi_mailbox_rcv_irq()
|
D | gmc_v10_0.c | 341 down_read_trylock(&adev->reset_domain->sem)) { in gmc_v10_0_flush_gpu_tlb() 351 up_read(&adev->reset_domain->sem); in gmc_v10_0_flush_gpu_tlb()
|
D | amdgpu_amdkfd.c | 262 amdgpu_reset_domain_schedule(adev->reset_domain, in amdgpu_amdkfd_gpu_reset()
|
D | amdgpu.h | 1064 struct amdgpu_reset_domain *reset_domain; member
|
D | amdgpu_ras.c | 3172 amdgpu_reset_domain_schedule(ras->adev->reset_domain, &ras->recovery_work); in amdgpu_ras_reset_gpu()
|
/linux-6.6.21/drivers/gpu/drm/i915/gt/ |
D | intel_engine_types.h | 372 u32 reset_domain; member
|
D | intel_engine_cs.c | 399 u32 reset_domain; in get_reset_domain() local 433 reset_domain = engine_reset_domains[id]; in get_reset_domain() 444 reset_domain = engine_reset_domains[id]; in get_reset_domain() 447 return reset_domain; in get_reset_domain() 485 engine->reset_domain = get_reset_domain(GRAPHICS_VER(gt->i915), in intel_engine_setup()
|
D | intel_reset.c | 334 hw_mask |= engine->reset_domain; in __gen6_reset_engines() 535 reset_mask |= engine->reset_domain; in __gen11_reset_engines()
|