Searched refs:hive (Results 1 – 10 of 10) sorted by relevance
206 struct amdgpu_hive_info *hive = container_of( in amdgpu_xgmi_show_attrs() local210 return snprintf(buf, PAGE_SIZE, "%llu\n", hive->hive_id); in amdgpu_xgmi_show_attrs()217 struct amdgpu_hive_info *hive = container_of( in amdgpu_xgmi_hive_release() local220 amdgpu_reset_put_reset_domain(hive->reset_domain); in amdgpu_xgmi_hive_release()221 hive->reset_domain = NULL; in amdgpu_xgmi_hive_release()223 mutex_destroy(&hive->hive_lock); in amdgpu_xgmi_hive_release()224 kfree(hive); in amdgpu_xgmi_hive_release()286 struct amdgpu_hive_info *hive) in amdgpu_xgmi_sysfs_add_dev_info() argument305 if (hive->kobj.parent != (&adev->dev->kobj)) { in amdgpu_xgmi_sysfs_add_dev_info()306 ret = sysfs_create_link(&adev->dev->kobj, &hive->kobj, in amdgpu_xgmi_sysfs_add_dev_info()[all …]
57 void amdgpu_put_xgmi_hive(struct amdgpu_hive_info *hive);58 int amdgpu_xgmi_update_topology(struct amdgpu_hive_info *hive, struct amdgpu_device *adev);
161 reset_context->hive == NULL) { in aldebaran_mode2_perform_reset()343 reset_context->hive == NULL) { in aldebaran_mode2_restore_hwcontext()365 if (reset_context->hive && in aldebaran_mode2_restore_hwcontext()367 r = amdgpu_xgmi_update_topology(reset_context->hive, in aldebaran_mode2_restore_hwcontext()
2466 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_ip_init() local2468 if (WARN_ON(!hive)) { in amdgpu_device_ip_init()2473 if (!hive->reset_domain || in amdgpu_device_ip_init()2474 !amdgpu_reset_get_reset_domain(hive->reset_domain)) { in amdgpu_device_ip_init()2476 amdgpu_put_xgmi_hive(hive); in amdgpu_device_ip_init()2482 adev->reset_domain = hive->reset_domain; in amdgpu_device_ip_init()2483 amdgpu_put_xgmi_hive(hive); in amdgpu_device_ip_init()3371 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_xgmi_reset_func() local3374 if (WARN_ON(!hive)) in amdgpu_device_xgmi_reset_func()3385 task_barrier_enter(&hive->tb); in amdgpu_device_xgmi_reset_func()[all …]
40 struct amdgpu_hive_info *hive; member
2239 struct amdgpu_hive_info *hive; in amdgpu_pci_remove() local2241 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_pci_remove()2242 if (hive->device_remove_count == 0) in amdgpu_pci_remove()2244 hive->device_remove_count++; in amdgpu_pci_remove()2245 amdgpu_put_xgmi_hive(hive); in amdgpu_pci_remove()
1926 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_do_recovery() local1929 if (hive && adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_ras_do_recovery()1930 device_list_handle = &hive->device_list; in amdgpu_ras_do_recovery()1943 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_do_recovery()
1291 struct amdgpu_hive_info *hive; in psp_xgmi_reflect_topology_info() local1297 hive = amdgpu_get_xgmi_hive(psp->adev); in psp_xgmi_reflect_topology_info()1298 list_for_each_entry(mirror_adev, &hive->device_list, gmc.xgmi.head) { in psp_xgmi_reflect_topology_info()1325 amdgpu_put_xgmi_hive(hive); in psp_xgmi_reflect_topology_info()
766 struct amdgpu_hive_info *hive; member
40 ((adev)->hive ? (void *)(adev)->hive : (void *)(adev))