Lines Matching refs:hive
2587 struct amdgpu_hive_info *hive, bool status) in amdgpu_ras_set_fed_all() argument
2591 if (hive) { in amdgpu_ras_set_fed_all()
2592 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_ras_set_fed_all()
2601 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_in_recovery() local
2605 if (hive) { in amdgpu_ras_in_recovery()
2606 hive_ras_recovery = atomic_read(&hive->ras_recovery); in amdgpu_ras_in_recovery()
2607 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_in_recovery()
2631 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_do_recovery() local
2634 if (hive) { in amdgpu_ras_do_recovery()
2635 atomic_set(&hive->ras_recovery, 1); in amdgpu_ras_do_recovery()
2642 list_for_each_entry(remote_adev, &hive->device_list, in amdgpu_ras_do_recovery()
2645 amdgpu_ras_set_fed_all(adev, hive, true); in amdgpu_ras_do_recovery()
2652 if (hive && adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_ras_do_recovery()
2653 device_list_handle = &hive->device_list; in amdgpu_ras_do_recovery()
2703 if (hive) { in amdgpu_ras_do_recovery()
2704 atomic_set(&hive->ras_recovery, 0); in amdgpu_ras_do_recovery()
2705 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_do_recovery()
3822 struct amdgpu_hive_info *hive; in amdgpu_ras_event_mgr_init() local
3827 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_event_mgr_init()
3828 ras->event_mgr = hive ? &hive->event_mgr : &ras->__event_mgr; in amdgpu_ras_event_mgr_init()
3832 if (!hive || adev->gmc.xgmi.node_id == 0) in amdgpu_ras_event_mgr_init()
3836 if (hive) in amdgpu_ras_event_mgr_init()
3837 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_event_mgr_init()
4644 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_reset_gpu() local
4647 if (hive) { in amdgpu_ras_reset_gpu()
4648 hive_ras_recovery = atomic_read(&hive->ras_recovery); in amdgpu_ras_reset_gpu()
4649 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_reset_gpu()