Lines Matching refs:hive

3029 				struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev);  in amdgpu_device_ip_init()  local
3031 if (WARN_ON(!hive)) { in amdgpu_device_ip_init()
3036 if (!hive->reset_domain || in amdgpu_device_ip_init()
3037 !amdgpu_reset_get_reset_domain(hive->reset_domain)) { in amdgpu_device_ip_init()
3039 amdgpu_put_xgmi_hive(hive); in amdgpu_device_ip_init()
3045 adev->reset_domain = hive->reset_domain; in amdgpu_device_ip_init()
3046 amdgpu_put_xgmi_hive(hive); in amdgpu_device_ip_init()
3977 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_xgmi_reset_func() local
3980 if (WARN_ON(!hive)) in amdgpu_device_xgmi_reset_func()
3991 task_barrier_enter(&hive->tb); in amdgpu_device_xgmi_reset_func()
3997 task_barrier_exit(&hive->tb); in amdgpu_device_xgmi_reset_func()
4006 task_barrier_full(&hive->tb); in amdgpu_device_xgmi_reset_func()
4014 amdgpu_put_xgmi_hive(hive); in amdgpu_device_xgmi_reset_func()
5226 struct amdgpu_hive_info *hive = NULL; in amdgpu_device_reset_sriov() local
5262 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_reset_sriov()
5264 if (hive && adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_reset_sriov()
5265 r = amdgpu_xgmi_update_topology(hive, adev); in amdgpu_device_reset_sriov()
5266 if (hive) in amdgpu_device_reset_sriov()
5267 amdgpu_put_xgmi_hive(hive); in amdgpu_device_reset_sriov()
5591 if (!reset_context->hive && in amdgpu_device_reinit_after_reset()
5620 if (reset_context->hive && in amdgpu_device_reinit_after_reset()
5623 reset_context->hive, tmp_adev); in amdgpu_device_reinit_after_reset()
5867 struct amdgpu_hive_info *hive = NULL; in amdgpu_device_gpu_recover() local
5907 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_gpu_recover()
5908 if (hive) in amdgpu_device_gpu_recover()
5909 mutex_lock(&hive->hive_lock); in amdgpu_device_gpu_recover()
5912 reset_context->hive = hive; in amdgpu_device_gpu_recover()
5919 if (!amdgpu_sriov_vf(adev) && (adev->gmc.xgmi.num_physical_nodes > 1) && hive) { in amdgpu_device_gpu_recover()
5920 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_device_gpu_recover()
6117 if (hive) { in amdgpu_device_gpu_recover()
6118 mutex_unlock(&hive->hive_lock); in amdgpu_device_gpu_recover()
6119 amdgpu_put_xgmi_hive(hive); in amdgpu_device_gpu_recover()