Lines Matching refs:tmp_adev
157 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_perform_reset() local
171 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
172 mutex_lock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
173 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_MODE2; in aldebaran_mode2_perform_reset()
179 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
181 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
183 &tmp_adev->reset_cntl->reset_work)) in aldebaran_mode2_perform_reset()
186 r = aldebaran_mode2_reset(tmp_adev); in aldebaran_mode2_perform_reset()
188 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset()
190 r, adev_to_drm(tmp_adev)->unique); in aldebaran_mode2_perform_reset()
197 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
198 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
199 flush_work(&tmp_adev->reset_cntl->reset_work); in aldebaran_mode2_perform_reset()
200 r = tmp_adev->asic_reset_res; in aldebaran_mode2_perform_reset()
207 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
208 mutex_unlock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
209 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_NONE; in aldebaran_mode2_perform_reset()
347 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_restore_hwcontext() local
361 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_restore_hwcontext()
362 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
364 dev_info(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
367 amdgpu_ras_clear_err_state(tmp_adev); in aldebaran_mode2_restore_hwcontext()
368 r = aldebaran_mode2_restore_ip(tmp_adev); in aldebaran_mode2_restore_hwcontext()
376 amdgpu_register_gpu_instance(tmp_adev); in aldebaran_mode2_restore_hwcontext()
379 con = amdgpu_ras_get_context(tmp_adev); in aldebaran_mode2_restore_hwcontext()
380 if (!amdgpu_sriov_vf(tmp_adev) && con) { in aldebaran_mode2_restore_hwcontext()
381 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
382 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
383 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
384 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
386 dev_err(tmp_adev->dev, "SDMA failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
391 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext()
392 tmp_adev->gfx.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
393 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
394 &tmp_adev->gfx.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
396 dev_err(tmp_adev->dev, "GFX failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
402 amdgpu_ras_resume(tmp_adev); in aldebaran_mode2_restore_hwcontext()
406 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in aldebaran_mode2_restore_hwcontext()
408 tmp_adev); in aldebaran_mode2_restore_hwcontext()
411 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
413 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in aldebaran_mode2_restore_hwcontext()
415 r = amdgpu_ib_ring_tests(tmp_adev); in aldebaran_mode2_restore_hwcontext()
417 dev_err(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
420 tmp_adev->asic_reset_res = r; in aldebaran_mode2_restore_hwcontext()