Lines Matching refs:gmu

25 	if (!adreno_has_gmu_wrapper(adreno_gpu) && !a6xx_gmu_isidle(&a6xx_gpu->gmu))  in _a6xx_check_idle()
496 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg() local
517 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_MODE_CNTL, in a6xx_set_hwcg()
519 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_DELAY_CNTL, in a6xx_set_hwcg()
521 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GMU_CGC_HYST_CNTL, in a6xx_set_hwcg()
551 gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 1, 0); in a6xx_set_hwcg()
558 gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 0, 1); in a6xx_set_hwcg()
1041 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in hw_init() local
1048 ret = a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1071 a6xx_sptprac_enable(gmu); in hw_init()
1191 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_SELECT_1, in hw_init()
1229 gmu_write(gmu, REG_A6XX_GPU_GMU_AO_GPU_CX_BUSY_MASK, 0xff000000); in hw_init()
1232 gmu_rmw(gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_SELECT_0, 0xff, BIT(5)); in hw_init()
1233 gmu_write(gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_ENABLE, 1); in hw_init()
1385 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1387 if (a6xx_gpu->gmu.legacy) { in hw_init()
1389 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); in hw_init()
1401 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1403 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1419 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_recover() local
1461 reinit_completion(&gmu->pd_gate); in a6xx_recover()
1462 dev_pm_genpd_add_notifier(gmu->cxpd, &gmu->pd_nb); in a6xx_recover()
1463 dev_pm_genpd_synced_poweroff(gmu->cxpd); in a6xx_recover()
1472 if (!wait_for_completion_timeout(&gmu->pd_gate, msecs_to_jiffies(1000))) in a6xx_recover()
1475 dev_pm_genpd_remove_notifier(gmu->cxpd); in a6xx_recover()
1687 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1); in a6xx_fault_detect_irq()
2023 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_resume()
2025 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_resume()
2040 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_resume() local
2049 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
2061 pm_runtime_resume_and_get(gmu->dev); in a6xx_pm_resume()
2062 pm_runtime_resume_and_get(gmu->gxpd); in a6xx_pm_resume()
2069 a6xx_sptprac_enable(gmu); in a6xx_pm_resume()
2074 pm_runtime_put(gmu->gxpd); in a6xx_pm_resume()
2075 pm_runtime_put(gmu->dev); in a6xx_pm_resume()
2079 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
2099 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_suspend()
2101 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_suspend()
2118 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_suspend() local
2125 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
2131 a6xx_sptprac_disable(gmu); in a6xx_pm_suspend()
2135 pm_runtime_put_sync(gmu->gxpd); in a6xx_pm_suspend()
2137 pm_runtime_put_sync(gmu->dev); in a6xx_pm_suspend()
2139 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
2155 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_get_timestamp()
2158 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_gmu_get_timestamp()
2162 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_gmu_get_timestamp()
2164 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_get_timestamp()
2216 busy_cycles = gmu_read64(&a6xx_gpu->gmu, in a6xx_gpu_busy()
2229 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
2231 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
2469 mutex_init(&a6xx_gpu->gmu.lock); in a6xx_gpu_init()