Lines Matching +full:100 +full:ps

260 	100,
294 100,
311 100,
486 100,
494 100
533 100,
550 100,
567 100,
974 100
1488 100
1518 100
1664 100
1713 i_leakage = div64_s64(drm_int2fixp(ileakage), 100); in si_calculate_leakage_for_v_and_t_formula()
1749 i_leakage = div64_s64(drm_int2fixp(ileakage), 100); in si_calculate_leakage_for_v_formula()
1789 (p_limit2 * (u32)100); in si_update_dte_from_pl2()
2048 wintime = (cac_window_size * 100) / xclk; in si_calculate_cac_wintime()
2069 max_tdp_limit = ((100 + 100) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2072 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2075 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2124 …cpu_to_be32(si_scale_power_for_smc((near_tdp_limit * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, … in si_populate_smc_tdp_limits()
2172 …->pm.dpm.near_tdp_limit_adjusted * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, scaling_factor) * … in si_populate_smc_tdp_limits_2()
2276 min_sclk = (prev_sclk * (u32)max_ps_percent) / 100; in si_populate_power_containment_values()
2491 if (si_pi->powertune_data->lkge_lut_v0_percent > 100) in si_get_cac_std_voltage_max_min()
2494 v0_loadline = (*min) * (100 - si_pi->powertune_data->lkge_lut_v0_percent) / 100; in si_get_cac_std_voltage_max_min()
2597 u32 ticks_per_us = radeon_get_xclk(rdev) / 100; in si_initialize_smc_cac_tables()
2637 load_line_slope = ((u32)rdev->pm.dpm.load_line_slope << SMC_SISLANDS_SCALE_R) / 100; in si_initialize_smc_cac_tables()
2905 struct ni_ps *ps = ni_get_ps(rps); in si_apply_state_adjust_rules() local
2967 for (i = ps->performance_level_count - 2; i >= 0; i--) { in si_apply_state_adjust_rules()
2968 if (ps->performance_levels[i].vddc > ps->performance_levels[i+1].vddc) in si_apply_state_adjust_rules()
2969 ps->performance_levels[i].vddc = ps->performance_levels[i+1].vddc; in si_apply_state_adjust_rules()
2972 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
2973 if (ps->performance_levels[i].mclk > max_limits->mclk) in si_apply_state_adjust_rules()
2974 ps->performance_levels[i].mclk = max_limits->mclk; in si_apply_state_adjust_rules()
2975 if (ps->performance_levels[i].sclk > max_limits->sclk) in si_apply_state_adjust_rules()
2976 ps->performance_levels[i].sclk = max_limits->sclk; in si_apply_state_adjust_rules()
2977 if (ps->performance_levels[i].vddc > max_limits->vddc) in si_apply_state_adjust_rules()
2978 ps->performance_levels[i].vddc = max_limits->vddc; in si_apply_state_adjust_rules()
2979 if (ps->performance_levels[i].vddci > max_limits->vddci) in si_apply_state_adjust_rules()
2980 ps->performance_levels[i].vddci = max_limits->vddci; in si_apply_state_adjust_rules()
2992 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
2994 if (ps->performance_levels[i].sclk > max_sclk_vddc) in si_apply_state_adjust_rules()
2995 ps->performance_levels[i].sclk = max_sclk_vddc; in si_apply_state_adjust_rules()
2998 if (ps->performance_levels[i].mclk > max_mclk_vddci) in si_apply_state_adjust_rules()
2999 ps->performance_levels[i].mclk = max_mclk_vddci; in si_apply_state_adjust_rules()
3002 if (ps->performance_levels[i].mclk > max_mclk_vddc) in si_apply_state_adjust_rules()
3003 ps->performance_levels[i].mclk = max_mclk_vddc; in si_apply_state_adjust_rules()
3006 if (ps->performance_levels[i].mclk > max_mclk) in si_apply_state_adjust_rules()
3007 ps->performance_levels[i].mclk = max_mclk; in si_apply_state_adjust_rules()
3010 if (ps->performance_levels[i].sclk > max_sclk) in si_apply_state_adjust_rules()
3011 ps->performance_levels[i].sclk = max_sclk; in si_apply_state_adjust_rules()
3018 mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; in si_apply_state_adjust_rules()
3019 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci; in si_apply_state_adjust_rules()
3021 mclk = ps->performance_levels[0].mclk; in si_apply_state_adjust_rules()
3022 vddci = ps->performance_levels[0].vddci; in si_apply_state_adjust_rules()
3026 sclk = ps->performance_levels[ps->performance_level_count - 1].sclk; in si_apply_state_adjust_rules()
3027 vddc = ps->performance_levels[ps->performance_level_count - 1].vddc; in si_apply_state_adjust_rules()
3029 sclk = ps->performance_levels[0].sclk; in si_apply_state_adjust_rules()
3030 vddc = ps->performance_levels[0].vddc; in si_apply_state_adjust_rules()
3041 ps->performance_levels[0].sclk = sclk; in si_apply_state_adjust_rules()
3042 ps->performance_levels[0].mclk = mclk; in si_apply_state_adjust_rules()
3043 ps->performance_levels[0].vddc = vddc; in si_apply_state_adjust_rules()
3044 ps->performance_levels[0].vddci = vddci; in si_apply_state_adjust_rules()
3047 sclk = ps->performance_levels[0].sclk; in si_apply_state_adjust_rules()
3048 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3049 if (sclk < ps->performance_levels[i].sclk) in si_apply_state_adjust_rules()
3050 sclk = ps->performance_levels[i].sclk; in si_apply_state_adjust_rules()
3052 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3053 ps->performance_levels[i].sclk = sclk; in si_apply_state_adjust_rules()
3054 ps->performance_levels[i].vddc = vddc; in si_apply_state_adjust_rules()
3057 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3058 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) in si_apply_state_adjust_rules()
3059 ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; in si_apply_state_adjust_rules()
3060 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) in si_apply_state_adjust_rules()
3061 ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; in si_apply_state_adjust_rules()
3066 mclk = ps->performance_levels[0].mclk; in si_apply_state_adjust_rules()
3067 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3068 if (mclk < ps->performance_levels[i].mclk) in si_apply_state_adjust_rules()
3069 mclk = ps->performance_levels[i].mclk; in si_apply_state_adjust_rules()
3071 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3072 ps->performance_levels[i].mclk = mclk; in si_apply_state_adjust_rules()
3073 ps->performance_levels[i].vddci = vddci; in si_apply_state_adjust_rules()
3076 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3077 if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) in si_apply_state_adjust_rules()
3078 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; in si_apply_state_adjust_rules()
3079 if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) in si_apply_state_adjust_rules()
3080 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; in si_apply_state_adjust_rules()
3084 for (i = 0; i < ps->performance_level_count; i++) in si_apply_state_adjust_rules()
3086 &ps->performance_levels[i]); in si_apply_state_adjust_rules()
3088 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3089 if (ps->performance_levels[i].vddc < min_vce_voltage) in si_apply_state_adjust_rules()
3090 ps->performance_levels[i].vddc = min_vce_voltage; in si_apply_state_adjust_rules()
3092 ps->performance_levels[i].sclk, in si_apply_state_adjust_rules()
3093 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3095 ps->performance_levels[i].mclk, in si_apply_state_adjust_rules()
3096 max_limits->vddci, &ps->performance_levels[i].vddci); in si_apply_state_adjust_rules()
3098 ps->performance_levels[i].mclk, in si_apply_state_adjust_rules()
3099 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3102 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3105 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3108 &ps->performance_levels[i].vddc, in si_apply_state_adjust_rules()
3109 &ps->performance_levels[i].vddci); in si_apply_state_adjust_rules()
3112 ps->dc_compatible = true; in si_apply_state_adjust_rules()
3113 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3114 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) in si_apply_state_adjust_rules()
3115 ps->dc_compatible = false; in si_apply_state_adjust_rules()
3341 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_force_performance_level() local
3342 u32 levels = ps->performance_level_count; in si_dpm_force_performance_level()
3595 vddc_dly = (voltage_response_time * reference_clock) / 100; in si_program_response_times()
3596 acpi_dly = (acpi_delay_time * reference_clock) / 100; in si_program_response_times()
3597 vbi_dly = (vbi_time_out * reference_clock) / 100; in si_program_response_times()
4867 u32 clkv = (u32)((((131 * ss.percentage * ss.rate) / 100) * tmp) / freq_nom); in si_populate_mclk_value()
4903 struct ni_ps *ps = ni_get_ps(radeon_state); in si_populate_smc_sp() local
4907 for (i = 0; i < ps->performance_level_count - 1; i++) in si_populate_smc_sp()
4910 smc_state->levels[ps->performance_level_count - 1].bSP = in si_populate_smc_sp()
5047 (50 / SISLANDS_MAX_HARDWARE_POWERLEVELS) * 100 * (i + 1), in si_populate_smc_t()
5048 100 * R600_AH_DFLT, in si_populate_smc_t()
5141 threshold = state->performance_levels[state->performance_level_count-1].sclk * 100 / 100; in si_convert_power_state_to_smc()
5999 slope1 = (u16)((50 + ((16 * duty100 * pwm_diff1) / t_diff1)) / 100); in si_thermal_setup_fan_table()
6000 slope2 = (u16)((50 + ((16 * duty100 * pwm_diff2) / t_diff2)) / 100); in si_thermal_setup_fan_table()
6002 fan_table.temp_min = cpu_to_be16((50 + rdev->pm.dpm.fan.t_min) / 100); in si_thermal_setup_fan_table()
6003 fan_table.temp_med = cpu_to_be16((50 + rdev->pm.dpm.fan.t_med) / 100); in si_thermal_setup_fan_table()
6004 fan_table.temp_max = cpu_to_be16((50 + rdev->pm.dpm.fan.t_max) / 100); in si_thermal_setup_fan_table()
6087 tmp64 = (u64)duty * 100; in si_fan_ctrl_get_fan_speed_percent()
6091 if (*speed > 100) in si_fan_ctrl_get_fan_speed_percent()
6092 *speed = 100; in si_fan_ctrl_get_fan_speed_percent()
6111 if (speed > 100) in si_fan_ctrl_set_fan_speed_percent()
6120 do_div(tmp64, 100); in si_fan_ctrl_set_fan_speed_percent()
6675 struct ni_ps *ps = ni_get_ps(rps); in si_parse_pplib_clock_info() local
6677 struct rv7xx_pl *pl = &ps->performance_levels[index]; in si_parse_pplib_clock_info()
6680 ps->performance_level_count = index + 1; in si_parse_pplib_clock_info()
6759 struct ni_ps *ps; in si_parse_power_table() local
6776 rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, in si_parse_power_table()
6779 if (!rdev->pm.dpm.ps) in si_parse_power_table()
6790 ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL); in si_parse_power_table()
6791 if (ps == NULL) { in si_parse_power_table()
6792 kfree(rdev->pm.dpm.ps); in si_parse_power_table()
6795 rdev->pm.dpm.ps[i].ps_priv = ps; in si_parse_power_table()
6796 si_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], in si_parse_power_table()
6811 &rdev->pm.dpm.ps[i], k, in si_parse_power_table()
7020 kfree(rdev->pm.dpm.ps[i].ps_priv); in si_dpm_fini()
7022 kfree(rdev->pm.dpm.ps); in si_dpm_fini()
7033 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_debugfs_print_current_performance_level() local
7039 if (current_index >= ps->performance_level_count) { in si_dpm_debugfs_print_current_performance_level()
7042 pl = &ps->performance_levels[current_index]; in si_dpm_debugfs_print_current_performance_level()
7053 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_get_current_sclk() local
7059 if (current_index >= ps->performance_level_count) { in si_dpm_get_current_sclk()
7062 pl = &ps->performance_levels[current_index]; in si_dpm_get_current_sclk()
7071 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_get_current_mclk() local
7077 if (current_index >= ps->performance_level_count) { in si_dpm_get_current_mclk()
7080 pl = &ps->performance_levels[current_index]; in si_dpm_get_current_mclk()