1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4
5 #include "dml2_dpmm_dcn4.h"
6 #include "dml2_internal_shared_types.h"
7 #include "dml_top_types.h"
8 #include "lib_float_math.h"
9
dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps,const struct dml2_dram_params * dram_config)10 static double dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps, const struct dml2_dram_params *dram_config)
11 {
12 double uclk_khz = 0;
13 unsigned long uclk_mbytes_per_tick = 0;
14
15 uclk_mbytes_per_tick = dram_config->channel_count * dram_config->channel_width_bytes * dram_config->transactions_per_clock;
16
17 uclk_khz = (double)bandwidth_kbps / uclk_mbytes_per_tick;
18
19 return uclk_khz;
20 }
21
get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,double * uclk,double * fclk,double * dcfclk)22 static void get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out,
23 double *uclk,
24 double *fclk,
25 double *dcfclk)
26 {
27 int min_clock_index_for_latency;
28
29 if (in_out->display_cfg->stage3.success)
30 min_clock_index_for_latency = in_out->display_cfg->stage3.min_clk_index_for_latency;
31 else
32 min_clock_index_for_latency = in_out->display_cfg->stage1.min_clk_index_for_latency;
33
34 *dcfclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_dcfclk_khz;
35 *fclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_fclk_khz;
36 *uclk = dram_bw_kbps_to_uclk_khz(in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].pre_derate_dram_bw_kbps,
37 &in_out->soc_bb->clk_table.dram_config);
38 }
39
dml_round_up(double a)40 static unsigned long dml_round_up(double a)
41 {
42 if (a - (unsigned long)a > 0) {
43 return ((unsigned long)a) + 1;
44 }
45 return (unsigned long)a;
46 }
47
calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)48 static void calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
49 {
50 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
51 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
52 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
53 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
54 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
55
56 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
57 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dram_derate_percent_pixel / 100);
58
59 min_uclk_urgent = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.urgent_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
60 if (in_out->display_cfg->display_config.hostvm_enable)
61 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel_and_vm / 100);
62 else
63 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100);
64
65 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
66
67 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
68 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.fclk_derate_percent / 100);
69
70 min_fclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
71 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100);
72
73 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
74
75 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
76 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dcfclk_derate_percent / 100);
77
78 min_dcfclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
79 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dcfclk_derate_percent / 100);
80
81 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
82
83 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
84
85 in_out->programming->min_clocks.dcn4x.active.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
86 in_out->programming->min_clocks.dcn4x.active.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
87 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
88 }
89
calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)90 static void calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
91 {
92 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
93 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
94 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
95 double min_fclk_latency, min_dcfclk_latency;
96 double min_uclk_latency;
97 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
98
99 /* assumes DF throttling is enabled */
100 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
101 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dram_derate_percent_pixel / 100);
102
103 min_uclk_urgent = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.urgent_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
104 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dram_derate_percent_pixel / 100);
105
106 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
107
108 min_fclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
109 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.fclk_derate_percent / 100);
110
111 min_fclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
112 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.fclk_derate_percent / 100);
113
114 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
115
116 min_dcfclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
117 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dcfclk_derate_percent / 100);
118
119 min_dcfclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
120 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dcfclk_derate_percent / 100);
121
122 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
123
124 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
125
126 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
127 in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
128 in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
129
130 /* assumes DF throttling is disabled */
131 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
132 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dram_derate_percent_pixel / 100);
133
134 min_uclk_urgent = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.urgent_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
135 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100);
136
137 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
138
139 min_fclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
140 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.fclk_derate_percent / 100);
141
142 min_fclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
143 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100);
144
145 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
146
147 min_dcfclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
148 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dcfclk_derate_percent / 100);
149
150 min_dcfclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
151 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dcfclk_derate_percent / 100);
152
153 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
154
155 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
156
157 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
158 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
159 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
160 }
161
calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)162 static void calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
163 {
164 double min_uclk_avg;
165 double min_fclk_avg;
166 double min_dcfclk_avg;
167 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
168 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
169
170 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
171 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dram_derate_percent_pixel / 100);
172
173 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
174 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.fclk_derate_percent / 100);
175
176 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
177 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dcfclk_derate_percent / 100);
178
179 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
180
181 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = dml_round_up(min_uclk_avg > min_uclk_latency ? min_uclk_avg : min_uclk_latency);
182 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = dml_round_up(min_fclk_avg > min_fclk_latency ? min_fclk_avg : min_fclk_latency);
183 in_out->programming->min_clocks.dcn4x.idle.dcfclk_khz = dml_round_up(min_dcfclk_avg > min_dcfclk_latency ? min_dcfclk_avg : min_dcfclk_latency);
184 }
185
add_margin_and_round_to_dfs_grainularity(double clock_khz,double margin,unsigned long vco_freq_khz,unsigned long * rounded_khz,uint32_t * divider_id)186 static bool add_margin_and_round_to_dfs_grainularity(double clock_khz, double margin, unsigned long vco_freq_khz, unsigned long *rounded_khz, uint32_t *divider_id)
187 {
188 enum dentist_divider_range {
189 DFS_DIVIDER_RANGE_1_START = 8, /* 2.00 */
190 DFS_DIVIDER_RANGE_1_STEP = 1, /* 0.25 */
191 DFS_DIVIDER_RANGE_2_START = 64, /* 16.00 */
192 DFS_DIVIDER_RANGE_2_STEP = 2, /* 0.50 */
193 DFS_DIVIDER_RANGE_3_START = 128, /* 32.00 */
194 DFS_DIVIDER_RANGE_3_STEP = 4, /* 1.00 */
195 DFS_DIVIDER_RANGE_4_START = 248, /* 62.00 */
196 DFS_DIVIDER_RANGE_4_STEP = 264, /* 66.00 */
197 DFS_DIVIDER_RANGE_SCALE_FACTOR = 4
198 };
199
200 enum DFS_base_divider_id {
201 DFS_BASE_DID_1 = 0x08,
202 DFS_BASE_DID_2 = 0x40,
203 DFS_BASE_DID_3 = 0x60,
204 DFS_BASE_DID_4 = 0x7e,
205 DFS_MAX_DID = 0x7f
206 };
207
208 unsigned int divider;
209
210 if (clock_khz < 1 || vco_freq_khz < 1 || clock_khz > vco_freq_khz)
211 return false;
212
213 clock_khz *= 1.0 + margin;
214
215 divider = (unsigned int)((int)DFS_DIVIDER_RANGE_SCALE_FACTOR * (vco_freq_khz / clock_khz));
216
217 /* we want to floor here to get higher clock than required rather than lower */
218 if (divider < DFS_DIVIDER_RANGE_2_START) {
219 if (divider < DFS_DIVIDER_RANGE_1_START)
220 *divider_id = DFS_BASE_DID_1;
221 else
222 *divider_id = DFS_BASE_DID_1 + ((divider - DFS_DIVIDER_RANGE_1_START) / DFS_DIVIDER_RANGE_1_STEP);
223 } else if (divider < DFS_DIVIDER_RANGE_3_START) {
224 *divider_id = DFS_BASE_DID_2 + ((divider - DFS_DIVIDER_RANGE_2_START) / DFS_DIVIDER_RANGE_2_STEP);
225 } else if (divider < DFS_DIVIDER_RANGE_4_START) {
226 *divider_id = DFS_BASE_DID_3 + ((divider - DFS_DIVIDER_RANGE_3_START) / DFS_DIVIDER_RANGE_3_STEP);
227 } else {
228 *divider_id = DFS_BASE_DID_4 + ((divider - DFS_DIVIDER_RANGE_4_START) / DFS_DIVIDER_RANGE_4_STEP);
229 if (*divider_id > DFS_MAX_DID)
230 *divider_id = DFS_MAX_DID;
231 }
232
233 *rounded_khz = vco_freq_khz * DFS_DIVIDER_RANGE_SCALE_FACTOR / divider;
234
235 return true;
236 }
237
round_to_non_dfs_granularity(unsigned long dispclk_khz,unsigned long dpprefclk_khz,unsigned long dtbrefclk_khz,unsigned long * rounded_dispclk_khz,unsigned long * rounded_dpprefclk_khz,unsigned long * rounded_dtbrefclk_khz)238 static bool round_to_non_dfs_granularity(unsigned long dispclk_khz, unsigned long dpprefclk_khz, unsigned long dtbrefclk_khz,
239 unsigned long *rounded_dispclk_khz, unsigned long *rounded_dpprefclk_khz, unsigned long *rounded_dtbrefclk_khz)
240 {
241 unsigned long pll_frequency_khz;
242
243 pll_frequency_khz = (unsigned long) math_max2(600000, math_ceil2(math_max3(dispclk_khz, dpprefclk_khz, dtbrefclk_khz), 1000));
244
245 *rounded_dispclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dispclk_khz, 32);
246
247 *rounded_dpprefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dpprefclk_khz, 32);
248
249 if (dtbrefclk_khz > 0) {
250 *rounded_dtbrefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dtbrefclk_khz, 32);
251 } else {
252 *rounded_dtbrefclk_khz = 0;
253 }
254
255 return true;
256 }
257
round_up_and_copy_to_next_dpm(unsigned long min_value,unsigned long * rounded_value,const struct dml2_clk_table * clock_table)258 static bool round_up_and_copy_to_next_dpm(unsigned long min_value, unsigned long *rounded_value, const struct dml2_clk_table *clock_table)
259 {
260 bool result = false;
261 int index = 0;
262
263 if (clock_table->num_clk_values > 2) {
264 while (index < clock_table->num_clk_values && clock_table->clk_values_khz[index] < min_value)
265 index++;
266
267 if (index < clock_table->num_clk_values) {
268 *rounded_value = clock_table->clk_values_khz[index];
269 result = true;
270 }
271 } else if (clock_table->clk_values_khz[clock_table->num_clk_values - 1] >= min_value) {
272 *rounded_value = min_value;
273 result = true;
274 }
275 return result;
276 }
277
round_up_to_next_dpm(unsigned long * clock_value,const struct dml2_clk_table * clock_table)278 static bool round_up_to_next_dpm(unsigned long *clock_value, const struct dml2_clk_table *clock_table)
279 {
280 return round_up_and_copy_to_next_dpm(*clock_value, clock_value, clock_table);
281 }
282
map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)283 static bool map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
284 {
285 bool result;
286
287 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.dcfclk_khz, &state_table->dcfclk);
288 if (result)
289 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.fclk_khz, &state_table->fclk);
290 if (result)
291 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.uclk_khz, &state_table->uclk);
292
293 if (result)
294 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz, &state_table->dcfclk);
295 if (result)
296 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz, &state_table->fclk);
297 if (result)
298 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz, &state_table->uclk);
299
300 if (result)
301 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.dcfclk_khz, &state_table->dcfclk);
302 if (result)
303 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.fclk_khz, &state_table->fclk);
304 if (result)
305 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.uclk_khz, &state_table->uclk);
306
307 /* these clocks are optional, so they can fail to map, in which case map all to 0 */
308 if (result) {
309 if (!round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz, &state_table->dcfclk) ||
310 !round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz, &state_table->fclk) ||
311 !round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz, &state_table->uclk)) {
312 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz = 0;
313 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz = 0;
314 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz = 0;
315 }
316 }
317
318 return result;
319 }
320
map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)321 static bool map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
322 {
323 bool result;
324 int index;
325
326 result = false;
327 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
328 if (display_cfg->min_clocks.dcn4x.active.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
329 display_cfg->min_clocks.dcn4x.active.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
330 display_cfg->min_clocks.dcn4x.active.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
331 display_cfg->min_clocks.dcn4x.active.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
332 display_cfg->min_clocks.dcn4x.active.fclk_khz = state_table->fclk.clk_values_khz[index];
333 display_cfg->min_clocks.dcn4x.active.uclk_khz = state_table->uclk.clk_values_khz[index];
334 result = true;
335 break;
336 }
337 }
338
339 if (result) {
340 result = false;
341 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
342 if (display_cfg->min_clocks.dcn4x.idle.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
343 display_cfg->min_clocks.dcn4x.idle.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
344 display_cfg->min_clocks.dcn4x.idle.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
345 display_cfg->min_clocks.dcn4x.idle.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
346 display_cfg->min_clocks.dcn4x.idle.fclk_khz = state_table->fclk.clk_values_khz[index];
347 display_cfg->min_clocks.dcn4x.idle.uclk_khz = state_table->uclk.clk_values_khz[index];
348 result = true;
349 break;
350 }
351 }
352 }
353
354 // SVP is not supported on any coarse grained SoCs
355 display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = 0;
356 display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz = 0;
357 display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz = 0;
358
359 return result;
360 }
361
map_min_clocks_to_dpm(const struct dml2_core_mode_support_result * mode_support_result,struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)362 static bool map_min_clocks_to_dpm(const struct dml2_core_mode_support_result *mode_support_result, struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
363 {
364 bool result = false;
365 bool dcfclk_fine_grained = false, fclk_fine_grained = false, clock_state_count_identical = false;
366 unsigned int i;
367
368 if (!state_table || !display_cfg)
369 return false;
370
371 if (state_table->dcfclk.num_clk_values == 2) {
372 dcfclk_fine_grained = true;
373 }
374
375 if (state_table->fclk.num_clk_values == 2) {
376 fclk_fine_grained = true;
377 }
378
379 if (state_table->fclk.num_clk_values == state_table->dcfclk.num_clk_values &&
380 state_table->fclk.num_clk_values == state_table->uclk.num_clk_values) {
381 clock_state_count_identical = true;
382 }
383
384 if (dcfclk_fine_grained || fclk_fine_grained || !clock_state_count_identical)
385 result = map_soc_min_clocks_to_dpm_fine_grained(display_cfg, state_table);
386 else
387 result = map_soc_min_clocks_to_dpm_coarse_grained(display_cfg, state_table);
388
389 if (result)
390 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dispclk_khz, &state_table->dispclk);
391
392 if (result)
393 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.deepsleep_dcfclk_khz, &state_table->dcfclk);
394
395 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
396 if (result)
397 result = round_up_to_next_dpm(&display_cfg->plane_programming[i].min_clocks.dcn4x.dppclk_khz, &state_table->dppclk);
398 }
399
400 for (i = 0; i < display_cfg->display_config.num_streams; i++) {
401 if (result)
402 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dscclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dscclk_khz, &state_table->dscclk);
403 if (result)
404 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dtbclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dtbclk_khz, &state_table->dtbclk);
405 if (result)
406 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].phyclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.phyclk_khz, &state_table->phyclk);
407 }
408
409 if (result)
410 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dpprefclk_khz, &state_table->dppclk);
411
412 if (result)
413 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dtbrefclk_khz, &state_table->dtbclk);
414
415 return result;
416 }
417
are_timings_trivially_synchronizable(struct dml2_display_cfg * display_config,int mask)418 static bool are_timings_trivially_synchronizable(struct dml2_display_cfg *display_config, int mask)
419 {
420 unsigned int i;
421 bool identical = true;
422 bool contains_drr = false;
423 unsigned int remap_array[DML2_MAX_PLANES];
424 unsigned int remap_array_size = 0;
425
426 // Create a remap array to enable simple iteration through only masked stream indicies
427 for (i = 0; i < display_config->num_streams; i++) {
428 if (mask & (0x1 << i)) {
429 remap_array[remap_array_size++] = i;
430 }
431 }
432
433 // 0 or 1 display is always trivially synchronizable
434 if (remap_array_size <= 1)
435 return true;
436
437 // Check that all displays timings are the same
438 for (i = 1; i < remap_array_size; i++) {
439 if (memcmp(&display_config->stream_descriptors[remap_array[i - 1]].timing, &display_config->stream_descriptors[remap_array[i]].timing, sizeof(struct dml2_timing_cfg))) {
440 identical = false;
441 break;
442 }
443 }
444
445 // Check if any displays are drr
446 for (i = 0; i < remap_array_size; i++) {
447 if (display_config->stream_descriptors[remap_array[i]].timing.drr_config.enabled) {
448 contains_drr = true;
449 break;
450 }
451 }
452
453 // Trivial sync is possible if all displays are identical and none are DRR
454 return !contains_drr && identical;
455 }
456
find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int mask)457 static int find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int mask)
458 {
459 unsigned int i;
460 int min_idle_us = 0;
461 unsigned int remap_array[DML2_MAX_PLANES];
462 unsigned int remap_array_size = 0;
463 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
464
465 // Create a remap array to enable simple iteration through only masked stream indicies
466 for (i = 0; i < in_out->programming->display_config.num_streams; i++) {
467 if (mask & (0x1 << i)) {
468 remap_array[remap_array_size++] = i;
469 }
470 }
471
472 if (remap_array_size == 0)
473 return 0;
474
475 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[0]].vblank_reserved_time_us;
476
477 for (i = 1; i < remap_array_size; i++) {
478 if (min_idle_us > mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us)
479 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us;
480 }
481
482 return min_idle_us;
483 }
484
determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)485 static bool determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
486 {
487 int min_idle_us;
488
489 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, 0xF)) {
490 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xF);
491
492 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
493 in_out->programming->uclk_pstate_supported = true;
494
495 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
496 in_out->programming->fclk_pstate_supported = true;
497 }
498
499 return true;
500 }
501
get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)502 static int get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
503 {
504 unsigned int i;
505 int displays_without_vactive_margin_mask = 0x0;
506 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
507
508 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
509 if (mode_support_result->cfg_support_info.plane_support_info[i].active_latency_hiding_us
510 < latency_hiding_requirement_us)
511 displays_without_vactive_margin_mask |= (0x1 << i);
512 }
513
514 return displays_without_vactive_margin_mask;
515 }
516
get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)517 static int get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
518 {
519 unsigned int i;
520 int displays_with_fams_mask = 0x0;
521
522 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
523 if (in_out->programming->display_config.plane_descriptors->overrides.legacy_svp_config != dml2_svp_mode_override_auto)
524 displays_with_fams_mask |= (0x1 << i);
525 }
526
527 return displays_with_fams_mask;
528 }
529
determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)530 static bool determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
531 {
532 int displays_without_vactive_margin_mask = 0x0;
533 int min_idle_us = 0;
534
535 if (in_out->programming->uclk_pstate_supported == false) {
536 displays_without_vactive_margin_mask =
537 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
538
539 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
540 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
541
542 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
543 in_out->programming->uclk_pstate_supported = true;
544 }
545 }
546
547 if (in_out->programming->fclk_pstate_supported == false) {
548 displays_without_vactive_margin_mask =
549 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
550
551 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
552 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
553
554 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
555 in_out->programming->fclk_pstate_supported = true;
556 }
557 }
558
559 return true;
560 }
561
determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)562 static bool determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
563 {
564 int displays_without_vactive_margin_mask = 0x0;
565 int displays_without_fams_mask = 0x0;
566
567 displays_without_vactive_margin_mask =
568 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
569
570 displays_without_fams_mask =
571 get_displays_with_fams_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
572
573 if ((displays_without_vactive_margin_mask & ~displays_without_fams_mask) == 0)
574 in_out->programming->uclk_pstate_supported = true;
575
576 return true;
577 }
578
clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)579 static void clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
580 {
581 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
582 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
583 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
584 }
585
clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)586 static void clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
587 {
588 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
589 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
590 }
591
map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)592 static bool map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
593 {
594 int i;
595 bool result;
596 double dispclk_khz;
597 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
598
599 calculate_system_active_minimums(in_out);
600 calculate_svp_prefetch_minimums(in_out);
601 calculate_idle_minimums(in_out);
602
603 // In NV4, there's no support for FCLK or DCFCLK DPM change before SVP prefetch starts, therefore
604 // active minimums must be boosted to prefetch minimums
605 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz > in_out->programming->min_clocks.dcn4x.active.uclk_khz)
606 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz;
607
608 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz > in_out->programming->min_clocks.dcn4x.active.fclk_khz)
609 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz;
610
611 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz > in_out->programming->min_clocks.dcn4x.active.dcfclk_khz)
612 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz;
613
614 // need some massaging for the dispclk ramping cases:
615 dispclk_khz = mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0) * (1.0 + in_out->ip->dispclk_ramp_margin_percent / 100.0);
616 // ramping margin should not make dispclk exceed the maximum dispclk speed:
617 dispclk_khz = math_min2(dispclk_khz, in_out->min_clk_table->max_clocks_khz.dispclk);
618 // but still the required dispclk can be more than the maximum dispclk speed:
619 dispclk_khz = math_max2(dispclk_khz, mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
620
621 // DPP Ref is always set to max of all DPP clocks
622 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
623 if (in_out->programming->min_clocks.dcn4x.dpprefclk_khz < mode_support_result->per_plane[i].dppclk_khz)
624 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = mode_support_result->per_plane[i].dppclk_khz;
625 }
626 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = (unsigned long) (in_out->programming->min_clocks.dcn4x.dpprefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
627
628 // DTB Ref is always set to max of all DTB clocks
629 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
630 if (in_out->programming->min_clocks.dcn4x.dtbrefclk_khz < mode_support_result->per_stream[i].dtbclk_khz)
631 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = mode_support_result->per_stream[i].dtbclk_khz;
632 }
633 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
634
635 if (in_out->soc_bb->no_dfs) {
636 round_to_non_dfs_granularity((unsigned long)dispclk_khz, in_out->programming->min_clocks.dcn4x.dpprefclk_khz, in_out->programming->min_clocks.dcn4x.dtbrefclk_khz,
637 &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz);
638 } else {
639 add_margin_and_round_to_dfs_grainularity(dispclk_khz, 0.0,
640 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dispclk_did);
641
642 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 0.0,
643 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dpprefclk_did);
644
645 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, 0.0,
646 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dtbrefclk_did);
647 }
648
649
650 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
651 in_out->programming->plane_programming[i].min_clocks.dcn4x.dppclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dpprefclk_khz / 255.0
652 * math_ceil2(in_out->display_cfg->mode_support_result.per_plane[i].dppclk_khz * (1.0 + in_out->soc_bb->dcn_downspread_percent / 100.0) * 255.0 / in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 1.0));
653 }
654
655 in_out->programming->min_clocks.dcn4x.deepsleep_dcfclk_khz = mode_support_result->global.dcfclk_deepsleep_khz;
656 in_out->programming->min_clocks.dcn4x.socclk_khz = mode_support_result->global.socclk_khz;
657
658 result = map_min_clocks_to_dpm(mode_support_result, in_out->programming, &in_out->soc_bb->clk_table);
659
660 // By default, all power management features are not enabled
661 in_out->programming->fclk_pstate_supported = false;
662 in_out->programming->uclk_pstate_supported = false;
663
664 return result;
665 }
666
dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)667 bool dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
668 {
669 bool result;
670
671 result = map_mode_to_soc_dpm(in_out);
672
673 // Check if any can be enabled by nominal vblank idle time
674 determine_power_management_features_with_vblank_only(in_out);
675
676 // Check if any can be enabled in vactive/vblank
677 determine_power_management_features_with_vactive_and_vblank(in_out);
678
679 // Check if any can be enabled via fams
680 determine_power_management_features_with_fams(in_out);
681
682 if (in_out->programming->uclk_pstate_supported == false)
683 clamp_uclk_to_max(in_out);
684
685 if (in_out->programming->fclk_pstate_supported == false)
686 clamp_fclk_to_max(in_out);
687
688 return result;
689 }
690
dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)691 bool dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
692 {
693 bool result;
694 int displays_without_vactive_margin_mask = 0x0;
695 int min_idle_us = 0;
696
697 result = map_mode_to_soc_dpm(in_out);
698
699 if (in_out->display_cfg->stage3.success)
700 in_out->programming->uclk_pstate_supported = true;
701
702 displays_without_vactive_margin_mask =
703 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
704
705 if (displays_without_vactive_margin_mask == 0) {
706 in_out->programming->fclk_pstate_supported = true;
707 } else {
708 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
709 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
710
711 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
712 in_out->programming->fclk_pstate_supported = true;
713 }
714 }
715
716 if (in_out->programming->uclk_pstate_supported == false)
717 clamp_uclk_to_max(in_out);
718
719 if (in_out->programming->fclk_pstate_supported == false)
720 clamp_fclk_to_max(in_out);
721
722 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xFF);
723 if (in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us > 0 &&
724 min_idle_us >= in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us)
725 in_out->programming->stutter.supported_in_blank = true;
726 else
727 in_out->programming->stutter.supported_in_blank = false;
728
729 // TODO: Fix me Sam
730 if (in_out->soc_bb->power_management_parameters.z8_min_idle_time > 0 &&
731 in_out->programming->informative.power_management.z8.stutter_period >= in_out->soc_bb->power_management_parameters.z8_min_idle_time)
732 in_out->programming->z8_stutter.meets_eco = true;
733 else
734 in_out->programming->z8_stutter.meets_eco = false;
735
736 if (in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us > 0 &&
737 min_idle_us >= in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us)
738 in_out->programming->z8_stutter.supported_in_blank = true;
739 else
740 in_out->programming->z8_stutter.supported_in_blank = false;
741
742 return result;
743 }
744
dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out * in_out)745 bool dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out *in_out)
746 {
747 const struct dml2_display_cfg *display_cfg = &in_out->display_cfg->display_config;
748 const struct dml2_core_internal_display_mode_lib *mode_lib = &in_out->core->clean_me_up.mode_lib;
749 struct dml2_dchub_global_register_set *dchubbub_regs = &in_out->programming->global_regs;
750
751 double refclk_freq_in_mhz = (display_cfg->overrides.hw.dlg_ref_clk_mhz > 0) ? (double)display_cfg->overrides.hw.dlg_ref_clk_mhz : mode_lib->soc.dchub_refclk_mhz;
752
753 /* set A */
754 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
755 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
756 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
757 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.temp_read_or_ppt_watermark_us * refclk_freq_in_mhz);
758 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
759 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
760 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
761 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
762 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
763 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
764 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
765 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
766
767 /* set B */
768 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
769 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
770 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
771 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.temp_read_or_ppt_watermark_us * refclk_freq_in_mhz);
772 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
773 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
774 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
775 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
776 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
777 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
778 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
779 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
780
781 dchubbub_regs->num_watermark_sets = 2;
782
783 return true;
784 }
785