1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4
5 #include "dml2_dpmm_dcn4.h"
6 #include "dml2_internal_shared_types.h"
7 #include "dml_top_types.h"
8 #include "lib_float_math.h"
9
dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps,const struct dml2_dram_params * dram_config,struct dml2_mcg_dram_bw_to_min_clk_table * dram_bw_table)10 static double dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps, const struct dml2_dram_params *dram_config, struct dml2_mcg_dram_bw_to_min_clk_table *dram_bw_table)
11 {
12 double uclk_khz = 0;
13
14 if (!dram_config->alt_clock_bw_conversion) {
15 unsigned long uclk_bytes_per_tick = 0;
16
17 uclk_bytes_per_tick = dram_config->channel_count * dram_config->channel_width_bytes * dram_config->transactions_per_clock;
18 uclk_khz = (double)bandwidth_kbps / uclk_bytes_per_tick;
19 } else {
20 unsigned int i;
21 /* For lpddr5 bytes per tick changes with mpstate, use table to find uclk*/
22 for (i = 0; i < dram_bw_table->num_entries; i++)
23 if (dram_bw_table->entries[i].pre_derate_dram_bw_kbps >= bandwidth_kbps) {
24 uclk_khz = dram_bw_table->entries[i].min_uclk_khz;
25 break;
26 }
27 }
28
29 return uclk_khz;
30 }
31
get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,double * uclk,double * fclk,double * dcfclk)32 static void get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out,
33 double *uclk,
34 double *fclk,
35 double *dcfclk)
36 {
37 int min_clock_index_for_latency;
38
39 if (in_out->display_cfg->stage3.success)
40 min_clock_index_for_latency = in_out->display_cfg->stage3.min_clk_index_for_latency;
41 else
42 min_clock_index_for_latency = in_out->display_cfg->stage1.min_clk_index_for_latency;
43
44 *dcfclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_dcfclk_khz;
45 *fclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_fclk_khz;
46 *uclk = dram_bw_kbps_to_uclk_khz(in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].pre_derate_dram_bw_kbps,
47 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
48 }
49
dml_round_up(double a)50 static unsigned long dml_round_up(double a)
51 {
52 if (a - (unsigned long)a > 0) {
53 return ((unsigned long)a) + 1;
54 }
55 return (unsigned long)a;
56 }
57
calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)58 static void calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
59 {
60 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
61 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
62 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
63 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
64 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
65
66 min_uclk_avg = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.active.average_bw_dram_kbps
67 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dram_derate_percent_pixel / 100)),
68 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
69
70 if (in_out->display_cfg->display_config.hostvm_enable)
71 min_uclk_urgent = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.active.urgent_bw_dram_kbps
72 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel_and_vm / 100)),
73 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
74 else
75 min_uclk_urgent = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.active.urgent_bw_dram_kbps
76 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100)),
77 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
78
79 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
80
81 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
82 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.fclk_derate_percent / 100);
83
84 min_fclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
85 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100);
86
87 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
88
89 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
90 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dcfclk_derate_percent / 100);
91
92 min_dcfclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
93 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dcfclk_derate_percent / 100);
94
95 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
96
97 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
98
99 in_out->programming->min_clocks.dcn4x.active.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
100 in_out->programming->min_clocks.dcn4x.active.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
101 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
102 }
103
calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)104 static void calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
105 {
106 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
107 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
108 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
109 double min_fclk_latency, min_dcfclk_latency;
110 double min_uclk_latency;
111 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
112
113 /* assumes DF throttling is enabled */
114 min_uclk_avg = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.svp_prefetch.average_bw_dram_kbps
115 / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dram_derate_percent_pixel / 100)),
116 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
117
118 min_uclk_urgent = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.svp_prefetch.urgent_bw_dram_kbps
119 / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dram_derate_percent_pixel / 100)),
120 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
121
122 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
123
124 min_fclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
125 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.fclk_derate_percent / 100);
126
127 min_fclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
128 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.fclk_derate_percent / 100);
129
130 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
131
132 min_dcfclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
133 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dcfclk_derate_percent / 100);
134
135 min_dcfclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
136 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dcfclk_derate_percent / 100);
137
138 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
139
140 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
141
142 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
143 in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
144 in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
145
146 /* assumes DF throttling is disabled */
147 min_uclk_avg = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.svp_prefetch.average_bw_dram_kbps
148 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dram_derate_percent_pixel / 100)),
149 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
150
151 min_uclk_urgent = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.svp_prefetch.urgent_bw_dram_kbps
152 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100)),
153 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
154
155 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
156
157 min_fclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
158 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.fclk_derate_percent / 100);
159
160 min_fclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
161 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100);
162
163 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
164
165 min_dcfclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
166 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dcfclk_derate_percent / 100);
167
168 min_dcfclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
169 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dcfclk_derate_percent / 100);
170
171 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
172
173 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
174
175 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
176 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
177 in_out->programming->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
178 }
179
calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)180 static void calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
181 {
182 double min_uclk_avg;
183 double min_fclk_avg;
184 double min_dcfclk_avg;
185 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
186 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
187
188 min_uclk_avg = dram_bw_kbps_to_uclk_khz((unsigned long long)(mode_support_result->global.active.average_bw_dram_kbps
189 / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dram_derate_percent_pixel / 100)),
190 &in_out->soc_bb->clk_table.dram_config, &in_out->min_clk_table->dram_bw_table);
191
192 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
193 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.fclk_derate_percent / 100);
194
195 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
196 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dcfclk_derate_percent / 100);
197
198 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
199
200 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = dml_round_up(min_uclk_avg > min_uclk_latency ? min_uclk_avg : min_uclk_latency);
201 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = dml_round_up(min_fclk_avg > min_fclk_latency ? min_fclk_avg : min_fclk_latency);
202 in_out->programming->min_clocks.dcn4x.idle.dcfclk_khz = dml_round_up(min_dcfclk_avg > min_dcfclk_latency ? min_dcfclk_avg : min_dcfclk_latency);
203 }
204
add_margin_and_round_to_dfs_grainularity(double clock_khz,double margin,unsigned long vco_freq_khz,unsigned long * rounded_khz,uint32_t * divider_id)205 static bool add_margin_and_round_to_dfs_grainularity(double clock_khz, double margin, unsigned long vco_freq_khz, unsigned long *rounded_khz, uint32_t *divider_id)
206 {
207 enum dentist_divider_range {
208 DFS_DIVIDER_RANGE_1_START = 8, /* 2.00 */
209 DFS_DIVIDER_RANGE_1_STEP = 1, /* 0.25 */
210 DFS_DIVIDER_RANGE_2_START = 64, /* 16.00 */
211 DFS_DIVIDER_RANGE_2_STEP = 2, /* 0.50 */
212 DFS_DIVIDER_RANGE_3_START = 128, /* 32.00 */
213 DFS_DIVIDER_RANGE_3_STEP = 4, /* 1.00 */
214 DFS_DIVIDER_RANGE_4_START = 248, /* 62.00 */
215 DFS_DIVIDER_RANGE_4_STEP = 264, /* 66.00 */
216 DFS_DIVIDER_RANGE_SCALE_FACTOR = 4
217 };
218
219 enum DFS_base_divider_id {
220 DFS_BASE_DID_1 = 0x08,
221 DFS_BASE_DID_2 = 0x40,
222 DFS_BASE_DID_3 = 0x60,
223 DFS_BASE_DID_4 = 0x7e,
224 DFS_MAX_DID = 0x7f
225 };
226
227 unsigned int divider;
228
229 if (clock_khz < 1 || vco_freq_khz < 1 || clock_khz > vco_freq_khz)
230 return false;
231
232 clock_khz *= 1.0 + margin;
233
234 divider = (unsigned int)((int)DFS_DIVIDER_RANGE_SCALE_FACTOR * (vco_freq_khz / clock_khz));
235
236 /* we want to floor here to get higher clock than required rather than lower */
237 if (divider < DFS_DIVIDER_RANGE_2_START) {
238 if (divider < DFS_DIVIDER_RANGE_1_START)
239 *divider_id = DFS_BASE_DID_1;
240 else
241 *divider_id = DFS_BASE_DID_1 + ((divider - DFS_DIVIDER_RANGE_1_START) / DFS_DIVIDER_RANGE_1_STEP);
242 } else if (divider < DFS_DIVIDER_RANGE_3_START) {
243 *divider_id = DFS_BASE_DID_2 + ((divider - DFS_DIVIDER_RANGE_2_START) / DFS_DIVIDER_RANGE_2_STEP);
244 } else if (divider < DFS_DIVIDER_RANGE_4_START) {
245 *divider_id = DFS_BASE_DID_3 + ((divider - DFS_DIVIDER_RANGE_3_START) / DFS_DIVIDER_RANGE_3_STEP);
246 } else {
247 *divider_id = DFS_BASE_DID_4 + ((divider - DFS_DIVIDER_RANGE_4_START) / DFS_DIVIDER_RANGE_4_STEP);
248 if (*divider_id > DFS_MAX_DID)
249 *divider_id = DFS_MAX_DID;
250 }
251
252 *rounded_khz = vco_freq_khz * DFS_DIVIDER_RANGE_SCALE_FACTOR / divider;
253
254 return true;
255 }
256
round_to_non_dfs_granularity(unsigned long dispclk_khz,unsigned long dpprefclk_khz,unsigned long dtbrefclk_khz,unsigned long * rounded_dispclk_khz,unsigned long * rounded_dpprefclk_khz,unsigned long * rounded_dtbrefclk_khz)257 static bool round_to_non_dfs_granularity(unsigned long dispclk_khz, unsigned long dpprefclk_khz, unsigned long dtbrefclk_khz,
258 unsigned long *rounded_dispclk_khz, unsigned long *rounded_dpprefclk_khz, unsigned long *rounded_dtbrefclk_khz)
259 {
260 unsigned long pll_frequency_khz;
261
262 pll_frequency_khz = (unsigned long) math_max2(600000, math_ceil2(math_max3(dispclk_khz, dpprefclk_khz, dtbrefclk_khz), 1000));
263
264 *rounded_dispclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dispclk_khz, 32);
265
266 *rounded_dpprefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dpprefclk_khz, 32);
267
268 if (dtbrefclk_khz > 0) {
269 *rounded_dtbrefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dtbrefclk_khz, 32);
270 } else {
271 *rounded_dtbrefclk_khz = 0;
272 }
273
274 return true;
275 }
276
round_up_and_copy_to_next_dpm(unsigned long min_value,unsigned long * rounded_value,const struct dml2_clk_table * clock_table)277 static bool round_up_and_copy_to_next_dpm(unsigned long min_value, unsigned long *rounded_value, const struct dml2_clk_table *clock_table)
278 {
279 bool result = false;
280 int index = 0;
281
282 if (clock_table->num_clk_values > 2) {
283 while (index < clock_table->num_clk_values && clock_table->clk_values_khz[index] < min_value)
284 index++;
285
286 if (index < clock_table->num_clk_values) {
287 *rounded_value = clock_table->clk_values_khz[index];
288 result = true;
289 }
290 } else if (clock_table->clk_values_khz[clock_table->num_clk_values - 1] >= min_value) {
291 *rounded_value = min_value;
292 result = true;
293 }
294 return result;
295 }
296
round_up_to_next_dpm(unsigned long * clock_value,const struct dml2_clk_table * clock_table)297 static bool round_up_to_next_dpm(unsigned long *clock_value, const struct dml2_clk_table *clock_table)
298 {
299 return round_up_and_copy_to_next_dpm(*clock_value, clock_value, clock_table);
300 }
301
map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)302 static bool map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
303 {
304 bool result;
305
306 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.dcfclk_khz, &state_table->dcfclk);
307 if (result)
308 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.fclk_khz, &state_table->fclk);
309 if (result)
310 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.uclk_khz, &state_table->uclk);
311
312 if (result)
313 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz, &state_table->dcfclk);
314 if (result)
315 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz, &state_table->fclk);
316 if (result)
317 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz, &state_table->uclk);
318
319 if (result)
320 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.dcfclk_khz, &state_table->dcfclk);
321 if (result)
322 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.fclk_khz, &state_table->fclk);
323 if (result)
324 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.uclk_khz, &state_table->uclk);
325
326 /* these clocks are optional, so they can fail to map, in which case map all to 0 */
327 if (result) {
328 if (!round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz, &state_table->dcfclk) ||
329 !round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz, &state_table->fclk) ||
330 !round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz, &state_table->uclk)) {
331 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.dcfclk_khz = 0;
332 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz = 0;
333 display_cfg->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz = 0;
334 }
335 }
336
337 return result;
338 }
339
map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)340 static bool map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
341 {
342 bool result;
343 int index;
344
345 result = false;
346 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
347 if (display_cfg->min_clocks.dcn4x.active.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
348 display_cfg->min_clocks.dcn4x.active.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
349 display_cfg->min_clocks.dcn4x.active.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
350 display_cfg->min_clocks.dcn4x.active.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
351 display_cfg->min_clocks.dcn4x.active.fclk_khz = state_table->fclk.clk_values_khz[index];
352 display_cfg->min_clocks.dcn4x.active.uclk_khz = state_table->uclk.clk_values_khz[index];
353 result = true;
354 break;
355 }
356 }
357
358 if (result) {
359 result = false;
360 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
361 if (display_cfg->min_clocks.dcn4x.idle.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
362 display_cfg->min_clocks.dcn4x.idle.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
363 display_cfg->min_clocks.dcn4x.idle.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
364 display_cfg->min_clocks.dcn4x.idle.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
365 display_cfg->min_clocks.dcn4x.idle.fclk_khz = state_table->fclk.clk_values_khz[index];
366 display_cfg->min_clocks.dcn4x.idle.uclk_khz = state_table->uclk.clk_values_khz[index];
367 result = true;
368 break;
369 }
370 }
371 }
372
373 // SVP is not supported on any coarse grained SoCs
374 display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = 0;
375 display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz = 0;
376 display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz = 0;
377
378 return result;
379 }
380
map_min_clocks_to_dpm(const struct dml2_core_mode_support_result * mode_support_result,struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)381 static bool map_min_clocks_to_dpm(const struct dml2_core_mode_support_result *mode_support_result, struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
382 {
383 bool result = false;
384 bool dcfclk_fine_grained = false, fclk_fine_grained = false, clock_state_count_identical = false;
385 unsigned int i;
386
387 if (!state_table || !display_cfg)
388 return false;
389
390 if (state_table->dcfclk.num_clk_values == 2) {
391 dcfclk_fine_grained = true;
392 }
393
394 if (state_table->fclk.num_clk_values == 2) {
395 fclk_fine_grained = true;
396 }
397
398 if (state_table->fclk.num_clk_values == state_table->dcfclk.num_clk_values &&
399 state_table->fclk.num_clk_values == state_table->uclk.num_clk_values) {
400 clock_state_count_identical = true;
401 }
402
403 if (dcfclk_fine_grained || fclk_fine_grained || !clock_state_count_identical)
404 result = map_soc_min_clocks_to_dpm_fine_grained(display_cfg, state_table);
405 else
406 result = map_soc_min_clocks_to_dpm_coarse_grained(display_cfg, state_table);
407
408 if (result)
409 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dispclk_khz, &state_table->dispclk);
410
411 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
412 if (result)
413 result = round_up_to_next_dpm(&display_cfg->plane_programming[i].min_clocks.dcn4x.dppclk_khz, &state_table->dppclk);
414 }
415
416 for (i = 0; i < display_cfg->display_config.num_streams; i++) {
417 if (result)
418 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dscclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dscclk_khz, &state_table->dscclk);
419 if (result)
420 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dtbclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dtbclk_khz, &state_table->dtbclk);
421 if (result)
422 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].phyclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.phyclk_khz, &state_table->phyclk);
423 }
424
425 if (result)
426 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dpprefclk_khz, &state_table->dppclk);
427
428 if (result)
429 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dtbrefclk_khz, &state_table->dtbclk);
430
431 return result;
432 }
433
are_timings_trivially_synchronizable(struct dml2_display_cfg * display_config,int mask)434 static bool are_timings_trivially_synchronizable(struct dml2_display_cfg *display_config, int mask)
435 {
436 unsigned int i;
437 bool identical = true;
438 bool contains_drr = false;
439 unsigned int remap_array[DML2_MAX_PLANES];
440 unsigned int remap_array_size = 0;
441
442 // Create a remap array to enable simple iteration through only masked stream indicies
443 for (i = 0; i < display_config->num_streams; i++) {
444 if (mask & (0x1 << i)) {
445 remap_array[remap_array_size++] = i;
446 }
447 }
448
449 // 0 or 1 display is always trivially synchronizable
450 if (remap_array_size <= 1)
451 return true;
452
453 // Check that all displays timings are the same
454 for (i = 1; i < remap_array_size; i++) {
455 if (memcmp(&display_config->stream_descriptors[remap_array[i - 1]].timing, &display_config->stream_descriptors[remap_array[i]].timing, sizeof(struct dml2_timing_cfg))) {
456 identical = false;
457 break;
458 }
459 }
460
461 // Check if any displays are drr
462 for (i = 0; i < remap_array_size; i++) {
463 if (display_config->stream_descriptors[remap_array[i]].timing.drr_config.enabled) {
464 contains_drr = true;
465 break;
466 }
467 }
468
469 // Trivial sync is possible if all displays are identical and none are DRR
470 return !contains_drr && identical;
471 }
472
find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int mask)473 static int find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int mask)
474 {
475 unsigned int i;
476 int min_idle_us = 0;
477 unsigned int remap_array[DML2_MAX_PLANES];
478 unsigned int remap_array_size = 0;
479 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
480
481 // Create a remap array to enable simple iteration through only masked stream indicies
482 for (i = 0; i < in_out->programming->display_config.num_streams; i++) {
483 if (mask & (0x1 << i)) {
484 remap_array[remap_array_size++] = i;
485 }
486 }
487
488 if (remap_array_size == 0)
489 return 0;
490
491 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[0]].vblank_reserved_time_us;
492
493 for (i = 1; i < remap_array_size; i++) {
494 if (min_idle_us > mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us)
495 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us;
496 }
497
498 return min_idle_us;
499 }
500
determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)501 static bool determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
502 {
503 int min_idle_us;
504
505 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, 0xF)) {
506 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xF);
507
508 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
509 in_out->programming->uclk_pstate_supported = true;
510
511 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
512 in_out->programming->fclk_pstate_supported = true;
513 }
514
515 return true;
516 }
517
get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)518 static int get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
519 {
520 unsigned int i;
521 int displays_without_vactive_margin_mask = 0x0;
522 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
523
524 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
525 if (mode_support_result->cfg_support_info.plane_support_info[i].active_latency_hiding_us
526 < latency_hiding_requirement_us)
527 displays_without_vactive_margin_mask |= (0x1 << i);
528 }
529
530 return displays_without_vactive_margin_mask;
531 }
532
get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)533 static int get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
534 {
535 unsigned int i;
536 int displays_with_fams_mask = 0x0;
537
538 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
539 if (in_out->programming->display_config.plane_descriptors->overrides.legacy_svp_config != dml2_svp_mode_override_auto)
540 displays_with_fams_mask |= (0x1 << i);
541 }
542
543 return displays_with_fams_mask;
544 }
545
determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)546 static bool determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
547 {
548 int displays_without_vactive_margin_mask = 0x0;
549 int min_idle_us = 0;
550
551 if (in_out->programming->uclk_pstate_supported == false) {
552 displays_without_vactive_margin_mask =
553 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
554
555 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
556 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
557
558 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
559 in_out->programming->uclk_pstate_supported = true;
560 }
561 }
562
563 if (in_out->programming->fclk_pstate_supported == false) {
564 displays_without_vactive_margin_mask =
565 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
566
567 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
568 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
569
570 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
571 in_out->programming->fclk_pstate_supported = true;
572 }
573 }
574
575 return true;
576 }
577
determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)578 static bool determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
579 {
580 int displays_without_vactive_margin_mask = 0x0;
581 int displays_without_fams_mask = 0x0;
582
583 displays_without_vactive_margin_mask =
584 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
585
586 displays_without_fams_mask =
587 get_displays_with_fams_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
588
589 if ((displays_without_vactive_margin_mask & ~displays_without_fams_mask) == 0)
590 in_out->programming->uclk_pstate_supported = true;
591
592 return true;
593 }
594
clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)595 static void clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
596 {
597 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
598 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
599 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
600 }
601
clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)602 static void clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
603 {
604 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
605 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
606 }
607
map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)608 static bool map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
609 {
610 int i;
611 bool result;
612 double dispclk_khz;
613 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
614
615 calculate_system_active_minimums(in_out);
616 calculate_svp_prefetch_minimums(in_out);
617 calculate_idle_minimums(in_out);
618
619 // In NV4, there's no support for FCLK or DCFCLK DPM change before SVP prefetch starts, therefore
620 // active minimums must be boosted to prefetch minimums
621 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz > in_out->programming->min_clocks.dcn4x.active.uclk_khz)
622 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz;
623
624 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz > in_out->programming->min_clocks.dcn4x.active.fclk_khz)
625 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz;
626
627 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz > in_out->programming->min_clocks.dcn4x.active.dcfclk_khz)
628 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz;
629
630 // need some massaging for the dispclk ramping cases:
631 dispclk_khz = mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0) * (1.0 + in_out->ip->dispclk_ramp_margin_percent / 100.0);
632 // ramping margin should not make dispclk exceed the maximum dispclk speed:
633 dispclk_khz = math_min2(dispclk_khz, in_out->min_clk_table->max_clocks_khz.dispclk);
634 // but still the required dispclk can be more than the maximum dispclk speed:
635 dispclk_khz = math_max2(dispclk_khz, mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
636
637 // DPP Ref is always set to max of all DPP clocks
638 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
639 if (in_out->programming->min_clocks.dcn4x.dpprefclk_khz < mode_support_result->per_plane[i].dppclk_khz)
640 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = mode_support_result->per_plane[i].dppclk_khz;
641 }
642 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = (unsigned long) (in_out->programming->min_clocks.dcn4x.dpprefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
643
644 // DTB Ref is always set to max of all DTB clocks
645 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
646 if (in_out->programming->min_clocks.dcn4x.dtbrefclk_khz < mode_support_result->per_stream[i].dtbclk_khz)
647 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = mode_support_result->per_stream[i].dtbclk_khz;
648 }
649 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
650
651 if (in_out->soc_bb->no_dfs) {
652 round_to_non_dfs_granularity((unsigned long)dispclk_khz, in_out->programming->min_clocks.dcn4x.dpprefclk_khz, in_out->programming->min_clocks.dcn4x.dtbrefclk_khz,
653 &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz);
654 } else {
655 add_margin_and_round_to_dfs_grainularity(dispclk_khz, 0.0,
656 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dispclk_did);
657
658 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 0.0,
659 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dpprefclk_did);
660
661 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, 0.0,
662 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dtbrefclk_did);
663 }
664
665
666 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
667 in_out->programming->plane_programming[i].min_clocks.dcn4x.dppclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dpprefclk_khz / 255.0
668 * math_ceil2(in_out->display_cfg->mode_support_result.per_plane[i].dppclk_khz * (1.0 + in_out->soc_bb->dcn_downspread_percent / 100.0) * 255.0 / in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 1.0));
669 }
670
671 in_out->programming->min_clocks.dcn4x.deepsleep_dcfclk_khz = mode_support_result->global.dcfclk_deepsleep_khz;
672 in_out->programming->min_clocks.dcn4x.socclk_khz = mode_support_result->global.socclk_khz;
673
674 result = map_min_clocks_to_dpm(mode_support_result, in_out->programming, &in_out->soc_bb->clk_table);
675
676 // By default, all power management features are not enabled
677 in_out->programming->fclk_pstate_supported = false;
678 in_out->programming->uclk_pstate_supported = false;
679
680 return result;
681 }
682
dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)683 bool dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
684 {
685 bool result;
686
687 result = map_mode_to_soc_dpm(in_out);
688
689 // Check if any can be enabled by nominal vblank idle time
690 determine_power_management_features_with_vblank_only(in_out);
691
692 // Check if any can be enabled in vactive/vblank
693 determine_power_management_features_with_vactive_and_vblank(in_out);
694
695 // Check if any can be enabled via fams
696 determine_power_management_features_with_fams(in_out);
697
698 if (in_out->programming->uclk_pstate_supported == false)
699 clamp_uclk_to_max(in_out);
700
701 if (in_out->programming->fclk_pstate_supported == false)
702 clamp_fclk_to_max(in_out);
703
704 return result;
705 }
706
dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)707 bool dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
708 {
709 bool result;
710 int displays_without_vactive_margin_mask = 0x0;
711 int min_idle_us = 0;
712
713 result = map_mode_to_soc_dpm(in_out);
714
715 if (in_out->display_cfg->stage3.success)
716 in_out->programming->uclk_pstate_supported = true;
717
718 displays_without_vactive_margin_mask =
719 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
720
721 if (displays_without_vactive_margin_mask == 0) {
722 in_out->programming->fclk_pstate_supported = true;
723 } else {
724 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
725 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
726
727 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
728 in_out->programming->fclk_pstate_supported = true;
729 }
730 }
731
732 if (in_out->programming->uclk_pstate_supported == false)
733 clamp_uclk_to_max(in_out);
734
735 if (in_out->programming->fclk_pstate_supported == false)
736 clamp_fclk_to_max(in_out);
737
738 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xFF);
739 if (in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us > 0 &&
740 min_idle_us >= in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us)
741 in_out->programming->stutter.supported_in_blank = true;
742 else
743 in_out->programming->stutter.supported_in_blank = false;
744
745 // TODO: Fix me Sam
746 if (in_out->soc_bb->power_management_parameters.z8_min_idle_time > 0 &&
747 in_out->programming->informative.power_management.z8.stutter_period >= in_out->soc_bb->power_management_parameters.z8_min_idle_time)
748 in_out->programming->z8_stutter.meets_eco = true;
749 else
750 in_out->programming->z8_stutter.meets_eco = false;
751
752 if (in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us > 0 &&
753 min_idle_us >= in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us)
754 in_out->programming->z8_stutter.supported_in_blank = true;
755 else
756 in_out->programming->z8_stutter.supported_in_blank = false;
757
758 return result;
759 }
760
dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out * in_out)761 bool dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out *in_out)
762 {
763 const struct dml2_display_cfg *display_cfg = &in_out->display_cfg->display_config;
764 const struct dml2_core_internal_display_mode_lib *mode_lib = &in_out->core->clean_me_up.mode_lib;
765 struct dml2_dchub_global_register_set *dchubbub_regs = &in_out->programming->global_regs;
766
767 double refclk_freq_in_mhz = (display_cfg->overrides.hw.dlg_ref_clk_mhz > 0) ? (double)display_cfg->overrides.hw.dlg_ref_clk_mhz : mode_lib->soc.dchub_refclk_mhz;
768
769 /* set A */
770 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
771 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
772 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
773 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_enter_z8 = (int unsigned)(mode_lib->mp.Watermark.Z8StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
774 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_exit_z8 = (int unsigned)(mode_lib->mp.Watermark.Z8StutterExitWatermark * refclk_freq_in_mhz);
775 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.temp_read_or_ppt_watermark_us * refclk_freq_in_mhz);
776 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
777 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
778 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
779 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
780 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
781 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
782 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
783 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
784
785 /* set B */
786 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
787 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
788 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
789 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_enter_z8 = (int unsigned)(mode_lib->mp.Watermark.Z8StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
790 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_exit_z8 = (int unsigned)(mode_lib->mp.Watermark.Z8StutterExitWatermark * refclk_freq_in_mhz);
791 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.temp_read_or_ppt_watermark_us * refclk_freq_in_mhz);
792 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
793 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
794 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
795 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
796 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
797 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
798 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
799 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
800
801 dchubbub_regs->num_watermark_sets = 2;
802
803 return true;
804 }
805