xref: /linux/drivers/gpu/drm/amd/display/dc/dml2/dml2_wrapper.c (revision b02b0fc488ba6efc29884a517ab38c2efeaf9e34)
1 /* SPDX-License-Identifier: MIT */
2 /*
3  * Copyright 2023 Advanced Micro Devices, Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: AMD
24  *
25  */
26 
27 #include "display_mode_core.h"
28 #include "dml2_internal_types.h"
29 #include "dml2_utils.h"
30 #include "dml2_policy.h"
31 #include "dml2_translation_helper.h"
32 #include "dml2_mall_phantom.h"
33 #include "dml2_dc_resource_mgmt.h"
34 #include "dml21_wrapper.h"
35 
36 static void initialize_dml2_ip_params(struct dml2_context *dml2, const struct dc *in_dc, struct ip_params_st *out)
37 {
38 	if (dml2->config.use_native_soc_bb_construction)
39 		dml2_init_ip_params(dml2, in_dc, out);
40 	else
41 		dml2_translate_ip_params(in_dc, out);
42 }
43 
44 static void initialize_dml2_soc_bbox(struct dml2_context *dml2, const struct dc *in_dc, struct soc_bounding_box_st *out)
45 {
46 	if (dml2->config.use_native_soc_bb_construction)
47 		dml2_init_socbb_params(dml2, in_dc, out);
48 	else
49 		dml2_translate_socbb_params(in_dc, out);
50 }
51 
52 static void initialize_dml2_soc_states(struct dml2_context *dml2,
53 	const struct dc *in_dc, const struct soc_bounding_box_st *in_bbox, struct soc_states_st *out)
54 {
55 	if (dml2->config.use_native_soc_bb_construction)
56 		dml2_init_soc_states(dml2, in_dc, in_bbox, out);
57 	else
58 		dml2_translate_soc_states(in_dc, out, in_dc->dml.soc.num_states);
59 }
60 
61 static void map_hw_resources(struct dml2_context *dml2,
62 		struct dml_display_cfg_st *in_out_display_cfg, struct dml_mode_support_info_st *mode_support_info)
63 {
64 	unsigned int num_pipes = 0;
65 	int i, j;
66 
67 	for (i = 0; i < __DML_NUM_PLANES__; i++) {
68 		in_out_display_cfg->hw.ODMMode[i] = mode_support_info->ODMMode[i];
69 		in_out_display_cfg->hw.DPPPerSurface[i] = mode_support_info->DPPPerSurface[i];
70 		in_out_display_cfg->hw.DSCEnabled[i] = mode_support_info->DSCEnabled[i];
71 		in_out_display_cfg->hw.NumberOfDSCSlices[i] = mode_support_info->NumberOfDSCSlices[i];
72 		in_out_display_cfg->hw.DLGRefClkFreqMHz = 24;
73 		if (dml2->v20.dml_core_ctx.project != dml_project_dcn35 &&
74 			dml2->v20.dml_core_ctx.project != dml_project_dcn36 &&
75 			dml2->v20.dml_core_ctx.project != dml_project_dcn351) {
76 			/*dGPU default as 50Mhz*/
77 			in_out_display_cfg->hw.DLGRefClkFreqMHz = 50;
78 		}
79 		for (j = 0; j < mode_support_info->DPPPerSurface[i]; j++) {
80 			if (i >= __DML2_WRAPPER_MAX_STREAMS_PLANES__) {
81 				dml_print("DML::%s: Index out of bounds: i=%d, __DML2_WRAPPER_MAX_STREAMS_PLANES__=%d\n",
82 					  __func__, i, __DML2_WRAPPER_MAX_STREAMS_PLANES__);
83 				break;
84 			}
85 			dml2->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_stream_id[num_pipes] = dml2->v20.scratch.dml_to_dc_pipe_mapping.disp_cfg_to_stream_id[i];
86 			dml2->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_stream_id_valid[num_pipes] = true;
87 			dml2->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_plane_id[num_pipes] = dml2->v20.scratch.dml_to_dc_pipe_mapping.disp_cfg_to_plane_id[i];
88 			dml2->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_plane_id_valid[num_pipes] = true;
89 			num_pipes++;
90 		}
91 	}
92 }
93 
94 static unsigned int pack_and_call_dml_mode_support_ex(struct dml2_context *dml2,
95 	const struct dml_display_cfg_st *display_cfg,
96 	struct dml_mode_support_info_st *evaluation_info,
97 	enum dc_validate_mode validate_mode)
98 {
99 	struct dml2_wrapper_scratch *s = &dml2->v20.scratch;
100 
101 	s->mode_support_params.mode_lib = &dml2->v20.dml_core_ctx;
102 	s->mode_support_params.in_display_cfg = display_cfg;
103 	if (validate_mode == DC_VALIDATE_MODE_ONLY)
104 		s->mode_support_params.in_start_state_idx = dml2->v20.dml_core_ctx.states.num_states - 1;
105 	else
106 		s->mode_support_params.in_start_state_idx = 0;
107 	s->mode_support_params.out_evaluation_info = evaluation_info;
108 
109 	memset(evaluation_info, 0, sizeof(struct dml_mode_support_info_st));
110 	s->mode_support_params.out_lowest_state_idx = 0;
111 
112 	return dml_mode_support_ex(&s->mode_support_params);
113 }
114 
115 static bool optimize_configuration(struct dml2_context *dml2, struct dml2_wrapper_optimize_configuration_params *p)
116 {
117 	int unused_dpps = p->ip_params->max_num_dpp;
118 	int i, j;
119 	int odms_needed, refresh_rate_hz, dpps_needed, subvp_height, pstate_width_fw_delay_lines, surface_count;
120 	int subvp_timing_to_add, new_timing_index, subvp_surface_to_add, new_surface_index;
121 	float frame_time_sec, max_frame_time_sec;
122 	int largest_blend_and_timing = 0;
123 	bool optimization_done = false;
124 
125 	for (i = 0; i < (int) p->cur_display_config->num_timings; i++) {
126 		if (p->cur_display_config->plane.BlendingAndTiming[i] > largest_blend_and_timing)
127 			largest_blend_and_timing = p->cur_display_config->plane.BlendingAndTiming[i];
128 	}
129 
130 	if (p->new_policy != p->cur_policy)
131 		*p->new_policy = *p->cur_policy;
132 
133 	if (p->new_display_config != p->cur_display_config)
134 		*p->new_display_config = *p->cur_display_config;
135 
136 	// Optimize P-State Support
137 	if (dml2->config.use_native_pstate_optimization) {
138 		if (p->cur_mode_support_info->DRAMClockChangeSupport[0] == dml_dram_clock_change_unsupported) {
139 			// Find a display with < 120Hz refresh rate with maximal refresh rate that's not already subvp
140 			subvp_timing_to_add = -1;
141 			subvp_surface_to_add = -1;
142 			max_frame_time_sec = 0;
143 			surface_count = 0;
144 			for (i = 0; i < (int) p->cur_display_config->num_timings; i++) {
145 				refresh_rate_hz = (int)div_u64((unsigned long long) p->cur_display_config->timing.PixelClock[i] * 1000 * 1000,
146 					(p->cur_display_config->timing.HTotal[i] * p->cur_display_config->timing.VTotal[i]));
147 				if (refresh_rate_hz < 120) {
148 					// Check its upstream surfaces to see if this one could be converted to subvp.
149 					dpps_needed = 0;
150 				for (j = 0; j < (int) p->cur_display_config->num_surfaces; j++) {
151 					if (p->cur_display_config->plane.BlendingAndTiming[j] == i &&
152 						p->cur_display_config->plane.UseMALLForPStateChange[j] == dml_use_mall_pstate_change_disable) {
153 						dpps_needed += p->cur_mode_support_info->DPPPerSurface[j];
154 						subvp_surface_to_add = j;
155 						surface_count++;
156 					}
157 				}
158 
159 				if (surface_count == 1 && dpps_needed > 0 && dpps_needed <= unused_dpps) {
160 					frame_time_sec = (float)1 / refresh_rate_hz;
161 					if (frame_time_sec > max_frame_time_sec) {
162 						max_frame_time_sec = frame_time_sec;
163 						subvp_timing_to_add = i;
164 						}
165 					}
166 				}
167 			}
168 			if (subvp_timing_to_add >= 0) {
169 				new_timing_index = p->new_display_config->num_timings++;
170 				new_surface_index = p->new_display_config->num_surfaces++;
171 				// Add a phantom pipe reflecting the main pipe's timing
172 				dml2_util_copy_dml_timing(&p->new_display_config->timing, new_timing_index, subvp_timing_to_add);
173 
174 				pstate_width_fw_delay_lines = (int)(((double)(p->config->svp_pstate.subvp_fw_processing_delay_us +
175 					p->config->svp_pstate.subvp_pstate_allow_width_us) / 1000000) *
176 				(p->new_display_config->timing.PixelClock[subvp_timing_to_add] * 1000 * 1000) /
177 				(double)p->new_display_config->timing.HTotal[subvp_timing_to_add]);
178 
179 				subvp_height = p->cur_mode_support_info->SubViewportLinesNeededInMALL[subvp_timing_to_add] + pstate_width_fw_delay_lines;
180 
181 				p->new_display_config->timing.VActive[new_timing_index] = subvp_height;
182 				p->new_display_config->timing.VTotal[new_timing_index] = subvp_height +
183 				p->new_display_config->timing.VTotal[subvp_timing_to_add] - p->new_display_config->timing.VActive[subvp_timing_to_add];
184 
185 				p->new_display_config->output.OutputDisabled[new_timing_index] = true;
186 
187 				p->new_display_config->plane.UseMALLForPStateChange[subvp_surface_to_add] = dml_use_mall_pstate_change_sub_viewport;
188 
189 				dml2_util_copy_dml_plane(&p->new_display_config->plane, new_surface_index, subvp_surface_to_add);
190 				dml2_util_copy_dml_surface(&p->new_display_config->surface, new_surface_index, subvp_surface_to_add);
191 
192 				p->new_display_config->plane.ViewportHeight[new_surface_index] = subvp_height;
193 				p->new_display_config->plane.ViewportHeightChroma[new_surface_index] = subvp_height;
194 				p->new_display_config->plane.ViewportStationary[new_surface_index] = false;
195 
196 				p->new_display_config->plane.UseMALLForStaticScreen[new_surface_index] = dml_use_mall_static_screen_disable;
197 				p->new_display_config->plane.UseMALLForPStateChange[new_surface_index] = dml_use_mall_pstate_change_phantom_pipe;
198 
199 				p->new_display_config->plane.NumberOfCursors[new_surface_index] = 0;
200 
201 				p->new_policy->ImmediateFlipRequirement[new_surface_index] = dml_immediate_flip_not_required;
202 
203 				p->new_display_config->plane.BlendingAndTiming[new_surface_index] = new_timing_index;
204 
205 				optimization_done = true;
206 			}
207 		}
208 	}
209 
210 	// Optimize Clocks
211 	if (!optimization_done) {
212 		if (largest_blend_and_timing == 0 && p->cur_policy->ODMUse[0] == dml_odm_use_policy_combine_as_needed && dml2->config.minimize_dispclk_using_odm) {
213 			odms_needed = dml2_util_get_maximum_odm_combine_for_output(dml2->config.optimize_odm_4to1,
214 				p->cur_display_config->output.OutputEncoder[0], p->cur_mode_support_info->DSCEnabled[0]) - 1;
215 
216 			if (odms_needed <= unused_dpps) {
217 				if (odms_needed == 1) {
218 					p->new_policy->ODMUse[0] = dml_odm_use_policy_combine_2to1;
219 					optimization_done = true;
220 				} else if (odms_needed == 3) {
221 					p->new_policy->ODMUse[0] = dml_odm_use_policy_combine_4to1;
222 					optimization_done = true;
223 				} else
224 					optimization_done = false;
225 			}
226 		}
227 	}
228 
229 	return optimization_done;
230 }
231 
232 static int calculate_lowest_supported_state_for_temp_read(struct dml2_context *dml2, struct dc_state *display_state,
233 		enum dc_validate_mode validate_mode)
234 {
235 	struct dml2_calculate_lowest_supported_state_for_temp_read_scratch *s = &dml2->v20.scratch.dml2_calculate_lowest_supported_state_for_temp_read_scratch;
236 	struct dml2_wrapper_scratch *s_global = &dml2->v20.scratch;
237 
238 	unsigned int dml_result = 0;
239 	int result = -1, i, j;
240 
241 	build_unoptimized_policy_settings(dml2->v20.dml_core_ctx.project, &dml2->v20.dml_core_ctx.policy);
242 
243 	/* Zero out before each call before proceeding */
244 	memset(s, 0, sizeof(struct dml2_calculate_lowest_supported_state_for_temp_read_scratch));
245 	memset(&s_global->mode_support_params, 0, sizeof(struct dml_mode_support_ex_params_st));
246 	memset(&s_global->dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
247 
248 	for (i = 0; i < dml2->config.dcn_pipe_count; i++) {
249 		/* Calling resource_build_scaling_params will populate the pipe params
250 		 * with the necessary information needed for correct DML calculations
251 		 * This is also done in DML1 driver code path and hence display_state
252 		 * cannot be const.
253 		 */
254 		struct pipe_ctx *pipe = &display_state->res_ctx.pipe_ctx[i];
255 
256 		if (pipe->plane_state) {
257 			if (!dml2->config.callbacks.build_scaling_params(pipe)) {
258 				ASSERT(false);
259 				return false;
260 			}
261 		}
262 	}
263 
264 	map_dc_state_into_dml_display_cfg(dml2, display_state, &s->cur_display_config);
265 
266 	for (i = 0; i < dml2->v20.dml_core_ctx.states.num_states; i++) {
267 		s->uclk_change_latencies[i] = dml2->v20.dml_core_ctx.states.state_array[i].dram_clock_change_latency_us;
268 	}
269 
270 	for (i = 0; i < 4; i++) {
271 		for (j = 0; j < dml2->v20.dml_core_ctx.states.num_states; j++) {
272 			dml2->v20.dml_core_ctx.states.state_array[j].dram_clock_change_latency_us = s_global->dummy_pstate_table[i].dummy_pstate_latency_us;
273 		}
274 
275 		dml_result = pack_and_call_dml_mode_support_ex(dml2, &s->cur_display_config, &s->evaluation_info,
276 						validate_mode);
277 
278 		if (dml_result && s->evaluation_info.DRAMClockChangeSupport[0] == dml_dram_clock_change_vactive) {
279 			map_hw_resources(dml2, &s->cur_display_config, &s->evaluation_info);
280 			dml_result = dml_mode_programming(&dml2->v20.dml_core_ctx, s_global->mode_support_params.out_lowest_state_idx, &s->cur_display_config, true);
281 
282 			ASSERT(dml_result);
283 
284 			dml2_extract_watermark_set(&dml2->v20.g6_temp_read_watermark_set, &dml2->v20.dml_core_ctx);
285 			dml2->v20.g6_temp_read_watermark_set.cstate_pstate.fclk_pstate_change_ns = dml2->v20.g6_temp_read_watermark_set.cstate_pstate.pstate_change_ns;
286 
287 			result = s_global->mode_support_params.out_lowest_state_idx;
288 
289 			while (dml2->v20.dml_core_ctx.states.state_array[result].dram_speed_mts < s_global->dummy_pstate_table[i].dram_speed_mts)
290 				result++;
291 
292 			break;
293 		}
294 	}
295 
296 	for (i = 0; i < dml2->v20.dml_core_ctx.states.num_states; i++) {
297 		dml2->v20.dml_core_ctx.states.state_array[i].dram_clock_change_latency_us = s->uclk_change_latencies[i];
298 	}
299 
300 	return result;
301 }
302 
303 static void copy_dummy_pstate_table(struct dummy_pstate_entry *dest, struct dummy_pstate_entry *src, unsigned int num_entries)
304 {
305 	for (int i = 0; i < num_entries; i++) {
306 		dest[i] = src[i];
307 	}
308 }
309 
310 static bool are_timings_requiring_odm_doing_blending(const struct dml_display_cfg_st *display_cfg,
311 		const struct dml_mode_support_info_st *evaluation_info)
312 {
313 	unsigned int planes_per_timing[__DML_NUM_PLANES__] = {0};
314 	int i;
315 
316 	for (i = 0; i < display_cfg->num_surfaces; i++)
317 		planes_per_timing[display_cfg->plane.BlendingAndTiming[i]]++;
318 
319 	for (i = 0; i < __DML_NUM_PLANES__; i++) {
320 		if (planes_per_timing[i] > 1 && evaluation_info->ODMMode[i] != dml_odm_mode_bypass)
321 			return true;
322 	}
323 
324 	return false;
325 }
326 
327 static bool does_configuration_meet_sw_policies(struct dml2_context *ctx, const struct dml_display_cfg_st *display_cfg,
328 	const struct dml_mode_support_info_st *evaluation_info)
329 {
330 	bool pass = true;
331 
332 	if (!ctx->config.enable_windowed_mpo_odm) {
333 		if (are_timings_requiring_odm_doing_blending(display_cfg, evaluation_info))
334 			pass = false;
335 	}
336 
337 	return pass;
338 }
339 
340 static bool dml_mode_support_wrapper(struct dml2_context *dml2,
341 		struct dc_state *display_state,
342 		enum dc_validate_mode validate_mode)
343 {
344 	struct dml2_wrapper_scratch *s = &dml2->v20.scratch;
345 	unsigned int result = 0, i;
346 	unsigned int optimized_result = true;
347 
348 	build_unoptimized_policy_settings(dml2->v20.dml_core_ctx.project, &dml2->v20.dml_core_ctx.policy);
349 
350 	/* Zero out before each call before proceeding */
351 	memset(&s->cur_display_config, 0, sizeof(struct dml_display_cfg_st));
352 	memset(&s->mode_support_params, 0, sizeof(struct dml_mode_support_ex_params_st));
353 	memset(&s->dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
354 	memset(&s->optimize_configuration_params, 0, sizeof(struct dml2_wrapper_optimize_configuration_params));
355 
356 	for (i = 0; i < dml2->config.dcn_pipe_count; i++) {
357 		/* Calling resource_build_scaling_params will populate the pipe params
358 		 * with the necessary information needed for correct DML calculations
359 		 * This is also done in DML1 driver code path and hence display_state
360 		 * cannot be const.
361 		 */
362 		struct pipe_ctx *pipe = &display_state->res_ctx.pipe_ctx[i];
363 
364 		if (pipe->plane_state) {
365 			if (!dml2->config.callbacks.build_scaling_params(pipe)) {
366 				ASSERT(false);
367 				return false;
368 			}
369 		}
370 	}
371 
372 	map_dc_state_into_dml_display_cfg(dml2, display_state, &s->cur_display_config);
373 	if (!dml2->config.skip_hw_state_mapping)
374 		dml2_apply_det_buffer_allocation_policy(dml2, &s->cur_display_config);
375 
376 	result = pack_and_call_dml_mode_support_ex(dml2,
377 		&s->cur_display_config,
378 		&s->mode_support_info,
379 		validate_mode);
380 
381 	if (result)
382 		result = does_configuration_meet_sw_policies(dml2, &s->cur_display_config, &s->mode_support_info);
383 
384 	// Try to optimize
385 	if (result) {
386 		s->cur_policy = dml2->v20.dml_core_ctx.policy;
387 		s->optimize_configuration_params.dml_core_ctx = &dml2->v20.dml_core_ctx;
388 		s->optimize_configuration_params.config = &dml2->config;
389 		s->optimize_configuration_params.ip_params = &dml2->v20.dml_core_ctx.ip;
390 		s->optimize_configuration_params.cur_display_config = &s->cur_display_config;
391 		s->optimize_configuration_params.cur_mode_support_info = &s->mode_support_info;
392 		s->optimize_configuration_params.cur_policy = &s->cur_policy;
393 		s->optimize_configuration_params.new_display_config = &s->new_display_config;
394 		s->optimize_configuration_params.new_policy = &s->new_policy;
395 
396 		while (optimized_result && optimize_configuration(dml2, &s->optimize_configuration_params)) {
397 			dml2->v20.dml_core_ctx.policy = s->new_policy;
398 			optimized_result = pack_and_call_dml_mode_support_ex(dml2,
399 				&s->new_display_config,
400 				&s->mode_support_info,
401 				validate_mode);
402 
403 			if (optimized_result)
404 				optimized_result = does_configuration_meet_sw_policies(dml2, &s->new_display_config, &s->mode_support_info);
405 
406 			// If the new optimized state is supposed, then set current = new
407 			if (optimized_result) {
408 				s->cur_display_config = s->new_display_config;
409 				s->cur_policy = s->new_policy;
410 			} else {
411 				// Else, restore policy to current
412 				dml2->v20.dml_core_ctx.policy = s->cur_policy;
413 			}
414 		}
415 
416 		// Optimize ended with a failed config, so we need to restore DML state to last passing
417 		if (!optimized_result) {
418 			result = pack_and_call_dml_mode_support_ex(dml2,
419 				&s->cur_display_config,
420 				&s->mode_support_info,
421 				validate_mode);
422 		}
423 	}
424 
425 	if (result)
426 		map_hw_resources(dml2, &s->cur_display_config, &s->mode_support_info);
427 
428 	return result;
429 }
430 
431 static int find_drr_eligible_stream(struct dc_state *display_state)
432 {
433 	int i;
434 
435 	for (i = 0; i < display_state->stream_count; i++) {
436 		if (dc_state_get_stream_subvp_type(display_state, display_state->streams[i]) == SUBVP_NONE
437 			&& display_state->streams[i]->ignore_msa_timing_param) {
438 			// Use ignore_msa_timing_param flag to identify as DRR
439 			return i;
440 		}
441 	}
442 
443 	return -1;
444 }
445 
446 static bool optimize_pstate_with_svp_and_drr(struct dml2_context *dml2, struct dc_state *display_state,
447 		enum dc_validate_mode validate_mode)
448 {
449 	struct dml2_wrapper_scratch *s = &dml2->v20.scratch;
450 	bool pstate_optimization_done = false;
451 	bool pstate_optimization_success = false;
452 	bool result = false;
453 	int drr_display_index = 0, non_svp_streams = 0;
454 	bool force_svp = dml2->config.svp_pstate.force_enable_subvp;
455 
456 	display_state->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
457 	display_state->bw_ctx.bw.dcn.legacy_svp_drr_stream_index_valid = false;
458 
459 	result = dml_mode_support_wrapper(dml2, display_state, validate_mode);
460 
461 	if (!result) {
462 		pstate_optimization_done = true;
463 	} else if (s->mode_support_info.DRAMClockChangeSupport[0] != dml_dram_clock_change_unsupported && !force_svp) {
464 		pstate_optimization_success = true;
465 		pstate_optimization_done = true;
466 	}
467 
468 	if (display_state->stream_count == 1 && dml2->config.callbacks.can_support_mclk_switch_using_fw_based_vblank_stretch(dml2->config.callbacks.dc, display_state)) {
469 			display_state->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = true;
470 
471 			result = dml_mode_support_wrapper(dml2, display_state, validate_mode);
472 	} else {
473 		non_svp_streams = display_state->stream_count;
474 
475 		while (!pstate_optimization_done) {
476 			result = dml_mode_programming(&dml2->v20.dml_core_ctx, s->mode_support_params.out_lowest_state_idx, &s->cur_display_config, true);
477 
478 			// Always try adding SVP first
479 			if (result)
480 				result = dml2_svp_add_phantom_pipe_to_dc_state(dml2, display_state, &s->mode_support_info);
481 			else
482 				pstate_optimization_done = true;
483 
484 
485 			if (result) {
486 				result = dml_mode_support_wrapper(dml2, display_state, validate_mode);
487 			} else {
488 				pstate_optimization_done = true;
489 			}
490 
491 			if (result) {
492 				non_svp_streams--;
493 
494 				if (s->mode_support_info.DRAMClockChangeSupport[0] != dml_dram_clock_change_unsupported) {
495 					if (dml2_svp_validate_static_schedulability(dml2, display_state, s->mode_support_info.DRAMClockChangeSupport[0])) {
496 						pstate_optimization_success = true;
497 						pstate_optimization_done = true;
498 					} else {
499 						pstate_optimization_success = false;
500 						pstate_optimization_done = false;
501 					}
502 				} else {
503 					drr_display_index = find_drr_eligible_stream(display_state);
504 
505 					// If there is only 1 remaining non SubVP pipe that is DRR, check static
506 					// schedulability for SubVP + DRR.
507 					if (non_svp_streams == 1 && drr_display_index >= 0) {
508 						if (dml2_svp_drr_schedulable(dml2, display_state, &display_state->streams[drr_display_index]->timing)) {
509 							display_state->bw_ctx.bw.dcn.legacy_svp_drr_stream_index_valid = true;
510 							display_state->bw_ctx.bw.dcn.legacy_svp_drr_stream_index = drr_display_index;
511 							result = dml_mode_support_wrapper(dml2, display_state,
512 										validate_mode);
513 						}
514 
515 						if (result && s->mode_support_info.DRAMClockChangeSupport[0] != dml_dram_clock_change_unsupported) {
516 							pstate_optimization_success = true;
517 							pstate_optimization_done = true;
518 						} else {
519 							pstate_optimization_success = false;
520 							pstate_optimization_done = false;
521 						}
522 					}
523 
524 					if (pstate_optimization_success) {
525 						pstate_optimization_done = true;
526 					} else {
527 						pstate_optimization_done = false;
528 					}
529 				}
530 			}
531 		}
532 	}
533 
534 	if (!pstate_optimization_success) {
535 		dml2_svp_remove_all_phantom_pipes(dml2, display_state);
536 		display_state->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
537 		display_state->bw_ctx.bw.dcn.legacy_svp_drr_stream_index_valid = false;
538 		result = dml_mode_support_wrapper(dml2, display_state, validate_mode);
539 	}
540 
541 	return result;
542 }
543 
544 static bool call_dml_mode_support_and_programming(struct dc_state *context, enum dc_validate_mode validate_mode)
545 {
546 	unsigned int result = 0;
547 	unsigned int min_state = 0;
548 	int min_state_for_g6_temp_read = 0;
549 
550 
551 	if (!context)
552 		return false;
553 
554 	struct dml2_context *dml2 = context->bw_ctx.dml2;
555 	struct dml2_wrapper_scratch *s = &dml2->v20.scratch;
556 
557 	if (!context->streams[0]->sink->link->dc->caps.is_apu) {
558 		min_state_for_g6_temp_read = calculate_lowest_supported_state_for_temp_read(dml2, context,
559 										validate_mode);
560 
561 		ASSERT(min_state_for_g6_temp_read >= 0);
562 	}
563 
564 	if (!dml2->config.use_native_pstate_optimization) {
565 		result = optimize_pstate_with_svp_and_drr(dml2, context, validate_mode);
566 	} else {
567 		result = dml_mode_support_wrapper(dml2, context, validate_mode);
568 	}
569 
570 	/* Upon trying to sett certain frequencies in FRL, min_state_for_g6_temp_read is reported as -1. This leads to an invalid value of min_state causing crashes later on.
571 	 * Use the default logic for min_state only when min_state_for_g6_temp_read is a valid value. In other cases, use the value calculated by the DML directly.
572 	 */
573 	if (!context->streams[0]->sink->link->dc->caps.is_apu) {
574 		if (min_state_for_g6_temp_read >= 0)
575 			min_state = min_state_for_g6_temp_read > s->mode_support_params.out_lowest_state_idx ? min_state_for_g6_temp_read : s->mode_support_params.out_lowest_state_idx;
576 		else
577 			min_state = s->mode_support_params.out_lowest_state_idx;
578 	}
579 
580 	if (result) {
581 		if (!context->streams[0]->sink->link->dc->caps.is_apu) {
582 			result = dml_mode_programming(&dml2->v20.dml_core_ctx, min_state, &s->cur_display_config, true);
583 		} else {
584 			result = dml_mode_programming(&dml2->v20.dml_core_ctx, s->mode_support_params.out_lowest_state_idx, &s->cur_display_config, true);
585 		}
586 	}
587 	return result;
588 }
589 
590 static bool dml2_validate_and_build_resource(const struct dc *in_dc, struct dc_state *context,
591 		enum dc_validate_mode validate_mode)
592 {
593 	struct dml2_context *dml2 = context->bw_ctx.dml2;
594 	struct dml2_wrapper_scratch *s = &dml2->v20.scratch;
595 	struct dml2_dcn_clocks out_clks;
596 	unsigned int result = 0;
597 	bool need_recalculation = false;
598 	uint32_t cstate_enter_plus_exit_z8_ns;
599 
600 	if (context->stream_count == 0) {
601 		unsigned int lowest_state_idx = 0;
602 
603 		out_clks.p_state_supported = true;
604 		out_clks.dispclk_khz = 0; /* No requirement, and lowest index will generally be maximum dispclk. */
605 		out_clks.dcfclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dcfclk_mhz * 1000;
606 		out_clks.fclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].fabricclk_mhz * 1000;
607 		out_clks.uclk_mts = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dram_speed_mts;
608 		out_clks.phyclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].phyclk_mhz * 1000;
609 		out_clks.socclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].socclk_mhz * 1000;
610 		out_clks.ref_dtbclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dtbclk_mhz * 1000;
611 		context->bw_ctx.bw.dcn.clk.dtbclk_en = false;
612 		dml2_copy_clocks_to_dc_state(&out_clks, context);
613 		return true;
614 	}
615 
616 	/* Zero out before each call before proceeding */
617 	memset(&dml2->v20.scratch, 0, sizeof(struct dml2_wrapper_scratch));
618 	memset(&dml2->v20.dml_core_ctx.policy, 0, sizeof(struct dml_mode_eval_policy_st));
619 	memset(&dml2->v20.dml_core_ctx.ms, 0, sizeof(struct mode_support_st));
620 	memset(&dml2->v20.dml_core_ctx.mp, 0, sizeof(struct mode_program_st));
621 
622 	/* Initialize DET scratch */
623 	dml2_initialize_det_scratch(dml2);
624 
625 	copy_dummy_pstate_table(s->dummy_pstate_table, in_dc->clk_mgr->bw_params->dummy_pstate_table, 4);
626 
627 	result = call_dml_mode_support_and_programming(context, validate_mode);
628 	/* Call map dc pipes to map the pipes based on the DML output. For correctly determining if recalculation
629 	 * is required or not, the resource context needs to correctly reflect the number of active pipes. We would
630 	 * only know the correct number if active pipes after dml2_map_dc_pipes is called.
631 	 */
632 	if (result && !dml2->config.skip_hw_state_mapping)
633 		dml2_map_dc_pipes(dml2, context, &s->cur_display_config, &s->dml_to_dc_pipe_mapping, in_dc->current_state);
634 
635 	/* Verify and update DET Buffer configuration if needed. dml2_verify_det_buffer_configuration will check if DET Buffer
636 	 * size needs to be updated. If yes it will update the DETOverride variable and set need_recalculation flag to true.
637 	 * Based on that flag, run mode support again. Verification needs to be run after dml_mode_programming because the getters
638 	 * return correct det buffer values only after dml_mode_programming is called.
639 	 */
640 	if (result && !dml2->config.skip_hw_state_mapping) {
641 		need_recalculation = dml2_verify_det_buffer_configuration(dml2, context, &dml2->det_helper_scratch);
642 		if (need_recalculation) {
643 			/* Engage the DML again if recalculation is required. */
644 			call_dml_mode_support_and_programming(context, validate_mode);
645 			if (!dml2->config.skip_hw_state_mapping) {
646 				dml2_map_dc_pipes(dml2, context, &s->cur_display_config, &s->dml_to_dc_pipe_mapping, in_dc->current_state);
647 			}
648 			need_recalculation = dml2_verify_det_buffer_configuration(dml2, context, &dml2->det_helper_scratch);
649 			ASSERT(need_recalculation == false);
650 		}
651 	}
652 
653 	if (result) {
654 		unsigned int lowest_state_idx = s->mode_support_params.out_lowest_state_idx;
655 		out_clks.dispclk_khz = (unsigned int)dml2->v20.dml_core_ctx.mp.Dispclk_calculated * 1000;
656 		out_clks.p_state_supported = s->mode_support_info.DRAMClockChangeSupport[0] != dml_dram_clock_change_unsupported;
657 		if (in_dc->config.use_default_clock_table &&
658 			(lowest_state_idx < dml2->v20.dml_core_ctx.states.num_states - 1)) {
659 			lowest_state_idx = dml2->v20.dml_core_ctx.states.num_states - 1;
660 			out_clks.dispclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dispclk_mhz * 1000;
661 		}
662 
663 		out_clks.dcfclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dcfclk_mhz * 1000;
664 		out_clks.fclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].fabricclk_mhz * 1000;
665 		out_clks.uclk_mts = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dram_speed_mts;
666 		out_clks.phyclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].phyclk_mhz * 1000;
667 		out_clks.socclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].socclk_mhz * 1000;
668 		out_clks.ref_dtbclk_khz = (unsigned int)dml2->v20.dml_core_ctx.states.state_array[lowest_state_idx].dtbclk_mhz * 1000;
669 		context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(in_dc, context);
670 
671 		if (!dml2->config.skip_hw_state_mapping) {
672 			/* Call dml2_calculate_rq_and_dlg_params */
673 			dml2_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml2, in_dc->res_pool->pipe_count);
674 		}
675 
676 		dml2_copy_clocks_to_dc_state(&out_clks, context);
677 		dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.a, &dml2->v20.dml_core_ctx);
678 		dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.b, &dml2->v20.dml_core_ctx);
679 		if (context->streams[0]->sink->link->dc->caps.is_apu)
680 			dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.c, &dml2->v20.dml_core_ctx);
681 		else
682 			memcpy(&context->bw_ctx.bw.dcn.watermarks.c, &dml2->v20.g6_temp_read_watermark_set, sizeof(context->bw_ctx.bw.dcn.watermarks.c));
683 		dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.d, &dml2->v20.dml_core_ctx);
684 		dml2_extract_writeback_wm(context, &dml2->v20.dml_core_ctx);
685 		//copy for deciding zstate use
686 		context->bw_ctx.dml.vba.StutterPeriod = context->bw_ctx.dml2->v20.dml_core_ctx.mp.StutterPeriod;
687 
688 		cstate_enter_plus_exit_z8_ns = context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns;
689 
690 		if (context->bw_ctx.dml.vba.StutterPeriod < in_dc->debug.minimum_z8_residency_time &&
691 				cstate_enter_plus_exit_z8_ns < in_dc->debug.minimum_z8_residency_time * 1000)
692 			cstate_enter_plus_exit_z8_ns = in_dc->debug.minimum_z8_residency_time * 1000;
693 
694 		context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns = cstate_enter_plus_exit_z8_ns;
695 	}
696 
697 	return result;
698 }
699 
700 static bool dml2_validate_only(struct dc_state *context, enum dc_validate_mode validate_mode)
701 {
702 	struct dml2_context *dml2;
703 	unsigned int result = 0;
704 
705 	if (!context || context->stream_count == 0)
706 		return true;
707 
708 	dml2 = context->bw_ctx.dml2;
709 
710 	/* Zero out before each call before proceeding */
711 	memset(&dml2->v20.scratch, 0, sizeof(struct dml2_wrapper_scratch));
712 	memset(&dml2->v20.dml_core_ctx.policy, 0, sizeof(struct dml_mode_eval_policy_st));
713 	memset(&dml2->v20.dml_core_ctx.ms, 0, sizeof(struct mode_support_st));
714 	memset(&dml2->v20.dml_core_ctx.mp, 0, sizeof(struct mode_program_st));
715 
716 	build_unoptimized_policy_settings(dml2->v20.dml_core_ctx.project, &dml2->v20.dml_core_ctx.policy);
717 
718 	map_dc_state_into_dml_display_cfg(dml2, context, &dml2->v20.scratch.cur_display_config);
719 	 if (!dml2->config.skip_hw_state_mapping)
720 		 dml2_apply_det_buffer_allocation_policy(dml2, &dml2->v20.scratch.cur_display_config);
721 
722 	result = pack_and_call_dml_mode_support_ex(dml2,
723 		&dml2->v20.scratch.cur_display_config,
724 		&dml2->v20.scratch.mode_support_info,
725 		validate_mode);
726 
727 	if (result)
728 		result = does_configuration_meet_sw_policies(dml2, &dml2->v20.scratch.cur_display_config, &dml2->v20.scratch.mode_support_info);
729 
730 	return (result == 1) ? true : false;
731 }
732 
733 static void dml2_apply_debug_options(const struct dc *dc, struct dml2_context *dml2)
734 {
735 	if (dc->debug.override_odm_optimization) {
736 		dml2->config.minimize_dispclk_using_odm = dc->debug.minimize_dispclk_using_odm;
737 	}
738 }
739 
740 bool dml2_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml2,
741 	enum dc_validate_mode validate_mode)
742 {
743 	bool out = false;
744 
745 	if (!dml2)
746 		return false;
747 	dml2_apply_debug_options(in_dc, dml2);
748 
749 	/* DML2.1 validation path */
750 	if (dml2->architecture == dml2_architecture_21) {
751 		out = dml21_validate(in_dc, context, dml2, validate_mode);
752 		return out;
753 	}
754 
755 	DC_FP_START();
756 
757 	/* Use dml_validate_only for DC_VALIDATE_MODE_ONLY and DC_VALIDATE_MODE_AND_STATE_INDEX path */
758 	if (validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING)
759 		out = dml2_validate_only(context, validate_mode);
760 	else
761 		out = dml2_validate_and_build_resource(in_dc, context, validate_mode);
762 
763 	DC_FP_END();
764 
765 	return out;
766 }
767 
768 static inline struct dml2_context *dml2_allocate_memory(void)
769 {
770 	return (struct dml2_context *) vzalloc(sizeof(struct dml2_context));
771 }
772 
773 static void dml2_init(const struct dc *in_dc, const struct dml2_configuration_options *config, struct dml2_context **dml2)
774 {
775 	if ((in_dc->debug.using_dml21) && (in_dc->ctx->dce_version == DCN_VERSION_4_01)) {
776 		dml21_reinit(in_dc, dml2, config);
777 		return;
778 	}
779 
780 	// Store config options
781 	(*dml2)->config = *config;
782 
783 	switch (in_dc->ctx->dce_version) {
784 	case DCN_VERSION_3_5:
785 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn35;
786 		break;
787 	case DCN_VERSION_3_51:
788 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn351;
789 		break;
790 	case DCN_VERSION_3_6:
791 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn36;
792 		break;
793 	case DCN_VERSION_3_2:
794 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn32;
795 		break;
796 	case DCN_VERSION_3_21:
797 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn321;
798 		break;
799 	case DCN_VERSION_4_01:
800 		(*dml2)->v20.dml_core_ctx.project = dml_project_dcn401;
801 		break;
802 	default:
803 		(*dml2)->v20.dml_core_ctx.project = dml_project_default;
804 		break;
805 	}
806 
807 	DC_FP_START();
808 
809 	initialize_dml2_ip_params(*dml2, in_dc, &(*dml2)->v20.dml_core_ctx.ip);
810 
811 	initialize_dml2_soc_bbox(*dml2, in_dc, &(*dml2)->v20.dml_core_ctx.soc);
812 
813 	initialize_dml2_soc_states(*dml2, in_dc, &(*dml2)->v20.dml_core_ctx.soc, &(*dml2)->v20.dml_core_ctx.states);
814 
815 	DC_FP_END();
816 }
817 
818 bool dml2_create(const struct dc *in_dc, const struct dml2_configuration_options *config, struct dml2_context **dml2)
819 {
820 	// TODO : Temporarily add DCN_VERSION_3_2 for N-1 validation. Remove DCN_VERSION_3_2 after N-1 validation phase is complete.
821 	if ((in_dc->debug.using_dml21)
822 			&& (in_dc->ctx->dce_version == DCN_VERSION_4_01
823 		))
824 		return dml21_create(in_dc, dml2, config);
825 
826 	// Allocate Mode Lib Ctx
827 	*dml2 = dml2_allocate_memory();
828 
829 	if (!(*dml2))
830 		return false;
831 
832 	dml2_init(in_dc, config, dml2);
833 
834 	return true;
835 }
836 
837 void dml2_destroy(struct dml2_context *dml2)
838 {
839 	if (!dml2)
840 		return;
841 
842 	if (dml2->architecture == dml2_architecture_21)
843 		dml21_destroy(dml2);
844 	vfree(dml2);
845 }
846 
847 void dml2_extract_dram_and_fclk_change_support(struct dml2_context *dml2,
848 	unsigned int *fclk_change_support, unsigned int *dram_clk_change_support)
849 {
850 	*fclk_change_support = (unsigned int) dml2->v20.dml_core_ctx.ms.support.FCLKChangeSupport[0];
851 	*dram_clk_change_support = (unsigned int) dml2->v20.dml_core_ctx.ms.support.DRAMClockChangeSupport[0];
852 }
853 
854 void dml2_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml2)
855 {
856 	if (dml2->architecture == dml2_architecture_21)
857 		dml21_prepare_mcache_programming(in_dc, context, dml2);
858 }
859 
860 void dml2_copy(struct dml2_context *dst_dml2,
861 	struct dml2_context *src_dml2)
862 {
863 	if (src_dml2->architecture == dml2_architecture_21) {
864 		dml21_copy(dst_dml2, src_dml2);
865 		return;
866 	}
867 	/* copy Mode Lib Ctx */
868 	memcpy(dst_dml2, src_dml2, sizeof(struct dml2_context));
869 }
870 
871 bool dml2_create_copy(struct dml2_context **dst_dml2,
872 	struct dml2_context *src_dml2)
873 {
874 	if (src_dml2->architecture == dml2_architecture_21)
875 		return dml21_create_copy(dst_dml2, src_dml2);
876 	/* Allocate Mode Lib Ctx */
877 	*dst_dml2 = dml2_allocate_memory();
878 
879 	if (!(*dst_dml2))
880 		return false;
881 
882 	/* copy Mode Lib Ctx */
883 	dml2_copy(*dst_dml2, src_dml2);
884 
885 	return true;
886 }
887 
888 void dml2_reinit(const struct dc *in_dc,
889 				 const struct dml2_configuration_options *config,
890 				 struct dml2_context **dml2)
891 {
892 	if ((in_dc->debug.using_dml21) && (in_dc->ctx->dce_version == DCN_VERSION_4_01)) {
893 		dml21_reinit(in_dc, dml2, config);
894 		return;
895 	}
896 
897 	dml2_init(in_dc, config, dml2);
898 }
899