xref: /linux/drivers/gpu/drm/amd/display/dc/dml2/dml21/dml21_wrapper.c (revision 85502b2214d50ba0ddf2a5fb454e4d28a160d175)
1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4 
5 #include <linux/vmalloc.h>
6 
7 #include "dml2_internal_types.h"
8 #include "dml_top.h"
9 #include "dml2_core_dcn4_calcs.h"
10 #include "dml2_internal_shared_types.h"
11 #include "dml21_utils.h"
12 #include "dml21_translation_helper.h"
13 #include "dml2_dc_resource_mgmt.h"
14 
15 #define INVALID -1
16 
dml21_allocate_memory(struct dml2_context ** dml_ctx)17 static bool dml21_allocate_memory(struct dml2_context **dml_ctx)
18 {
19 	*dml_ctx = vzalloc(sizeof(struct dml2_context));
20 	if (!(*dml_ctx))
21 		return false;
22 
23 	(*dml_ctx)->v21.dml_init.dml2_instance = vzalloc(sizeof(struct dml2_instance));
24 	if (!((*dml_ctx)->v21.dml_init.dml2_instance))
25 		return false;
26 
27 	(*dml_ctx)->v21.mode_support.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
28 	(*dml_ctx)->v21.mode_programming.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
29 
30 	(*dml_ctx)->v21.mode_support.display_config = &(*dml_ctx)->v21.display_config;
31 	(*dml_ctx)->v21.mode_programming.display_config = (*dml_ctx)->v21.mode_support.display_config;
32 
33 	(*dml_ctx)->v21.mode_programming.programming = vzalloc(sizeof(struct dml2_display_cfg_programming));
34 	if (!((*dml_ctx)->v21.mode_programming.programming))
35 		return false;
36 
37 	return true;
38 }
39 
dml21_apply_debug_options(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)40 static void dml21_apply_debug_options(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
41 {
42 	bool disable_fams2;
43 	struct dml2_pmo_options *pmo_options = &dml_ctx->v21.dml_init.options.pmo_options;
44 
45 	/* ODM options */
46 	pmo_options->disable_dyn_odm = !config->minimize_dispclk_using_odm;
47 	pmo_options->disable_dyn_odm_for_multi_stream = true;
48 	pmo_options->disable_dyn_odm_for_stream_with_svp = true;
49 
50 	/* UCLK P-State options */
51 	if (in_dc->debug.dml21_force_pstate_method) {
52 		dml_ctx->config.pmo.force_pstate_method_enable = true;
53 		for (int i = 0; i < MAX_PIPES; i++)
54 			dml_ctx->config.pmo.force_pstate_method_values[i] = in_dc->debug.dml21_force_pstate_method_values[i];
55 	} else {
56 		dml_ctx->config.pmo.force_pstate_method_enable = false;
57 	}
58 
59 	pmo_options->disable_vblank = ((in_dc->debug.dml21_disable_pstate_method_mask >> 1) & 1);
60 
61 	/* NOTE: DRR and SubVP Require FAMS2 */
62 	disable_fams2 = !in_dc->debug.fams2_config.bits.enable;
63 	pmo_options->disable_svp = ((in_dc->debug.dml21_disable_pstate_method_mask >> 2) & 1) ||
64 			in_dc->debug.force_disable_subvp ||
65 			disable_fams2;
66 	pmo_options->disable_drr_clamped = ((in_dc->debug.dml21_disable_pstate_method_mask >> 3) & 1) ||
67 			disable_fams2;
68 	pmo_options->disable_drr_var = ((in_dc->debug.dml21_disable_pstate_method_mask >> 4) & 1) ||
69 			disable_fams2;
70 	pmo_options->disable_fams2 = disable_fams2;
71 
72 	pmo_options->disable_drr_var_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE ||
73 			in_dc->debug.disable_fams_gaming == INGAME_FAMS_MULTI_DISP_CLAMPED_ONLY;
74 	pmo_options->disable_drr_clamped_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE;
75 }
76 
dml21_init(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)77 static void dml21_init(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
78 {
79 	switch (in_dc->ctx->dce_version) {
80 	case DCN_VERSION_4_01:
81 		(*dml_ctx)->v21.dml_init.options.project_id = dml2_project_dcn4x_stage2_auto_drr_svp;
82 		break;
83 	default:
84 		(*dml_ctx)->v21.dml_init.options.project_id = dml2_project_invalid;
85 	}
86 
87 	(*dml_ctx)->architecture = dml2_architecture_21;
88 
89 	/* Store configuration options */
90 	(*dml_ctx)->config = *config;
91 
92 	DC_FP_START();
93 
94 	/*Initialize SOCBB and DCNIP params */
95 	dml21_initialize_soc_bb_params(&(*dml_ctx)->v21.dml_init, config, in_dc);
96 	dml21_initialize_ip_params(&(*dml_ctx)->v21.dml_init, config, in_dc);
97 	dml21_apply_soc_bb_overrides(&(*dml_ctx)->v21.dml_init, config, in_dc);
98 
99 	/* apply debug overrides */
100 	dml21_apply_debug_options(in_dc, *dml_ctx, config);
101 
102 	/*Initialize DML21 instance */
103 	dml2_initialize_instance(&(*dml_ctx)->v21.dml_init);
104 
105 	DC_FP_END();
106 }
107 
dml21_create(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)108 bool dml21_create(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
109 {
110 	/* Allocate memory for initializing DML21 instance */
111 	if (!dml21_allocate_memory(dml_ctx))
112 		return false;
113 
114 	dml21_init(in_dc, dml_ctx, config);
115 
116 	return true;
117 }
118 
dml21_destroy(struct dml2_context * dml2)119 void dml21_destroy(struct dml2_context *dml2)
120 {
121 	vfree(dml2->v21.dml_init.dml2_instance);
122 	vfree(dml2->v21.mode_programming.programming);
123 }
124 
dml21_calculate_rq_and_dlg_params(const struct dc * dc,struct dc_state * context,struct resource_context * out_new_hw_state,struct dml2_context * in_ctx,unsigned int pipe_cnt)125 static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state,
126 	struct dml2_context *in_ctx, unsigned int pipe_cnt)
127 {
128 	unsigned int dml_prog_idx = 0, dc_pipe_index = 0, num_dpps_required = 0;
129 	struct dml2_per_plane_programming *pln_prog = NULL;
130 	struct dml2_per_stream_programming *stream_prog = NULL;
131 	struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
132 	struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
133 	int num_pipes;
134 	unsigned int dml_phantom_prog_idx;
135 
136 	context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
137 
138 	/* copy global DCHUBBUB arbiter registers */
139 	memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs));
140 
141 	/* legacy only */
142 	context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64;
143 
144 	context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
145 	context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
146 	context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
147 
148 	/* phantom's start after main planes */
149 	dml_phantom_prog_idx = in_ctx->v21.mode_programming.programming->display_config.num_planes;
150 
151 	for (dml_prog_idx = 0; dml_prog_idx < DML2_MAX_PLANES; dml_prog_idx++) {
152 		pln_prog = &in_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
153 
154 		if (!pln_prog->plane_descriptor)
155 			continue;
156 
157 		stream_prog = &in_ctx->v21.mode_programming.programming->stream_programming[pln_prog->plane_descriptor->stream_index];
158 		num_dpps_required = pln_prog->num_dpps_required;
159 
160 		if (num_dpps_required == 0) {
161 			continue;
162 		}
163 		num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
164 
165 		if (num_pipes <= 0)
166 			continue;
167 
168 		/* program each pipe */
169 		for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
170 			dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog);
171 
172 			if (pln_prog->phantom_plane.valid && dc_phantom_pipes[dc_pipe_index]) {
173 				dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog);
174 			}
175 		}
176 
177 		/* copy per plane mcache allocation */
178 		memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx], &pln_prog->mcache_allocation, sizeof(struct dml2_mcache_surface_allocation));
179 		if (pln_prog->phantom_plane.valid) {
180 			memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx],
181 					&pln_prog->phantom_plane.mcache_allocation,
182 					sizeof(struct dml2_mcache_surface_allocation));
183 
184 			dml_phantom_prog_idx++;
185 		}
186 	}
187 
188 	/* assign global clocks */
189 	context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
190 	context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
191 	if (in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values > 1) {
192 		context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz =
193 			in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values] * 1000;
194 	} else {
195 		context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000;
196 	}
197 
198 	if (in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values > 1) {
199 		context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
200 			in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values] * 1000;
201 	} else {
202 		context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000;
203 	}
204 
205 	/* get global mall allocation */
206 	if (dc->res_pool->funcs->calculate_mall_ways_from_bytes) {
207 		context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
208 	} else {
209 		context->bw_ctx.bw.dcn.clk.num_ways = 0;
210 	}
211 }
212 
dml21_prepare_mcache_params(struct dml2_context * dml_ctx,struct dc_state * context,struct dc_mcache_params * mcache_params)213 static void dml21_prepare_mcache_params(struct dml2_context *dml_ctx, struct dc_state *context, struct dc_mcache_params *mcache_params)
214 {
215 	int dc_plane_idx = 0;
216 	int dml_prog_idx, stream_idx, plane_idx;
217 	struct dml2_per_plane_programming *pln_prog = NULL;
218 
219 	for (stream_idx = 0; stream_idx < context->stream_count; stream_idx++) {
220 		for (plane_idx = 0; plane_idx < context->stream_status[stream_idx].plane_count; plane_idx++) {
221 			dml_prog_idx = map_plane_to_dml21_display_cfg(dml_ctx, context->streams[stream_idx]->stream_id, context->stream_status[stream_idx].plane_states[plane_idx], context);
222 			if (dml_prog_idx == INVALID) {
223 				continue;
224 			}
225 			pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
226 			mcache_params[dc_plane_idx].valid = pln_prog->mcache_allocation.valid;
227 			mcache_params[dc_plane_idx].num_mcaches_plane0 = pln_prog->mcache_allocation.num_mcaches_plane0;
228 			mcache_params[dc_plane_idx].num_mcaches_plane1 = pln_prog->mcache_allocation.num_mcaches_plane1;
229 			mcache_params[dc_plane_idx].requires_dedicated_mall_mcache = pln_prog->mcache_allocation.requires_dedicated_mall_mcache;
230 			mcache_params[dc_plane_idx].last_slice_sharing.plane0_plane1 = pln_prog->mcache_allocation.last_slice_sharing.plane0_plane1;
231 			memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane0,
232 				pln_prog->mcache_allocation.mcache_x_offsets_plane0,
233 				sizeof(int) * (DML2_MAX_MCACHES + 1));
234 			memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane1,
235 				pln_prog->mcache_allocation.mcache_x_offsets_plane1,
236 				sizeof(int) * (DML2_MAX_MCACHES + 1));
237 			dc_plane_idx++;
238 		}
239 	}
240 }
241 
dml21_mode_check_and_programming(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)242 static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
243 {
244 	bool result = false;
245 	struct dml2_build_mode_programming_in_out *mode_programming = &dml_ctx->v21.mode_programming;
246 	struct dc_mcache_params mcache_params[MAX_PLANES] = {0};
247 
248 	memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
249 	memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
250 	memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params, 0, sizeof(struct dml2_core_mode_programming_in_out));
251 
252 	if (!context)
253 		return true;
254 
255 	if (context->stream_count == 0) {
256 		dml21_build_fams2_programming(in_dc, context, dml_ctx);
257 		return true;
258 	}
259 
260 	/* scrub phantom's from current dc_state */
261 	dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
262 	dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
263 
264 	/* Populate stream, plane mappings and other fields in display config. */
265 	result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
266 	if (!result)
267 		return false;
268 
269 	DC_FP_START();
270 	result = dml2_build_mode_programming(mode_programming);
271 	DC_FP_END();
272 	if (!result)
273 		return false;
274 
275 	/* Check and map HW resources */
276 	if (result && !dml_ctx->config.skip_hw_state_mapping) {
277 		dml21_map_hw_resources(dml_ctx);
278 		dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state);
279 		/* if subvp phantoms are present, expand them into dc context */
280 		dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx);
281 
282 		if (in_dc->res_pool->funcs->program_mcache_pipe_config) {
283 			//Prepare mcache params for each plane based on mcache output from DML
284 			dml21_prepare_mcache_params(dml_ctx, context, mcache_params);
285 
286 			//populate mcache regs to each pipe
287 			dml_ctx->config.callbacks.allocate_mcache(context, mcache_params);
288 		}
289 	}
290 
291 	/* Copy DML CLK, WM and REG outputs to bandwidth context */
292 	if (result && !dml_ctx->config.skip_hw_state_mapping) {
293 		dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count);
294 		dml21_copy_clocks_to_dc_state(dml_ctx, context);
295 		dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx);
296 		dml21_build_fams2_programming(in_dc, context, dml_ctx);
297 	}
298 
299 	return true;
300 }
301 
dml21_check_mode_support(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)302 static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
303 {
304 	bool is_supported = false;
305 	struct dml2_initialize_instance_in_out *dml_init = &dml_ctx->v21.dml_init;
306 	struct dml2_check_mode_supported_in_out *mode_support = &dml_ctx->v21.mode_support;
307 
308 	memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
309 	memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
310 	memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.check_mode_supported_locals.mode_support_params, 0, sizeof(struct dml2_core_mode_support_in_out));
311 
312 	if (!context || context->stream_count == 0)
313 		return true;
314 
315 	/* Scrub phantom's from current dc_state */
316 	dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
317 	dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
318 
319 	mode_support->dml2_instance = dml_init->dml2_instance;
320 	dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
321 	dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params.programming = dml_ctx->v21.mode_programming.programming;
322 	DC_FP_START();
323 	is_supported = dml2_check_mode_supported(mode_support);
324 	DC_FP_END();
325 	if (!is_supported)
326 		return false;
327 
328 	return true;
329 }
330 
dml21_validate(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx,bool fast_validate)331 bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx, bool fast_validate)
332 {
333 	bool out = false;
334 
335 	/* Use dml_validate_only for fast_validate path */
336 	if (fast_validate)
337 		out = dml21_check_mode_support(in_dc, context, dml_ctx);
338 	else
339 		out = dml21_mode_check_and_programming(in_dc, context, dml_ctx);
340 
341 	return out;
342 }
343 
dml21_prepare_mcache_programming(struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)344 void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
345 {
346 	unsigned int dml_prog_idx, dml_phantom_prog_idx, dc_pipe_index;
347 	int num_pipes;
348 	struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
349 	struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
350 
351 	struct dml2_per_plane_programming *pln_prog = NULL;
352 	struct dml2_plane_mcache_configuration_descriptor *mcache_config = NULL;
353 	struct prepare_mcache_programming_locals *l = &dml_ctx->v21.scratch.prepare_mcache_locals;
354 
355 	if (context->stream_count == 0) {
356 		return;
357 	}
358 
359 	memset(&l->build_mcache_programming_params, 0, sizeof(struct dml2_build_mcache_programming_in_out));
360 	l->build_mcache_programming_params.dml2_instance = dml_ctx->v21.dml_init.dml2_instance;
361 
362 	/* phantom's start after main planes */
363 	dml_phantom_prog_idx = dml_ctx->v21.mode_programming.programming->display_config.num_planes;
364 
365 	/* Build mcache programming parameters per plane per pipe */
366 	for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
367 		pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
368 
369 		mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_prog_idx];
370 		memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
371 		mcache_config->plane_descriptor = pln_prog->plane_descriptor;
372 		mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx];
373 		mcache_config->num_pipes = pln_prog->num_dpps_required;
374 		l->build_mcache_programming_params.num_configurations++;
375 
376 		if (pln_prog->num_dpps_required == 0) {
377 			continue;
378 		}
379 
380 		num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
381 		if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
382 		    dc_main_pipes[0]->plane_state == NULL)
383 			continue;
384 
385 		/* get config for each pipe */
386 		for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
387 			ASSERT(dc_main_pipes[dc_pipe_index]);
388 			dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
389 		}
390 
391 		/* get config for each phantom pipe */
392 		if (pln_prog->phantom_plane.valid &&
393 				dc_phantom_pipes[0] &&
394 				dc_main_pipes[0]->stream &&
395 				dc_phantom_pipes[0]->plane_state) {
396 			mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_phantom_prog_idx];
397 			memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
398 			mcache_config->plane_descriptor = pln_prog->plane_descriptor;
399 			mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx];
400 			mcache_config->num_pipes = pln_prog->num_dpps_required;
401 			l->build_mcache_programming_params.num_configurations++;
402 
403 			for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
404 				ASSERT(dc_phantom_pipes[dc_pipe_index]);
405 				dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
406 			}
407 
408 			/* increment phantom index */
409 			dml_phantom_prog_idx++;
410 		}
411 	}
412 
413 	/* Call to generate mcache programming per plane per pipe for the given display configuration */
414 	dml2_build_mcache_programming(&l->build_mcache_programming_params);
415 
416 	/* get per plane per pipe mcache programming */
417 	for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
418 		pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
419 
420 		num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
421 		if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
422 		    dc_main_pipes[0]->plane_state == NULL)
423 			continue;
424 
425 		/* get config for each pipe */
426 		for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
427 			ASSERT(dc_main_pipes[dc_pipe_index]);
428 			if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index]) {
429 				memcpy(&dc_main_pipes[dc_pipe_index]->mcache_regs,
430 						l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index],
431 						sizeof(struct dml2_hubp_pipe_mcache_regs));
432 			}
433 		}
434 
435 		/* get config for each phantom pipe */
436 		if (pln_prog->phantom_plane.valid &&
437 				dc_phantom_pipes[0] &&
438 				dc_main_pipes[0]->stream &&
439 				dc_phantom_pipes[0]->plane_state) {
440 			for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
441 				ASSERT(dc_phantom_pipes[dc_pipe_index]);
442 				if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index]) {
443 					memcpy(&dc_phantom_pipes[dc_pipe_index]->mcache_regs,
444 							l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index],
445 							sizeof(struct dml2_hubp_pipe_mcache_regs));
446 				}
447 			}
448 			/* increment phantom index */
449 			dml_phantom_prog_idx++;
450 		}
451 	}
452 }
453 
dml21_copy(struct dml2_context * dst_dml_ctx,struct dml2_context * src_dml_ctx)454 void dml21_copy(struct dml2_context *dst_dml_ctx,
455 	struct dml2_context *src_dml_ctx)
456 {
457 	/* Preserve references to internals */
458 	struct dml2_instance *dst_dml2_instance = dst_dml_ctx->v21.dml_init.dml2_instance;
459 	struct dml2_display_cfg_programming *dst_dml2_programming = dst_dml_ctx->v21.mode_programming.programming;
460 
461 	/* Copy context */
462 	memcpy(dst_dml_ctx, src_dml_ctx, sizeof(struct dml2_context));
463 
464 	/* Copy Internals */
465 	memcpy(dst_dml2_instance, src_dml_ctx->v21.dml_init.dml2_instance, sizeof(struct dml2_instance));
466 	memcpy(dst_dml2_programming, src_dml_ctx->v21.mode_programming.programming, sizeof(struct dml2_display_cfg_programming));
467 
468 	/* Restore references to internals */
469 	dst_dml_ctx->v21.dml_init.dml2_instance = dst_dml2_instance;
470 
471 	dst_dml_ctx->v21.mode_support.dml2_instance = dst_dml2_instance;
472 	dst_dml_ctx->v21.mode_programming.dml2_instance = dst_dml2_instance;
473 
474 	dst_dml_ctx->v21.mode_support.display_config = &dst_dml_ctx->v21.display_config;
475 	dst_dml_ctx->v21.mode_programming.display_config = dst_dml_ctx->v21.mode_support.display_config;
476 
477 	dst_dml_ctx->v21.mode_programming.programming = dst_dml2_programming;
478 
479 	DC_FP_START();
480 
481 	/* need to initialize copied instance for internal references to be correct */
482 	dml2_initialize_instance(&dst_dml_ctx->v21.dml_init);
483 
484 	DC_FP_END();
485 }
486 
dml21_create_copy(struct dml2_context ** dst_dml_ctx,struct dml2_context * src_dml_ctx)487 bool dml21_create_copy(struct dml2_context **dst_dml_ctx,
488 	struct dml2_context *src_dml_ctx)
489 {
490 	/* Allocate memory for initializing DML21 instance */
491 	if (!dml21_allocate_memory(dst_dml_ctx))
492 		return false;
493 
494 	dml21_copy(*dst_dml_ctx, src_dml_ctx);
495 
496 	return true;
497 }
498 
dml21_reinit(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)499 void dml21_reinit(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
500 {
501 	dml21_init(in_dc, dml_ctx, config);
502 }
503 
504