1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2026 Advanced Micro Devices, Inc.
4
5 #include "dml2_internal_types.h"
6 #include "dml_top.h"
7 #include "dml2_core_dcn4_calcs.h"
8 #include "dml2_internal_shared_types.h"
9 #include "dml21_utils.h"
10 #include "dml21_translation_helper.h"
11 #include "dml2_dc_resource_mgmt.h"
12 #include "dml2_wrapper.h"
13 #include "dml2_wrapper_fpu.h"
14 #include "dml21_wrapper.h"
15 #include "dml21_wrapper_fpu.h"
16
17 #define INVALID -1
18
dml21_populate_configuration_options(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)19 static void dml21_populate_configuration_options(const struct dc *in_dc,
20 struct dml2_context *dml_ctx,
21 const struct dml2_configuration_options *config)
22 {
23 dml_ctx->config = *config;
24
25 /* UCLK P-State options */
26 if (in_dc->debug.dml21_force_pstate_method) {
27 dml_ctx->config.pmo.force_pstate_method_enable = true;
28 for (int i = 0; i < MAX_PIPES; i++)
29 dml_ctx->config.pmo.force_pstate_method_values[i] = in_dc->debug.dml21_force_pstate_method_values[i];
30 } else {
31 dml_ctx->config.pmo.force_pstate_method_enable = false;
32 }
33 }
34
dml21_init(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)35 void dml21_init(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
36 {
37 dml_ctx->architecture = dml2_architecture_21;
38
39 dml21_populate_configuration_options(in_dc, dml_ctx, config);
40
41 dml21_populate_dml_init_params(&dml_ctx->v21.dml_init, &dml_ctx->config, in_dc);
42
43 dml2_initialize_instance(&dml_ctx->v21.dml_init);
44 }
45
dml21_reinit(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)46 void dml21_reinit(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
47 {
48 dml21_init(in_dc, dml_ctx, config);
49 }
50
dml21_calculate_rq_and_dlg_params(const struct dc * dc,struct dc_state * context,struct resource_context * out_new_hw_state,struct dml2_context * in_ctx,unsigned int pipe_cnt)51 static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state,
52 struct dml2_context *in_ctx, unsigned int pipe_cnt)
53 {
54 (void)out_new_hw_state;
55 (void)pipe_cnt;
56 unsigned int dml_prog_idx = 0, dc_pipe_index = 0, num_dpps_required = 0;
57 struct dml2_per_plane_programming *pln_prog = NULL;
58 struct dml2_per_stream_programming *stream_prog = NULL;
59 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
60 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
61 int num_pipes;
62 unsigned int dml_phantom_prog_idx;
63
64 context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
65
66 /* copy global DCHUBBUB arbiter registers */
67 memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs));
68
69 /* legacy only */
70 context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64;
71
72 context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
73 context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
74 context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
75
76 /* phantom's start after main planes */
77 dml_phantom_prog_idx = in_ctx->v21.mode_programming.programming->display_config.num_planes;
78
79 for (dml_prog_idx = 0; dml_prog_idx < DML2_MAX_PLANES; dml_prog_idx++) {
80 pln_prog = &in_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
81
82 if (!pln_prog->plane_descriptor)
83 continue;
84
85 stream_prog = &in_ctx->v21.mode_programming.programming->stream_programming[pln_prog->plane_descriptor->stream_index];
86 num_dpps_required = pln_prog->num_dpps_required;
87
88 if (num_dpps_required == 0) {
89 continue;
90 }
91 num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
92
93 if (num_pipes <= 0)
94 continue;
95
96 /* program each pipe */
97 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
98 dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog);
99
100 if (pln_prog->phantom_plane.valid && dc_phantom_pipes[dc_pipe_index]) {
101 dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog);
102 }
103 }
104
105 /* copy per plane mcache allocation */
106 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx], &pln_prog->mcache_allocation, sizeof(struct dml2_mcache_surface_allocation));
107 if (pln_prog->phantom_plane.valid) {
108 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx],
109 &pln_prog->phantom_plane.mcache_allocation,
110 sizeof(struct dml2_mcache_surface_allocation));
111
112 dml_phantom_prog_idx++;
113 }
114 }
115
116 /* assign global clocks */
117 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
118 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
119 if (in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values > 1) {
120 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz =
121 in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values] * 1000;
122 } else {
123 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000;
124 }
125
126 if (in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values > 1) {
127 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
128 in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values] * 1000;
129 } else {
130 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000;
131 }
132
133 /* get global mall allocation */
134 if (dc->res_pool->funcs->calculate_mall_ways_from_bytes) {
135 context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
136 } else {
137 context->bw_ctx.bw.dcn.clk.num_ways = 0;
138 }
139 }
140
dml21_prepare_mcache_params(struct dml2_context * dml_ctx,struct dc_state * context,struct dc_mcache_params * mcache_params)141 static void dml21_prepare_mcache_params(struct dml2_context *dml_ctx, struct dc_state *context, struct dc_mcache_params *mcache_params)
142 {
143 int dc_plane_idx = 0;
144 int dml_prog_idx, stream_idx, plane_idx;
145 struct dml2_per_plane_programming *pln_prog = NULL;
146
147 for (stream_idx = 0; stream_idx < context->stream_count; stream_idx++) {
148 for (plane_idx = 0; plane_idx < context->stream_status[stream_idx].plane_count; plane_idx++) {
149 dml_prog_idx = map_plane_to_dml21_display_cfg(dml_ctx, context->streams[stream_idx]->stream_id, context->stream_status[stream_idx].plane_states[plane_idx], context);
150 if (dml_prog_idx == INVALID) {
151 continue;
152 }
153 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
154 mcache_params[dc_plane_idx].valid = pln_prog->mcache_allocation.valid;
155 mcache_params[dc_plane_idx].num_mcaches_plane0 = pln_prog->mcache_allocation.num_mcaches_plane0;
156 mcache_params[dc_plane_idx].num_mcaches_plane1 = pln_prog->mcache_allocation.num_mcaches_plane1;
157 mcache_params[dc_plane_idx].requires_dedicated_mall_mcache = pln_prog->mcache_allocation.requires_dedicated_mall_mcache;
158 mcache_params[dc_plane_idx].last_slice_sharing.plane0_plane1 = pln_prog->mcache_allocation.last_slice_sharing.plane0_plane1;
159 memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane0,
160 pln_prog->mcache_allocation.mcache_x_offsets_plane0,
161 sizeof(int) * (DML2_MAX_MCACHES + 1));
162 memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane1,
163 pln_prog->mcache_allocation.mcache_x_offsets_plane1,
164 sizeof(int) * (DML2_MAX_MCACHES + 1));
165 dc_plane_idx++;
166 }
167 }
168 }
169
dml21_check_mode_support(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)170 static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
171 {
172 bool is_supported = false;
173 struct dml2_initialize_instance_in_out *dml_init = &dml_ctx->v21.dml_init;
174 struct dml2_check_mode_supported_in_out *mode_support = &dml_ctx->v21.mode_support;
175
176 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
177 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
178 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.check_mode_supported_locals.mode_support_params, 0, sizeof(struct dml2_core_mode_support_in_out));
179
180 if (!context || context->stream_count == 0)
181 return true;
182
183 /* Scrub phantom's from current dc_state */
184 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
185 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
186
187 mode_support->dml2_instance = dml_init->dml2_instance;
188 dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
189 dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params.programming = dml_ctx->v21.mode_programming.programming;
190
191 is_supported = dml2_check_mode_supported(mode_support);
192
193 if (!is_supported)
194 return false;
195
196 return true;
197 }
198
dml21_mode_check_and_programming(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)199 static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
200 {
201 bool result = false;
202 struct dml2_build_mode_programming_in_out *mode_programming = &dml_ctx->v21.mode_programming;
203 struct dc_mcache_params mcache_params[MAX_PLANES] = {0};
204
205 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
206 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
207 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params, 0, sizeof(struct dml2_core_mode_programming_in_out));
208
209 if (!context)
210 return true;
211
212 if (context->stream_count == 0) {
213 dml21_init_min_clocks_for_dc_state(dml_ctx, context);
214 dml21_build_fams2_programming(in_dc, context, dml_ctx);
215 return true;
216 }
217
218 /* scrub phantom's from current dc_state */
219 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
220 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
221
222 /* Populate stream, plane mappings and other fields in display config. */
223 result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
224 if (!result)
225 return false;
226
227 result = dml2_build_mode_programming(mode_programming);
228
229 if (!result)
230 return false;
231
232 /* Check and map HW resources */
233 if (result && !dml_ctx->config.skip_hw_state_mapping) {
234 dml21_map_hw_resources(dml_ctx);
235 dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state);
236 /* if subvp phantoms are present, expand them into dc context */
237 dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx);
238
239 if (in_dc->res_pool->funcs->program_mcache_pipe_config) {
240 //Prepare mcache params for each plane based on mcache output from DML
241 dml21_prepare_mcache_params(dml_ctx, context, mcache_params);
242
243 //populate mcache regs to each pipe
244 dml_ctx->config.callbacks.allocate_mcache(context, mcache_params);
245 }
246 }
247
248 /* Copy DML CLK, WM and REG outputs to bandwidth context */
249 if (result && !dml_ctx->config.skip_hw_state_mapping) {
250 dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count);
251 dml21_copy_clocks_to_dc_state(dml_ctx, context);
252 dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx);
253 dml21_build_fams2_programming(in_dc, context, dml_ctx);
254 }
255
256 return true;
257 }
258
dml21_validate(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx,enum dc_validate_mode validate_mode)259 bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx,
260 enum dc_validate_mode validate_mode)
261 {
262 bool out = false;
263
264 /* Use dml21_check_mode_support for DC_VALIDATE_MODE_ONLY and DC_VALIDATE_MODE_AND_STATE_INDEX path */
265 if (validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING)
266 out = dml21_check_mode_support(in_dc, context, dml_ctx);
267 else
268 out = dml21_mode_check_and_programming(in_dc, context, dml_ctx);
269
270 return out;
271 }
272
dml21_prepare_mcache_programming(struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)273 void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
274 {
275 unsigned int dml_prog_idx, dml_phantom_prog_idx, dc_pipe_index;
276 int num_pipes;
277 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
278 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
279
280 struct dml2_per_plane_programming *pln_prog = NULL;
281 struct dml2_plane_mcache_configuration_descriptor *mcache_config = NULL;
282 struct prepare_mcache_programming_locals *l = &dml_ctx->v21.scratch.prepare_mcache_locals;
283
284 if (context->stream_count == 0) {
285 return;
286 }
287
288 memset(&l->build_mcache_programming_params, 0, sizeof(struct dml2_build_mcache_programming_in_out));
289 l->build_mcache_programming_params.dml2_instance = dml_ctx->v21.dml_init.dml2_instance;
290
291 /* phantom's start after main planes */
292 dml_phantom_prog_idx = dml_ctx->v21.mode_programming.programming->display_config.num_planes;
293
294 /* Build mcache programming parameters per plane per pipe */
295 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
296 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
297
298 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_prog_idx];
299 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
300 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
301 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx];
302 ASSERT(pln_prog->num_dpps_required <= 0x7F);
303 mcache_config->num_pipes = (char)pln_prog->num_dpps_required;
304 l->build_mcache_programming_params.num_configurations++;
305
306 if (pln_prog->num_dpps_required == 0) {
307 continue;
308 }
309
310 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
311 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
312 dc_main_pipes[0]->plane_state == NULL)
313 continue;
314
315 /* get config for each pipe */
316 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
317 ASSERT(dc_main_pipes[dc_pipe_index]);
318 dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
319 }
320
321 /* get config for each phantom pipe */
322 if (pln_prog->phantom_plane.valid &&
323 dc_phantom_pipes[0] &&
324 dc_main_pipes[0]->stream &&
325 dc_phantom_pipes[0]->plane_state) {
326 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_phantom_prog_idx];
327 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
328 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
329 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx];
330 ASSERT(pln_prog->num_dpps_required <= 0x7F);
331 mcache_config->num_pipes = (char)pln_prog->num_dpps_required;
332 l->build_mcache_programming_params.num_configurations++;
333
334 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
335 ASSERT(dc_phantom_pipes[dc_pipe_index]);
336 dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
337 }
338
339 /* increment phantom index */
340 dml_phantom_prog_idx++;
341 }
342 }
343
344 /* Call to generate mcache programming per plane per pipe for the given display configuration */
345 dml2_build_mcache_programming(&l->build_mcache_programming_params);
346
347 /* get per plane per pipe mcache programming */
348 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
349 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
350
351 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
352 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
353 dc_main_pipes[0]->plane_state == NULL)
354 continue;
355
356 /* get config for each pipe */
357 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
358 ASSERT(dc_main_pipes[dc_pipe_index]);
359 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index]) {
360 memcpy(&dc_main_pipes[dc_pipe_index]->mcache_regs,
361 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index],
362 sizeof(struct dml2_hubp_pipe_mcache_regs));
363 }
364 }
365
366 /* get config for each phantom pipe */
367 if (pln_prog->phantom_plane.valid &&
368 dc_phantom_pipes[0] &&
369 dc_main_pipes[0]->stream &&
370 dc_phantom_pipes[0]->plane_state) {
371 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
372 ASSERT(dc_phantom_pipes[dc_pipe_index]);
373 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index]) {
374 memcpy(&dc_phantom_pipes[dc_pipe_index]->mcache_regs,
375 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index],
376 sizeof(struct dml2_hubp_pipe_mcache_regs));
377 }
378 }
379 /* increment phantom index */
380 dml_phantom_prog_idx++;
381 }
382 }
383 }
384