xref: /linux/drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c (revision 77ec462536a13d4b428a1eead725c4818a49f0b1)
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include <linux/delay.h>
27 #include <linux/slab.h>
28 
29 #include "core_types.h"
30 #include "link_encoder.h"
31 #include "dce_dmcu.h"
32 #include "dm_services.h"
33 #include "reg_helper.h"
34 #include "fixed31_32.h"
35 #include "dc.h"
36 
37 #define TO_DCE_DMCU(dmcu)\
38 	container_of(dmcu, struct dce_dmcu, base)
39 
40 #define REG(reg) \
41 	(dmcu_dce->regs->reg)
42 
43 #undef FN
44 #define FN(reg_name, field_name) \
45 	dmcu_dce->dmcu_shift->field_name, dmcu_dce->dmcu_mask->field_name
46 
47 #define CTX \
48 	dmcu_dce->base.ctx
49 
50 /* PSR related commands */
51 #define PSR_ENABLE 0x20
52 #define PSR_EXIT 0x21
53 #define PSR_SET 0x23
54 #define PSR_SET_WAITLOOP 0x31
55 #define MCP_INIT_DMCU 0x88
56 #define MCP_INIT_IRAM 0x89
57 #define MCP_SYNC_PHY_LOCK 0x90
58 #define MCP_SYNC_PHY_UNLOCK 0x91
59 #define MCP_BL_SET_PWM_FRAC 0x6A  /* Enable or disable Fractional PWM */
60 #define MCP_SEND_EDID_CEA 0xA0
61 #define EDID_CEA_CMD_ACK 1
62 #define EDID_CEA_CMD_NACK 2
63 #define MASTER_COMM_CNTL_REG__MASTER_COMM_INTERRUPT_MASK   0x00000001L
64 
65 // PSP FW version
66 #define mmMP0_SMN_C2PMSG_58				0x1607A
67 
68 //Register access policy version
69 #define mmMP0_SMN_C2PMSG_91				0x1609B
70 
71 #if defined(CONFIG_DRM_AMD_DC_DCN)
72 static const uint32_t abm_gain_stepsize = 0x0060;
73 #endif
74 
75 static bool dce_dmcu_init(struct dmcu *dmcu)
76 {
77 	// Do nothing
78 	return true;
79 }
80 
81 static bool dce_dmcu_load_iram(struct dmcu *dmcu,
82 		unsigned int start_offset,
83 		const char *src,
84 		unsigned int bytes)
85 {
86 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
87 	unsigned int count = 0;
88 
89 	/* Enable write access to IRAM */
90 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
91 			IRAM_HOST_ACCESS_EN, 1,
92 			IRAM_WR_ADDR_AUTO_INC, 1);
93 
94 	REG_WAIT(DCI_MEM_PWR_STATUS, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
95 
96 	REG_WRITE(DMCU_IRAM_WR_CTRL, start_offset);
97 
98 	for (count = 0; count < bytes; count++)
99 		REG_WRITE(DMCU_IRAM_WR_DATA, src[count]);
100 
101 	/* Disable write access to IRAM to allow dynamic sleep state */
102 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
103 			IRAM_HOST_ACCESS_EN, 0,
104 			IRAM_WR_ADDR_AUTO_INC, 0);
105 
106 	return true;
107 }
108 
109 static void dce_get_dmcu_psr_state(struct dmcu *dmcu, enum dc_psr_state *state)
110 {
111 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
112 
113 	uint32_t psr_state_offset = 0xf0;
114 
115 	/* Enable write access to IRAM */
116 	REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 1);
117 
118 	REG_WAIT(DCI_MEM_PWR_STATUS, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
119 
120 	/* Write address to IRAM_RD_ADDR in DMCU_IRAM_RD_CTRL */
121 	REG_WRITE(DMCU_IRAM_RD_CTRL, psr_state_offset);
122 
123 	/* Read data from IRAM_RD_DATA in DMCU_IRAM_RD_DATA*/
124 	*state = (enum dc_psr_state)REG_READ(DMCU_IRAM_RD_DATA);
125 
126 	/* Disable write access to IRAM after finished using IRAM
127 	 * in order to allow dynamic sleep state
128 	 */
129 	REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 0);
130 }
131 
132 static void dce_dmcu_set_psr_enable(struct dmcu *dmcu, bool enable, bool wait)
133 {
134 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
135 	unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
136 	unsigned int dmcu_wait_reg_ready_interval = 100;
137 
138 	unsigned int retryCount;
139 	enum dc_psr_state state = PSR_STATE0;
140 
141 	/* waitDMCUReadyForCmd */
142 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
143 				dmcu_wait_reg_ready_interval,
144 				dmcu_max_retry_on_wait_reg_ready);
145 
146 	/* setDMCUParam_Cmd */
147 	if (enable)
148 		REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
149 				PSR_ENABLE);
150 	else
151 		REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
152 				PSR_EXIT);
153 
154 	/* notifyDMCUMsg */
155 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
156 	if (wait == true) {
157 		for (retryCount = 0; retryCount <= 100; retryCount++) {
158 			dce_get_dmcu_psr_state(dmcu, &state);
159 			if (enable) {
160 				if (state != PSR_STATE0)
161 					break;
162 			} else {
163 				if (state == PSR_STATE0)
164 					break;
165 			}
166 			udelay(10);
167 		}
168 	}
169 }
170 
171 static bool dce_dmcu_setup_psr(struct dmcu *dmcu,
172 		struct dc_link *link,
173 		struct psr_context *psr_context)
174 {
175 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
176 
177 	unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
178 	unsigned int dmcu_wait_reg_ready_interval = 100;
179 
180 	union dce_dmcu_psr_config_data_reg1 masterCmdData1;
181 	union dce_dmcu_psr_config_data_reg2 masterCmdData2;
182 	union dce_dmcu_psr_config_data_reg3 masterCmdData3;
183 
184 	link->link_enc->funcs->psr_program_dp_dphy_fast_training(link->link_enc,
185 			psr_context->psrExitLinkTrainingRequired);
186 
187 	/* Enable static screen interrupts for PSR supported display */
188 	/* Disable the interrupt coming from other displays. */
189 	REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK,
190 			STATIC_SCREEN1_INT_TO_UC_EN, 0,
191 			STATIC_SCREEN2_INT_TO_UC_EN, 0,
192 			STATIC_SCREEN3_INT_TO_UC_EN, 0,
193 			STATIC_SCREEN4_INT_TO_UC_EN, 0);
194 
195 	switch (psr_context->controllerId) {
196 	/* Driver uses case 1 for unconfigured */
197 	case 1:
198 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
199 				STATIC_SCREEN1_INT_TO_UC_EN, 1);
200 		break;
201 	case 2:
202 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
203 				STATIC_SCREEN2_INT_TO_UC_EN, 1);
204 		break;
205 	case 3:
206 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
207 				STATIC_SCREEN3_INT_TO_UC_EN, 1);
208 		break;
209 	case 4:
210 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
211 				STATIC_SCREEN4_INT_TO_UC_EN, 1);
212 		break;
213 	case 5:
214 		/* CZ/NL only has 4 CRTC!!
215 		 * really valid.
216 		 * There is no interrupt enable mask for these instances.
217 		 */
218 		break;
219 	case 6:
220 		/* CZ/NL only has 4 CRTC!!
221 		 * These are here because they are defined in HW regspec,
222 		 * but not really valid. There is no interrupt enable mask
223 		 * for these instances.
224 		 */
225 		break;
226 	default:
227 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
228 				STATIC_SCREEN1_INT_TO_UC_EN, 1);
229 		break;
230 	}
231 
232 	link->link_enc->funcs->psr_program_secondary_packet(link->link_enc,
233 			psr_context->sdpTransmitLineNumDeadline);
234 
235 	/* waitDMCUReadyForCmd */
236 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
237 					dmcu_wait_reg_ready_interval,
238 					dmcu_max_retry_on_wait_reg_ready);
239 
240 	/* setDMCUParam_PSRHostConfigData */
241 	masterCmdData1.u32All = 0;
242 	masterCmdData1.bits.timehyst_frames = psr_context->timehyst_frames;
243 	masterCmdData1.bits.hyst_lines = psr_context->hyst_lines;
244 	masterCmdData1.bits.rfb_update_auto_en =
245 			psr_context->rfb_update_auto_en;
246 	masterCmdData1.bits.dp_port_num = psr_context->transmitterId;
247 	masterCmdData1.bits.dcp_sel = psr_context->controllerId;
248 	masterCmdData1.bits.phy_type  = psr_context->phyType;
249 	masterCmdData1.bits.frame_cap_ind =
250 			psr_context->psrFrameCaptureIndicationReq;
251 	masterCmdData1.bits.aux_chan = psr_context->channel;
252 	masterCmdData1.bits.aux_repeat = psr_context->aux_repeats;
253 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
254 					masterCmdData1.u32All);
255 
256 	masterCmdData2.u32All = 0;
257 	masterCmdData2.bits.dig_fe = psr_context->engineId;
258 	masterCmdData2.bits.dig_be = psr_context->transmitterId;
259 	masterCmdData2.bits.skip_wait_for_pll_lock =
260 			psr_context->skipPsrWaitForPllLock;
261 	masterCmdData2.bits.frame_delay = psr_context->frame_delay;
262 	masterCmdData2.bits.smu_phy_id = psr_context->smuPhyId;
263 	masterCmdData2.bits.num_of_controllers =
264 			psr_context->numberOfControllers;
265 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
266 			masterCmdData2.u32All);
267 
268 	masterCmdData3.u32All = 0;
269 	masterCmdData3.bits.psr_level = psr_context->psr_level.u32all;
270 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
271 			masterCmdData3.u32All);
272 
273 	/* setDMCUParam_Cmd */
274 	REG_UPDATE(MASTER_COMM_CMD_REG,
275 			MASTER_COMM_CMD_REG_BYTE0, PSR_SET);
276 
277 	/* notifyDMCUMsg */
278 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
279 
280 	return true;
281 }
282 
283 static bool dce_is_dmcu_initialized(struct dmcu *dmcu)
284 {
285 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
286 	unsigned int dmcu_uc_reset;
287 
288 	/* microcontroller is not running */
289 	REG_GET(DMCU_STATUS, UC_IN_RESET, &dmcu_uc_reset);
290 
291 	/* DMCU is not running */
292 	if (dmcu_uc_reset)
293 		return false;
294 
295 	return true;
296 }
297 
298 static void dce_psr_wait_loop(
299 	struct dmcu *dmcu,
300 	unsigned int wait_loop_number)
301 {
302 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
303 	union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1;
304 
305 	if (dmcu->cached_wait_loop_number == wait_loop_number)
306 		return;
307 
308 	/* DMCU is not running */
309 	if (!dce_is_dmcu_initialized(dmcu))
310 		return;
311 
312 	/* waitDMCUReadyForCmd */
313 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
314 
315 	masterCmdData1.u32 = 0;
316 	masterCmdData1.bits.wait_loop = wait_loop_number;
317 	dmcu->cached_wait_loop_number = wait_loop_number;
318 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
319 
320 	/* setDMCUParam_Cmd */
321 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, PSR_SET_WAITLOOP);
322 
323 	/* notifyDMCUMsg */
324 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
325 }
326 
327 static void dce_get_psr_wait_loop(
328 		struct dmcu *dmcu, unsigned int *psr_wait_loop_number)
329 {
330 	*psr_wait_loop_number = dmcu->cached_wait_loop_number;
331 	return;
332 }
333 
334 #if defined(CONFIG_DRM_AMD_DC_DCN)
335 static void dcn10_get_dmcu_version(struct dmcu *dmcu)
336 {
337 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
338 	uint32_t dmcu_version_offset = 0xf1;
339 
340 	/* Enable write access to IRAM */
341 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
342 			IRAM_HOST_ACCESS_EN, 1,
343 			IRAM_RD_ADDR_AUTO_INC, 1);
344 
345 	REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
346 
347 	/* Write address to IRAM_RD_ADDR and read from DATA register */
348 	REG_WRITE(DMCU_IRAM_RD_CTRL, dmcu_version_offset);
349 	dmcu->dmcu_version.interface_version = REG_READ(DMCU_IRAM_RD_DATA);
350 	dmcu->dmcu_version.abm_version = REG_READ(DMCU_IRAM_RD_DATA);
351 	dmcu->dmcu_version.psr_version = REG_READ(DMCU_IRAM_RD_DATA);
352 	dmcu->dmcu_version.build_version = ((REG_READ(DMCU_IRAM_RD_DATA) << 8) |
353 						REG_READ(DMCU_IRAM_RD_DATA));
354 
355 	/* Disable write access to IRAM to allow dynamic sleep state */
356 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
357 			IRAM_HOST_ACCESS_EN, 0,
358 			IRAM_RD_ADDR_AUTO_INC, 0);
359 }
360 
361 static void dcn10_dmcu_enable_fractional_pwm(struct dmcu *dmcu,
362 		uint32_t fractional_pwm)
363 {
364 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
365 
366 	/* Wait until microcontroller is ready to process interrupt */
367 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
368 
369 	/* Set PWM fractional enable/disable */
370 	REG_WRITE(MASTER_COMM_DATA_REG1, fractional_pwm);
371 
372 	/* Set command to enable or disable fractional PWM microcontroller */
373 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
374 			MCP_BL_SET_PWM_FRAC);
375 
376 	/* Notify microcontroller of new command */
377 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
378 
379 	/* Ensure command has been executed before continuing */
380 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
381 }
382 
383 static bool dcn10_dmcu_init(struct dmcu *dmcu)
384 {
385 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
386 	const struct dc_config *config = &dmcu->ctx->dc->config;
387 	bool status = false;
388 	struct dc_context *ctx = dmcu->ctx;
389 	unsigned int i;
390 	//  5 4 3 2 1 0
391 	//  F E D C B A - bit 0 is A, bit 5 is F
392 	unsigned int tx_interrupt_mask = 0;
393 
394 	PERF_TRACE();
395 	/*  Definition of DC_DMCU_SCRATCH
396 	 *  0 : firmare not loaded
397 	 *  1 : PSP load DMCU FW but not initialized
398 	 *  2 : Firmware already initialized
399 	 */
400 	dmcu->dmcu_state = REG_READ(DC_DMCU_SCRATCH);
401 
402 	for (i = 0; i < ctx->dc->link_count; i++) {
403 		if (ctx->dc->links[i]->link_enc->features.flags.bits.DP_IS_USB_C) {
404 			if (ctx->dc->links[i]->link_enc->transmitter >= TRANSMITTER_UNIPHY_A &&
405 					ctx->dc->links[i]->link_enc->transmitter <= TRANSMITTER_UNIPHY_F) {
406 				tx_interrupt_mask |= 1 << ctx->dc->links[i]->link_enc->transmitter;
407 			}
408 		}
409 	}
410 
411 	switch (dmcu->dmcu_state) {
412 	case DMCU_UNLOADED:
413 		status = false;
414 		break;
415 	case DMCU_LOADED_UNINITIALIZED:
416 		/* Wait until microcontroller is ready to process interrupt */
417 		REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
418 
419 		/* Set initialized ramping boundary value */
420 		REG_WRITE(MASTER_COMM_DATA_REG1, 0xFFFF);
421 
422 		/* Set backlight ramping stepsize */
423 		REG_WRITE(MASTER_COMM_DATA_REG2, abm_gain_stepsize);
424 
425 		REG_WRITE(MASTER_COMM_DATA_REG3, tx_interrupt_mask);
426 
427 		/* Set command to initialize microcontroller */
428 		REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
429 			MCP_INIT_DMCU);
430 
431 		/* Notify microcontroller of new command */
432 		REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
433 
434 		/* Ensure command has been executed before continuing */
435 		REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
436 
437 		// Check state is initialized
438 		dmcu->dmcu_state = REG_READ(DC_DMCU_SCRATCH);
439 
440 		// If microcontroller is not in running state, fail
441 		if (dmcu->dmcu_state == DMCU_RUNNING) {
442 			/* Retrieve and cache the DMCU firmware version. */
443 			dcn10_get_dmcu_version(dmcu);
444 
445 			/* Initialize DMCU to use fractional PWM or not */
446 			dcn10_dmcu_enable_fractional_pwm(dmcu,
447 				(config->disable_fractional_pwm == false) ? 1 : 0);
448 			status = true;
449 		} else {
450 			status = false;
451 		}
452 
453 		break;
454 	case DMCU_RUNNING:
455 		status = true;
456 		break;
457 	default:
458 		status = false;
459 		break;
460 	}
461 
462 	PERF_TRACE();
463 	return status;
464 }
465 
466 static bool dcn21_dmcu_init(struct dmcu *dmcu)
467 {
468 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
469 	uint32_t dmcub_psp_version = REG_READ(DMCUB_SCRATCH15);
470 
471 	if (dmcu->auto_load_dmcu && dmcub_psp_version == 0) {
472 		return false;
473 	}
474 
475 	return dcn10_dmcu_init(dmcu);
476 }
477 
478 static bool dcn10_dmcu_load_iram(struct dmcu *dmcu,
479 		unsigned int start_offset,
480 		const char *src,
481 		unsigned int bytes)
482 {
483 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
484 	unsigned int count = 0;
485 
486 	/* If microcontroller is not running, do nothing */
487 	if (dmcu->dmcu_state != DMCU_RUNNING)
488 		return false;
489 
490 	/* Enable write access to IRAM */
491 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
492 			IRAM_HOST_ACCESS_EN, 1,
493 			IRAM_WR_ADDR_AUTO_INC, 1);
494 
495 	REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
496 
497 	REG_WRITE(DMCU_IRAM_WR_CTRL, start_offset);
498 
499 	for (count = 0; count < bytes; count++)
500 		REG_WRITE(DMCU_IRAM_WR_DATA, src[count]);
501 
502 	/* Disable write access to IRAM to allow dynamic sleep state */
503 	REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
504 			IRAM_HOST_ACCESS_EN, 0,
505 			IRAM_WR_ADDR_AUTO_INC, 0);
506 
507 	/* Wait until microcontroller is ready to process interrupt */
508 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
509 
510 	/* Set command to signal IRAM is loaded and to initialize IRAM */
511 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
512 			MCP_INIT_IRAM);
513 
514 	/* Notify microcontroller of new command */
515 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
516 
517 	/* Ensure command has been executed before continuing */
518 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
519 
520 	return true;
521 }
522 
523 static void dcn10_get_dmcu_psr_state(struct dmcu *dmcu, enum dc_psr_state *state)
524 {
525 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
526 
527 	uint32_t psr_state_offset = 0xf0;
528 
529 	/* If microcontroller is not running, do nothing */
530 	if (dmcu->dmcu_state != DMCU_RUNNING)
531 		return;
532 
533 	/* Enable write access to IRAM */
534 	REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 1);
535 
536 	REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
537 
538 	/* Write address to IRAM_RD_ADDR in DMCU_IRAM_RD_CTRL */
539 	REG_WRITE(DMCU_IRAM_RD_CTRL, psr_state_offset);
540 
541 	/* Read data from IRAM_RD_DATA in DMCU_IRAM_RD_DATA*/
542 	*state = (enum dc_psr_state)REG_READ(DMCU_IRAM_RD_DATA);
543 
544 	/* Disable write access to IRAM after finished using IRAM
545 	 * in order to allow dynamic sleep state
546 	 */
547 	REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 0);
548 }
549 
550 static void dcn10_dmcu_set_psr_enable(struct dmcu *dmcu, bool enable, bool wait)
551 {
552 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
553 	unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
554 	unsigned int dmcu_wait_reg_ready_interval = 100;
555 
556 	unsigned int retryCount;
557 	enum dc_psr_state state = PSR_STATE0;
558 
559 	/* If microcontroller is not running, do nothing */
560 	if (dmcu->dmcu_state != DMCU_RUNNING)
561 		return;
562 
563 	/* waitDMCUReadyForCmd */
564 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
565 				dmcu_wait_reg_ready_interval,
566 				dmcu_max_retry_on_wait_reg_ready);
567 
568 	/* setDMCUParam_Cmd */
569 	if (enable)
570 		REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
571 				PSR_ENABLE);
572 	else
573 		REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
574 				PSR_EXIT);
575 
576 	/* notifyDMCUMsg */
577 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
578 
579 	/* Below loops 1000 x 500us = 500 ms.
580 	 *  Exit PSR may need to wait 1-2 frames to power up. Timeout after at
581 	 *  least a few frames. Should never hit the max retry assert below.
582 	 */
583 	if (wait == true) {
584 		for (retryCount = 0; retryCount <= 1000; retryCount++) {
585 			dcn10_get_dmcu_psr_state(dmcu, &state);
586 			if (enable) {
587 				if (state != PSR_STATE0)
588 					break;
589 			} else {
590 				if (state == PSR_STATE0)
591 					break;
592 			}
593 			udelay(500);
594 		}
595 
596 		/* assert if max retry hit */
597 		if (retryCount >= 1000)
598 			ASSERT(0);
599 	}
600 }
601 
602 static bool dcn10_dmcu_setup_psr(struct dmcu *dmcu,
603 		struct dc_link *link,
604 		struct psr_context *psr_context)
605 {
606 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
607 
608 	unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
609 	unsigned int dmcu_wait_reg_ready_interval = 100;
610 
611 	union dce_dmcu_psr_config_data_reg1 masterCmdData1;
612 	union dce_dmcu_psr_config_data_reg2 masterCmdData2;
613 	union dce_dmcu_psr_config_data_reg3 masterCmdData3;
614 
615 	/* If microcontroller is not running, do nothing */
616 	if (dmcu->dmcu_state != DMCU_RUNNING)
617 		return false;
618 
619 	link->link_enc->funcs->psr_program_dp_dphy_fast_training(link->link_enc,
620 			psr_context->psrExitLinkTrainingRequired);
621 
622 	/* Enable static screen interrupts for PSR supported display */
623 	/* Disable the interrupt coming from other displays. */
624 	REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK,
625 			STATIC_SCREEN1_INT_TO_UC_EN, 0,
626 			STATIC_SCREEN2_INT_TO_UC_EN, 0,
627 			STATIC_SCREEN3_INT_TO_UC_EN, 0,
628 			STATIC_SCREEN4_INT_TO_UC_EN, 0);
629 
630 	switch (psr_context->controllerId) {
631 	/* Driver uses case 1 for unconfigured */
632 	case 1:
633 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
634 				STATIC_SCREEN1_INT_TO_UC_EN, 1);
635 		break;
636 	case 2:
637 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
638 				STATIC_SCREEN2_INT_TO_UC_EN, 1);
639 		break;
640 	case 3:
641 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
642 				STATIC_SCREEN3_INT_TO_UC_EN, 1);
643 		break;
644 	case 4:
645 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
646 				STATIC_SCREEN4_INT_TO_UC_EN, 1);
647 		break;
648 	case 5:
649 		/* CZ/NL only has 4 CRTC!!
650 		 * really valid.
651 		 * There is no interrupt enable mask for these instances.
652 		 */
653 		break;
654 	case 6:
655 		/* CZ/NL only has 4 CRTC!!
656 		 * These are here because they are defined in HW regspec,
657 		 * but not really valid. There is no interrupt enable mask
658 		 * for these instances.
659 		 */
660 		break;
661 	default:
662 		REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
663 				STATIC_SCREEN1_INT_TO_UC_EN, 1);
664 		break;
665 	}
666 
667 	link->link_enc->funcs->psr_program_secondary_packet(link->link_enc,
668 			psr_context->sdpTransmitLineNumDeadline);
669 
670 	if (psr_context->allow_smu_optimizations)
671 		REG_UPDATE(SMU_INTERRUPT_CONTROL, DC_SMU_INT_ENABLE, 1);
672 
673 	/* waitDMCUReadyForCmd */
674 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
675 			dmcu_wait_reg_ready_interval,
676 			dmcu_max_retry_on_wait_reg_ready);
677 
678 	/* setDMCUParam_PSRHostConfigData */
679 	masterCmdData1.u32All = 0;
680 	masterCmdData1.bits.timehyst_frames = psr_context->timehyst_frames;
681 	masterCmdData1.bits.hyst_lines = psr_context->hyst_lines;
682 	masterCmdData1.bits.rfb_update_auto_en =
683 			psr_context->rfb_update_auto_en;
684 	masterCmdData1.bits.dp_port_num = psr_context->transmitterId;
685 	masterCmdData1.bits.dcp_sel = psr_context->controllerId;
686 	masterCmdData1.bits.phy_type  = psr_context->phyType;
687 	masterCmdData1.bits.frame_cap_ind =
688 			psr_context->psrFrameCaptureIndicationReq;
689 	masterCmdData1.bits.aux_chan = psr_context->channel;
690 	masterCmdData1.bits.aux_repeat = psr_context->aux_repeats;
691 	masterCmdData1.bits.allow_smu_optimizations = psr_context->allow_smu_optimizations;
692 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
693 					masterCmdData1.u32All);
694 
695 	masterCmdData2.u32All = 0;
696 	masterCmdData2.bits.dig_fe = psr_context->engineId;
697 	masterCmdData2.bits.dig_be = psr_context->transmitterId;
698 	masterCmdData2.bits.skip_wait_for_pll_lock =
699 			psr_context->skipPsrWaitForPllLock;
700 	masterCmdData2.bits.frame_delay = psr_context->frame_delay;
701 	masterCmdData2.bits.smu_phy_id = psr_context->smuPhyId;
702 	masterCmdData2.bits.num_of_controllers =
703 			psr_context->numberOfControllers;
704 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
705 			masterCmdData2.u32All);
706 
707 	masterCmdData3.u32All = 0;
708 	masterCmdData3.bits.psr_level = psr_context->psr_level.u32all;
709 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
710 			masterCmdData3.u32All);
711 
712 
713 	/* setDMCUParam_Cmd */
714 	REG_UPDATE(MASTER_COMM_CMD_REG,
715 			MASTER_COMM_CMD_REG_BYTE0, PSR_SET);
716 
717 	/* notifyDMCUMsg */
718 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
719 
720 	/* waitDMCUReadyForCmd */
721 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
722 
723 	return true;
724 }
725 
726 static void dcn10_psr_wait_loop(
727 	struct dmcu *dmcu,
728 	unsigned int wait_loop_number)
729 {
730 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
731 	union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1;
732 
733 	/* If microcontroller is not running, do nothing */
734 	if (dmcu->dmcu_state != DMCU_RUNNING)
735 		return;
736 
737 	if (wait_loop_number != 0) {
738 	/* waitDMCUReadyForCmd */
739 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
740 
741 	masterCmdData1.u32 = 0;
742 	masterCmdData1.bits.wait_loop = wait_loop_number;
743 	dmcu->cached_wait_loop_number = wait_loop_number;
744 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
745 
746 	/* setDMCUParam_Cmd */
747 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, PSR_SET_WAITLOOP);
748 
749 	/* notifyDMCUMsg */
750 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
751 	}
752 }
753 
754 static void dcn10_get_psr_wait_loop(
755 		struct dmcu *dmcu, unsigned int *psr_wait_loop_number)
756 {
757 	*psr_wait_loop_number = dmcu->cached_wait_loop_number;
758 	return;
759 }
760 
761 static bool dcn10_is_dmcu_initialized(struct dmcu *dmcu)
762 {
763 	/* microcontroller is not running */
764 	if (dmcu->dmcu_state != DMCU_RUNNING)
765 		return false;
766 	return true;
767 }
768 
769 
770 
771 static bool dcn20_lock_phy(struct dmcu *dmcu)
772 {
773 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
774 
775 	/* If microcontroller is not running, do nothing */
776 	if (dmcu->dmcu_state != DMCU_RUNNING)
777 		return false;
778 
779 	/* waitDMCUReadyForCmd */
780 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
781 
782 	/* setDMCUParam_Cmd */
783 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, MCP_SYNC_PHY_LOCK);
784 
785 	/* notifyDMCUMsg */
786 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
787 
788 	/* waitDMCUReadyForCmd */
789 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
790 
791 	return true;
792 }
793 
794 static bool dcn20_unlock_phy(struct dmcu *dmcu)
795 {
796 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
797 
798 	/* If microcontroller is not running, do nothing */
799 	if (dmcu->dmcu_state != DMCU_RUNNING)
800 		return false;
801 
802 	/* waitDMCUReadyForCmd */
803 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
804 
805 	/* setDMCUParam_Cmd */
806 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, MCP_SYNC_PHY_UNLOCK);
807 
808 	/* notifyDMCUMsg */
809 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
810 
811 	/* waitDMCUReadyForCmd */
812 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
813 
814 	return true;
815 }
816 
817 static bool dcn10_send_edid_cea(struct dmcu *dmcu,
818 		int offset,
819 		int total_length,
820 		uint8_t *data,
821 		int length)
822 {
823 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
824 	uint32_t header, data1, data2;
825 
826 	/* If microcontroller is not running, do nothing */
827 	if (dmcu->dmcu_state != DMCU_RUNNING)
828 		return false;
829 
830 	if (length > 8 || length <= 0)
831 		return false;
832 
833 	header = ((uint32_t)offset & 0xFFFF) << 16 | (total_length & 0xFFFF);
834 	data1 = (((uint32_t)data[0]) << 24) | (((uint32_t)data[1]) << 16) |
835 		(((uint32_t)data[2]) << 8) | ((uint32_t)data[3]);
836 	data2 = (((uint32_t)data[4]) << 24) | (((uint32_t)data[5]) << 16) |
837 		(((uint32_t)data[6]) << 8) | ((uint32_t)data[7]);
838 
839 	/* waitDMCUReadyForCmd */
840 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
841 
842 	/* setDMCUParam_Cmd */
843 	REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, MCP_SEND_EDID_CEA);
844 
845 	REG_WRITE(MASTER_COMM_DATA_REG1, header);
846 	REG_WRITE(MASTER_COMM_DATA_REG2, data1);
847 	REG_WRITE(MASTER_COMM_DATA_REG3, data2);
848 
849 	/* notifyDMCUMsg */
850 	REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
851 
852 	/* waitDMCUReadyForCmd */
853 	REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
854 
855 	return true;
856 }
857 
858 static bool dcn10_get_scp_results(struct dmcu *dmcu,
859 		uint32_t *cmd,
860 		uint32_t *data1,
861 		uint32_t *data2,
862 		uint32_t *data3)
863 {
864 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
865 
866 	/* If microcontroller is not running, do nothing */
867 	if (dmcu->dmcu_state != DMCU_RUNNING)
868 		return false;
869 
870 	*cmd = REG_READ(SLAVE_COMM_CMD_REG);
871 	*data1 =  REG_READ(SLAVE_COMM_DATA_REG1);
872 	*data2 =  REG_READ(SLAVE_COMM_DATA_REG2);
873 	*data3 =  REG_READ(SLAVE_COMM_DATA_REG3);
874 
875 	/* clear SCP interrupt */
876 	REG_UPDATE(SLAVE_COMM_CNTL_REG, SLAVE_COMM_INTERRUPT, 0);
877 
878 	return true;
879 }
880 
881 static bool dcn10_recv_amd_vsdb(struct dmcu *dmcu,
882 		int *version,
883 		int *min_frame_rate,
884 		int *max_frame_rate)
885 {
886 	uint32_t data[4];
887 	int cmd, ack, len;
888 
889 	if (!dcn10_get_scp_results(dmcu, &data[0], &data[1], &data[2], &data[3]))
890 		return false;
891 
892 	cmd = data[0] & 0x3FF;
893 	len = (data[0] >> 10) & 0x3F;
894 	ack = data[1];
895 
896 	if (cmd != MCP_SEND_EDID_CEA || ack != EDID_CEA_CMD_ACK || len != 12)
897 		return false;
898 
899 	if ((data[2] & 0xFF)) {
900 		*version = (data[2] >> 8) & 0xFF;
901 		*min_frame_rate = (data[3] >> 16) & 0xFFFF;
902 		*max_frame_rate = data[3] & 0xFFFF;
903 		return true;
904 	}
905 
906 	return false;
907 }
908 
909 static bool dcn10_recv_edid_cea_ack(struct dmcu *dmcu, int *offset)
910 {
911 	uint32_t data[4];
912 	int cmd, ack;
913 
914 	if (!dcn10_get_scp_results(dmcu,
915 				&data[0], &data[1], &data[2], &data[3]))
916 		return false;
917 
918 	cmd = data[0] & 0x3FF;
919 	ack = data[1];
920 
921 	if (cmd != MCP_SEND_EDID_CEA)
922 		return false;
923 
924 	if (ack == EDID_CEA_CMD_ACK)
925 		return true;
926 
927 	*offset = data[2]; /* nack */
928 	return false;
929 }
930 
931 #endif //(CONFIG_DRM_AMD_DC_DCN)
932 
933 static const struct dmcu_funcs dce_funcs = {
934 	.dmcu_init = dce_dmcu_init,
935 	.load_iram = dce_dmcu_load_iram,
936 	.set_psr_enable = dce_dmcu_set_psr_enable,
937 	.setup_psr = dce_dmcu_setup_psr,
938 	.get_psr_state = dce_get_dmcu_psr_state,
939 	.set_psr_wait_loop = dce_psr_wait_loop,
940 	.get_psr_wait_loop = dce_get_psr_wait_loop,
941 	.is_dmcu_initialized = dce_is_dmcu_initialized
942 };
943 
944 #if defined(CONFIG_DRM_AMD_DC_DCN)
945 static const struct dmcu_funcs dcn10_funcs = {
946 	.dmcu_init = dcn10_dmcu_init,
947 	.load_iram = dcn10_dmcu_load_iram,
948 	.set_psr_enable = dcn10_dmcu_set_psr_enable,
949 	.setup_psr = dcn10_dmcu_setup_psr,
950 	.get_psr_state = dcn10_get_dmcu_psr_state,
951 	.set_psr_wait_loop = dcn10_psr_wait_loop,
952 	.get_psr_wait_loop = dcn10_get_psr_wait_loop,
953 	.send_edid_cea = dcn10_send_edid_cea,
954 	.recv_amd_vsdb = dcn10_recv_amd_vsdb,
955 	.recv_edid_cea_ack = dcn10_recv_edid_cea_ack,
956 	.is_dmcu_initialized = dcn10_is_dmcu_initialized
957 };
958 
959 static const struct dmcu_funcs dcn20_funcs = {
960 	.dmcu_init = dcn10_dmcu_init,
961 	.load_iram = dcn10_dmcu_load_iram,
962 	.set_psr_enable = dcn10_dmcu_set_psr_enable,
963 	.setup_psr = dcn10_dmcu_setup_psr,
964 	.get_psr_state = dcn10_get_dmcu_psr_state,
965 	.set_psr_wait_loop = dcn10_psr_wait_loop,
966 	.get_psr_wait_loop = dcn10_get_psr_wait_loop,
967 	.is_dmcu_initialized = dcn10_is_dmcu_initialized,
968 	.lock_phy = dcn20_lock_phy,
969 	.unlock_phy = dcn20_unlock_phy
970 };
971 
972 static const struct dmcu_funcs dcn21_funcs = {
973 	.dmcu_init = dcn21_dmcu_init,
974 	.load_iram = dcn10_dmcu_load_iram,
975 	.set_psr_enable = dcn10_dmcu_set_psr_enable,
976 	.setup_psr = dcn10_dmcu_setup_psr,
977 	.get_psr_state = dcn10_get_dmcu_psr_state,
978 	.set_psr_wait_loop = dcn10_psr_wait_loop,
979 	.get_psr_wait_loop = dcn10_get_psr_wait_loop,
980 	.is_dmcu_initialized = dcn10_is_dmcu_initialized,
981 	.lock_phy = dcn20_lock_phy,
982 	.unlock_phy = dcn20_unlock_phy
983 };
984 #endif
985 
986 static void dce_dmcu_construct(
987 	struct dce_dmcu *dmcu_dce,
988 	struct dc_context *ctx,
989 	const struct dce_dmcu_registers *regs,
990 	const struct dce_dmcu_shift *dmcu_shift,
991 	const struct dce_dmcu_mask *dmcu_mask)
992 {
993 	struct dmcu *base = &dmcu_dce->base;
994 
995 	base->ctx = ctx;
996 	base->funcs = &dce_funcs;
997 	base->cached_wait_loop_number = 0;
998 
999 	dmcu_dce->regs = regs;
1000 	dmcu_dce->dmcu_shift = dmcu_shift;
1001 	dmcu_dce->dmcu_mask = dmcu_mask;
1002 }
1003 
1004 #if defined(CONFIG_DRM_AMD_DC_DCN)
1005 static void dcn21_dmcu_construct(
1006 		struct dce_dmcu *dmcu_dce,
1007 		struct dc_context *ctx,
1008 		const struct dce_dmcu_registers *regs,
1009 		const struct dce_dmcu_shift *dmcu_shift,
1010 		const struct dce_dmcu_mask *dmcu_mask)
1011 {
1012 	uint32_t psp_version = 0;
1013 
1014 	dce_dmcu_construct(dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
1015 
1016 	if (!IS_FPGA_MAXIMUS_DC(ctx->dce_environment)) {
1017 		psp_version = dm_read_reg(ctx, mmMP0_SMN_C2PMSG_58);
1018 		dmcu_dce->base.auto_load_dmcu = ((psp_version & 0x00FF00FF) > 0x00110029);
1019 		dmcu_dce->base.psp_version = psp_version;
1020 	}
1021 }
1022 #endif
1023 
1024 struct dmcu *dce_dmcu_create(
1025 	struct dc_context *ctx,
1026 	const struct dce_dmcu_registers *regs,
1027 	const struct dce_dmcu_shift *dmcu_shift,
1028 	const struct dce_dmcu_mask *dmcu_mask)
1029 {
1030 	struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
1031 
1032 	if (dmcu_dce == NULL) {
1033 		BREAK_TO_DEBUGGER();
1034 		return NULL;
1035 	}
1036 
1037 	dce_dmcu_construct(
1038 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
1039 
1040 	dmcu_dce->base.funcs = &dce_funcs;
1041 
1042 	return &dmcu_dce->base;
1043 }
1044 
1045 #if defined(CONFIG_DRM_AMD_DC_DCN)
1046 struct dmcu *dcn10_dmcu_create(
1047 	struct dc_context *ctx,
1048 	const struct dce_dmcu_registers *regs,
1049 	const struct dce_dmcu_shift *dmcu_shift,
1050 	const struct dce_dmcu_mask *dmcu_mask)
1051 {
1052 	struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
1053 
1054 	if (dmcu_dce == NULL) {
1055 		BREAK_TO_DEBUGGER();
1056 		return NULL;
1057 	}
1058 
1059 	dce_dmcu_construct(
1060 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
1061 
1062 	dmcu_dce->base.funcs = &dcn10_funcs;
1063 
1064 	return &dmcu_dce->base;
1065 }
1066 
1067 struct dmcu *dcn20_dmcu_create(
1068 	struct dc_context *ctx,
1069 	const struct dce_dmcu_registers *regs,
1070 	const struct dce_dmcu_shift *dmcu_shift,
1071 	const struct dce_dmcu_mask *dmcu_mask)
1072 {
1073 	struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
1074 
1075 	if (dmcu_dce == NULL) {
1076 		BREAK_TO_DEBUGGER();
1077 		return NULL;
1078 	}
1079 
1080 	dce_dmcu_construct(
1081 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
1082 
1083 	dmcu_dce->base.funcs = &dcn20_funcs;
1084 
1085 	return &dmcu_dce->base;
1086 }
1087 
1088 struct dmcu *dcn21_dmcu_create(
1089 	struct dc_context *ctx,
1090 	const struct dce_dmcu_registers *regs,
1091 	const struct dce_dmcu_shift *dmcu_shift,
1092 	const struct dce_dmcu_mask *dmcu_mask)
1093 {
1094 	struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
1095 
1096 	if (dmcu_dce == NULL) {
1097 		BREAK_TO_DEBUGGER();
1098 		return NULL;
1099 	}
1100 
1101 	dcn21_dmcu_construct(
1102 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
1103 
1104 	dmcu_dce->base.funcs = &dcn21_funcs;
1105 
1106 	return &dmcu_dce->base;
1107 }
1108 #endif
1109 
1110 void dce_dmcu_destroy(struct dmcu **dmcu)
1111 {
1112 	struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(*dmcu);
1113 
1114 	kfree(dmcu_dce);
1115 	*dmcu = NULL;
1116 }
1117