xref: /linux/drivers/gpu/drm/msm/dp/dp_catalog.c (revision 7204df5e7e681238d457da03502f4b653403d7e7)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7 
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/platform_device.h>
11 #include <linux/rational.h>
12 #include <drm/display/drm_dp_helper.h>
13 #include <drm/drm_print.h>
14 
15 #include "dp_catalog.h"
16 #include "dp_reg.h"
17 
18 #define POLLING_SLEEP_US			1000
19 #define POLLING_TIMEOUT_US			10000
20 
21 #define SCRAMBLER_RESET_COUNT_VALUE		0xFC
22 
23 #define DP_INTERRUPT_STATUS_ACK_SHIFT	1
24 #define DP_INTERRUPT_STATUS_MASK_SHIFT	2
25 
26 #define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
27 
28 #define DP_INTERRUPT_STATUS1 \
29 	(DP_INTR_AUX_XFER_DONE| \
30 	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
31 	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
32 	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
33 	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
34 
35 #define DP_INTERRUPT_STATUS1_ACK \
36 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
37 #define DP_INTERRUPT_STATUS1_MASK \
38 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
39 
40 #define DP_INTERRUPT_STATUS2 \
41 	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
42 	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
43 
44 #define DP_INTERRUPT_STATUS2_ACK \
45 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46 #define DP_INTERRUPT_STATUS2_MASK \
47 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48 
49 #define DP_INTERRUPT_STATUS4 \
50 	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
51 	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
52 
53 #define DP_INTERRUPT_MASK4 \
54 	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
55 	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
56 
57 #define DP_DEFAULT_AHB_OFFSET	0x0000
58 #define DP_DEFAULT_AHB_SIZE	0x0200
59 #define DP_DEFAULT_AUX_OFFSET	0x0200
60 #define DP_DEFAULT_AUX_SIZE	0x0200
61 #define DP_DEFAULT_LINK_OFFSET	0x0400
62 #define DP_DEFAULT_LINK_SIZE	0x0C00
63 #define DP_DEFAULT_P0_OFFSET	0x1000
64 #define DP_DEFAULT_P0_SIZE	0x0400
65 
66 struct dss_io_region {
67 	size_t len;
68 	void __iomem *base;
69 };
70 
71 struct dss_io_data {
72 	struct dss_io_region ahb;
73 	struct dss_io_region aux;
74 	struct dss_io_region link;
75 	struct dss_io_region p0;
76 };
77 
78 struct dp_catalog_private {
79 	struct device *dev;
80 	struct drm_device *drm_dev;
81 	struct dss_io_data io;
82 	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
83 	struct dp_catalog dp_catalog;
84 	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
85 };
86 
87 void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
88 {
89 	struct dp_catalog_private *catalog = container_of(dp_catalog,
90 			struct dp_catalog_private, dp_catalog);
91 	struct dss_io_data *dss = &catalog->io;
92 
93 	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
94 	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
95 	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
96 	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
97 }
98 
99 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
100 {
101 	return readl_relaxed(catalog->io.aux.base + offset);
102 }
103 
104 static inline void dp_write_aux(struct dp_catalog_private *catalog,
105 			       u32 offset, u32 data)
106 {
107 	/*
108 	 * To make sure aux reg writes happens before any other operation,
109 	 * this function uses writel() instread of writel_relaxed()
110 	 */
111 	writel(data, catalog->io.aux.base + offset);
112 }
113 
114 static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
115 {
116 	return readl_relaxed(catalog->io.ahb.base + offset);
117 }
118 
119 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
120 			       u32 offset, u32 data)
121 {
122 	/*
123 	 * To make sure phy reg writes happens before any other operation,
124 	 * this function uses writel() instread of writel_relaxed()
125 	 */
126 	writel(data, catalog->io.ahb.base + offset);
127 }
128 
129 static inline void dp_write_p0(struct dp_catalog_private *catalog,
130 			       u32 offset, u32 data)
131 {
132 	/*
133 	 * To make sure interface reg writes happens before any other operation,
134 	 * this function uses writel() instread of writel_relaxed()
135 	 */
136 	writel(data, catalog->io.p0.base + offset);
137 }
138 
139 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
140 			       u32 offset)
141 {
142 	/*
143 	 * To make sure interface reg writes happens before any other operation,
144 	 * this function uses writel() instread of writel_relaxed()
145 	 */
146 	return readl_relaxed(catalog->io.p0.base + offset);
147 }
148 
149 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
150 {
151 	return readl_relaxed(catalog->io.link.base + offset);
152 }
153 
154 static inline void dp_write_link(struct dp_catalog_private *catalog,
155 			       u32 offset, u32 data)
156 {
157 	/*
158 	 * To make sure link reg writes happens before any other operation,
159 	 * this function uses writel() instread of writel_relaxed()
160 	 */
161 	writel(data, catalog->io.link.base + offset);
162 }
163 
164 /* aux related catalog functions */
165 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
166 {
167 	struct dp_catalog_private *catalog = container_of(dp_catalog,
168 				struct dp_catalog_private, dp_catalog);
169 
170 	return dp_read_aux(catalog, REG_DP_AUX_DATA);
171 }
172 
173 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
174 {
175 	struct dp_catalog_private *catalog = container_of(dp_catalog,
176 				struct dp_catalog_private, dp_catalog);
177 
178 	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
179 	return 0;
180 }
181 
182 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
183 {
184 	struct dp_catalog_private *catalog = container_of(dp_catalog,
185 				struct dp_catalog_private, dp_catalog);
186 
187 	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
188 	return 0;
189 }
190 
191 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
192 {
193 	u32 data;
194 	struct dp_catalog_private *catalog = container_of(dp_catalog,
195 				struct dp_catalog_private, dp_catalog);
196 
197 	if (read) {
198 		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
199 		data &= ~DP_AUX_TRANS_CTRL_GO;
200 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
201 	} else {
202 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
203 	}
204 	return 0;
205 }
206 
207 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
208 {
209 	struct dp_catalog_private *catalog = container_of(dp_catalog,
210 				struct dp_catalog_private, dp_catalog);
211 
212 	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
213 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
214 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
215 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
216 	return 0;
217 }
218 
219 /**
220  * dp_catalog_aux_reset() - reset AUX controller
221  *
222  * @dp_catalog: DP catalog structure
223  *
224  * return: void
225  *
226  * This function reset AUX controller
227  *
228  * NOTE: reset AUX controller will also clear any pending HPD related interrupts
229  *
230  */
231 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
232 {
233 	u32 aux_ctrl;
234 	struct dp_catalog_private *catalog = container_of(dp_catalog,
235 				struct dp_catalog_private, dp_catalog);
236 
237 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
238 
239 	aux_ctrl |= DP_AUX_CTRL_RESET;
240 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
241 	usleep_range(1000, 1100); /* h/w recommended delay */
242 
243 	aux_ctrl &= ~DP_AUX_CTRL_RESET;
244 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
245 }
246 
247 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
248 {
249 	u32 aux_ctrl;
250 	struct dp_catalog_private *catalog = container_of(dp_catalog,
251 				struct dp_catalog_private, dp_catalog);
252 
253 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
254 
255 	if (enable) {
256 		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
257 		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
258 		aux_ctrl |= DP_AUX_CTRL_ENABLE;
259 	} else {
260 		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
261 	}
262 
263 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
264 }
265 
266 int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog)
267 {
268 	u32 state;
269 	struct dp_catalog_private *catalog = container_of(dp_catalog,
270 				struct dp_catalog_private, dp_catalog);
271 
272 	/* poll for hpd connected status every 2ms and timeout after 500ms */
273 	return readl_poll_timeout(catalog->io.aux.base +
274 				REG_DP_DP_HPD_INT_STATUS,
275 				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
276 				2000, 500000);
277 }
278 
279 static void dump_regs(void __iomem *base, int len)
280 {
281 	int i;
282 	u32 x0, x4, x8, xc;
283 	u32 addr_off = 0;
284 
285 	len = DIV_ROUND_UP(len, 16);
286 	for (i = 0; i < len; i++) {
287 		x0 = readl_relaxed(base + addr_off);
288 		x4 = readl_relaxed(base + addr_off + 0x04);
289 		x8 = readl_relaxed(base + addr_off + 0x08);
290 		xc = readl_relaxed(base + addr_off + 0x0c);
291 
292 		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
293 		addr_off += 16;
294 	}
295 }
296 
297 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
298 {
299 	struct dp_catalog_private *catalog = container_of(dp_catalog,
300 		struct dp_catalog_private, dp_catalog);
301 	struct dss_io_data *io = &catalog->io;
302 
303 	pr_info("AHB regs\n");
304 	dump_regs(io->ahb.base, io->ahb.len);
305 
306 	pr_info("AUXCLK regs\n");
307 	dump_regs(io->aux.base, io->aux.len);
308 
309 	pr_info("LCLK regs\n");
310 	dump_regs(io->link.base, io->link.len);
311 
312 	pr_info("P0CLK regs\n");
313 	dump_regs(io->p0.base, io->p0.len);
314 }
315 
316 u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
317 {
318 	struct dp_catalog_private *catalog = container_of(dp_catalog,
319 				struct dp_catalog_private, dp_catalog);
320 	u32 intr, intr_ack;
321 
322 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
323 	intr &= ~DP_INTERRUPT_STATUS1_MASK;
324 	intr_ack = (intr & DP_INTERRUPT_STATUS1)
325 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
326 	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
327 			DP_INTERRUPT_STATUS1_MASK);
328 
329 	return intr;
330 
331 }
332 
333 /* controller related catalog functions */
334 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
335 				u32 dp_tu, u32 valid_boundary,
336 				u32 valid_boundary2)
337 {
338 	struct dp_catalog_private *catalog = container_of(dp_catalog,
339 				struct dp_catalog_private, dp_catalog);
340 
341 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
342 	dp_write_link(catalog, REG_DP_TU, dp_tu);
343 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
344 }
345 
346 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
347 {
348 	struct dp_catalog_private *catalog = container_of(dp_catalog,
349 				struct dp_catalog_private, dp_catalog);
350 
351 	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
352 }
353 
354 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
355 {
356 	struct dp_catalog_private *catalog = container_of(dp_catalog,
357 				struct dp_catalog_private, dp_catalog);
358 
359 	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
360 
361 	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
362 }
363 
364 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
365 {
366 	struct dp_catalog_private *catalog = container_of(dp_catalog,
367 				struct dp_catalog_private, dp_catalog);
368 	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
369 	u32 ln_mapping;
370 
371 	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
372 	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
373 	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
374 	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
375 
376 	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
377 			ln_mapping);
378 }
379 
380 void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
381 						bool enable)
382 {
383 	u32 val;
384 	struct dp_catalog_private *catalog = container_of(dp_catalog,
385 				struct dp_catalog_private, dp_catalog);
386 
387 	val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
388 
389 	if (enable)
390 		val |= DP_MAINLINK_CTRL_ENABLE;
391 	else
392 		val &= ~DP_MAINLINK_CTRL_ENABLE;
393 
394 	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
395 }
396 
397 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
398 						bool enable)
399 {
400 	u32 mainlink_ctrl;
401 	struct dp_catalog_private *catalog = container_of(dp_catalog,
402 				struct dp_catalog_private, dp_catalog);
403 
404 	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
405 	if (enable) {
406 		/*
407 		 * To make sure link reg writes happens before other operation,
408 		 * dp_write_link() function uses writel()
409 		 */
410 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
411 
412 		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
413 						DP_MAINLINK_CTRL_ENABLE);
414 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
415 
416 		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
417 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
418 
419 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
420 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
421 
422 		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
423 					DP_MAINLINK_FB_BOUNDARY_SEL);
424 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
425 	} else {
426 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
427 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
428 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
429 	}
430 }
431 
432 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
433 					u32 colorimetry_cfg,
434 					u32 test_bits_depth)
435 {
436 	u32 misc_val;
437 	struct dp_catalog_private *catalog = container_of(dp_catalog,
438 				struct dp_catalog_private, dp_catalog);
439 
440 	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
441 
442 	/* clear bpp bits */
443 	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
444 	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
445 	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
446 	/* Configure clock to synchronous mode */
447 	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
448 
449 	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
450 	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
451 }
452 
453 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
454 					u32 rate, u32 stream_rate_khz,
455 					bool fixed_nvid)
456 {
457 	u32 pixel_m, pixel_n;
458 	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
459 	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
460 	u32 const link_rate_hbr2 = 540000;
461 	u32 const link_rate_hbr3 = 810000;
462 	unsigned long den, num;
463 
464 	struct dp_catalog_private *catalog = container_of(dp_catalog,
465 				struct dp_catalog_private, dp_catalog);
466 
467 	if (rate == link_rate_hbr3)
468 		pixel_div = 6;
469 	else if (rate == 162000 || rate == 270000)
470 		pixel_div = 2;
471 	else if (rate == link_rate_hbr2)
472 		pixel_div = 4;
473 	else
474 		DRM_ERROR("Invalid pixel mux divider\n");
475 
476 	dispcc_input_rate = (rate * 10) / pixel_div;
477 
478 	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
479 			(unsigned long)(1 << 16) - 1,
480 			(unsigned long)(1 << 16) - 1, &den, &num);
481 
482 	den = ~(den - num);
483 	den = den & 0xFFFF;
484 	pixel_m = num;
485 	pixel_n = den;
486 
487 	mvid = (pixel_m & 0xFFFF) * 5;
488 	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
489 
490 	if (nvid < nvid_fixed) {
491 		u32 temp;
492 
493 		temp = (nvid_fixed / nvid) * nvid;
494 		mvid = (nvid_fixed / nvid) * mvid;
495 		nvid = temp;
496 	}
497 
498 	if (link_rate_hbr2 == rate)
499 		nvid *= 2;
500 
501 	if (link_rate_hbr3 == rate)
502 		nvid *= 3;
503 
504 	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
505 	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
506 	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
507 	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
508 }
509 
510 int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
511 					u32 state_bit)
512 {
513 	int bit, ret;
514 	u32 data;
515 	struct dp_catalog_private *catalog = container_of(dp_catalog,
516 				struct dp_catalog_private, dp_catalog);
517 
518 	bit = BIT(state_bit - 1);
519 	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
520 	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
521 
522 	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
523 
524 	/* Poll for mainlink ready status */
525 	ret = readx_poll_timeout(readl, catalog->io.link.base +
526 					REG_DP_MAINLINK_READY,
527 					data, data & bit,
528 					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
529 	if (ret < 0) {
530 		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
531 		return ret;
532 	}
533 	return 0;
534 }
535 
536 /**
537  * dp_catalog_hw_revision() - retrieve DP hw revision
538  *
539  * @dp_catalog: DP catalog structure
540  *
541  * Return: DP controller hw revision
542  *
543  */
544 u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
545 {
546 	const struct dp_catalog_private *catalog = container_of(dp_catalog,
547 				struct dp_catalog_private, dp_catalog);
548 
549 	return dp_read_ahb(catalog, REG_DP_HW_VERSION);
550 }
551 
552 /**
553  * dp_catalog_ctrl_reset() - reset DP controller
554  *
555  * @dp_catalog: DP catalog structure
556  *
557  * return: void
558  *
559  * This function reset the DP controller
560  *
561  * NOTE: reset DP controller will also clear any pending HPD related interrupts
562  *
563  */
564 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
565 {
566 	u32 sw_reset;
567 	struct dp_catalog_private *catalog = container_of(dp_catalog,
568 				struct dp_catalog_private, dp_catalog);
569 
570 	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
571 
572 	sw_reset |= DP_SW_RESET;
573 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
574 	usleep_range(1000, 1100); /* h/w recommended delay */
575 
576 	sw_reset &= ~DP_SW_RESET;
577 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
578 }
579 
580 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
581 {
582 	u32 data;
583 	int ret;
584 	struct dp_catalog_private *catalog = container_of(dp_catalog,
585 				struct dp_catalog_private, dp_catalog);
586 
587 	/* Poll for mainlink ready status */
588 	ret = readl_poll_timeout(catalog->io.link.base +
589 				REG_DP_MAINLINK_READY,
590 				data, data & DP_MAINLINK_READY_FOR_VIDEO,
591 				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
592 	if (ret < 0) {
593 		DRM_ERROR("mainlink not ready\n");
594 		return false;
595 	}
596 
597 	return true;
598 }
599 
600 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
601 						bool enable)
602 {
603 	struct dp_catalog_private *catalog = container_of(dp_catalog,
604 				struct dp_catalog_private, dp_catalog);
605 
606 	if (enable) {
607 		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
608 				DP_INTERRUPT_STATUS1_MASK);
609 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
610 				DP_INTERRUPT_STATUS2_MASK);
611 	} else {
612 		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
613 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
614 	}
615 }
616 
617 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
618 			u32 intr_mask, bool en)
619 {
620 	struct dp_catalog_private *catalog = container_of(dp_catalog,
621 				struct dp_catalog_private, dp_catalog);
622 
623 	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
624 
625 	config = (en ? config | intr_mask : config & ~intr_mask);
626 
627 	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
628 					intr_mask, config);
629 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
630 				config & DP_DP_HPD_INT_MASK);
631 }
632 
633 void dp_catalog_ctrl_hpd_enable(struct dp_catalog *dp_catalog)
634 {
635 	struct dp_catalog_private *catalog = container_of(dp_catalog,
636 				struct dp_catalog_private, dp_catalog);
637 
638 	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
639 
640 	/* Configure REFTIMER and enable it */
641 	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
642 	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
643 
644 	/* Enable HPD */
645 	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
646 }
647 
648 void dp_catalog_ctrl_hpd_disable(struct dp_catalog *dp_catalog)
649 {
650 	struct dp_catalog_private *catalog = container_of(dp_catalog,
651 				struct dp_catalog_private, dp_catalog);
652 
653 	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
654 
655 	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
656 	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
657 
658 	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
659 }
660 
661 static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
662 {
663 	/* trigger sdp */
664 	dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
665 	dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
666 }
667 
668 void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
669 {
670 	struct dp_catalog_private *catalog = container_of(dp_catalog,
671 				struct dp_catalog_private, dp_catalog);
672 	u32 config;
673 
674 	/* enable PSR1 function */
675 	config = dp_read_link(catalog, REG_PSR_CONFIG);
676 	config |= PSR1_SUPPORTED;
677 	dp_write_link(catalog, REG_PSR_CONFIG, config);
678 
679 	dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
680 	dp_catalog_enable_sdp(catalog);
681 }
682 
683 void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
684 {
685 	struct dp_catalog_private *catalog = container_of(dp_catalog,
686 			struct dp_catalog_private, dp_catalog);
687 	u32 cmd;
688 
689 	cmd = dp_read_link(catalog, REG_PSR_CMD);
690 
691 	cmd &= ~(PSR_ENTER | PSR_EXIT);
692 
693 	if (enter)
694 		cmd |= PSR_ENTER;
695 	else
696 		cmd |= PSR_EXIT;
697 
698 	dp_catalog_enable_sdp(catalog);
699 	dp_write_link(catalog, REG_PSR_CMD, cmd);
700 }
701 
702 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
703 {
704 	struct dp_catalog_private *catalog = container_of(dp_catalog,
705 				struct dp_catalog_private, dp_catalog);
706 	u32 status;
707 
708 	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
709 	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
710 	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
711 	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
712 
713 	return status;
714 }
715 
716 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
717 {
718 	struct dp_catalog_private *catalog = container_of(dp_catalog,
719 				struct dp_catalog_private, dp_catalog);
720 	int isr, mask;
721 
722 	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
723 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
724 				 (isr & DP_DP_HPD_INT_MASK));
725 	mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
726 
727 	/*
728 	 * We only want to return interrupts that are unmasked to the caller.
729 	 * However, the interrupt status field also contains other
730 	 * informational bits about the HPD state status, so we only mask
731 	 * out the part of the register that tells us about which interrupts
732 	 * are pending.
733 	 */
734 	return isr & (mask | ~DP_DP_HPD_INT_MASK);
735 }
736 
737 u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
738 {
739 	struct dp_catalog_private *catalog = container_of(dp_catalog,
740 				struct dp_catalog_private, dp_catalog);
741 	u32 intr, intr_ack;
742 
743 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
744 	intr_ack = (intr & DP_INTERRUPT_STATUS4)
745 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
746 	dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
747 
748 	return intr;
749 }
750 
751 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
752 {
753 	struct dp_catalog_private *catalog = container_of(dp_catalog,
754 				struct dp_catalog_private, dp_catalog);
755 	u32 intr, intr_ack;
756 
757 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
758 	intr &= ~DP_INTERRUPT_STATUS2_MASK;
759 	intr_ack = (intr & DP_INTERRUPT_STATUS2)
760 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
761 	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
762 			intr_ack | DP_INTERRUPT_STATUS2_MASK);
763 
764 	return intr;
765 }
766 
767 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
768 {
769 	struct dp_catalog_private *catalog = container_of(dp_catalog,
770 				struct dp_catalog_private, dp_catalog);
771 
772 	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
773 			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
774 	usleep_range(1000, 1100); /* h/w recommended delay */
775 	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
776 }
777 
778 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
779 			u32 pattern)
780 {
781 	struct dp_catalog_private *catalog = container_of(dp_catalog,
782 				struct dp_catalog_private, dp_catalog);
783 	u32 value = 0x0;
784 
785 	/* Make sure to clear the current pattern before starting a new one */
786 	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
787 
788 	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
789 	switch (pattern) {
790 	case DP_PHY_TEST_PATTERN_D10_2:
791 		dp_write_link(catalog, REG_DP_STATE_CTRL,
792 				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
793 		break;
794 	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
795 		value &= ~(1 << 16);
796 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
797 					value);
798 		value |= SCRAMBLER_RESET_COUNT_VALUE;
799 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
800 					value);
801 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
802 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
803 		dp_write_link(catalog, REG_DP_STATE_CTRL,
804 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
805 		break;
806 	case DP_PHY_TEST_PATTERN_PRBS7:
807 		dp_write_link(catalog, REG_DP_STATE_CTRL,
808 				DP_STATE_CTRL_LINK_PRBS7);
809 		break;
810 	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
811 		dp_write_link(catalog, REG_DP_STATE_CTRL,
812 				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
813 		/* 00111110000011111000001111100000 */
814 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
815 				0x3E0F83E0);
816 		/* 00001111100000111110000011111000 */
817 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
818 				0x0F83E0F8);
819 		/* 1111100000111110 */
820 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
821 				0x0000F83E);
822 		break;
823 	case DP_PHY_TEST_PATTERN_CP2520:
824 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
825 		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
826 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
827 
828 		value = DP_HBR2_ERM_PATTERN;
829 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
830 				value);
831 		value |= SCRAMBLER_RESET_COUNT_VALUE;
832 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
833 					value);
834 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
835 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
836 		dp_write_link(catalog, REG_DP_STATE_CTRL,
837 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
838 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
839 		value |= DP_MAINLINK_CTRL_ENABLE;
840 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
841 		break;
842 	case DP_PHY_TEST_PATTERN_SEL_MASK:
843 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
844 				DP_MAINLINK_CTRL_ENABLE);
845 		dp_write_link(catalog, REG_DP_STATE_CTRL,
846 				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
847 		break;
848 	default:
849 		drm_dbg_dp(catalog->drm_dev,
850 				"No valid test pattern requested: %#x\n", pattern);
851 		break;
852 	}
853 }
854 
855 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
856 {
857 	struct dp_catalog_private *catalog = container_of(dp_catalog,
858 				struct dp_catalog_private, dp_catalog);
859 
860 	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
861 }
862 
863 /* panel related catalog functions */
864 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
865 {
866 	struct dp_catalog_private *catalog = container_of(dp_catalog,
867 				struct dp_catalog_private, dp_catalog);
868 	u32 reg;
869 
870 	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
871 				dp_catalog->total);
872 	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
873 				dp_catalog->sync_start);
874 	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
875 				dp_catalog->width_blanking);
876 	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
877 
878 	reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
879 
880 	if (dp_catalog->wide_bus_en)
881 		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
882 	else
883 		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
884 
885 
886 	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
887 
888 	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
889 	return 0;
890 }
891 
892 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
893 				struct drm_display_mode *drm_mode)
894 {
895 	struct dp_catalog_private *catalog = container_of(dp_catalog,
896 				struct dp_catalog_private, dp_catalog);
897 	u32 hsync_period, vsync_period;
898 	u32 display_v_start, display_v_end;
899 	u32 hsync_start_x, hsync_end_x;
900 	u32 v_sync_width;
901 	u32 hsync_ctl;
902 	u32 display_hctl;
903 
904 	/* TPG config parameters*/
905 	hsync_period = drm_mode->htotal;
906 	vsync_period = drm_mode->vtotal;
907 
908 	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
909 					hsync_period);
910 	display_v_end = ((vsync_period - (drm_mode->vsync_start -
911 					drm_mode->vdisplay))
912 					* hsync_period) - 1;
913 
914 	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
915 	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
916 
917 	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
918 	hsync_end_x = hsync_period - (drm_mode->hsync_start -
919 					drm_mode->hdisplay) - 1;
920 
921 	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
922 
923 	hsync_ctl = (hsync_period << 16) |
924 			(drm_mode->hsync_end - drm_mode->hsync_start);
925 	display_hctl = (hsync_end_x << 16) | hsync_start_x;
926 
927 
928 	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
929 	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
930 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
931 			hsync_period);
932 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
933 			hsync_period);
934 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
935 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
936 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
937 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
938 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
939 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
940 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
941 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
942 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
943 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
944 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
945 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
946 	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
947 
948 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
949 				DP_TPG_CHECKERED_RECT_PATTERN);
950 	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
951 				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
952 				DP_TPG_VIDEO_CONFIG_RGB);
953 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
954 				DP_BIST_ENABLE_DPBIST_EN);
955 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
956 				DP_TIMING_ENGINE_EN_EN);
957 	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
958 }
959 
960 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
961 {
962 	struct dp_catalog_private *catalog = container_of(dp_catalog,
963 				struct dp_catalog_private, dp_catalog);
964 
965 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
966 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
967 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
968 }
969 
970 static void __iomem *dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
971 {
972 	struct resource *res;
973 	void __iomem *base;
974 
975 	base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
976 	if (!IS_ERR(base))
977 		*len = resource_size(res);
978 
979 	return base;
980 }
981 
982 static int dp_catalog_get_io(struct dp_catalog_private *catalog)
983 {
984 	struct platform_device *pdev = to_platform_device(catalog->dev);
985 	struct dss_io_data *dss = &catalog->io;
986 
987 	dss->ahb.base = dp_ioremap(pdev, 0, &dss->ahb.len);
988 	if (IS_ERR(dss->ahb.base))
989 		return PTR_ERR(dss->ahb.base);
990 
991 	dss->aux.base = dp_ioremap(pdev, 1, &dss->aux.len);
992 	if (IS_ERR(dss->aux.base)) {
993 		/*
994 		 * The initial binding had a single reg, but in order to
995 		 * support variation in the sub-region sizes this was split.
996 		 * dp_ioremap() will fail with -EINVAL here if only a single
997 		 * reg is specified, so fill in the sub-region offsets and
998 		 * lengths based on this single region.
999 		 */
1000 		if (PTR_ERR(dss->aux.base) == -EINVAL) {
1001 			if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1002 				DRM_ERROR("legacy memory region not large enough\n");
1003 				return -EINVAL;
1004 			}
1005 
1006 			dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1007 			dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1008 			dss->aux.len = DP_DEFAULT_AUX_SIZE;
1009 			dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1010 			dss->link.len = DP_DEFAULT_LINK_SIZE;
1011 			dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1012 			dss->p0.len = DP_DEFAULT_P0_SIZE;
1013 		} else {
1014 			DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1015 			return PTR_ERR(dss->aux.base);
1016 		}
1017 	} else {
1018 		dss->link.base = dp_ioremap(pdev, 2, &dss->link.len);
1019 		if (IS_ERR(dss->link.base)) {
1020 			DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1021 			return PTR_ERR(dss->link.base);
1022 		}
1023 
1024 		dss->p0.base = dp_ioremap(pdev, 3, &dss->p0.len);
1025 		if (IS_ERR(dss->p0.base)) {
1026 			DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1027 			return PTR_ERR(dss->p0.base);
1028 		}
1029 	}
1030 
1031 	return 0;
1032 }
1033 
1034 struct dp_catalog *dp_catalog_get(struct device *dev)
1035 {
1036 	struct dp_catalog_private *catalog;
1037 	int ret;
1038 
1039 	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1040 	if (!catalog)
1041 		return ERR_PTR(-ENOMEM);
1042 
1043 	catalog->dev = dev;
1044 
1045 	ret = dp_catalog_get_io(catalog);
1046 	if (ret)
1047 		return ERR_PTR(ret);
1048 
1049 	return &catalog->dp_catalog;
1050 }
1051 
1052 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
1053 {
1054 	struct dp_catalog_private *catalog;
1055 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1056 	enum dp_catalog_audio_sdp_type sdp;
1057 	enum dp_catalog_audio_header_type header;
1058 
1059 	if (!dp_catalog)
1060 		return;
1061 
1062 	catalog = container_of(dp_catalog,
1063 		struct dp_catalog_private, dp_catalog);
1064 
1065 	sdp_map = catalog->audio_map;
1066 	sdp     = dp_catalog->sdp_type;
1067 	header  = dp_catalog->sdp_header;
1068 
1069 	dp_catalog->audio_data = dp_read_link(catalog,
1070 			sdp_map[sdp][header]);
1071 }
1072 
1073 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
1074 {
1075 	struct dp_catalog_private *catalog;
1076 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1077 	enum dp_catalog_audio_sdp_type sdp;
1078 	enum dp_catalog_audio_header_type header;
1079 	u32 data;
1080 
1081 	if (!dp_catalog)
1082 		return;
1083 
1084 	catalog = container_of(dp_catalog,
1085 		struct dp_catalog_private, dp_catalog);
1086 
1087 	sdp_map = catalog->audio_map;
1088 	sdp     = dp_catalog->sdp_type;
1089 	header  = dp_catalog->sdp_header;
1090 	data    = dp_catalog->audio_data;
1091 
1092 	dp_write_link(catalog, sdp_map[sdp][header], data);
1093 }
1094 
1095 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
1096 {
1097 	struct dp_catalog_private *catalog;
1098 	u32 acr_ctrl, select;
1099 
1100 	if (!dp_catalog)
1101 		return;
1102 
1103 	catalog = container_of(dp_catalog,
1104 		struct dp_catalog_private, dp_catalog);
1105 
1106 	select = dp_catalog->audio_data;
1107 	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1108 
1109 	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1110 					select, acr_ctrl);
1111 
1112 	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1113 }
1114 
1115 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
1116 {
1117 	struct dp_catalog_private *catalog;
1118 	bool enable;
1119 	u32 audio_ctrl;
1120 
1121 	if (!dp_catalog)
1122 		return;
1123 
1124 	catalog = container_of(dp_catalog,
1125 		struct dp_catalog_private, dp_catalog);
1126 
1127 	enable = !!dp_catalog->audio_data;
1128 	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1129 
1130 	if (enable)
1131 		audio_ctrl |= BIT(0);
1132 	else
1133 		audio_ctrl &= ~BIT(0);
1134 
1135 	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1136 
1137 	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1138 	/* make sure audio engine is disabled */
1139 	wmb();
1140 }
1141 
1142 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1143 {
1144 	struct dp_catalog_private *catalog;
1145 	u32 sdp_cfg = 0;
1146 	u32 sdp_cfg2 = 0;
1147 
1148 	if (!dp_catalog)
1149 		return;
1150 
1151 	catalog = container_of(dp_catalog,
1152 		struct dp_catalog_private, dp_catalog);
1153 
1154 	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1155 	/* AUDIO_TIMESTAMP_SDP_EN */
1156 	sdp_cfg |= BIT(1);
1157 	/* AUDIO_STREAM_SDP_EN */
1158 	sdp_cfg |= BIT(2);
1159 	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1160 	sdp_cfg |= BIT(5);
1161 	/* AUDIO_ISRC_SDP_EN  */
1162 	sdp_cfg |= BIT(6);
1163 	/* AUDIO_INFOFRAME_SDP_EN  */
1164 	sdp_cfg |= BIT(20);
1165 
1166 	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1167 
1168 	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1169 
1170 	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1171 	/* IFRM_REGSRC -> Do not use reg values */
1172 	sdp_cfg2 &= ~BIT(0);
1173 	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1174 	sdp_cfg2 &= ~BIT(1);
1175 
1176 	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1177 
1178 	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1179 }
1180 
1181 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1182 {
1183 	struct dp_catalog_private *catalog;
1184 
1185 	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1186 		{
1187 			MMSS_DP_AUDIO_STREAM_0,
1188 			MMSS_DP_AUDIO_STREAM_1,
1189 			MMSS_DP_AUDIO_STREAM_1,
1190 		},
1191 		{
1192 			MMSS_DP_AUDIO_TIMESTAMP_0,
1193 			MMSS_DP_AUDIO_TIMESTAMP_1,
1194 			MMSS_DP_AUDIO_TIMESTAMP_1,
1195 		},
1196 		{
1197 			MMSS_DP_AUDIO_INFOFRAME_0,
1198 			MMSS_DP_AUDIO_INFOFRAME_1,
1199 			MMSS_DP_AUDIO_INFOFRAME_1,
1200 		},
1201 		{
1202 			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1203 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1204 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1205 		},
1206 		{
1207 			MMSS_DP_AUDIO_ISRC_0,
1208 			MMSS_DP_AUDIO_ISRC_1,
1209 			MMSS_DP_AUDIO_ISRC_1,
1210 		},
1211 	};
1212 
1213 	if (!dp_catalog)
1214 		return;
1215 
1216 	catalog = container_of(dp_catalog,
1217 		struct dp_catalog_private, dp_catalog);
1218 
1219 	catalog->audio_map = sdp_map;
1220 }
1221 
1222 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1223 {
1224 	struct dp_catalog_private *catalog;
1225 	u32 mainlink_levels, safe_to_exit_level;
1226 
1227 	if (!dp_catalog)
1228 		return;
1229 
1230 	catalog = container_of(dp_catalog,
1231 		struct dp_catalog_private, dp_catalog);
1232 
1233 	safe_to_exit_level = dp_catalog->audio_data;
1234 	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1235 	mainlink_levels &= 0xFE0;
1236 	mainlink_levels |= safe_to_exit_level;
1237 
1238 	drm_dbg_dp(catalog->drm_dev,
1239 			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1240 			 mainlink_levels, safe_to_exit_level);
1241 
1242 	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1243 }
1244