xref: /linux/drivers/gpu/drm/msm/dp/dp_catalog.c (revision 2c1ed907520c50326b8f604907a8478b27881a2e)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7 
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/platform_device.h>
11 #include <linux/rational.h>
12 #include <drm/display/drm_dp_helper.h>
13 #include <drm/drm_print.h>
14 
15 #include "dp_catalog.h"
16 #include "dp_reg.h"
17 
18 #define POLLING_SLEEP_US			1000
19 #define POLLING_TIMEOUT_US			10000
20 
21 #define SCRAMBLER_RESET_COUNT_VALUE		0xFC
22 
23 #define DP_INTERRUPT_STATUS_ACK_SHIFT	1
24 #define DP_INTERRUPT_STATUS_MASK_SHIFT	2
25 
26 #define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
27 
28 #define DP_INTERRUPT_STATUS1 \
29 	(DP_INTR_AUX_XFER_DONE| \
30 	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
31 	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
32 	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
33 	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
34 
35 #define DP_INTERRUPT_STATUS1_ACK \
36 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
37 #define DP_INTERRUPT_STATUS1_MASK \
38 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
39 
40 #define DP_INTERRUPT_STATUS2 \
41 	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
42 	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
43 
44 #define DP_INTERRUPT_STATUS2_ACK \
45 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46 #define DP_INTERRUPT_STATUS2_MASK \
47 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48 
49 #define DP_INTERRUPT_STATUS4 \
50 	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
51 	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
52 
53 #define DP_INTERRUPT_MASK4 \
54 	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
55 	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
56 
57 #define DP_DEFAULT_AHB_OFFSET	0x0000
58 #define DP_DEFAULT_AHB_SIZE	0x0200
59 #define DP_DEFAULT_AUX_OFFSET	0x0200
60 #define DP_DEFAULT_AUX_SIZE	0x0200
61 #define DP_DEFAULT_LINK_OFFSET	0x0400
62 #define DP_DEFAULT_LINK_SIZE	0x0C00
63 #define DP_DEFAULT_P0_OFFSET	0x1000
64 #define DP_DEFAULT_P0_SIZE	0x0400
65 
66 struct dss_io_region {
67 	size_t len;
68 	void __iomem *base;
69 };
70 
71 struct dss_io_data {
72 	struct dss_io_region ahb;
73 	struct dss_io_region aux;
74 	struct dss_io_region link;
75 	struct dss_io_region p0;
76 };
77 
78 struct msm_dp_catalog_private {
79 	struct device *dev;
80 	struct drm_device *drm_dev;
81 	struct dss_io_data io;
82 	struct msm_dp_catalog msm_dp_catalog;
83 };
84 
msm_dp_catalog_snapshot(struct msm_dp_catalog * msm_dp_catalog,struct msm_disp_state * disp_state)85 void msm_dp_catalog_snapshot(struct msm_dp_catalog *msm_dp_catalog, struct msm_disp_state *disp_state)
86 {
87 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
88 			struct msm_dp_catalog_private, msm_dp_catalog);
89 	struct dss_io_data *dss = &catalog->io;
90 
91 	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
92 	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
93 	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
94 	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
95 }
96 
msm_dp_read_aux(struct msm_dp_catalog_private * catalog,u32 offset)97 static inline u32 msm_dp_read_aux(struct msm_dp_catalog_private *catalog, u32 offset)
98 {
99 	return readl_relaxed(catalog->io.aux.base + offset);
100 }
101 
msm_dp_write_aux(struct msm_dp_catalog_private * catalog,u32 offset,u32 data)102 static inline void msm_dp_write_aux(struct msm_dp_catalog_private *catalog,
103 			       u32 offset, u32 data)
104 {
105 	/*
106 	 * To make sure aux reg writes happens before any other operation,
107 	 * this function uses writel() instread of writel_relaxed()
108 	 */
109 	writel(data, catalog->io.aux.base + offset);
110 }
111 
msm_dp_read_ahb(const struct msm_dp_catalog_private * catalog,u32 offset)112 static inline u32 msm_dp_read_ahb(const struct msm_dp_catalog_private *catalog, u32 offset)
113 {
114 	return readl_relaxed(catalog->io.ahb.base + offset);
115 }
116 
msm_dp_write_ahb(struct msm_dp_catalog_private * catalog,u32 offset,u32 data)117 static inline void msm_dp_write_ahb(struct msm_dp_catalog_private *catalog,
118 			       u32 offset, u32 data)
119 {
120 	/*
121 	 * To make sure phy reg writes happens before any other operation,
122 	 * this function uses writel() instread of writel_relaxed()
123 	 */
124 	writel(data, catalog->io.ahb.base + offset);
125 }
126 
msm_dp_write_p0(struct msm_dp_catalog_private * catalog,u32 offset,u32 data)127 static inline void msm_dp_write_p0(struct msm_dp_catalog_private *catalog,
128 			       u32 offset, u32 data)
129 {
130 	/*
131 	 * To make sure interface reg writes happens before any other operation,
132 	 * this function uses writel() instread of writel_relaxed()
133 	 */
134 	writel(data, catalog->io.p0.base + offset);
135 }
136 
msm_dp_read_p0(struct msm_dp_catalog_private * catalog,u32 offset)137 static inline u32 msm_dp_read_p0(struct msm_dp_catalog_private *catalog,
138 			       u32 offset)
139 {
140 	/*
141 	 * To make sure interface reg writes happens before any other operation,
142 	 * this function uses writel() instread of writel_relaxed()
143 	 */
144 	return readl_relaxed(catalog->io.p0.base + offset);
145 }
146 
msm_dp_read_link(struct msm_dp_catalog_private * catalog,u32 offset)147 static inline u32 msm_dp_read_link(struct msm_dp_catalog_private *catalog, u32 offset)
148 {
149 	return readl_relaxed(catalog->io.link.base + offset);
150 }
151 
msm_dp_write_link(struct msm_dp_catalog_private * catalog,u32 offset,u32 data)152 static inline void msm_dp_write_link(struct msm_dp_catalog_private *catalog,
153 			       u32 offset, u32 data)
154 {
155 	/*
156 	 * To make sure link reg writes happens before any other operation,
157 	 * this function uses writel() instread of writel_relaxed()
158 	 */
159 	writel(data, catalog->io.link.base + offset);
160 }
161 
162 /* aux related catalog functions */
msm_dp_catalog_aux_read_data(struct msm_dp_catalog * msm_dp_catalog)163 u32 msm_dp_catalog_aux_read_data(struct msm_dp_catalog *msm_dp_catalog)
164 {
165 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
166 				struct msm_dp_catalog_private, msm_dp_catalog);
167 
168 	return msm_dp_read_aux(catalog, REG_DP_AUX_DATA);
169 }
170 
msm_dp_catalog_aux_write_data(struct msm_dp_catalog * msm_dp_catalog,u32 data)171 int msm_dp_catalog_aux_write_data(struct msm_dp_catalog *msm_dp_catalog, u32 data)
172 {
173 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
174 				struct msm_dp_catalog_private, msm_dp_catalog);
175 
176 	msm_dp_write_aux(catalog, REG_DP_AUX_DATA, data);
177 	return 0;
178 }
179 
msm_dp_catalog_aux_write_trans(struct msm_dp_catalog * msm_dp_catalog,u32 data)180 int msm_dp_catalog_aux_write_trans(struct msm_dp_catalog *msm_dp_catalog, u32 data)
181 {
182 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
183 				struct msm_dp_catalog_private, msm_dp_catalog);
184 
185 	msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
186 	return 0;
187 }
188 
msm_dp_catalog_aux_clear_trans(struct msm_dp_catalog * msm_dp_catalog,bool read)189 int msm_dp_catalog_aux_clear_trans(struct msm_dp_catalog *msm_dp_catalog, bool read)
190 {
191 	u32 data;
192 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
193 				struct msm_dp_catalog_private, msm_dp_catalog);
194 
195 	if (read) {
196 		data = msm_dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
197 		data &= ~DP_AUX_TRANS_CTRL_GO;
198 		msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
199 	} else {
200 		msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
201 	}
202 	return 0;
203 }
204 
msm_dp_catalog_aux_clear_hw_interrupts(struct msm_dp_catalog * msm_dp_catalog)205 int msm_dp_catalog_aux_clear_hw_interrupts(struct msm_dp_catalog *msm_dp_catalog)
206 {
207 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
208 				struct msm_dp_catalog_private, msm_dp_catalog);
209 
210 	msm_dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
211 	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
212 	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
213 	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
214 	return 0;
215 }
216 
217 /**
218  * msm_dp_catalog_aux_reset() - reset AUX controller
219  *
220  * @msm_dp_catalog: DP catalog structure
221  *
222  * return: void
223  *
224  * This function reset AUX controller
225  *
226  * NOTE: reset AUX controller will also clear any pending HPD related interrupts
227  *
228  */
msm_dp_catalog_aux_reset(struct msm_dp_catalog * msm_dp_catalog)229 void msm_dp_catalog_aux_reset(struct msm_dp_catalog *msm_dp_catalog)
230 {
231 	u32 aux_ctrl;
232 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
233 				struct msm_dp_catalog_private, msm_dp_catalog);
234 
235 	aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
236 
237 	aux_ctrl |= DP_AUX_CTRL_RESET;
238 	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
239 	usleep_range(1000, 1100); /* h/w recommended delay */
240 
241 	aux_ctrl &= ~DP_AUX_CTRL_RESET;
242 	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
243 }
244 
msm_dp_catalog_aux_enable(struct msm_dp_catalog * msm_dp_catalog,bool enable)245 void msm_dp_catalog_aux_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
246 {
247 	u32 aux_ctrl;
248 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
249 				struct msm_dp_catalog_private, msm_dp_catalog);
250 
251 	aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
252 
253 	if (enable) {
254 		msm_dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
255 		msm_dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
256 		aux_ctrl |= DP_AUX_CTRL_ENABLE;
257 	} else {
258 		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
259 	}
260 
261 	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
262 }
263 
msm_dp_catalog_aux_wait_for_hpd_connect_state(struct msm_dp_catalog * msm_dp_catalog,unsigned long wait_us)264 int msm_dp_catalog_aux_wait_for_hpd_connect_state(struct msm_dp_catalog *msm_dp_catalog,
265 					      unsigned long wait_us)
266 {
267 	u32 state;
268 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
269 				struct msm_dp_catalog_private, msm_dp_catalog);
270 
271 	/* poll for hpd connected status every 2ms and timeout after wait_us */
272 	return readl_poll_timeout(catalog->io.aux.base +
273 				REG_DP_DP_HPD_INT_STATUS,
274 				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
275 				min(wait_us, 2000), wait_us);
276 }
277 
msm_dp_catalog_aux_get_irq(struct msm_dp_catalog * msm_dp_catalog)278 u32 msm_dp_catalog_aux_get_irq(struct msm_dp_catalog *msm_dp_catalog)
279 {
280 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
281 				struct msm_dp_catalog_private, msm_dp_catalog);
282 	u32 intr, intr_ack;
283 
284 	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS);
285 	intr &= ~DP_INTERRUPT_STATUS1_MASK;
286 	intr_ack = (intr & DP_INTERRUPT_STATUS1)
287 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
288 	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
289 			DP_INTERRUPT_STATUS1_MASK);
290 
291 	return intr;
292 
293 }
294 
295 /* controller related catalog functions */
msm_dp_catalog_ctrl_update_transfer_unit(struct msm_dp_catalog * msm_dp_catalog,u32 msm_dp_tu,u32 valid_boundary,u32 valid_boundary2)296 void msm_dp_catalog_ctrl_update_transfer_unit(struct msm_dp_catalog *msm_dp_catalog,
297 				u32 msm_dp_tu, u32 valid_boundary,
298 				u32 valid_boundary2)
299 {
300 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
301 				struct msm_dp_catalog_private, msm_dp_catalog);
302 
303 	msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
304 	msm_dp_write_link(catalog, REG_DP_TU, msm_dp_tu);
305 	msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
306 }
307 
msm_dp_catalog_ctrl_state_ctrl(struct msm_dp_catalog * msm_dp_catalog,u32 state)308 void msm_dp_catalog_ctrl_state_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 state)
309 {
310 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
311 				struct msm_dp_catalog_private, msm_dp_catalog);
312 
313 	msm_dp_write_link(catalog, REG_DP_STATE_CTRL, state);
314 }
315 
msm_dp_catalog_ctrl_config_ctrl(struct msm_dp_catalog * msm_dp_catalog,u32 cfg)316 void msm_dp_catalog_ctrl_config_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 cfg)
317 {
318 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
319 				struct msm_dp_catalog_private, msm_dp_catalog);
320 
321 	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
322 
323 	msm_dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
324 }
325 
msm_dp_catalog_ctrl_lane_mapping(struct msm_dp_catalog * msm_dp_catalog)326 void msm_dp_catalog_ctrl_lane_mapping(struct msm_dp_catalog *msm_dp_catalog)
327 {
328 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
329 				struct msm_dp_catalog_private, msm_dp_catalog);
330 	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
331 	u32 ln_mapping;
332 
333 	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
334 	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
335 	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
336 	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
337 
338 	msm_dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
339 			ln_mapping);
340 }
341 
msm_dp_catalog_ctrl_psr_mainlink_enable(struct msm_dp_catalog * msm_dp_catalog,bool enable)342 void msm_dp_catalog_ctrl_psr_mainlink_enable(struct msm_dp_catalog *msm_dp_catalog,
343 						bool enable)
344 {
345 	u32 val;
346 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
347 				struct msm_dp_catalog_private, msm_dp_catalog);
348 
349 	val = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
350 
351 	if (enable)
352 		val |= DP_MAINLINK_CTRL_ENABLE;
353 	else
354 		val &= ~DP_MAINLINK_CTRL_ENABLE;
355 
356 	msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
357 }
358 
msm_dp_catalog_ctrl_mainlink_ctrl(struct msm_dp_catalog * msm_dp_catalog,bool enable)359 void msm_dp_catalog_ctrl_mainlink_ctrl(struct msm_dp_catalog *msm_dp_catalog,
360 						bool enable)
361 {
362 	u32 mainlink_ctrl;
363 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
364 				struct msm_dp_catalog_private, msm_dp_catalog);
365 
366 	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
367 	if (enable) {
368 		/*
369 		 * To make sure link reg writes happens before other operation,
370 		 * msm_dp_write_link() function uses writel()
371 		 */
372 		mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
373 
374 		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
375 						DP_MAINLINK_CTRL_ENABLE);
376 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
377 
378 		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
379 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
380 
381 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
382 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
383 
384 		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
385 					DP_MAINLINK_FB_BOUNDARY_SEL);
386 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
387 	} else {
388 		mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
389 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
390 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
391 	}
392 }
393 
msm_dp_catalog_ctrl_config_misc(struct msm_dp_catalog * msm_dp_catalog,u32 colorimetry_cfg,u32 test_bits_depth)394 void msm_dp_catalog_ctrl_config_misc(struct msm_dp_catalog *msm_dp_catalog,
395 					u32 colorimetry_cfg,
396 					u32 test_bits_depth)
397 {
398 	u32 misc_val;
399 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
400 				struct msm_dp_catalog_private, msm_dp_catalog);
401 
402 	misc_val = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
403 
404 	/* clear bpp bits */
405 	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
406 	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
407 	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
408 	/* Configure clock to synchronous mode */
409 	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
410 
411 	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
412 	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
413 }
414 
msm_dp_catalog_setup_peripheral_flush(struct msm_dp_catalog * msm_dp_catalog)415 void msm_dp_catalog_setup_peripheral_flush(struct msm_dp_catalog *msm_dp_catalog)
416 {
417 	u32 mainlink_ctrl, hw_revision;
418 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
419 				struct msm_dp_catalog_private, msm_dp_catalog);
420 
421 	mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
422 
423 	hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
424 	if (hw_revision >= DP_HW_VERSION_1_2)
425 		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
426 	else
427 		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
428 
429 	msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
430 }
431 
msm_dp_catalog_ctrl_config_msa(struct msm_dp_catalog * msm_dp_catalog,u32 rate,u32 stream_rate_khz,bool is_ycbcr_420)432 void msm_dp_catalog_ctrl_config_msa(struct msm_dp_catalog *msm_dp_catalog,
433 					u32 rate, u32 stream_rate_khz,
434 					bool is_ycbcr_420)
435 {
436 	u32 pixel_m, pixel_n;
437 	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
438 	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
439 	u32 const link_rate_hbr2 = 540000;
440 	u32 const link_rate_hbr3 = 810000;
441 	unsigned long den, num;
442 
443 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
444 				struct msm_dp_catalog_private, msm_dp_catalog);
445 
446 	if (rate == link_rate_hbr3)
447 		pixel_div = 6;
448 	else if (rate == 162000 || rate == 270000)
449 		pixel_div = 2;
450 	else if (rate == link_rate_hbr2)
451 		pixel_div = 4;
452 	else
453 		DRM_ERROR("Invalid pixel mux divider\n");
454 
455 	dispcc_input_rate = (rate * 10) / pixel_div;
456 
457 	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
458 			(unsigned long)(1 << 16) - 1,
459 			(unsigned long)(1 << 16) - 1, &den, &num);
460 
461 	den = ~(den - num);
462 	den = den & 0xFFFF;
463 	pixel_m = num;
464 	pixel_n = den;
465 
466 	mvid = (pixel_m & 0xFFFF) * 5;
467 	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
468 
469 	if (nvid < nvid_fixed) {
470 		u32 temp;
471 
472 		temp = (nvid_fixed / nvid) * nvid;
473 		mvid = (nvid_fixed / nvid) * mvid;
474 		nvid = temp;
475 	}
476 
477 	if (is_ycbcr_420)
478 		mvid /= 2;
479 
480 	if (link_rate_hbr2 == rate)
481 		nvid *= 2;
482 
483 	if (link_rate_hbr3 == rate)
484 		nvid *= 3;
485 
486 	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
487 	msm_dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
488 	msm_dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
489 	msm_dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
490 }
491 
msm_dp_catalog_ctrl_set_pattern_state_bit(struct msm_dp_catalog * msm_dp_catalog,u32 state_bit)492 int msm_dp_catalog_ctrl_set_pattern_state_bit(struct msm_dp_catalog *msm_dp_catalog,
493 					u32 state_bit)
494 {
495 	int bit, ret;
496 	u32 data;
497 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
498 				struct msm_dp_catalog_private, msm_dp_catalog);
499 
500 	bit = BIT(state_bit - 1);
501 	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
502 	msm_dp_catalog_ctrl_state_ctrl(msm_dp_catalog, bit);
503 
504 	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
505 
506 	/* Poll for mainlink ready status */
507 	ret = readx_poll_timeout(readl, catalog->io.link.base +
508 					REG_DP_MAINLINK_READY,
509 					data, data & bit,
510 					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
511 	if (ret < 0) {
512 		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
513 		return ret;
514 	}
515 	return 0;
516 }
517 
518 /**
519  * msm_dp_catalog_hw_revision() - retrieve DP hw revision
520  *
521  * @msm_dp_catalog: DP catalog structure
522  *
523  * Return: DP controller hw revision
524  *
525  */
msm_dp_catalog_hw_revision(const struct msm_dp_catalog * msm_dp_catalog)526 u32 msm_dp_catalog_hw_revision(const struct msm_dp_catalog *msm_dp_catalog)
527 {
528 	const struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
529 				struct msm_dp_catalog_private, msm_dp_catalog);
530 
531 	return msm_dp_read_ahb(catalog, REG_DP_HW_VERSION);
532 }
533 
534 /**
535  * msm_dp_catalog_ctrl_reset() - reset DP controller
536  *
537  * @msm_dp_catalog: DP catalog structure
538  *
539  * return: void
540  *
541  * This function reset the DP controller
542  *
543  * NOTE: reset DP controller will also clear any pending HPD related interrupts
544  *
545  */
msm_dp_catalog_ctrl_reset(struct msm_dp_catalog * msm_dp_catalog)546 void msm_dp_catalog_ctrl_reset(struct msm_dp_catalog *msm_dp_catalog)
547 {
548 	u32 sw_reset;
549 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
550 				struct msm_dp_catalog_private, msm_dp_catalog);
551 
552 	sw_reset = msm_dp_read_ahb(catalog, REG_DP_SW_RESET);
553 
554 	sw_reset |= DP_SW_RESET;
555 	msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
556 	usleep_range(1000, 1100); /* h/w recommended delay */
557 
558 	sw_reset &= ~DP_SW_RESET;
559 	msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
560 }
561 
msm_dp_catalog_ctrl_mainlink_ready(struct msm_dp_catalog * msm_dp_catalog)562 bool msm_dp_catalog_ctrl_mainlink_ready(struct msm_dp_catalog *msm_dp_catalog)
563 {
564 	u32 data;
565 	int ret;
566 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
567 				struct msm_dp_catalog_private, msm_dp_catalog);
568 
569 	/* Poll for mainlink ready status */
570 	ret = readl_poll_timeout(catalog->io.link.base +
571 				REG_DP_MAINLINK_READY,
572 				data, data & DP_MAINLINK_READY_FOR_VIDEO,
573 				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
574 	if (ret < 0) {
575 		DRM_ERROR("mainlink not ready\n");
576 		return false;
577 	}
578 
579 	return true;
580 }
581 
msm_dp_catalog_ctrl_enable_irq(struct msm_dp_catalog * msm_dp_catalog,bool enable)582 void msm_dp_catalog_ctrl_enable_irq(struct msm_dp_catalog *msm_dp_catalog,
583 						bool enable)
584 {
585 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
586 				struct msm_dp_catalog_private, msm_dp_catalog);
587 
588 	if (enable) {
589 		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS,
590 				DP_INTERRUPT_STATUS1_MASK);
591 		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
592 				DP_INTERRUPT_STATUS2_MASK);
593 	} else {
594 		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
595 		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
596 	}
597 }
598 
msm_dp_catalog_hpd_config_intr(struct msm_dp_catalog * msm_dp_catalog,u32 intr_mask,bool en)599 void msm_dp_catalog_hpd_config_intr(struct msm_dp_catalog *msm_dp_catalog,
600 			u32 intr_mask, bool en)
601 {
602 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
603 				struct msm_dp_catalog_private, msm_dp_catalog);
604 
605 	u32 config = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
606 
607 	config = (en ? config | intr_mask : config & ~intr_mask);
608 
609 	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
610 					intr_mask, config);
611 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
612 				config & DP_DP_HPD_INT_MASK);
613 }
614 
msm_dp_catalog_ctrl_hpd_enable(struct msm_dp_catalog * msm_dp_catalog)615 void msm_dp_catalog_ctrl_hpd_enable(struct msm_dp_catalog *msm_dp_catalog)
616 {
617 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
618 				struct msm_dp_catalog_private, msm_dp_catalog);
619 
620 	u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
621 
622 	/* Configure REFTIMER and enable it */
623 	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
624 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
625 
626 	/* Enable HPD */
627 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
628 }
629 
msm_dp_catalog_ctrl_hpd_disable(struct msm_dp_catalog * msm_dp_catalog)630 void msm_dp_catalog_ctrl_hpd_disable(struct msm_dp_catalog *msm_dp_catalog)
631 {
632 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
633 				struct msm_dp_catalog_private, msm_dp_catalog);
634 
635 	u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
636 
637 	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
638 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
639 
640 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
641 }
642 
msm_dp_catalog_enable_sdp(struct msm_dp_catalog_private * catalog)643 static void msm_dp_catalog_enable_sdp(struct msm_dp_catalog_private *catalog)
644 {
645 	/* trigger sdp */
646 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
647 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
648 }
649 
msm_dp_catalog_ctrl_config_psr(struct msm_dp_catalog * msm_dp_catalog)650 void msm_dp_catalog_ctrl_config_psr(struct msm_dp_catalog *msm_dp_catalog)
651 {
652 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
653 				struct msm_dp_catalog_private, msm_dp_catalog);
654 	u32 config;
655 
656 	/* enable PSR1 function */
657 	config = msm_dp_read_link(catalog, REG_PSR_CONFIG);
658 	config |= PSR1_SUPPORTED;
659 	msm_dp_write_link(catalog, REG_PSR_CONFIG, config);
660 
661 	msm_dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
662 	msm_dp_catalog_enable_sdp(catalog);
663 }
664 
msm_dp_catalog_ctrl_set_psr(struct msm_dp_catalog * msm_dp_catalog,bool enter)665 void msm_dp_catalog_ctrl_set_psr(struct msm_dp_catalog *msm_dp_catalog, bool enter)
666 {
667 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
668 			struct msm_dp_catalog_private, msm_dp_catalog);
669 	u32 cmd;
670 
671 	cmd = msm_dp_read_link(catalog, REG_PSR_CMD);
672 
673 	cmd &= ~(PSR_ENTER | PSR_EXIT);
674 
675 	if (enter)
676 		cmd |= PSR_ENTER;
677 	else
678 		cmd |= PSR_EXIT;
679 
680 	msm_dp_catalog_enable_sdp(catalog);
681 	msm_dp_write_link(catalog, REG_PSR_CMD, cmd);
682 }
683 
msm_dp_catalog_link_is_connected(struct msm_dp_catalog * msm_dp_catalog)684 u32 msm_dp_catalog_link_is_connected(struct msm_dp_catalog *msm_dp_catalog)
685 {
686 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
687 				struct msm_dp_catalog_private, msm_dp_catalog);
688 	u32 status;
689 
690 	status = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
691 	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
692 	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
693 	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
694 
695 	return status;
696 }
697 
msm_dp_catalog_hpd_get_intr_status(struct msm_dp_catalog * msm_dp_catalog)698 u32 msm_dp_catalog_hpd_get_intr_status(struct msm_dp_catalog *msm_dp_catalog)
699 {
700 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
701 				struct msm_dp_catalog_private, msm_dp_catalog);
702 	int isr, mask;
703 
704 	isr = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
705 	msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
706 				 (isr & DP_DP_HPD_INT_MASK));
707 	mask = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
708 
709 	/*
710 	 * We only want to return interrupts that are unmasked to the caller.
711 	 * However, the interrupt status field also contains other
712 	 * informational bits about the HPD state status, so we only mask
713 	 * out the part of the register that tells us about which interrupts
714 	 * are pending.
715 	 */
716 	return isr & (mask | ~DP_DP_HPD_INT_MASK);
717 }
718 
msm_dp_catalog_ctrl_read_psr_interrupt_status(struct msm_dp_catalog * msm_dp_catalog)719 u32 msm_dp_catalog_ctrl_read_psr_interrupt_status(struct msm_dp_catalog *msm_dp_catalog)
720 {
721 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
722 				struct msm_dp_catalog_private, msm_dp_catalog);
723 	u32 intr, intr_ack;
724 
725 	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
726 	intr_ack = (intr & DP_INTERRUPT_STATUS4)
727 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
728 	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
729 
730 	return intr;
731 }
732 
msm_dp_catalog_ctrl_get_interrupt(struct msm_dp_catalog * msm_dp_catalog)733 int msm_dp_catalog_ctrl_get_interrupt(struct msm_dp_catalog *msm_dp_catalog)
734 {
735 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
736 				struct msm_dp_catalog_private, msm_dp_catalog);
737 	u32 intr, intr_ack;
738 
739 	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
740 	intr &= ~DP_INTERRUPT_STATUS2_MASK;
741 	intr_ack = (intr & DP_INTERRUPT_STATUS2)
742 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
743 	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
744 			intr_ack | DP_INTERRUPT_STATUS2_MASK);
745 
746 	return intr;
747 }
748 
msm_dp_catalog_ctrl_phy_reset(struct msm_dp_catalog * msm_dp_catalog)749 void msm_dp_catalog_ctrl_phy_reset(struct msm_dp_catalog *msm_dp_catalog)
750 {
751 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
752 				struct msm_dp_catalog_private, msm_dp_catalog);
753 
754 	msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL,
755 			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
756 	usleep_range(1000, 1100); /* h/w recommended delay */
757 	msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
758 }
759 
msm_dp_catalog_ctrl_send_phy_pattern(struct msm_dp_catalog * msm_dp_catalog,u32 pattern)760 void msm_dp_catalog_ctrl_send_phy_pattern(struct msm_dp_catalog *msm_dp_catalog,
761 			u32 pattern)
762 {
763 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
764 				struct msm_dp_catalog_private, msm_dp_catalog);
765 	u32 value = 0x0;
766 
767 	/* Make sure to clear the current pattern before starting a new one */
768 	msm_dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
769 
770 	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
771 	switch (pattern) {
772 	case DP_PHY_TEST_PATTERN_D10_2:
773 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
774 				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
775 		break;
776 	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
777 		value &= ~(1 << 16);
778 		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
779 					value);
780 		value |= SCRAMBLER_RESET_COUNT_VALUE;
781 		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
782 					value);
783 		msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
784 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
785 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
786 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
787 		break;
788 	case DP_PHY_TEST_PATTERN_PRBS7:
789 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
790 				DP_STATE_CTRL_LINK_PRBS7);
791 		break;
792 	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
793 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
794 				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
795 		/* 00111110000011111000001111100000 */
796 		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
797 				0x3E0F83E0);
798 		/* 00001111100000111110000011111000 */
799 		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
800 				0x0F83E0F8);
801 		/* 1111100000111110 */
802 		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
803 				0x0000F83E);
804 		break;
805 	case DP_PHY_TEST_PATTERN_CP2520:
806 		value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
807 		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
808 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
809 
810 		value = DP_HBR2_ERM_PATTERN;
811 		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
812 				value);
813 		value |= SCRAMBLER_RESET_COUNT_VALUE;
814 		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
815 					value);
816 		msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
817 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
818 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
819 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
820 		value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
821 		value |= DP_MAINLINK_CTRL_ENABLE;
822 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
823 		break;
824 	case DP_PHY_TEST_PATTERN_SEL_MASK:
825 		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
826 				DP_MAINLINK_CTRL_ENABLE);
827 		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
828 				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
829 		break;
830 	default:
831 		drm_dbg_dp(catalog->drm_dev,
832 				"No valid test pattern requested: %#x\n", pattern);
833 		break;
834 	}
835 }
836 
msm_dp_catalog_ctrl_read_phy_pattern(struct msm_dp_catalog * msm_dp_catalog)837 u32 msm_dp_catalog_ctrl_read_phy_pattern(struct msm_dp_catalog *msm_dp_catalog)
838 {
839 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
840 				struct msm_dp_catalog_private, msm_dp_catalog);
841 
842 	return msm_dp_read_link(catalog, REG_DP_MAINLINK_READY);
843 }
844 
845 /* panel related catalog functions */
msm_dp_catalog_panel_timing_cfg(struct msm_dp_catalog * msm_dp_catalog,u32 total,u32 sync_start,u32 width_blanking,u32 msm_dp_active)846 int msm_dp_catalog_panel_timing_cfg(struct msm_dp_catalog *msm_dp_catalog, u32 total,
847 				u32 sync_start, u32 width_blanking, u32 msm_dp_active)
848 {
849 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
850 				struct msm_dp_catalog_private, msm_dp_catalog);
851 	u32 reg;
852 
853 	msm_dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, total);
854 	msm_dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, sync_start);
855 	msm_dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, width_blanking);
856 	msm_dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, msm_dp_active);
857 
858 	reg = msm_dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
859 
860 	if (msm_dp_catalog->wide_bus_en)
861 		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
862 	else
863 		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
864 
865 
866 	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", msm_dp_catalog->wide_bus_en, reg);
867 
868 	msm_dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
869 	return 0;
870 }
871 
msm_dp_catalog_panel_send_vsc_sdp(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp * vsc_sdp)872 static void msm_dp_catalog_panel_send_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
873 {
874 	struct msm_dp_catalog_private *catalog;
875 	u32 header[2];
876 	u32 val;
877 	int i;
878 
879 	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
880 
881 	msm_dp_utils_pack_sdp_header(&vsc_sdp->sdp_header, header);
882 
883 	msm_dp_write_link(catalog, MMSS_DP_GENERIC0_0, header[0]);
884 	msm_dp_write_link(catalog, MMSS_DP_GENERIC0_1, header[1]);
885 
886 	for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
887 		val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
888 		       (vsc_sdp->db[i + 3] << 24));
889 		msm_dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, val);
890 	}
891 }
892 
msm_dp_catalog_panel_update_sdp(struct msm_dp_catalog * msm_dp_catalog)893 static void msm_dp_catalog_panel_update_sdp(struct msm_dp_catalog *msm_dp_catalog)
894 {
895 	struct msm_dp_catalog_private *catalog;
896 	u32 hw_revision;
897 
898 	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
899 
900 	hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
901 	if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
902 		msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x01);
903 		msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x00);
904 	}
905 }
906 
msm_dp_catalog_panel_enable_vsc_sdp(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp * vsc_sdp)907 void msm_dp_catalog_panel_enable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
908 {
909 	struct msm_dp_catalog_private *catalog;
910 	u32 cfg, cfg2, misc;
911 
912 	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
913 
914 	cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
915 	cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
916 	misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
917 
918 	cfg |= GEN0_SDP_EN;
919 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
920 
921 	cfg2 |= GENERIC0_SDPSIZE_VALID;
922 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
923 
924 	msm_dp_catalog_panel_send_vsc_sdp(msm_dp_catalog, vsc_sdp);
925 
926 	/* indicates presence of VSC (BIT(6) of MISC1) */
927 	misc |= DP_MISC1_VSC_SDP;
928 
929 	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
930 
931 	pr_debug("misc settings = 0x%x\n", misc);
932 	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
933 
934 	msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
935 }
936 
msm_dp_catalog_panel_disable_vsc_sdp(struct msm_dp_catalog * msm_dp_catalog)937 void msm_dp_catalog_panel_disable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog)
938 {
939 	struct msm_dp_catalog_private *catalog;
940 	u32 cfg, cfg2, misc;
941 
942 	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
943 
944 	cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
945 	cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
946 	misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
947 
948 	cfg &= ~GEN0_SDP_EN;
949 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
950 
951 	cfg2 &= ~GENERIC0_SDPSIZE_VALID;
952 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
953 
954 	/* switch back to MSA */
955 	misc &= ~DP_MISC1_VSC_SDP;
956 
957 	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
958 
959 	pr_debug("misc settings = 0x%x\n", misc);
960 	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
961 
962 	msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
963 }
964 
msm_dp_catalog_panel_tpg_enable(struct msm_dp_catalog * msm_dp_catalog,struct drm_display_mode * drm_mode)965 void msm_dp_catalog_panel_tpg_enable(struct msm_dp_catalog *msm_dp_catalog,
966 				struct drm_display_mode *drm_mode)
967 {
968 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
969 				struct msm_dp_catalog_private, msm_dp_catalog);
970 	u32 hsync_period, vsync_period;
971 	u32 display_v_start, display_v_end;
972 	u32 hsync_start_x, hsync_end_x;
973 	u32 v_sync_width;
974 	u32 hsync_ctl;
975 	u32 display_hctl;
976 
977 	/* TPG config parameters*/
978 	hsync_period = drm_mode->htotal;
979 	vsync_period = drm_mode->vtotal;
980 
981 	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
982 					hsync_period);
983 	display_v_end = ((vsync_period - (drm_mode->vsync_start -
984 					drm_mode->vdisplay))
985 					* hsync_period) - 1;
986 
987 	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
988 	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
989 
990 	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
991 	hsync_end_x = hsync_period - (drm_mode->hsync_start -
992 					drm_mode->hdisplay) - 1;
993 
994 	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
995 
996 	hsync_ctl = (hsync_period << 16) |
997 			(drm_mode->hsync_end - drm_mode->hsync_start);
998 	display_hctl = (hsync_end_x << 16) | hsync_start_x;
999 
1000 
1001 	msm_dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
1002 	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
1003 			hsync_period);
1004 	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
1005 			hsync_period);
1006 	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
1007 	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
1008 	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
1009 	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
1010 	msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
1011 	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
1012 	msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
1013 	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
1014 	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
1015 	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
1016 	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
1017 	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
1018 	msm_dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
1019 
1020 	msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1021 				DP_TPG_CHECKERED_RECT_PATTERN);
1022 	msm_dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1023 				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1024 				DP_TPG_VIDEO_CONFIG_RGB);
1025 	msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1026 				DP_BIST_ENABLE_DPBIST_EN);
1027 	msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1028 				DP_TIMING_ENGINE_EN_EN);
1029 	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1030 }
1031 
msm_dp_catalog_panel_tpg_disable(struct msm_dp_catalog * msm_dp_catalog)1032 void msm_dp_catalog_panel_tpg_disable(struct msm_dp_catalog *msm_dp_catalog)
1033 {
1034 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1035 				struct msm_dp_catalog_private, msm_dp_catalog);
1036 
1037 	msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
1038 	msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
1039 	msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
1040 }
1041 
msm_dp_ioremap(struct platform_device * pdev,int idx,size_t * len)1042 static void __iomem *msm_dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1043 {
1044 	struct resource *res;
1045 	void __iomem *base;
1046 
1047 	base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
1048 	if (!IS_ERR(base))
1049 		*len = resource_size(res);
1050 
1051 	return base;
1052 }
1053 
msm_dp_catalog_get_io(struct msm_dp_catalog_private * catalog)1054 static int msm_dp_catalog_get_io(struct msm_dp_catalog_private *catalog)
1055 {
1056 	struct platform_device *pdev = to_platform_device(catalog->dev);
1057 	struct dss_io_data *dss = &catalog->io;
1058 
1059 	dss->ahb.base = msm_dp_ioremap(pdev, 0, &dss->ahb.len);
1060 	if (IS_ERR(dss->ahb.base))
1061 		return PTR_ERR(dss->ahb.base);
1062 
1063 	dss->aux.base = msm_dp_ioremap(pdev, 1, &dss->aux.len);
1064 	if (IS_ERR(dss->aux.base)) {
1065 		/*
1066 		 * The initial binding had a single reg, but in order to
1067 		 * support variation in the sub-region sizes this was split.
1068 		 * msm_dp_ioremap() will fail with -EINVAL here if only a single
1069 		 * reg is specified, so fill in the sub-region offsets and
1070 		 * lengths based on this single region.
1071 		 */
1072 		if (PTR_ERR(dss->aux.base) == -EINVAL) {
1073 			if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1074 				DRM_ERROR("legacy memory region not large enough\n");
1075 				return -EINVAL;
1076 			}
1077 
1078 			dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1079 			dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1080 			dss->aux.len = DP_DEFAULT_AUX_SIZE;
1081 			dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1082 			dss->link.len = DP_DEFAULT_LINK_SIZE;
1083 			dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1084 			dss->p0.len = DP_DEFAULT_P0_SIZE;
1085 		} else {
1086 			DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1087 			return PTR_ERR(dss->aux.base);
1088 		}
1089 	} else {
1090 		dss->link.base = msm_dp_ioremap(pdev, 2, &dss->link.len);
1091 		if (IS_ERR(dss->link.base)) {
1092 			DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1093 			return PTR_ERR(dss->link.base);
1094 		}
1095 
1096 		dss->p0.base = msm_dp_ioremap(pdev, 3, &dss->p0.len);
1097 		if (IS_ERR(dss->p0.base)) {
1098 			DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1099 			return PTR_ERR(dss->p0.base);
1100 		}
1101 	}
1102 
1103 	return 0;
1104 }
1105 
msm_dp_catalog_get(struct device * dev)1106 struct msm_dp_catalog *msm_dp_catalog_get(struct device *dev)
1107 {
1108 	struct msm_dp_catalog_private *catalog;
1109 	int ret;
1110 
1111 	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1112 	if (!catalog)
1113 		return ERR_PTR(-ENOMEM);
1114 
1115 	catalog->dev = dev;
1116 
1117 	ret = msm_dp_catalog_get_io(catalog);
1118 	if (ret)
1119 		return ERR_PTR(ret);
1120 
1121 	return &catalog->msm_dp_catalog;
1122 }
1123 
msm_dp_catalog_write_audio_stream(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp_header * sdp_hdr)1124 void msm_dp_catalog_write_audio_stream(struct msm_dp_catalog *msm_dp_catalog,
1125 				       struct dp_sdp_header *sdp_hdr)
1126 {
1127 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1128 				struct msm_dp_catalog_private, msm_dp_catalog);
1129 	u32 header[2];
1130 
1131 	msm_dp_utils_pack_sdp_header(sdp_hdr, header);
1132 
1133 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_STREAM_0, header[0]);
1134 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_STREAM_1, header[1]);
1135 }
1136 
msm_dp_catalog_write_audio_timestamp(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp_header * sdp_hdr)1137 void msm_dp_catalog_write_audio_timestamp(struct msm_dp_catalog *msm_dp_catalog,
1138 					  struct dp_sdp_header *sdp_hdr)
1139 {
1140 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1141 				struct msm_dp_catalog_private, msm_dp_catalog);
1142 	u32 header[2];
1143 
1144 	msm_dp_utils_pack_sdp_header(sdp_hdr, header);
1145 
1146 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_TIMESTAMP_0, header[0]);
1147 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_TIMESTAMP_1, header[1]);
1148 }
1149 
msm_dp_catalog_write_audio_infoframe(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp_header * sdp_hdr)1150 void msm_dp_catalog_write_audio_infoframe(struct msm_dp_catalog *msm_dp_catalog,
1151 					  struct dp_sdp_header *sdp_hdr)
1152 {
1153 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1154 				struct msm_dp_catalog_private, msm_dp_catalog);
1155 	u32 header[2];
1156 
1157 	msm_dp_utils_pack_sdp_header(sdp_hdr, header);
1158 
1159 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_INFOFRAME_0, header[0]);
1160 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_INFOFRAME_1, header[1]);
1161 }
1162 
msm_dp_catalog_write_audio_copy_mgmt(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp_header * sdp_hdr)1163 void msm_dp_catalog_write_audio_copy_mgmt(struct msm_dp_catalog *msm_dp_catalog,
1164 					  struct dp_sdp_header *sdp_hdr)
1165 {
1166 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1167 				struct msm_dp_catalog_private, msm_dp_catalog);
1168 	u32 header[2];
1169 
1170 	msm_dp_utils_pack_sdp_header(sdp_hdr, header);
1171 
1172 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_COPYMANAGEMENT_0, header[0]);
1173 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_COPYMANAGEMENT_1, header[1]);
1174 }
1175 
msm_dp_catalog_write_audio_isrc(struct msm_dp_catalog * msm_dp_catalog,struct dp_sdp_header * sdp_hdr)1176 void msm_dp_catalog_write_audio_isrc(struct msm_dp_catalog *msm_dp_catalog,
1177 				     struct dp_sdp_header *sdp_hdr)
1178 {
1179 	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1180 				struct msm_dp_catalog_private, msm_dp_catalog);
1181 	struct dp_sdp_header tmp = *sdp_hdr;
1182 	u32 header[2];
1183 	u32 reg;
1184 
1185 	/* XXX: is it necessary to preserve this field? */
1186 	reg = msm_dp_read_link(catalog, MMSS_DP_AUDIO_ISRC_1);
1187 	tmp.HB3 = FIELD_GET(HEADER_3_MASK, reg);
1188 
1189 	msm_dp_utils_pack_sdp_header(&tmp, header);
1190 
1191 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_ISRC_0, header[0]);
1192 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_ISRC_1, header[1]);
1193 }
1194 
msm_dp_catalog_audio_config_acr(struct msm_dp_catalog * msm_dp_catalog,u32 select)1195 void msm_dp_catalog_audio_config_acr(struct msm_dp_catalog *msm_dp_catalog, u32 select)
1196 {
1197 	struct msm_dp_catalog_private *catalog;
1198 	u32 acr_ctrl;
1199 
1200 	if (!msm_dp_catalog)
1201 		return;
1202 
1203 	catalog = container_of(msm_dp_catalog,
1204 		struct msm_dp_catalog_private, msm_dp_catalog);
1205 
1206 	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1207 
1208 	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1209 					select, acr_ctrl);
1210 
1211 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1212 }
1213 
msm_dp_catalog_audio_enable(struct msm_dp_catalog * msm_dp_catalog,bool enable)1214 void msm_dp_catalog_audio_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
1215 {
1216 	struct msm_dp_catalog_private *catalog;
1217 	u32 audio_ctrl;
1218 
1219 	if (!msm_dp_catalog)
1220 		return;
1221 
1222 	catalog = container_of(msm_dp_catalog,
1223 		struct msm_dp_catalog_private, msm_dp_catalog);
1224 
1225 	audio_ctrl = msm_dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1226 
1227 	if (enable)
1228 		audio_ctrl |= BIT(0);
1229 	else
1230 		audio_ctrl &= ~BIT(0);
1231 
1232 	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1233 
1234 	msm_dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1235 	/* make sure audio engine is disabled */
1236 	wmb();
1237 }
1238 
msm_dp_catalog_audio_config_sdp(struct msm_dp_catalog * msm_dp_catalog)1239 void msm_dp_catalog_audio_config_sdp(struct msm_dp_catalog *msm_dp_catalog)
1240 {
1241 	struct msm_dp_catalog_private *catalog;
1242 	u32 sdp_cfg = 0;
1243 	u32 sdp_cfg2 = 0;
1244 
1245 	if (!msm_dp_catalog)
1246 		return;
1247 
1248 	catalog = container_of(msm_dp_catalog,
1249 		struct msm_dp_catalog_private, msm_dp_catalog);
1250 
1251 	sdp_cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
1252 	/* AUDIO_TIMESTAMP_SDP_EN */
1253 	sdp_cfg |= BIT(1);
1254 	/* AUDIO_STREAM_SDP_EN */
1255 	sdp_cfg |= BIT(2);
1256 	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1257 	sdp_cfg |= BIT(5);
1258 	/* AUDIO_ISRC_SDP_EN  */
1259 	sdp_cfg |= BIT(6);
1260 	/* AUDIO_INFOFRAME_SDP_EN  */
1261 	sdp_cfg |= BIT(20);
1262 
1263 	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1264 
1265 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1266 
1267 	sdp_cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1268 	/* IFRM_REGSRC -> Do not use reg values */
1269 	sdp_cfg2 &= ~BIT(0);
1270 	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1271 	sdp_cfg2 &= ~BIT(1);
1272 
1273 	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1274 
1275 	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1276 }
1277 
msm_dp_catalog_audio_sfe_level(struct msm_dp_catalog * msm_dp_catalog,u32 safe_to_exit_level)1278 void msm_dp_catalog_audio_sfe_level(struct msm_dp_catalog *msm_dp_catalog, u32 safe_to_exit_level)
1279 {
1280 	struct msm_dp_catalog_private *catalog;
1281 	u32 mainlink_levels;
1282 
1283 	if (!msm_dp_catalog)
1284 		return;
1285 
1286 	catalog = container_of(msm_dp_catalog,
1287 		struct msm_dp_catalog_private, msm_dp_catalog);
1288 
1289 	mainlink_levels = msm_dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1290 	mainlink_levels &= 0xFE0;
1291 	mainlink_levels |= safe_to_exit_level;
1292 
1293 	drm_dbg_dp(catalog->drm_dev,
1294 			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1295 			 mainlink_levels, safe_to_exit_level);
1296 
1297 	msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1298 }
1299