xref: /linux/drivers/gpu/drm/msm/disp/dpu1/dpu_hw_intf.c (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
4  * Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5  */
6 
7 #include "dpu_hwio.h"
8 #include "dpu_hw_catalog.h"
9 #include "dpu_hw_intf.h"
10 #include "dpu_kms.h"
11 #include "dpu_trace.h"
12 
13 #include <linux/iopoll.h>
14 
15 #include <drm/drm_managed.h>
16 
17 #define INTF_TIMING_ENGINE_EN           0x000
18 #define INTF_CONFIG                     0x004
19 #define INTF_HSYNC_CTL                  0x008
20 #define INTF_VSYNC_PERIOD_F0            0x00C
21 #define INTF_VSYNC_PERIOD_F1            0x010
22 #define INTF_VSYNC_PULSE_WIDTH_F0       0x014
23 #define INTF_VSYNC_PULSE_WIDTH_F1       0x018
24 #define INTF_DISPLAY_V_START_F0         0x01C
25 #define INTF_DISPLAY_V_START_F1         0x020
26 #define INTF_DISPLAY_V_END_F0           0x024
27 #define INTF_DISPLAY_V_END_F1           0x028
28 #define INTF_ACTIVE_V_START_F0          0x02C
29 #define INTF_ACTIVE_V_START_F1          0x030
30 #define INTF_ACTIVE_V_END_F0            0x034
31 #define INTF_ACTIVE_V_END_F1            0x038
32 #define INTF_DISPLAY_HCTL               0x03C
33 #define INTF_ACTIVE_HCTL                0x040
34 #define INTF_BORDER_COLOR               0x044
35 #define INTF_UNDERFLOW_COLOR            0x048
36 #define INTF_HSYNC_SKEW                 0x04C
37 #define INTF_POLARITY_CTL               0x050
38 #define INTF_TEST_CTL                   0x054
39 #define INTF_TP_COLOR0                  0x058
40 #define INTF_TP_COLOR1                  0x05C
41 #define INTF_CONFIG2                    0x060
42 #define INTF_DISPLAY_DATA_HCTL          0x064
43 #define INTF_ACTIVE_DATA_HCTL           0x068
44 
45 #define INTF_DSI_CMD_MODE_TRIGGER_EN    0x084
46 #define INTF_PANEL_FORMAT               0x090
47 
48 #define INTF_FRAME_LINE_COUNT_EN        0x0A8
49 #define INTF_FRAME_COUNT                0x0AC
50 #define INTF_LINE_COUNT                 0x0B0
51 
52 #define INTF_DEFLICKER_CONFIG           0x0F0
53 #define INTF_DEFLICKER_STRNG_COEFF      0x0F4
54 #define INTF_DEFLICKER_WEAK_COEFF       0x0F8
55 
56 #define INTF_TPG_ENABLE                 0x100
57 #define INTF_TPG_MAIN_CONTROL           0x104
58 #define INTF_TPG_VIDEO_CONFIG           0x108
59 #define INTF_TPG_COMPONENT_LIMITS       0x10C
60 #define INTF_TPG_RECTANGLE              0x110
61 #define INTF_TPG_INITIAL_VALUE          0x114
62 #define INTF_TPG_BLK_WHITE_PATTERN_FRAMES 0x118
63 #define INTF_TPG_RGB_MAPPING            0x11C
64 #define INTF_PROG_FETCH_START           0x170
65 #define INTF_PROG_ROT_START             0x174
66 
67 #define INTF_MISR_CTRL                  0x180
68 #define INTF_MISR_SIGNATURE             0x184
69 
70 #define INTF_MUX                        0x25C
71 #define INTF_STATUS                     0x26C
72 #define INTF_AVR_CONTROL                0x270
73 #define INTF_AVR_MODE                   0x274
74 #define INTF_AVR_TRIGGER                0x278
75 #define INTF_AVR_VTOTAL                 0x27C
76 #define INTF_TEAR_MDP_VSYNC_SEL         0x280
77 #define INTF_TEAR_TEAR_CHECK_EN         0x284
78 #define INTF_TEAR_SYNC_CONFIG_VSYNC     0x288
79 #define INTF_TEAR_SYNC_CONFIG_HEIGHT    0x28C
80 #define INTF_TEAR_SYNC_WRCOUNT          0x290
81 #define INTF_TEAR_VSYNC_INIT_VAL        0x294
82 #define INTF_TEAR_INT_COUNT_VAL         0x298
83 #define INTF_TEAR_SYNC_THRESH           0x29C
84 #define INTF_TEAR_START_POS             0x2A0
85 #define INTF_TEAR_RD_PTR_IRQ            0x2A4
86 #define INTF_TEAR_WR_PTR_IRQ            0x2A8
87 #define INTF_TEAR_OUT_LINE_COUNT        0x2AC
88 #define INTF_TEAR_LINE_COUNT            0x2B0
89 #define INTF_TEAR_AUTOREFRESH_CONFIG    0x2B4
90 
91 #define INTF_CFG_ACTIVE_H_EN	BIT(29)
92 #define INTF_CFG_ACTIVE_V_EN	BIT(30)
93 
94 #define INTF_CFG2_DATABUS_WIDEN	BIT(0)
95 #define INTF_CFG2_DATA_HCTL_EN	BIT(4)
96 #define INTF_CFG2_DCE_DATA_COMPRESS     BIT(12)
97 
98 
dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf * intf,const struct dpu_hw_intf_timing_params * p,const struct msm_format * fmt,const struct dpu_mdss_version * mdss_ver)99 static void dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf *intf,
100 		const struct dpu_hw_intf_timing_params *p,
101 		const struct msm_format *fmt,
102 		const struct dpu_mdss_version *mdss_ver)
103 {
104 	struct dpu_hw_blk_reg_map *c = &intf->hw;
105 	u32 hsync_period, vsync_period;
106 	u32 display_v_start, display_v_end;
107 	u32 hsync_start_x, hsync_end_x;
108 	u32 hsync_data_start_x, hsync_data_end_x;
109 	u32 active_h_start, active_h_end;
110 	u32 active_v_start, active_v_end;
111 	u32 active_hctl, display_hctl, hsync_ctl;
112 	u32 polarity_ctl, den_polarity;
113 	u32 panel_format;
114 	u32 intf_cfg, intf_cfg2 = 0;
115 	u32 display_data_hctl = 0, active_data_hctl = 0;
116 	u32 data_width;
117 	bool dp_intf = false;
118 
119 	/* read interface_cfg */
120 	intf_cfg = DPU_REG_READ(c, INTF_CONFIG);
121 
122 	if (intf->cap->type == INTF_DP)
123 		dp_intf = true;
124 
125 	hsync_period = p->hsync_pulse_width + p->h_back_porch + p->width +
126 	p->h_front_porch;
127 	vsync_period = p->vsync_pulse_width + p->v_back_porch + p->height +
128 	p->v_front_porch;
129 
130 	display_v_start = ((p->vsync_pulse_width + p->v_back_porch) *
131 	hsync_period) + p->hsync_skew;
132 	display_v_end = ((vsync_period - p->v_front_porch) * hsync_period) +
133 	p->hsync_skew - 1;
134 
135 	hsync_start_x = p->h_back_porch + p->hsync_pulse_width;
136 	hsync_end_x = hsync_period - p->h_front_porch - 1;
137 
138 	if (p->width != p->xres) { /* border fill added */
139 		active_h_start = hsync_start_x;
140 		active_h_end = active_h_start + p->xres - 1;
141 	} else {
142 		active_h_start = 0;
143 		active_h_end = 0;
144 	}
145 
146 	if (p->height != p->yres) { /* border fill added */
147 		active_v_start = display_v_start;
148 		active_v_end = active_v_start + (p->yres * hsync_period) - 1;
149 	} else {
150 		active_v_start = 0;
151 		active_v_end = 0;
152 	}
153 
154 	if (active_h_end) {
155 		active_hctl = (active_h_end << 16) | active_h_start;
156 		intf_cfg |= INTF_CFG_ACTIVE_H_EN;
157 	} else {
158 		active_hctl = 0;
159 	}
160 
161 	if (active_v_end)
162 		intf_cfg |= INTF_CFG_ACTIVE_V_EN;
163 
164 	hsync_ctl = (hsync_period << 16) | p->hsync_pulse_width;
165 	display_hctl = (hsync_end_x << 16) | hsync_start_x;
166 
167 	if (p->wide_bus_en)
168 		intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN;
169 
170 	data_width = p->width;
171 
172 	/*
173 	 * If widebus is enabled, data is valid for only half the active window
174 	 * since the data rate is doubled in this mode. But for the compression
175 	 * mode in DP case, the p->width is already adjusted in
176 	 * drm_mode_to_intf_timing_params()
177 	 */
178 	if (p->wide_bus_en && !dp_intf)
179 		data_width = p->width >> 1;
180 
181 	/* TODO: handle DSC+DP case, we only handle DSC+DSI case so far */
182 	if (p->compression_en && !dp_intf &&
183 	    mdss_ver->core_major_ver >= 7)
184 		intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
185 
186 	hsync_data_start_x = hsync_start_x;
187 	hsync_data_end_x =  hsync_start_x + data_width - 1;
188 
189 	display_data_hctl = (hsync_data_end_x << 16) | hsync_data_start_x;
190 
191 	if (dp_intf) {
192 		/* DP timing adjustment */
193 		display_v_start += p->hsync_pulse_width + p->h_back_porch;
194 		display_v_end   -= p->h_front_porch;
195 
196 		active_h_start = hsync_start_x;
197 		active_h_end = active_h_start + p->xres - 1;
198 		active_v_start = display_v_start;
199 		active_v_end = active_v_start + (p->yres * hsync_period) - 1;
200 
201 		active_hctl = (active_h_end << 16) | active_h_start;
202 		display_hctl = active_hctl;
203 
204 		intf_cfg |= INTF_CFG_ACTIVE_H_EN | INTF_CFG_ACTIVE_V_EN;
205 	}
206 
207 	den_polarity = 0;
208 	polarity_ctl = (den_polarity << 2) | /*  DEN Polarity  */
209 		(p->vsync_polarity << 1) | /* VSYNC Polarity */
210 		(p->hsync_polarity << 0);  /* HSYNC Polarity */
211 
212 	if (!MSM_FORMAT_IS_YUV(fmt))
213 		panel_format = (fmt->bpc_g_y |
214 				(fmt->bpc_b_cb << 2) |
215 				(fmt->bpc_r_cr << 4) |
216 				(0x21 << 8));
217 	else
218 		/* Interface treats all the pixel data in RGB888 format */
219 		panel_format = (BPC8 |
220 				(BPC8 << 2) |
221 				(BPC8 << 4) |
222 				(0x21 << 8));
223 
224 	DPU_REG_WRITE(c, INTF_HSYNC_CTL, hsync_ctl);
225 	DPU_REG_WRITE(c, INTF_VSYNC_PERIOD_F0, vsync_period * hsync_period);
226 	DPU_REG_WRITE(c, INTF_VSYNC_PULSE_WIDTH_F0,
227 			p->vsync_pulse_width * hsync_period);
228 	DPU_REG_WRITE(c, INTF_DISPLAY_HCTL, display_hctl);
229 	DPU_REG_WRITE(c, INTF_DISPLAY_V_START_F0, display_v_start);
230 	DPU_REG_WRITE(c, INTF_DISPLAY_V_END_F0, display_v_end);
231 	DPU_REG_WRITE(c, INTF_ACTIVE_HCTL,  active_hctl);
232 	DPU_REG_WRITE(c, INTF_ACTIVE_V_START_F0, active_v_start);
233 	DPU_REG_WRITE(c, INTF_ACTIVE_V_END_F0, active_v_end);
234 	DPU_REG_WRITE(c, INTF_BORDER_COLOR, p->border_clr);
235 	DPU_REG_WRITE(c, INTF_UNDERFLOW_COLOR, p->underflow_clr);
236 	DPU_REG_WRITE(c, INTF_HSYNC_SKEW, p->hsync_skew);
237 	DPU_REG_WRITE(c, INTF_POLARITY_CTL, polarity_ctl);
238 	DPU_REG_WRITE(c, INTF_FRAME_LINE_COUNT_EN, 0x3);
239 	DPU_REG_WRITE(c, INTF_CONFIG, intf_cfg);
240 	DPU_REG_WRITE(c, INTF_PANEL_FORMAT, panel_format);
241 	if (intf->cap->features & BIT(DPU_DATA_HCTL_EN)) {
242 		/*
243 		 * DATA_HCTL_EN controls data timing which can be different from
244 		 * video timing. It is recommended to enable it for all cases, except
245 		 * if compression is enabled in 1 pixel per clock mode
246 		 */
247 		if (!(p->compression_en && !p->wide_bus_en))
248 			intf_cfg2 |= INTF_CFG2_DATA_HCTL_EN;
249 
250 		DPU_REG_WRITE(c, INTF_CONFIG2, intf_cfg2);
251 		DPU_REG_WRITE(c, INTF_DISPLAY_DATA_HCTL, display_data_hctl);
252 		DPU_REG_WRITE(c, INTF_ACTIVE_DATA_HCTL, active_data_hctl);
253 	}
254 }
255 
dpu_hw_intf_enable_timing_engine(struct dpu_hw_intf * intf,u8 enable)256 static void dpu_hw_intf_enable_timing_engine(
257 		struct dpu_hw_intf *intf,
258 		u8 enable)
259 {
260 	struct dpu_hw_blk_reg_map *c = &intf->hw;
261 	/* Note: Display interface select is handled in top block hw layer */
262 	DPU_REG_WRITE(c, INTF_TIMING_ENGINE_EN, enable != 0);
263 }
264 
dpu_hw_intf_setup_prg_fetch(struct dpu_hw_intf * intf,const struct dpu_hw_intf_prog_fetch * fetch)265 static void dpu_hw_intf_setup_prg_fetch(
266 		struct dpu_hw_intf *intf,
267 		const struct dpu_hw_intf_prog_fetch *fetch)
268 {
269 	struct dpu_hw_blk_reg_map *c = &intf->hw;
270 	int fetch_enable;
271 
272 	/*
273 	 * Fetch should always be outside the active lines. If the fetching
274 	 * is programmed within active region, hardware behavior is unknown.
275 	 */
276 
277 	fetch_enable = DPU_REG_READ(c, INTF_CONFIG);
278 	if (fetch->enable) {
279 		fetch_enable |= BIT(31);
280 		DPU_REG_WRITE(c, INTF_PROG_FETCH_START,
281 				fetch->fetch_start);
282 	} else {
283 		fetch_enable &= ~BIT(31);
284 	}
285 
286 	DPU_REG_WRITE(c, INTF_CONFIG, fetch_enable);
287 }
288 
dpu_hw_intf_bind_pingpong_blk(struct dpu_hw_intf * intf,const enum dpu_pingpong pp)289 static void dpu_hw_intf_bind_pingpong_blk(
290 		struct dpu_hw_intf *intf,
291 		const enum dpu_pingpong pp)
292 {
293 	struct dpu_hw_blk_reg_map *c = &intf->hw;
294 	u32 mux_cfg;
295 
296 	mux_cfg = DPU_REG_READ(c, INTF_MUX);
297 	mux_cfg &= ~0xf;
298 
299 	if (pp)
300 		mux_cfg |= (pp - PINGPONG_0) & 0x7;
301 	else
302 		mux_cfg |= 0xf;
303 
304 	DPU_REG_WRITE(c, INTF_MUX, mux_cfg);
305 }
306 
dpu_hw_intf_get_status(struct dpu_hw_intf * intf,struct dpu_hw_intf_status * s)307 static void dpu_hw_intf_get_status(
308 		struct dpu_hw_intf *intf,
309 		struct dpu_hw_intf_status *s)
310 {
311 	struct dpu_hw_blk_reg_map *c = &intf->hw;
312 	unsigned long cap = intf->cap->features;
313 
314 	if (cap & BIT(DPU_INTF_STATUS_SUPPORTED))
315 		s->is_en = DPU_REG_READ(c, INTF_STATUS) & BIT(0);
316 	else
317 		s->is_en = DPU_REG_READ(c, INTF_TIMING_ENGINE_EN);
318 
319 	s->is_prog_fetch_en = !!(DPU_REG_READ(c, INTF_CONFIG) & BIT(31));
320 	if (s->is_en) {
321 		s->frame_count = DPU_REG_READ(c, INTF_FRAME_COUNT);
322 		s->line_count = DPU_REG_READ(c, INTF_LINE_COUNT);
323 	} else {
324 		s->line_count = 0;
325 		s->frame_count = 0;
326 	}
327 }
328 
dpu_hw_intf_get_line_count(struct dpu_hw_intf * intf)329 static u32 dpu_hw_intf_get_line_count(struct dpu_hw_intf *intf)
330 {
331 	struct dpu_hw_blk_reg_map *c;
332 
333 	if (!intf)
334 		return 0;
335 
336 	c = &intf->hw;
337 
338 	return DPU_REG_READ(c, INTF_LINE_COUNT);
339 }
340 
dpu_hw_intf_setup_misr(struct dpu_hw_intf * intf)341 static void dpu_hw_intf_setup_misr(struct dpu_hw_intf *intf)
342 {
343 	dpu_hw_setup_misr(&intf->hw, INTF_MISR_CTRL, 0x1);
344 }
345 
dpu_hw_intf_collect_misr(struct dpu_hw_intf * intf,u32 * misr_value)346 static int dpu_hw_intf_collect_misr(struct dpu_hw_intf *intf, u32 *misr_value)
347 {
348 	return dpu_hw_collect_misr(&intf->hw, INTF_MISR_CTRL, INTF_MISR_SIGNATURE, misr_value);
349 }
350 
dpu_hw_intf_enable_te(struct dpu_hw_intf * intf,struct dpu_hw_tear_check * te)351 static int dpu_hw_intf_enable_te(struct dpu_hw_intf *intf,
352 		struct dpu_hw_tear_check *te)
353 {
354 	struct dpu_hw_blk_reg_map *c;
355 	int cfg;
356 
357 	if (!intf)
358 		return -EINVAL;
359 
360 	c = &intf->hw;
361 
362 	cfg = BIT(19); /* VSYNC_COUNTER_EN */
363 	if (te->hw_vsync_mode)
364 		cfg |= BIT(20);
365 
366 	cfg |= te->vsync_count;
367 
368 	DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
369 	DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_HEIGHT, te->sync_cfg_height);
370 	DPU_REG_WRITE(c, INTF_TEAR_VSYNC_INIT_VAL, te->vsync_init_val);
371 	DPU_REG_WRITE(c, INTF_TEAR_RD_PTR_IRQ, te->rd_ptr_irq);
372 	DPU_REG_WRITE(c, INTF_TEAR_START_POS, te->start_pos);
373 	DPU_REG_WRITE(c, INTF_TEAR_SYNC_THRESH,
374 			((te->sync_threshold_continue << 16) |
375 			 te->sync_threshold_start));
376 	DPU_REG_WRITE(c, INTF_TEAR_SYNC_WRCOUNT,
377 			(te->start_pos + te->sync_threshold_start + 1));
378 
379 	DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 1);
380 
381 	return 0;
382 }
383 
dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf * intf,u32 frame_count,bool enable)384 static void dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf *intf,
385 		u32 frame_count, bool enable)
386 {
387 	struct dpu_hw_blk_reg_map *c;
388 	u32 refresh_cfg;
389 
390 	c = &intf->hw;
391 	refresh_cfg = DPU_REG_READ(c, INTF_TEAR_AUTOREFRESH_CONFIG);
392 	if (enable)
393 		refresh_cfg = BIT(31) | frame_count;
394 	else
395 		refresh_cfg &= ~BIT(31);
396 
397 	DPU_REG_WRITE(c, INTF_TEAR_AUTOREFRESH_CONFIG, refresh_cfg);
398 }
399 
400 /*
401  * dpu_hw_intf_get_autorefresh_config - Get autorefresh config from HW
402  * @intf:        DPU intf structure
403  * @frame_count: Used to return the current frame count from hw
404  *
405  * Returns: True if autorefresh enabled, false if disabled.
406  */
dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf * intf,u32 * frame_count)407 static bool dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf *intf,
408 		u32 *frame_count)
409 {
410 	u32 val = DPU_REG_READ(&intf->hw, INTF_TEAR_AUTOREFRESH_CONFIG);
411 
412 	if (frame_count != NULL)
413 		*frame_count = val & 0xffff;
414 	return !!((val & BIT(31)) >> 31);
415 }
416 
dpu_hw_intf_disable_te(struct dpu_hw_intf * intf)417 static int dpu_hw_intf_disable_te(struct dpu_hw_intf *intf)
418 {
419 	struct dpu_hw_blk_reg_map *c;
420 
421 	if (!intf)
422 		return -EINVAL;
423 
424 	c = &intf->hw;
425 	DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 0);
426 	return 0;
427 }
428 
dpu_hw_intf_connect_external_te(struct dpu_hw_intf * intf,bool enable_external_te)429 static int dpu_hw_intf_connect_external_te(struct dpu_hw_intf *intf,
430 		bool enable_external_te)
431 {
432 	struct dpu_hw_blk_reg_map *c = &intf->hw;
433 	u32 cfg;
434 	int orig;
435 
436 	if (!intf)
437 		return -EINVAL;
438 
439 	c = &intf->hw;
440 	cfg = DPU_REG_READ(c, INTF_TEAR_SYNC_CONFIG_VSYNC);
441 	orig = (bool)(cfg & BIT(20));
442 	if (enable_external_te)
443 		cfg |= BIT(20);
444 	else
445 		cfg &= ~BIT(20);
446 	DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
447 	trace_dpu_intf_connect_ext_te(intf->idx - INTF_0, cfg);
448 
449 	return orig;
450 }
451 
dpu_hw_intf_get_vsync_info(struct dpu_hw_intf * intf,struct dpu_hw_pp_vsync_info * info)452 static int dpu_hw_intf_get_vsync_info(struct dpu_hw_intf *intf,
453 		struct dpu_hw_pp_vsync_info *info)
454 {
455 	struct dpu_hw_blk_reg_map *c = &intf->hw;
456 	u32 val;
457 
458 	if (!intf || !info)
459 		return -EINVAL;
460 
461 	c = &intf->hw;
462 
463 	val = DPU_REG_READ(c, INTF_TEAR_VSYNC_INIT_VAL);
464 	info->rd_ptr_init_val = val & 0xffff;
465 
466 	val = DPU_REG_READ(c, INTF_TEAR_INT_COUNT_VAL);
467 	info->rd_ptr_frame_count = (val & 0xffff0000) >> 16;
468 	info->rd_ptr_line_count = val & 0xffff;
469 
470 	val = DPU_REG_READ(c, INTF_TEAR_LINE_COUNT);
471 	info->wr_ptr_line_count = val & 0xffff;
472 
473 	val = DPU_REG_READ(c, INTF_FRAME_COUNT);
474 	info->intf_frame_count = val;
475 
476 	return 0;
477 }
478 
dpu_hw_intf_vsync_sel(struct dpu_hw_intf * intf,enum dpu_vsync_source vsync_source)479 static void dpu_hw_intf_vsync_sel(struct dpu_hw_intf *intf,
480 				  enum dpu_vsync_source vsync_source)
481 {
482 	struct dpu_hw_blk_reg_map *c;
483 
484 	if (!intf)
485 		return;
486 
487 	c = &intf->hw;
488 
489 	DPU_REG_WRITE(c, INTF_TEAR_MDP_VSYNC_SEL, (vsync_source & 0xf));
490 }
491 
dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf * intf,uint32_t encoder_id,u16 vdisplay)492 static void dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf *intf,
493 					    uint32_t encoder_id, u16 vdisplay)
494 {
495 	struct dpu_hw_pp_vsync_info info;
496 	int trial = 0;
497 
498 	/* If autorefresh is already disabled, we have nothing to do */
499 	if (!dpu_hw_intf_get_autorefresh_config(intf, NULL))
500 		return;
501 
502 	/*
503 	 * If autorefresh is enabled, disable it and make sure it is safe to
504 	 * proceed with current frame commit/push. Sequence followed is,
505 	 * 1. Disable TE
506 	 * 2. Disable autorefresh config
507 	 * 4. Poll for frame transfer ongoing to be false
508 	 * 5. Enable TE back
509 	 */
510 
511 	dpu_hw_intf_connect_external_te(intf, false);
512 	dpu_hw_intf_setup_autorefresh_config(intf, 0, false);
513 
514 	do {
515 		udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
516 		if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
517 				> (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
518 			DPU_ERROR("enc%d intf%d disable autorefresh failed\n",
519 				  encoder_id, intf->idx - INTF_0);
520 			break;
521 		}
522 
523 		trial++;
524 
525 		dpu_hw_intf_get_vsync_info(intf, &info);
526 	} while (info.wr_ptr_line_count > 0 &&
527 		 info.wr_ptr_line_count < vdisplay);
528 
529 	dpu_hw_intf_connect_external_te(intf, true);
530 
531 	DPU_DEBUG("enc%d intf%d disabled autorefresh\n",
532 		  encoder_id, intf->idx - INTF_0);
533 
534 }
535 
dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf * intf,struct dpu_hw_intf_cmd_mode_cfg * cmd_mode_cfg)536 static void dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf *intf,
537 					     struct dpu_hw_intf_cmd_mode_cfg *cmd_mode_cfg)
538 {
539 	u32 intf_cfg2 = DPU_REG_READ(&intf->hw, INTF_CONFIG2);
540 
541 	if (cmd_mode_cfg->data_compress)
542 		intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
543 
544 	if (cmd_mode_cfg->wide_bus_en)
545 		intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN;
546 
547 	DPU_REG_WRITE(&intf->hw, INTF_CONFIG2, intf_cfg2);
548 }
549 
dpu_hw_intf_init(struct drm_device * dev,const struct dpu_intf_cfg * cfg,void __iomem * addr,const struct dpu_mdss_version * mdss_rev)550 struct dpu_hw_intf *dpu_hw_intf_init(struct drm_device *dev,
551 				     const struct dpu_intf_cfg *cfg,
552 				     void __iomem *addr,
553 				     const struct dpu_mdss_version *mdss_rev)
554 {
555 	struct dpu_hw_intf *c;
556 
557 	if (cfg->type == INTF_NONE) {
558 		DPU_DEBUG("Skip intf %d with type NONE\n", cfg->id - INTF_0);
559 		return NULL;
560 	}
561 
562 	c = drmm_kzalloc(dev, sizeof(*c), GFP_KERNEL);
563 	if (!c)
564 		return ERR_PTR(-ENOMEM);
565 
566 	c->hw.blk_addr = addr + cfg->base;
567 	c->hw.log_mask = DPU_DBG_MASK_INTF;
568 
569 	/*
570 	 * Assign ops
571 	 */
572 	c->idx = cfg->id;
573 	c->cap = cfg;
574 
575 	c->ops.setup_timing_gen = dpu_hw_intf_setup_timing_engine;
576 	c->ops.setup_prg_fetch  = dpu_hw_intf_setup_prg_fetch;
577 	c->ops.get_status = dpu_hw_intf_get_status;
578 	c->ops.enable_timing = dpu_hw_intf_enable_timing_engine;
579 	c->ops.get_line_count = dpu_hw_intf_get_line_count;
580 	c->ops.setup_misr = dpu_hw_intf_setup_misr;
581 	c->ops.collect_misr = dpu_hw_intf_collect_misr;
582 
583 	if (cfg->features & BIT(DPU_INTF_INPUT_CTRL))
584 		c->ops.bind_pingpong_blk = dpu_hw_intf_bind_pingpong_blk;
585 
586 	/* INTF TE is only for DSI interfaces */
587 	if (mdss_rev->core_major_ver >= 5 && cfg->type == INTF_DSI) {
588 		WARN_ON(!cfg->intr_tear_rd_ptr);
589 
590 		c->ops.enable_tearcheck = dpu_hw_intf_enable_te;
591 		c->ops.disable_tearcheck = dpu_hw_intf_disable_te;
592 		c->ops.connect_external_te = dpu_hw_intf_connect_external_te;
593 		c->ops.vsync_sel = dpu_hw_intf_vsync_sel;
594 		c->ops.disable_autorefresh = dpu_hw_intf_disable_autorefresh;
595 	}
596 
597 	/* Technically, INTF_CONFIG2 is present for DPU 5.0+, but
598 	 * we can configure it for DPU 7.0+ since the wide bus and DSC flags
599 	 * would not be set for DPU < 7.0 anyways
600 	 */
601 	if (mdss_rev->core_major_ver >= 7)
602 		c->ops.program_intf_cmd_cfg = dpu_hw_intf_program_intf_cmd_cfg;
603 
604 	return c;
605 }
606