xref: /linux/drivers/gpu/drm/hisilicon/hibmc/dp/dp_link.c (revision 3e93d5bbcbfc3808f83712c0701f9d4c148cc8ed)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 // Copyright (c) 2024 Hisilicon Limited.
3 
4 #include <linux/delay.h>
5 #include <drm/drm_device.h>
6 #include <drm/drm_print.h>
7 #include "dp_comm.h"
8 #include "dp_reg.h"
9 
10 #define HIBMC_EQ_MAX_RETRY 5
11 
hibmc_dp_get_serdes_rate_cfg(struct hibmc_dp_dev * dp)12 static inline int hibmc_dp_get_serdes_rate_cfg(struct hibmc_dp_dev *dp)
13 {
14 	switch (dp->link.cap.link_rate) {
15 	case DP_LINK_BW_1_62:
16 		return DP_SERDES_BW_1_62;
17 	case DP_LINK_BW_2_7:
18 		return DP_SERDES_BW_2_7;
19 	case DP_LINK_BW_5_4:
20 		return DP_SERDES_BW_5_4;
21 	case DP_LINK_BW_8_1:
22 		return DP_SERDES_BW_8_1;
23 	default:
24 		return -EINVAL;
25 	}
26 }
27 
hibmc_dp_link_training_configure(struct hibmc_dp_dev * dp)28 static int hibmc_dp_link_training_configure(struct hibmc_dp_dev *dp)
29 {
30 	u8 buf[2];
31 	int ret;
32 
33 	/* DP 2 lane */
34 	hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_LANE_DATA_EN,
35 				 dp->link.cap.lanes == 0x2 ? 0x3 : 0x1);
36 	hibmc_dp_reg_write_field(dp, HIBMC_DP_DPTX_GCTL0, HIBMC_DP_CFG_PHY_LANE_NUM,
37 				 dp->link.cap.lanes == 0x2 ? 0x1 : 0);
38 
39 	/* enhanced frame */
40 	hibmc_dp_reg_write_field(dp, HIBMC_DP_VIDEO_CTRL, HIBMC_DP_CFG_STREAM_FRAME_MODE, 0x1);
41 
42 	/* set rate and lane count */
43 	buf[0] = dp->link.cap.link_rate;
44 	buf[1] = DP_LANE_COUNT_ENHANCED_FRAME_EN | dp->link.cap.lanes;
45 	ret = drm_dp_dpcd_write(dp->aux, DP_LINK_BW_SET, buf, sizeof(buf));
46 	if (ret != sizeof(buf)) {
47 		drm_dbg_dp(dp->dev, "dp aux write link rate and lanes failed, ret: %d\n", ret);
48 		return ret >= 0 ? -EIO : ret;
49 	}
50 
51 	/* set 8b/10b and downspread */
52 	buf[0] = DP_SPREAD_AMP_0_5;
53 	buf[1] = DP_SET_ANSI_8B10B;
54 	ret = drm_dp_dpcd_write(dp->aux, DP_DOWNSPREAD_CTRL, buf, sizeof(buf));
55 	if (ret != sizeof(buf)) {
56 		drm_dbg_dp(dp->dev, "dp aux write 8b/10b and downspread failed, ret: %d\n", ret);
57 		return ret >= 0 ? -EIO : ret;
58 	}
59 
60 	return 0;
61 }
62 
hibmc_dp_link_set_pattern(struct hibmc_dp_dev * dp,int pattern)63 static int hibmc_dp_link_set_pattern(struct hibmc_dp_dev *dp, int pattern)
64 {
65 	int ret;
66 	u8 val;
67 	u8 buf;
68 
69 	buf = (u8)pattern;
70 	if (pattern != DP_TRAINING_PATTERN_DISABLE && pattern != DP_TRAINING_PATTERN_4) {
71 		buf |= DP_LINK_SCRAMBLING_DISABLE;
72 		hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_SCRAMBLE_EN, 0x1);
73 	} else {
74 		hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_SCRAMBLE_EN, 0);
75 	}
76 
77 	switch (pattern) {
78 	case DP_TRAINING_PATTERN_DISABLE:
79 		val = 0;
80 		break;
81 	case DP_TRAINING_PATTERN_1:
82 		val = 1;
83 		break;
84 	case DP_TRAINING_PATTERN_2:
85 		val = 2;
86 		break;
87 	case DP_TRAINING_PATTERN_3:
88 		val = 3;
89 		break;
90 	case DP_TRAINING_PATTERN_4:
91 		val = 4;
92 		break;
93 	default:
94 		return -EINVAL;
95 	}
96 
97 	hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_PAT_SEL, val);
98 
99 	ret = drm_dp_dpcd_write(dp->aux, DP_TRAINING_PATTERN_SET, &buf, sizeof(buf));
100 	if (ret != sizeof(buf)) {
101 		drm_dbg_dp(dp->dev, "dp aux write training pattern set failed\n");
102 		return ret >= 0 ? -EIO : ret;
103 	}
104 
105 	return 0;
106 }
107 
hibmc_dp_link_training_cr_pre(struct hibmc_dp_dev * dp)108 static int hibmc_dp_link_training_cr_pre(struct hibmc_dp_dev *dp)
109 {
110 	u8 *train_set = dp->link.train_set;
111 	int ret;
112 	u8 i;
113 
114 	ret = hibmc_dp_link_training_configure(dp);
115 	if (ret)
116 		return ret;
117 
118 	ret = hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_1);
119 	if (ret)
120 		return ret;
121 
122 	for (i = 0; i < dp->link.cap.lanes; i++)
123 		train_set[i] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
124 
125 	ret = hibmc_dp_serdes_set_tx_cfg(dp, dp->link.train_set);
126 	if (ret)
127 		return ret;
128 
129 	ret = drm_dp_dpcd_write(dp->aux, DP_TRAINING_LANE0_SET, train_set, dp->link.cap.lanes);
130 	if (ret != dp->link.cap.lanes) {
131 		drm_dbg_dp(dp->dev, "dp aux write training lane set failed\n");
132 		return ret >= 0 ? -EIO : ret;
133 	}
134 
135 	return 0;
136 }
137 
hibmc_dp_link_get_adjust_train(struct hibmc_dp_dev * dp,u8 lane_status[DP_LINK_STATUS_SIZE])138 static bool hibmc_dp_link_get_adjust_train(struct hibmc_dp_dev *dp,
139 					   u8 lane_status[DP_LINK_STATUS_SIZE])
140 {
141 	u8 train_set[HIBMC_DP_LANE_NUM_MAX] = {0};
142 	u8 lane;
143 
144 	for (lane = 0; lane < dp->link.cap.lanes; lane++)
145 		train_set[lane] = drm_dp_get_adjust_request_voltage(lane_status, lane) |
146 				  drm_dp_get_adjust_request_pre_emphasis(lane_status, lane);
147 
148 	if (memcmp(dp->link.train_set, train_set, HIBMC_DP_LANE_NUM_MAX)) {
149 		memcpy(dp->link.train_set, train_set, HIBMC_DP_LANE_NUM_MAX);
150 		return true;
151 	}
152 
153 	return false;
154 }
155 
hibmc_dp_link_reduce_rate(struct hibmc_dp_dev * dp)156 static int hibmc_dp_link_reduce_rate(struct hibmc_dp_dev *dp)
157 {
158 	int ret;
159 
160 	switch (dp->link.cap.link_rate) {
161 	case DP_LINK_BW_2_7:
162 		dp->link.cap.link_rate = DP_LINK_BW_1_62;
163 		break;
164 	case DP_LINK_BW_5_4:
165 		dp->link.cap.link_rate = DP_LINK_BW_2_7;
166 		break;
167 	case DP_LINK_BW_8_1:
168 		dp->link.cap.link_rate = DP_LINK_BW_5_4;
169 		break;
170 	default:
171 		return -EINVAL;
172 	}
173 
174 	ret = hibmc_dp_get_serdes_rate_cfg(dp);
175 	if (ret < 0)
176 		return ret;
177 
178 	return hibmc_dp_serdes_rate_switch(ret, dp);
179 }
180 
hibmc_dp_link_reduce_lane(struct hibmc_dp_dev * dp)181 static inline int hibmc_dp_link_reduce_lane(struct hibmc_dp_dev *dp)
182 {
183 	switch (dp->link.cap.lanes) {
184 	case 0x2:
185 		dp->link.cap.lanes--;
186 		drm_dbg_dp(dp->dev, "dp link training reduce to 1 lane\n");
187 		break;
188 	case 0x1:
189 		drm_err(dp->dev, "dp link training reduce lane failed, already reach minimum\n");
190 		return -EIO;
191 	default:
192 		return -EINVAL;
193 	}
194 
195 	return 0;
196 }
197 
hibmc_dp_link_training_cr(struct hibmc_dp_dev * dp)198 static int hibmc_dp_link_training_cr(struct hibmc_dp_dev *dp)
199 {
200 	u8 lane_status[DP_LINK_STATUS_SIZE] = {0};
201 	bool level_changed;
202 	u32 voltage_tries;
203 	u32 cr_tries;
204 	int ret;
205 
206 	/*
207 	 * DP 1.4 spec define 10 for maxtries value, for pre DP 1.4 version set a limit of 80
208 	 * (4 voltage levels x 4 preemphasis levels x 5 identical voltage retries)
209 	 */
210 
211 	voltage_tries = 1;
212 	for (cr_tries = 0; cr_tries < 80; cr_tries++) {
213 		drm_dp_link_train_clock_recovery_delay(dp->aux, dp->dpcd);
214 
215 		ret = drm_dp_dpcd_read_link_status(dp->aux, lane_status);
216 		if (ret) {
217 			drm_err(dp->dev, "Get lane status failed\n");
218 			return ret;
219 		}
220 
221 		if (drm_dp_clock_recovery_ok(lane_status, dp->link.cap.lanes)) {
222 			drm_dbg_dp(dp->dev, "dp link training cr done\n");
223 			dp->link.status.clock_recovered = true;
224 			return 0;
225 		}
226 
227 		if (voltage_tries == 5) {
228 			drm_dbg_dp(dp->dev, "same voltage tries 5 times\n");
229 			dp->link.status.clock_recovered = false;
230 			return 0;
231 		}
232 
233 		level_changed = hibmc_dp_link_get_adjust_train(dp, lane_status);
234 
235 		ret = hibmc_dp_serdes_set_tx_cfg(dp, dp->link.train_set);
236 		if (ret)
237 			return ret;
238 
239 		ret = drm_dp_dpcd_write(dp->aux, DP_TRAINING_LANE0_SET, dp->link.train_set,
240 					dp->link.cap.lanes);
241 		if (ret != dp->link.cap.lanes) {
242 			drm_dbg_dp(dp->dev, "Update link training failed\n");
243 			return ret >= 0 ? -EIO : ret;
244 		}
245 
246 		voltage_tries = level_changed ? 1 : voltage_tries + 1;
247 	}
248 
249 	drm_err(dp->dev, "dp link training clock recovery 80 times failed\n");
250 	dp->link.status.clock_recovered = false;
251 
252 	return 0;
253 }
254 
hibmc_dp_link_training_channel_eq(struct hibmc_dp_dev * dp)255 static int hibmc_dp_link_training_channel_eq(struct hibmc_dp_dev *dp)
256 {
257 	u8 lane_status[DP_LINK_STATUS_SIZE] = {0};
258 	u8 eq_tries;
259 	int ret;
260 
261 	ret = hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_2);
262 	if (ret)
263 		return ret;
264 
265 	for (eq_tries = 0; eq_tries < HIBMC_EQ_MAX_RETRY; eq_tries++) {
266 		drm_dp_link_train_channel_eq_delay(dp->aux, dp->dpcd);
267 
268 		ret = drm_dp_dpcd_read_link_status(dp->aux, lane_status);
269 		if (ret) {
270 			drm_err(dp->dev, "get lane status failed\n");
271 			break;
272 		}
273 
274 		if (!drm_dp_clock_recovery_ok(lane_status, dp->link.cap.lanes)) {
275 			drm_dbg_dp(dp->dev, "clock recovery check failed\n");
276 			drm_dbg_dp(dp->dev, "cannot continue channel equalization\n");
277 			dp->link.status.clock_recovered = false;
278 			break;
279 		}
280 
281 		if (drm_dp_channel_eq_ok(lane_status, dp->link.cap.lanes)) {
282 			dp->link.status.channel_equalized = true;
283 			drm_dbg_dp(dp->dev, "dp link training eq done\n");
284 			break;
285 		}
286 
287 		hibmc_dp_link_get_adjust_train(dp, lane_status);
288 
289 		ret = hibmc_dp_serdes_set_tx_cfg(dp, dp->link.train_set);
290 		if (ret)
291 			return ret;
292 
293 		ret = drm_dp_dpcd_write(dp->aux, DP_TRAINING_LANE0_SET,
294 					dp->link.train_set, dp->link.cap.lanes);
295 		if (ret != dp->link.cap.lanes) {
296 			drm_dbg_dp(dp->dev, "Update link training failed\n");
297 			ret = (ret >= 0) ? -EIO : ret;
298 			break;
299 		}
300 	}
301 
302 	if (eq_tries == HIBMC_EQ_MAX_RETRY)
303 		drm_err(dp->dev, "channel equalization failed %u times\n", eq_tries);
304 
305 	hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_DISABLE);
306 
307 	return ret < 0 ? ret : 0;
308 }
309 
hibmc_dp_link_downgrade_training_cr(struct hibmc_dp_dev * dp)310 static int hibmc_dp_link_downgrade_training_cr(struct hibmc_dp_dev *dp)
311 {
312 	if (hibmc_dp_link_reduce_rate(dp))
313 		return hibmc_dp_link_reduce_lane(dp);
314 
315 	return 0;
316 }
317 
hibmc_dp_link_downgrade_training_eq(struct hibmc_dp_dev * dp)318 static int hibmc_dp_link_downgrade_training_eq(struct hibmc_dp_dev *dp)
319 {
320 	if ((dp->link.status.clock_recovered && !dp->link.status.channel_equalized)) {
321 		if (!hibmc_dp_link_reduce_lane(dp))
322 			return 0;
323 	}
324 
325 	return hibmc_dp_link_reduce_rate(dp);
326 }
327 
hibmc_dp_update_caps(struct hibmc_dp_dev * dp)328 static void hibmc_dp_update_caps(struct hibmc_dp_dev *dp)
329 {
330 	dp->link.cap.link_rate = dp->dpcd[DP_MAX_LINK_RATE];
331 	if (dp->link.cap.link_rate > DP_LINK_BW_8_1 || !dp->link.cap.link_rate)
332 		dp->link.cap.link_rate = DP_LINK_BW_8_1;
333 
334 	dp->link.cap.lanes = dp->dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
335 	if (dp->link.cap.lanes > HIBMC_DP_LANE_NUM_MAX)
336 		dp->link.cap.lanes = HIBMC_DP_LANE_NUM_MAX;
337 }
338 
hibmc_dp_link_training(struct hibmc_dp_dev * dp)339 int hibmc_dp_link_training(struct hibmc_dp_dev *dp)
340 {
341 	struct hibmc_dp_link *link = &dp->link;
342 	int ret;
343 
344 	ret = drm_dp_read_dpcd_caps(dp->aux, dp->dpcd);
345 	if (ret)
346 		drm_err(dp->dev, "dp aux read dpcd failed, ret: %d\n", ret);
347 
348 	hibmc_dp_update_caps(dp);
349 
350 	ret = hibmc_dp_get_serdes_rate_cfg(dp);
351 	if (ret < 0)
352 		return ret;
353 
354 	ret = hibmc_dp_serdes_rate_switch(ret, dp);
355 	if (ret)
356 		return ret;
357 
358 	while (true) {
359 		ret = hibmc_dp_link_training_cr_pre(dp);
360 		if (ret)
361 			goto err;
362 
363 		ret = hibmc_dp_link_training_cr(dp);
364 		if (ret)
365 			goto err;
366 
367 		if (!link->status.clock_recovered) {
368 			ret = hibmc_dp_link_downgrade_training_cr(dp);
369 			if (ret)
370 				goto err;
371 			continue;
372 		}
373 
374 		ret = hibmc_dp_link_training_channel_eq(dp);
375 		if (ret)
376 			goto err;
377 
378 		if (!link->status.channel_equalized) {
379 			ret = hibmc_dp_link_downgrade_training_eq(dp);
380 			if (ret)
381 				goto err;
382 			continue;
383 		}
384 
385 		return 0;
386 	}
387 
388 err:
389 	hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_DISABLE);
390 
391 	return ret;
392 }
393