1 /*
2 * Copyright © 2012 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/module.h>
30 #include <linux/slab.h>
31
32 #include <drm/display/drm_dp_helper.h>
33 #include <drm/drm_crtc.h>
34 #include <drm/drm_crtc_helper.h>
35 #include <drm/drm_edid.h>
36 #include <drm/drm_modeset_helper_vtables.h>
37 #include <drm/drm_simple_kms_helper.h>
38
39 #include "gma_display.h"
40 #include "psb_drv.h"
41 #include "psb_intel_drv.h"
42 #include "psb_intel_reg.h"
43
44 /**
45 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
46 * aux algorithm
47 * @running: set by the algo indicating whether an i2c is ongoing or whether
48 * the i2c bus is quiescent
49 * @address: i2c target address for the currently ongoing transfer
50 * @aux_ch: driver callback to transfer a single byte of the i2c payload
51 */
52 struct i2c_algo_dp_aux_data {
53 bool running;
54 u16 address;
55 int (*aux_ch) (struct i2c_adapter *adapter,
56 int mode, uint8_t write_byte,
57 uint8_t *read_byte);
58 };
59
60 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
61 static int
i2c_algo_dp_aux_transaction(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)62 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
63 uint8_t write_byte, uint8_t *read_byte)
64 {
65 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
66 int ret;
67
68 ret = (*algo_data->aux_ch)(adapter, mode,
69 write_byte, read_byte);
70 return ret;
71 }
72
73 /*
74 * I2C over AUX CH
75 */
76
77 /*
78 * Send the address. If the I2C link is running, this 'restarts'
79 * the connection with the new address, this is used for doing
80 * a write followed by a read (as needed for DDC)
81 */
82 static int
i2c_algo_dp_aux_address(struct i2c_adapter * adapter,u16 address,bool reading)83 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
84 {
85 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
86 int mode = MODE_I2C_START;
87
88 if (reading)
89 mode |= MODE_I2C_READ;
90 else
91 mode |= MODE_I2C_WRITE;
92 algo_data->address = address;
93 algo_data->running = true;
94 return i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
95 }
96
97 /*
98 * Stop the I2C transaction. This closes out the link, sending
99 * a bare address packet with the MOT bit turned off
100 */
101 static void
i2c_algo_dp_aux_stop(struct i2c_adapter * adapter,bool reading)102 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
103 {
104 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
105 int mode = MODE_I2C_STOP;
106
107 if (reading)
108 mode |= MODE_I2C_READ;
109 else
110 mode |= MODE_I2C_WRITE;
111 if (algo_data->running) {
112 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
113 algo_data->running = false;
114 }
115 }
116
117 /*
118 * Write a single byte to the current I2C address, the
119 * I2C link must be running or this returns -EIO
120 */
121 static int
i2c_algo_dp_aux_put_byte(struct i2c_adapter * adapter,u8 byte)122 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
123 {
124 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
125
126 if (!algo_data->running)
127 return -EIO;
128
129 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
130 }
131
132 /*
133 * Read a single byte from the current I2C address, the
134 * I2C link must be running or this returns -EIO
135 */
136 static int
i2c_algo_dp_aux_get_byte(struct i2c_adapter * adapter,u8 * byte_ret)137 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
138 {
139 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
140
141 if (!algo_data->running)
142 return -EIO;
143
144 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
145 }
146
147 static int
i2c_algo_dp_aux_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)148 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
149 struct i2c_msg *msgs,
150 int num)
151 {
152 int ret = 0;
153 bool reading = false;
154 int m;
155 int b;
156
157 for (m = 0; m < num; m++) {
158 u16 len = msgs[m].len;
159 u8 *buf = msgs[m].buf;
160 reading = (msgs[m].flags & I2C_M_RD) != 0;
161 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
162 if (ret < 0)
163 break;
164 if (reading) {
165 for (b = 0; b < len; b++) {
166 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
167 if (ret < 0)
168 break;
169 }
170 } else {
171 for (b = 0; b < len; b++) {
172 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
173 if (ret < 0)
174 break;
175 }
176 }
177 if (ret < 0)
178 break;
179 }
180 if (ret >= 0)
181 ret = num;
182 i2c_algo_dp_aux_stop(adapter, reading);
183 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
184 return ret;
185 }
186
187 static u32
i2c_algo_dp_aux_functionality(struct i2c_adapter * adapter)188 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
189 {
190 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
191 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
192 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
193 I2C_FUNC_10BIT_ADDR;
194 }
195
196 static const struct i2c_algorithm i2c_dp_aux_algo = {
197 .master_xfer = i2c_algo_dp_aux_xfer,
198 .functionality = i2c_algo_dp_aux_functionality,
199 };
200
201 static void
i2c_dp_aux_reset_bus(struct i2c_adapter * adapter)202 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
203 {
204 (void) i2c_algo_dp_aux_address(adapter, 0, false);
205 (void) i2c_algo_dp_aux_stop(adapter, false);
206 }
207
208 static int
i2c_dp_aux_prepare_bus(struct i2c_adapter * adapter)209 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
210 {
211 adapter->algo = &i2c_dp_aux_algo;
212 adapter->retries = 3;
213 i2c_dp_aux_reset_bus(adapter);
214 return 0;
215 }
216
217 /*
218 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
219 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
220 */
221 static int
i2c_dp_aux_add_bus(struct i2c_adapter * adapter)222 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
223 {
224 int error;
225
226 error = i2c_dp_aux_prepare_bus(adapter);
227 if (error)
228 return error;
229 error = i2c_add_adapter(adapter);
230 return error;
231 }
232
233 #define _wait_for(COND, MS, W) ({ \
234 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
235 int ret__ = 0; \
236 while (! (COND)) { \
237 if (time_after(jiffies, timeout__)) { \
238 ret__ = -ETIMEDOUT; \
239 break; \
240 } \
241 if (W && !in_dbg_master()) msleep(W); \
242 } \
243 ret__; \
244 })
245
246 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
247
248 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
249
250 #define DP_LINK_CONFIGURATION_SIZE 9
251
252 #define CDV_FAST_LINK_TRAIN 1
253
254 struct cdv_intel_dp {
255 uint32_t output_reg;
256 uint32_t DP;
257 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
258 bool has_audio;
259 int force_audio;
260 uint32_t color_range;
261 uint8_t link_bw;
262 uint8_t lane_count;
263 uint8_t dpcd[4];
264 struct gma_encoder *encoder;
265 struct i2c_adapter adapter;
266 struct i2c_algo_dp_aux_data algo;
267 uint8_t train_set[4];
268 uint8_t link_status[DP_LINK_STATUS_SIZE];
269 int panel_power_up_delay;
270 int panel_power_down_delay;
271 int panel_power_cycle_delay;
272 int backlight_on_delay;
273 int backlight_off_delay;
274 struct drm_display_mode *panel_fixed_mode; /* for eDP */
275 bool panel_on;
276 };
277
278 struct ddi_regoff {
279 uint32_t PreEmph1;
280 uint32_t PreEmph2;
281 uint32_t VSwing1;
282 uint32_t VSwing2;
283 uint32_t VSwing3;
284 uint32_t VSwing4;
285 uint32_t VSwing5;
286 };
287
288 static struct ddi_regoff ddi_DP_train_table[] = {
289 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
290 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
291 .VSwing5 = 0x8158,},
292 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
293 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
294 .VSwing5 = 0x8258,},
295 };
296
297 static uint32_t dp_vswing_premph_table[] = {
298 0x55338954, 0x4000,
299 0x554d8954, 0x2000,
300 0x55668954, 0,
301 0x559ac0d4, 0x6000,
302 };
303 /**
304 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
305 * @encoder: GMA encoder struct
306 *
307 * If a CPU or PCH DP output is attached to an eDP panel, this function
308 * will return true, and false otherwise.
309 */
is_edp(struct gma_encoder * encoder)310 static bool is_edp(struct gma_encoder *encoder)
311 {
312 return encoder->type == INTEL_OUTPUT_EDP;
313 }
314
315
316 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
317 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
318 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
319
320 static int
cdv_intel_dp_max_lane_count(struct gma_encoder * encoder)321 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
322 {
323 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
324 int max_lane_count = 4;
325
326 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
327 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
328 switch (max_lane_count) {
329 case 1: case 2: case 4:
330 break;
331 default:
332 max_lane_count = 4;
333 }
334 }
335 return max_lane_count;
336 }
337
338 static int
cdv_intel_dp_max_link_bw(struct gma_encoder * encoder)339 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
340 {
341 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
342 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
343
344 switch (max_link_bw) {
345 case DP_LINK_BW_1_62:
346 case DP_LINK_BW_2_7:
347 break;
348 default:
349 max_link_bw = DP_LINK_BW_1_62;
350 break;
351 }
352 return max_link_bw;
353 }
354
355 static int
cdv_intel_dp_link_clock(uint8_t link_bw)356 cdv_intel_dp_link_clock(uint8_t link_bw)
357 {
358 if (link_bw == DP_LINK_BW_2_7)
359 return 270000;
360 else
361 return 162000;
362 }
363
364 static int
cdv_intel_dp_link_required(int pixel_clock,int bpp)365 cdv_intel_dp_link_required(int pixel_clock, int bpp)
366 {
367 return (pixel_clock * bpp + 7) / 8;
368 }
369
370 static int
cdv_intel_dp_max_data_rate(int max_link_clock,int max_lanes)371 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
372 {
373 return (max_link_clock * max_lanes * 19) / 20;
374 }
375
cdv_intel_edp_panel_vdd_on(struct gma_encoder * intel_encoder)376 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
377 {
378 struct drm_device *dev = intel_encoder->base.dev;
379 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
380 u32 pp;
381
382 if (intel_dp->panel_on) {
383 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
384 return;
385 }
386 DRM_DEBUG_KMS("\n");
387
388 pp = REG_READ(PP_CONTROL);
389
390 pp |= EDP_FORCE_VDD;
391 REG_WRITE(PP_CONTROL, pp);
392 REG_READ(PP_CONTROL);
393 msleep(intel_dp->panel_power_up_delay);
394 }
395
cdv_intel_edp_panel_vdd_off(struct gma_encoder * intel_encoder)396 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
397 {
398 struct drm_device *dev = intel_encoder->base.dev;
399 u32 pp;
400
401 DRM_DEBUG_KMS("\n");
402 pp = REG_READ(PP_CONTROL);
403
404 pp &= ~EDP_FORCE_VDD;
405 REG_WRITE(PP_CONTROL, pp);
406 REG_READ(PP_CONTROL);
407
408 }
409
410 /* Returns true if the panel was already on when called */
cdv_intel_edp_panel_on(struct gma_encoder * intel_encoder)411 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
412 {
413 struct drm_device *dev = intel_encoder->base.dev;
414 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
415 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
416
417 if (intel_dp->panel_on)
418 return true;
419
420 DRM_DEBUG_KMS("\n");
421 pp = REG_READ(PP_CONTROL);
422 pp &= ~PANEL_UNLOCK_MASK;
423
424 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
425 REG_WRITE(PP_CONTROL, pp);
426 REG_READ(PP_CONTROL);
427
428 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
429 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
430 intel_dp->panel_on = false;
431 } else
432 intel_dp->panel_on = true;
433 msleep(intel_dp->panel_power_up_delay);
434
435 return false;
436 }
437
cdv_intel_edp_panel_off(struct gma_encoder * intel_encoder)438 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
439 {
440 struct drm_device *dev = intel_encoder->base.dev;
441 u32 pp, idle_off_mask = PP_ON ;
442 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
443
444 DRM_DEBUG_KMS("\n");
445
446 pp = REG_READ(PP_CONTROL);
447
448 if ((pp & POWER_TARGET_ON) == 0)
449 return;
450
451 intel_dp->panel_on = false;
452 pp &= ~PANEL_UNLOCK_MASK;
453 /* ILK workaround: disable reset around power sequence */
454
455 pp &= ~POWER_TARGET_ON;
456 pp &= ~EDP_FORCE_VDD;
457 pp &= ~EDP_BLC_ENABLE;
458 REG_WRITE(PP_CONTROL, pp);
459 REG_READ(PP_CONTROL);
460 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
461
462 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
463 DRM_DEBUG_KMS("Error in turning off Panel\n");
464 }
465
466 msleep(intel_dp->panel_power_cycle_delay);
467 DRM_DEBUG_KMS("Over\n");
468 }
469
cdv_intel_edp_backlight_on(struct gma_encoder * intel_encoder)470 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
471 {
472 struct drm_device *dev = intel_encoder->base.dev;
473 u32 pp;
474
475 DRM_DEBUG_KMS("\n");
476 /*
477 * If we enable the backlight right away following a panel power
478 * on, we may see slight flicker as the panel syncs with the eDP
479 * link. So delay a bit to make sure the image is solid before
480 * allowing it to appear.
481 */
482 msleep(300);
483 pp = REG_READ(PP_CONTROL);
484
485 pp |= EDP_BLC_ENABLE;
486 REG_WRITE(PP_CONTROL, pp);
487 gma_backlight_enable(dev);
488 }
489
cdv_intel_edp_backlight_off(struct gma_encoder * intel_encoder)490 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
491 {
492 struct drm_device *dev = intel_encoder->base.dev;
493 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
494 u32 pp;
495
496 DRM_DEBUG_KMS("\n");
497 gma_backlight_disable(dev);
498 msleep(10);
499 pp = REG_READ(PP_CONTROL);
500
501 pp &= ~EDP_BLC_ENABLE;
502 REG_WRITE(PP_CONTROL, pp);
503 msleep(intel_dp->backlight_off_delay);
504 }
505
506 static enum drm_mode_status
cdv_intel_dp_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)507 cdv_intel_dp_mode_valid(struct drm_connector *connector,
508 struct drm_display_mode *mode)
509 {
510 struct gma_encoder *encoder = gma_attached_encoder(connector);
511 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
512 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
513 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
514 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
515
516 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
517 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
518 return MODE_PANEL;
519 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
520 return MODE_PANEL;
521 }
522
523 /* only refuse the mode on non eDP since we have seen some weird eDP panels
524 which are outside spec tolerances but somehow work by magic */
525 if (!is_edp(encoder) &&
526 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
527 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
528 return MODE_CLOCK_HIGH;
529
530 if (is_edp(encoder)) {
531 if (cdv_intel_dp_link_required(mode->clock, 24)
532 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
533 return MODE_CLOCK_HIGH;
534
535 }
536 if (mode->clock < 10000)
537 return MODE_CLOCK_LOW;
538
539 return MODE_OK;
540 }
541
542 static uint32_t
pack_aux(uint8_t * src,int src_bytes)543 pack_aux(uint8_t *src, int src_bytes)
544 {
545 int i;
546 uint32_t v = 0;
547
548 if (src_bytes > 4)
549 src_bytes = 4;
550 for (i = 0; i < src_bytes; i++)
551 v |= ((uint32_t) src[i]) << ((3-i) * 8);
552 return v;
553 }
554
555 static void
unpack_aux(uint32_t src,uint8_t * dst,int dst_bytes)556 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
557 {
558 int i;
559 if (dst_bytes > 4)
560 dst_bytes = 4;
561 for (i = 0; i < dst_bytes; i++)
562 dst[i] = src >> ((3-i) * 8);
563 }
564
565 static int
cdv_intel_dp_aux_ch(struct gma_encoder * encoder,uint8_t * send,int send_bytes,uint8_t * recv,int recv_size)566 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
567 uint8_t *send, int send_bytes,
568 uint8_t *recv, int recv_size)
569 {
570 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
571 uint32_t output_reg = intel_dp->output_reg;
572 struct drm_device *dev = encoder->base.dev;
573 uint32_t ch_ctl = output_reg + 0x10;
574 uint32_t ch_data = ch_ctl + 4;
575 int i;
576 int recv_bytes;
577 uint32_t status;
578 uint32_t aux_clock_divider;
579 int try, precharge;
580
581 /* The clock divider is based off the hrawclk,
582 * and would like to run at 2MHz. So, take the
583 * hrawclk value and divide by 2 and use that
584 * On CDV platform it uses 200MHz as hrawclk.
585 *
586 */
587 aux_clock_divider = 200 / 2;
588
589 precharge = 4;
590 if (is_edp(encoder))
591 precharge = 10;
592
593 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
594 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
595 REG_READ(ch_ctl));
596 return -EBUSY;
597 }
598
599 /* Must try at least 3 times according to DP spec */
600 for (try = 0; try < 5; try++) {
601 /* Load the send data into the aux channel data registers */
602 for (i = 0; i < send_bytes; i += 4)
603 REG_WRITE(ch_data + i,
604 pack_aux(send + i, send_bytes - i));
605
606 /* Send the command and wait for it to complete */
607 REG_WRITE(ch_ctl,
608 DP_AUX_CH_CTL_SEND_BUSY |
609 DP_AUX_CH_CTL_TIME_OUT_400us |
610 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
611 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
612 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
613 DP_AUX_CH_CTL_DONE |
614 DP_AUX_CH_CTL_TIME_OUT_ERROR |
615 DP_AUX_CH_CTL_RECEIVE_ERROR);
616 for (;;) {
617 status = REG_READ(ch_ctl);
618 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
619 break;
620 udelay(100);
621 }
622
623 /* Clear done status and any errors */
624 REG_WRITE(ch_ctl,
625 status |
626 DP_AUX_CH_CTL_DONE |
627 DP_AUX_CH_CTL_TIME_OUT_ERROR |
628 DP_AUX_CH_CTL_RECEIVE_ERROR);
629 if (status & DP_AUX_CH_CTL_DONE)
630 break;
631 }
632
633 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
634 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
635 return -EBUSY;
636 }
637
638 /* Check for timeout or receive error.
639 * Timeouts occur when the sink is not connected
640 */
641 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
642 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
643 return -EIO;
644 }
645
646 /* Timeouts occur when the device isn't connected, so they're
647 * "normal" -- don't fill the kernel log with these */
648 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
649 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
650 return -ETIMEDOUT;
651 }
652
653 /* Unload any bytes sent back from the other side */
654 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
655 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
656 if (recv_bytes > recv_size)
657 recv_bytes = recv_size;
658
659 for (i = 0; i < recv_bytes; i += 4)
660 unpack_aux(REG_READ(ch_data + i),
661 recv + i, recv_bytes - i);
662
663 return recv_bytes;
664 }
665
666 /* Write data to the aux channel in native mode */
667 static int
cdv_intel_dp_aux_native_write(struct gma_encoder * encoder,uint16_t address,uint8_t * send,int send_bytes)668 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
669 uint16_t address, uint8_t *send, int send_bytes)
670 {
671 int ret;
672 uint8_t msg[20];
673 int msg_bytes;
674 uint8_t ack;
675
676 if (send_bytes > 16)
677 return -1;
678 msg[0] = DP_AUX_NATIVE_WRITE << 4;
679 msg[1] = address >> 8;
680 msg[2] = address & 0xff;
681 msg[3] = send_bytes - 1;
682 memcpy(&msg[4], send, send_bytes);
683 msg_bytes = send_bytes + 4;
684 for (;;) {
685 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
686 if (ret < 0)
687 return ret;
688 ack >>= 4;
689 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
690 break;
691 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
692 udelay(100);
693 else
694 return -EIO;
695 }
696 return send_bytes;
697 }
698
699 /* Write a single byte to the aux channel in native mode */
700 static int
cdv_intel_dp_aux_native_write_1(struct gma_encoder * encoder,uint16_t address,uint8_t byte)701 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
702 uint16_t address, uint8_t byte)
703 {
704 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
705 }
706
707 /* read bytes from a native aux channel */
708 static int
cdv_intel_dp_aux_native_read(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)709 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
710 uint16_t address, uint8_t *recv, int recv_bytes)
711 {
712 uint8_t msg[4];
713 int msg_bytes;
714 uint8_t reply[20];
715 int reply_bytes;
716 uint8_t ack;
717 int ret;
718
719 msg[0] = DP_AUX_NATIVE_READ << 4;
720 msg[1] = address >> 8;
721 msg[2] = address & 0xff;
722 msg[3] = recv_bytes - 1;
723
724 msg_bytes = 4;
725 reply_bytes = recv_bytes + 1;
726
727 for (;;) {
728 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
729 reply, reply_bytes);
730 if (ret == 0)
731 return -EPROTO;
732 if (ret < 0)
733 return ret;
734 ack = reply[0] >> 4;
735 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
736 memcpy(recv, reply + 1, ret - 1);
737 return ret - 1;
738 }
739 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
740 udelay(100);
741 else
742 return -EIO;
743 }
744 }
745
746 static int
cdv_intel_dp_i2c_aux_ch(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)747 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
748 uint8_t write_byte, uint8_t *read_byte)
749 {
750 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
751 struct cdv_intel_dp *intel_dp = container_of(adapter,
752 struct cdv_intel_dp,
753 adapter);
754 struct gma_encoder *encoder = intel_dp->encoder;
755 uint16_t address = algo_data->address;
756 uint8_t msg[5];
757 uint8_t reply[2];
758 unsigned retry;
759 int msg_bytes;
760 int reply_bytes;
761 int ret;
762
763 /* Set up the command byte */
764 if (mode & MODE_I2C_READ)
765 msg[0] = DP_AUX_I2C_READ << 4;
766 else
767 msg[0] = DP_AUX_I2C_WRITE << 4;
768
769 if (!(mode & MODE_I2C_STOP))
770 msg[0] |= DP_AUX_I2C_MOT << 4;
771
772 msg[1] = address >> 8;
773 msg[2] = address;
774
775 switch (mode) {
776 case MODE_I2C_WRITE:
777 msg[3] = 0;
778 msg[4] = write_byte;
779 msg_bytes = 5;
780 reply_bytes = 1;
781 break;
782 case MODE_I2C_READ:
783 msg[3] = 0;
784 msg_bytes = 4;
785 reply_bytes = 2;
786 break;
787 default:
788 msg_bytes = 3;
789 reply_bytes = 1;
790 break;
791 }
792
793 for (retry = 0; retry < 5; retry++) {
794 ret = cdv_intel_dp_aux_ch(encoder,
795 msg, msg_bytes,
796 reply, reply_bytes);
797 if (ret < 0) {
798 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
799 return ret;
800 }
801
802 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
803 case DP_AUX_NATIVE_REPLY_ACK:
804 /* I2C-over-AUX Reply field is only valid
805 * when paired with AUX ACK.
806 */
807 break;
808 case DP_AUX_NATIVE_REPLY_NACK:
809 DRM_DEBUG_KMS("aux_ch native nack\n");
810 return -EREMOTEIO;
811 case DP_AUX_NATIVE_REPLY_DEFER:
812 udelay(100);
813 continue;
814 default:
815 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
816 reply[0]);
817 return -EREMOTEIO;
818 }
819
820 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
821 case DP_AUX_I2C_REPLY_ACK:
822 if (mode == MODE_I2C_READ) {
823 *read_byte = reply[1];
824 }
825 return reply_bytes - 1;
826 case DP_AUX_I2C_REPLY_NACK:
827 DRM_DEBUG_KMS("aux_i2c nack\n");
828 return -EREMOTEIO;
829 case DP_AUX_I2C_REPLY_DEFER:
830 DRM_DEBUG_KMS("aux_i2c defer\n");
831 udelay(100);
832 break;
833 default:
834 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
835 return -EREMOTEIO;
836 }
837 }
838
839 DRM_ERROR("too many retries, giving up\n");
840 return -EREMOTEIO;
841 }
842
843 static int
cdv_intel_dp_i2c_init(struct gma_connector * connector,struct gma_encoder * encoder,const char * name)844 cdv_intel_dp_i2c_init(struct gma_connector *connector,
845 struct gma_encoder *encoder, const char *name)
846 {
847 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
848 int ret;
849
850 DRM_DEBUG_KMS("i2c_init %s\n", name);
851
852 intel_dp->algo.running = false;
853 intel_dp->algo.address = 0;
854 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
855
856 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
857 intel_dp->adapter.owner = THIS_MODULE;
858 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
859 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
860 intel_dp->adapter.algo_data = &intel_dp->algo;
861 intel_dp->adapter.dev.parent = connector->base.kdev;
862
863 if (is_edp(encoder))
864 cdv_intel_edp_panel_vdd_on(encoder);
865 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
866 if (is_edp(encoder))
867 cdv_intel_edp_panel_vdd_off(encoder);
868
869 return ret;
870 }
871
cdv_intel_fixed_panel_mode(struct drm_display_mode * fixed_mode,struct drm_display_mode * adjusted_mode)872 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
873 struct drm_display_mode *adjusted_mode)
874 {
875 adjusted_mode->hdisplay = fixed_mode->hdisplay;
876 adjusted_mode->hsync_start = fixed_mode->hsync_start;
877 adjusted_mode->hsync_end = fixed_mode->hsync_end;
878 adjusted_mode->htotal = fixed_mode->htotal;
879
880 adjusted_mode->vdisplay = fixed_mode->vdisplay;
881 adjusted_mode->vsync_start = fixed_mode->vsync_start;
882 adjusted_mode->vsync_end = fixed_mode->vsync_end;
883 adjusted_mode->vtotal = fixed_mode->vtotal;
884
885 adjusted_mode->clock = fixed_mode->clock;
886
887 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
888 }
889
890 static bool
cdv_intel_dp_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)891 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
892 struct drm_display_mode *adjusted_mode)
893 {
894 struct drm_psb_private *dev_priv = to_drm_psb_private(encoder->dev);
895 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
896 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
897 int lane_count, clock;
898 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
899 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
900 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
901 int refclock = mode->clock;
902 int bpp = 24;
903
904 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
905 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
906 refclock = intel_dp->panel_fixed_mode->clock;
907 bpp = dev_priv->edp.bpp;
908 }
909
910 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
911 for (clock = max_clock; clock >= 0; clock--) {
912 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
913
914 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
915 intel_dp->link_bw = bws[clock];
916 intel_dp->lane_count = lane_count;
917 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
918 DRM_DEBUG_KMS("Display port link bw %02x lane "
919 "count %d clock %d\n",
920 intel_dp->link_bw, intel_dp->lane_count,
921 adjusted_mode->clock);
922 return true;
923 }
924 }
925 }
926 if (is_edp(intel_encoder)) {
927 /* okay we failed just pick the highest */
928 intel_dp->lane_count = max_lane_count;
929 intel_dp->link_bw = bws[max_clock];
930 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
931 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
932 "count %d clock %d\n",
933 intel_dp->link_bw, intel_dp->lane_count,
934 adjusted_mode->clock);
935
936 return true;
937 }
938 return false;
939 }
940
941 struct cdv_intel_dp_m_n {
942 uint32_t tu;
943 uint32_t gmch_m;
944 uint32_t gmch_n;
945 uint32_t link_m;
946 uint32_t link_n;
947 };
948
949 static void
cdv_intel_reduce_ratio(uint32_t * num,uint32_t * den)950 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
951 {
952 /*
953 while (*num > 0xffffff || *den > 0xffffff) {
954 *num >>= 1;
955 *den >>= 1;
956 }*/
957 uint64_t value, m;
958 m = *num;
959 value = m * (0x800000);
960 m = do_div(value, *den);
961 *num = value;
962 *den = 0x800000;
963 }
964
965 static void
cdv_intel_dp_compute_m_n(int bpp,int nlanes,int pixel_clock,int link_clock,struct cdv_intel_dp_m_n * m_n)966 cdv_intel_dp_compute_m_n(int bpp,
967 int nlanes,
968 int pixel_clock,
969 int link_clock,
970 struct cdv_intel_dp_m_n *m_n)
971 {
972 m_n->tu = 64;
973 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
974 m_n->gmch_n = link_clock * nlanes;
975 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
976 m_n->link_m = pixel_clock;
977 m_n->link_n = link_clock;
978 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
979 }
980
981 void
cdv_intel_dp_set_m_n(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)982 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
983 struct drm_display_mode *adjusted_mode)
984 {
985 struct drm_device *dev = crtc->dev;
986 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
987 struct drm_mode_config *mode_config = &dev->mode_config;
988 struct drm_encoder *encoder;
989 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
990 int lane_count = 4, bpp = 24;
991 struct cdv_intel_dp_m_n m_n;
992 int pipe = gma_crtc->pipe;
993
994 /*
995 * Find the lane count in the intel_encoder private
996 */
997 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
998 struct gma_encoder *intel_encoder;
999 struct cdv_intel_dp *intel_dp;
1000
1001 if (encoder->crtc != crtc)
1002 continue;
1003
1004 intel_encoder = to_gma_encoder(encoder);
1005 intel_dp = intel_encoder->dev_priv;
1006 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1007 lane_count = intel_dp->lane_count;
1008 break;
1009 } else if (is_edp(intel_encoder)) {
1010 lane_count = intel_dp->lane_count;
1011 bpp = dev_priv->edp.bpp;
1012 break;
1013 }
1014 }
1015
1016 /*
1017 * Compute the GMCH and Link ratios. The '3' here is
1018 * the number of bytes_per_pixel post-LUT, which we always
1019 * set up for 8-bits of R/G/B, or 3 bytes total.
1020 */
1021 cdv_intel_dp_compute_m_n(bpp, lane_count,
1022 mode->clock, adjusted_mode->clock, &m_n);
1023
1024 {
1025 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1026 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1027 m_n.gmch_m);
1028 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1029 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1030 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1031 }
1032 }
1033
1034 static void
cdv_intel_dp_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1035 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1036 struct drm_display_mode *adjusted_mode)
1037 {
1038 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1039 struct drm_crtc *crtc = encoder->crtc;
1040 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1041 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1042 struct drm_device *dev = encoder->dev;
1043
1044 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1045 intel_dp->DP |= intel_dp->color_range;
1046
1047 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1048 intel_dp->DP |= DP_SYNC_HS_HIGH;
1049 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1050 intel_dp->DP |= DP_SYNC_VS_HIGH;
1051
1052 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1053
1054 switch (intel_dp->lane_count) {
1055 case 1:
1056 intel_dp->DP |= DP_PORT_WIDTH_1;
1057 break;
1058 case 2:
1059 intel_dp->DP |= DP_PORT_WIDTH_2;
1060 break;
1061 case 4:
1062 intel_dp->DP |= DP_PORT_WIDTH_4;
1063 break;
1064 }
1065 if (intel_dp->has_audio)
1066 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1067
1068 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1069 intel_dp->link_configuration[0] = intel_dp->link_bw;
1070 intel_dp->link_configuration[1] = intel_dp->lane_count;
1071
1072 /*
1073 * Check for DPCD version > 1.1 and enhanced framing support
1074 */
1075 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1076 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1077 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1078 intel_dp->DP |= DP_ENHANCED_FRAMING;
1079 }
1080
1081 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1082 if (gma_crtc->pipe == 1)
1083 intel_dp->DP |= DP_PIPEB_SELECT;
1084
1085 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1086 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1087 if (is_edp(intel_encoder)) {
1088 uint32_t pfit_control;
1089 cdv_intel_edp_panel_on(intel_encoder);
1090
1091 if (mode->hdisplay != adjusted_mode->hdisplay ||
1092 mode->vdisplay != adjusted_mode->vdisplay)
1093 pfit_control = PFIT_ENABLE;
1094 else
1095 pfit_control = 0;
1096
1097 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1098
1099 REG_WRITE(PFIT_CONTROL, pfit_control);
1100 }
1101 }
1102
1103
1104 /* If the sink supports it, try to set the power state appropriately */
cdv_intel_dp_sink_dpms(struct gma_encoder * encoder,int mode)1105 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1106 {
1107 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1108 int ret, i;
1109
1110 /* Should have a valid DPCD by this point */
1111 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1112 return;
1113
1114 if (mode != DRM_MODE_DPMS_ON) {
1115 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1116 DP_SET_POWER_D3);
1117 if (ret != 1)
1118 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1119 } else {
1120 /*
1121 * When turning on, we need to retry for 1ms to give the sink
1122 * time to wake up.
1123 */
1124 for (i = 0; i < 3; i++) {
1125 ret = cdv_intel_dp_aux_native_write_1(encoder,
1126 DP_SET_POWER,
1127 DP_SET_POWER_D0);
1128 if (ret == 1)
1129 break;
1130 udelay(1000);
1131 }
1132 }
1133 }
1134
cdv_intel_dp_prepare(struct drm_encoder * encoder)1135 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1136 {
1137 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1138 int edp = is_edp(intel_encoder);
1139
1140 if (edp) {
1141 cdv_intel_edp_backlight_off(intel_encoder);
1142 cdv_intel_edp_panel_off(intel_encoder);
1143 cdv_intel_edp_panel_vdd_on(intel_encoder);
1144 }
1145 /* Wake up the sink first */
1146 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1147 cdv_intel_dp_link_down(intel_encoder);
1148 if (edp)
1149 cdv_intel_edp_panel_vdd_off(intel_encoder);
1150 }
1151
cdv_intel_dp_commit(struct drm_encoder * encoder)1152 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1153 {
1154 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1155 int edp = is_edp(intel_encoder);
1156
1157 if (edp)
1158 cdv_intel_edp_panel_on(intel_encoder);
1159 cdv_intel_dp_start_link_train(intel_encoder);
1160 cdv_intel_dp_complete_link_train(intel_encoder);
1161 if (edp)
1162 cdv_intel_edp_backlight_on(intel_encoder);
1163 }
1164
1165 static void
cdv_intel_dp_dpms(struct drm_encoder * encoder,int mode)1166 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1167 {
1168 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1169 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1170 struct drm_device *dev = encoder->dev;
1171 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1172 int edp = is_edp(intel_encoder);
1173
1174 if (mode != DRM_MODE_DPMS_ON) {
1175 if (edp) {
1176 cdv_intel_edp_backlight_off(intel_encoder);
1177 cdv_intel_edp_panel_vdd_on(intel_encoder);
1178 }
1179 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1180 cdv_intel_dp_link_down(intel_encoder);
1181 if (edp) {
1182 cdv_intel_edp_panel_vdd_off(intel_encoder);
1183 cdv_intel_edp_panel_off(intel_encoder);
1184 }
1185 } else {
1186 if (edp)
1187 cdv_intel_edp_panel_on(intel_encoder);
1188 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1189 if (!(dp_reg & DP_PORT_EN)) {
1190 cdv_intel_dp_start_link_train(intel_encoder);
1191 cdv_intel_dp_complete_link_train(intel_encoder);
1192 }
1193 if (edp)
1194 cdv_intel_edp_backlight_on(intel_encoder);
1195 }
1196 }
1197
1198 /*
1199 * Native read with retry for link status and receiver capability reads for
1200 * cases where the sink may still be asleep.
1201 */
1202 static bool
cdv_intel_dp_aux_native_read_retry(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)1203 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1204 uint8_t *recv, int recv_bytes)
1205 {
1206 int ret, i;
1207
1208 /*
1209 * Sinks are *supposed* to come up within 1ms from an off state,
1210 * but we're also supposed to retry 3 times per the spec.
1211 */
1212 for (i = 0; i < 3; i++) {
1213 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1214 recv_bytes);
1215 if (ret == recv_bytes)
1216 return true;
1217 udelay(1000);
1218 }
1219
1220 return false;
1221 }
1222
1223 /*
1224 * Fetch AUX CH registers 0x202 - 0x207 which contain
1225 * link status information
1226 */
1227 static bool
cdv_intel_dp_get_link_status(struct gma_encoder * encoder)1228 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1229 {
1230 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1231 return cdv_intel_dp_aux_native_read_retry(encoder,
1232 DP_LANE0_1_STATUS,
1233 intel_dp->link_status,
1234 DP_LINK_STATUS_SIZE);
1235 }
1236
1237 static uint8_t
cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int r)1238 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1239 int r)
1240 {
1241 return link_status[r - DP_LANE0_1_STATUS];
1242 }
1243
1244 static uint8_t
cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1245 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1246 int lane)
1247 {
1248 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1249 int s = ((lane & 1) ?
1250 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1251 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1252 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1253
1254 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1255 }
1256
1257 static uint8_t
cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1258 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1259 int lane)
1260 {
1261 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1262 int s = ((lane & 1) ?
1263 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1264 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1265 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1266
1267 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1268 }
1269
1270 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1271
1272 static void
cdv_intel_get_adjust_train(struct gma_encoder * encoder)1273 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1274 {
1275 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1276 uint8_t v = 0;
1277 uint8_t p = 0;
1278 int lane;
1279
1280 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1281 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1282 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1283
1284 if (this_v > v)
1285 v = this_v;
1286 if (this_p > p)
1287 p = this_p;
1288 }
1289
1290 if (v >= CDV_DP_VOLTAGE_MAX)
1291 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1292
1293 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1294 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1295
1296 for (lane = 0; lane < 4; lane++)
1297 intel_dp->train_set[lane] = v | p;
1298 }
1299
1300
1301 static uint8_t
cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1302 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1303 int lane)
1304 {
1305 int i = DP_LANE0_1_STATUS + (lane >> 1);
1306 int s = (lane & 1) * 4;
1307 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1308
1309 return (l >> s) & 0xf;
1310 }
1311
1312 /* Check for clock recovery is done on all channels */
1313 static bool
cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane_count)1314 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1315 {
1316 int lane;
1317 uint8_t lane_status;
1318
1319 for (lane = 0; lane < lane_count; lane++) {
1320 lane_status = cdv_intel_get_lane_status(link_status, lane);
1321 if ((lane_status & DP_LANE_CR_DONE) == 0)
1322 return false;
1323 }
1324 return true;
1325 }
1326
1327 /* Check to see if channel eq is done on all channels */
1328 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1329 DP_LANE_CHANNEL_EQ_DONE|\
1330 DP_LANE_SYMBOL_LOCKED)
1331 static bool
cdv_intel_channel_eq_ok(struct gma_encoder * encoder)1332 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1333 {
1334 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1335 uint8_t lane_align;
1336 uint8_t lane_status;
1337 int lane;
1338
1339 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1340 DP_LANE_ALIGN_STATUS_UPDATED);
1341 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1342 return false;
1343 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1344 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1345 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1346 return false;
1347 }
1348 return true;
1349 }
1350
1351 static bool
cdv_intel_dp_set_link_train(struct gma_encoder * encoder,uint32_t dp_reg_value,uint8_t dp_train_pat)1352 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1353 uint32_t dp_reg_value,
1354 uint8_t dp_train_pat)
1355 {
1356 struct drm_device *dev = encoder->base.dev;
1357 int ret;
1358 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1359
1360 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1361 REG_READ(intel_dp->output_reg);
1362
1363 ret = cdv_intel_dp_aux_native_write_1(encoder,
1364 DP_TRAINING_PATTERN_SET,
1365 dp_train_pat);
1366
1367 if (ret != 1) {
1368 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1369 dp_train_pat);
1370 return false;
1371 }
1372
1373 return true;
1374 }
1375
1376
1377 static bool
cdv_intel_dplink_set_level(struct gma_encoder * encoder,uint8_t dp_train_pat)1378 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1379 uint8_t dp_train_pat)
1380 {
1381 int ret;
1382 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1383
1384 ret = cdv_intel_dp_aux_native_write(encoder,
1385 DP_TRAINING_LANE0_SET,
1386 intel_dp->train_set,
1387 intel_dp->lane_count);
1388
1389 if (ret != intel_dp->lane_count) {
1390 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1391 intel_dp->train_set[0], intel_dp->lane_count);
1392 return false;
1393 }
1394 return true;
1395 }
1396
1397 static void
cdv_intel_dp_set_vswing_premph(struct gma_encoder * encoder,uint8_t signal_level)1398 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1399 {
1400 struct drm_device *dev = encoder->base.dev;
1401 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1402 struct ddi_regoff *ddi_reg;
1403 int vswing, premph, index;
1404
1405 if (intel_dp->output_reg == DP_B)
1406 ddi_reg = &ddi_DP_train_table[0];
1407 else
1408 ddi_reg = &ddi_DP_train_table[1];
1409
1410 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1411 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1412 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1413
1414 if (vswing + premph > 3)
1415 return;
1416 #ifdef CDV_FAST_LINK_TRAIN
1417 return;
1418 #endif
1419 DRM_DEBUG_KMS("Test2\n");
1420 //return ;
1421 cdv_sb_reset(dev);
1422 /* ;Swing voltage programming
1423 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1424 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1425
1426 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1427 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1428
1429 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1430 * The VSwing_PreEmph table is also considered based on the vswing/premp
1431 */
1432 index = (vswing + premph) * 2;
1433 if (premph == 1 && vswing == 1) {
1434 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1435 } else
1436 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1437
1438 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1439 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1440 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1441 else
1442 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1443
1444 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1445 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1446
1447 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1448 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1449
1450 /* ;Pre emphasis programming
1451 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1452 */
1453 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1454
1455 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1456 index = 2 * premph + 1;
1457 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1458 return;
1459 }
1460
1461
1462 /* Enable corresponding port and start training pattern 1 */
1463 static void
cdv_intel_dp_start_link_train(struct gma_encoder * encoder)1464 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1465 {
1466 struct drm_device *dev = encoder->base.dev;
1467 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1468 int i;
1469 uint8_t voltage;
1470 bool clock_recovery = false;
1471 int tries;
1472 u32 reg;
1473 uint32_t DP = intel_dp->DP;
1474
1475 DP |= DP_PORT_EN;
1476 DP &= ~DP_LINK_TRAIN_MASK;
1477
1478 reg = DP;
1479 reg |= DP_LINK_TRAIN_PAT_1;
1480 /* Enable output, wait for it to become active */
1481 REG_WRITE(intel_dp->output_reg, reg);
1482 REG_READ(intel_dp->output_reg);
1483 gma_wait_for_vblank(dev);
1484
1485 DRM_DEBUG_KMS("Link config\n");
1486 /* Write the link configuration data */
1487 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1488 intel_dp->link_configuration,
1489 2);
1490
1491 memset(intel_dp->train_set, 0, 4);
1492 voltage = 0;
1493 tries = 0;
1494 clock_recovery = false;
1495
1496 DRM_DEBUG_KMS("Start train\n");
1497 reg = DP | DP_LINK_TRAIN_PAT_1;
1498
1499 for (;;) {
1500 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1501 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1502 intel_dp->train_set[0],
1503 intel_dp->link_configuration[0],
1504 intel_dp->link_configuration[1]);
1505
1506 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1507 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1508 }
1509 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1510 /* Set training pattern 1 */
1511
1512 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1513
1514 udelay(200);
1515 if (!cdv_intel_dp_get_link_status(encoder))
1516 break;
1517
1518 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1519 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1520 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1521
1522 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1523 DRM_DEBUG_KMS("PT1 train is done\n");
1524 clock_recovery = true;
1525 break;
1526 }
1527
1528 /* Check to see if we've tried the max voltage */
1529 for (i = 0; i < intel_dp->lane_count; i++)
1530 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1531 break;
1532 if (i == intel_dp->lane_count)
1533 break;
1534
1535 /* Check to see if we've tried the same voltage 5 times */
1536 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1537 ++tries;
1538 if (tries == 5)
1539 break;
1540 } else
1541 tries = 0;
1542 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1543
1544 /* Compute new intel_dp->train_set as requested by target */
1545 cdv_intel_get_adjust_train(encoder);
1546
1547 }
1548
1549 if (!clock_recovery) {
1550 DRM_DEBUG_KMS("failure in DP pattern 1 training, train set %x\n", intel_dp->train_set[0]);
1551 }
1552
1553 intel_dp->DP = DP;
1554 }
1555
1556 static void
cdv_intel_dp_complete_link_train(struct gma_encoder * encoder)1557 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1558 {
1559 struct drm_device *dev = encoder->base.dev;
1560 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1561 int tries, cr_tries;
1562 u32 reg;
1563 uint32_t DP = intel_dp->DP;
1564
1565 /* channel equalization */
1566 tries = 0;
1567 cr_tries = 0;
1568
1569 DRM_DEBUG_KMS("\n");
1570 reg = DP | DP_LINK_TRAIN_PAT_2;
1571
1572 for (;;) {
1573
1574 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1575 intel_dp->train_set[0],
1576 intel_dp->link_configuration[0],
1577 intel_dp->link_configuration[1]);
1578 /* channel eq pattern */
1579
1580 if (!cdv_intel_dp_set_link_train(encoder, reg,
1581 DP_TRAINING_PATTERN_2)) {
1582 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1583 }
1584 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1585
1586 if (cr_tries > 5) {
1587 DRM_ERROR("failed to train DP, aborting\n");
1588 cdv_intel_dp_link_down(encoder);
1589 break;
1590 }
1591
1592 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1593
1594 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1595
1596 udelay(1000);
1597 if (!cdv_intel_dp_get_link_status(encoder))
1598 break;
1599
1600 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1601 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1602 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1603
1604 /* Make sure clock is still ok */
1605 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1606 cdv_intel_dp_start_link_train(encoder);
1607 cr_tries++;
1608 continue;
1609 }
1610
1611 if (cdv_intel_channel_eq_ok(encoder)) {
1612 DRM_DEBUG_KMS("PT2 train is done\n");
1613 break;
1614 }
1615
1616 /* Try 5 times, then try clock recovery if that fails */
1617 if (tries > 5) {
1618 cdv_intel_dp_link_down(encoder);
1619 cdv_intel_dp_start_link_train(encoder);
1620 tries = 0;
1621 cr_tries++;
1622 continue;
1623 }
1624
1625 /* Compute new intel_dp->train_set as requested by target */
1626 cdv_intel_get_adjust_train(encoder);
1627 ++tries;
1628
1629 }
1630
1631 reg = DP | DP_LINK_TRAIN_OFF;
1632
1633 REG_WRITE(intel_dp->output_reg, reg);
1634 REG_READ(intel_dp->output_reg);
1635 cdv_intel_dp_aux_native_write_1(encoder,
1636 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1637 }
1638
1639 static void
cdv_intel_dp_link_down(struct gma_encoder * encoder)1640 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1641 {
1642 struct drm_device *dev = encoder->base.dev;
1643 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1644 uint32_t DP = intel_dp->DP;
1645
1646 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1647 return;
1648
1649 DRM_DEBUG_KMS("\n");
1650
1651
1652 {
1653 DP &= ~DP_LINK_TRAIN_MASK;
1654 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1655 }
1656 REG_READ(intel_dp->output_reg);
1657
1658 msleep(17);
1659
1660 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1661 REG_READ(intel_dp->output_reg);
1662 }
1663
cdv_dp_detect(struct gma_encoder * encoder)1664 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1665 {
1666 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1667 enum drm_connector_status status;
1668
1669 status = connector_status_disconnected;
1670 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1671 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1672 {
1673 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1674 status = connector_status_connected;
1675 }
1676 if (status == connector_status_connected)
1677 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1678 intel_dp->dpcd[0], intel_dp->dpcd[1],
1679 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1680 return status;
1681 }
1682
1683 /*
1684 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1685 *
1686 * \return true if DP port is connected.
1687 * \return false if DP port is disconnected.
1688 */
1689 static enum drm_connector_status
cdv_intel_dp_detect(struct drm_connector * connector,bool force)1690 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1691 {
1692 struct gma_encoder *encoder = gma_attached_encoder(connector);
1693 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1694 enum drm_connector_status status;
1695 struct edid *edid = NULL;
1696 int edp = is_edp(encoder);
1697
1698 intel_dp->has_audio = false;
1699
1700 if (edp)
1701 cdv_intel_edp_panel_vdd_on(encoder);
1702 status = cdv_dp_detect(encoder);
1703 if (status != connector_status_connected) {
1704 if (edp)
1705 cdv_intel_edp_panel_vdd_off(encoder);
1706 return status;
1707 }
1708
1709 if (intel_dp->force_audio) {
1710 intel_dp->has_audio = intel_dp->force_audio > 0;
1711 } else {
1712 edid = drm_get_edid(connector, &intel_dp->adapter);
1713 if (edid) {
1714 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1715 kfree(edid);
1716 }
1717 }
1718 if (edp)
1719 cdv_intel_edp_panel_vdd_off(encoder);
1720
1721 return connector_status_connected;
1722 }
1723
cdv_intel_dp_get_modes(struct drm_connector * connector)1724 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1725 {
1726 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1727 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1728 struct edid *edid = NULL;
1729 int ret = 0;
1730 int edp = is_edp(intel_encoder);
1731
1732
1733 edid = drm_get_edid(connector, &intel_dp->adapter);
1734 if (edid) {
1735 drm_connector_update_edid_property(connector, edid);
1736 ret = drm_add_edid_modes(connector, edid);
1737 kfree(edid);
1738 }
1739
1740 if (is_edp(intel_encoder)) {
1741 struct drm_device *dev = connector->dev;
1742 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1743
1744 cdv_intel_edp_panel_vdd_off(intel_encoder);
1745 if (ret) {
1746 if (edp && !intel_dp->panel_fixed_mode) {
1747 struct drm_display_mode *newmode;
1748 list_for_each_entry(newmode, &connector->probed_modes,
1749 head) {
1750 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1751 intel_dp->panel_fixed_mode =
1752 drm_mode_duplicate(dev, newmode);
1753 break;
1754 }
1755 }
1756 }
1757
1758 return ret;
1759 }
1760 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1761 intel_dp->panel_fixed_mode =
1762 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1763 if (intel_dp->panel_fixed_mode) {
1764 intel_dp->panel_fixed_mode->type |=
1765 DRM_MODE_TYPE_PREFERRED;
1766 }
1767 }
1768 if (intel_dp->panel_fixed_mode != NULL) {
1769 struct drm_display_mode *mode;
1770 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1771 drm_mode_probed_add(connector, mode);
1772 return 1;
1773 }
1774 }
1775
1776 return ret;
1777 }
1778
1779 static bool
cdv_intel_dp_detect_audio(struct drm_connector * connector)1780 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1781 {
1782 struct gma_encoder *encoder = gma_attached_encoder(connector);
1783 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1784 struct edid *edid;
1785 bool has_audio = false;
1786 int edp = is_edp(encoder);
1787
1788 if (edp)
1789 cdv_intel_edp_panel_vdd_on(encoder);
1790
1791 edid = drm_get_edid(connector, &intel_dp->adapter);
1792 if (edid) {
1793 has_audio = drm_detect_monitor_audio(edid);
1794 kfree(edid);
1795 }
1796 if (edp)
1797 cdv_intel_edp_panel_vdd_off(encoder);
1798
1799 return has_audio;
1800 }
1801
1802 static int
cdv_intel_dp_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)1803 cdv_intel_dp_set_property(struct drm_connector *connector,
1804 struct drm_property *property,
1805 uint64_t val)
1806 {
1807 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
1808 struct gma_encoder *encoder = gma_attached_encoder(connector);
1809 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1810 int ret;
1811
1812 ret = drm_object_property_set_value(&connector->base, property, val);
1813 if (ret)
1814 return ret;
1815
1816 if (property == dev_priv->force_audio_property) {
1817 int i = val;
1818 bool has_audio;
1819
1820 if (i == intel_dp->force_audio)
1821 return 0;
1822
1823 intel_dp->force_audio = i;
1824
1825 if (i == 0)
1826 has_audio = cdv_intel_dp_detect_audio(connector);
1827 else
1828 has_audio = i > 0;
1829
1830 if (has_audio == intel_dp->has_audio)
1831 return 0;
1832
1833 intel_dp->has_audio = has_audio;
1834 goto done;
1835 }
1836
1837 if (property == dev_priv->broadcast_rgb_property) {
1838 if (val == !!intel_dp->color_range)
1839 return 0;
1840
1841 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1842 goto done;
1843 }
1844
1845 return -EINVAL;
1846
1847 done:
1848 if (encoder->base.crtc) {
1849 struct drm_crtc *crtc = encoder->base.crtc;
1850 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1851 crtc->x, crtc->y,
1852 crtc->primary->fb);
1853 }
1854
1855 return 0;
1856 }
1857
1858 static void
cdv_intel_dp_destroy(struct drm_connector * connector)1859 cdv_intel_dp_destroy(struct drm_connector *connector)
1860 {
1861 struct gma_connector *gma_connector = to_gma_connector(connector);
1862 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1863 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1864
1865 if (is_edp(gma_encoder)) {
1866 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1867 kfree(intel_dp->panel_fixed_mode);
1868 intel_dp->panel_fixed_mode = NULL;
1869 }
1870 i2c_del_adapter(&intel_dp->adapter);
1871 drm_connector_cleanup(connector);
1872 kfree(gma_connector);
1873 }
1874
1875 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1876 .dpms = cdv_intel_dp_dpms,
1877 .mode_fixup = cdv_intel_dp_mode_fixup,
1878 .prepare = cdv_intel_dp_prepare,
1879 .mode_set = cdv_intel_dp_mode_set,
1880 .commit = cdv_intel_dp_commit,
1881 };
1882
1883 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1884 .dpms = drm_helper_connector_dpms,
1885 .detect = cdv_intel_dp_detect,
1886 .fill_modes = drm_helper_probe_single_connector_modes,
1887 .set_property = cdv_intel_dp_set_property,
1888 .destroy = cdv_intel_dp_destroy,
1889 };
1890
1891 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1892 .get_modes = cdv_intel_dp_get_modes,
1893 .mode_valid = cdv_intel_dp_mode_valid,
1894 .best_encoder = gma_best_encoder,
1895 };
1896
cdv_intel_dp_add_properties(struct drm_connector * connector)1897 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1898 {
1899 cdv_intel_attach_force_audio_property(connector);
1900 cdv_intel_attach_broadcast_rgb_property(connector);
1901 }
1902
1903 /* check the VBT to see whether the eDP is on DP-D port */
cdv_intel_dpc_is_edp(struct drm_device * dev)1904 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1905 {
1906 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1907 struct child_device_config *p_child;
1908 int i;
1909
1910 if (!dev_priv->child_dev_num)
1911 return false;
1912
1913 for (i = 0; i < dev_priv->child_dev_num; i++) {
1914 p_child = dev_priv->child_dev + i;
1915
1916 if (p_child->dvo_port == PORT_IDPC &&
1917 p_child->device_type == DEVICE_TYPE_eDP)
1918 return true;
1919 }
1920 return false;
1921 }
1922
1923 /* Cedarview display clock gating
1924
1925 We need this disable dot get correct behaviour while enabling
1926 DP/eDP. TODO - investigate if we can turn it back to normality
1927 after enabling */
cdv_disable_intel_clock_gating(struct drm_device * dev)1928 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1929 {
1930 u32 reg_value;
1931 reg_value = REG_READ(DSPCLK_GATE_D);
1932
1933 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1934 DPUNIT_PIPEA_GATE_DISABLE |
1935 DPCUNIT_CLOCK_GATE_DISABLE |
1936 DPLSUNIT_CLOCK_GATE_DISABLE |
1937 DPOUNIT_CLOCK_GATE_DISABLE |
1938 DPIOUNIT_CLOCK_GATE_DISABLE);
1939
1940 REG_WRITE(DSPCLK_GATE_D, reg_value);
1941
1942 udelay(500);
1943 }
1944
1945 void
cdv_intel_dp_init(struct drm_device * dev,struct psb_intel_mode_device * mode_dev,int output_reg)1946 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1947 {
1948 struct gma_encoder *gma_encoder;
1949 struct gma_connector *gma_connector;
1950 struct drm_connector *connector;
1951 struct drm_encoder *encoder;
1952 struct cdv_intel_dp *intel_dp;
1953 const char *name = NULL;
1954 int type = DRM_MODE_CONNECTOR_DisplayPort;
1955
1956 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
1957 if (!gma_encoder)
1958 return;
1959 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
1960 if (!gma_connector)
1961 goto err_connector;
1962 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
1963 if (!intel_dp)
1964 goto err_priv;
1965
1966 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
1967 type = DRM_MODE_CONNECTOR_eDP;
1968
1969 connector = &gma_connector->base;
1970 encoder = &gma_encoder->base;
1971
1972 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
1973 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS);
1974
1975 gma_connector_attach_encoder(gma_connector, gma_encoder);
1976
1977 if (type == DRM_MODE_CONNECTOR_DisplayPort)
1978 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1979 else
1980 gma_encoder->type = INTEL_OUTPUT_EDP;
1981
1982
1983 gma_encoder->dev_priv=intel_dp;
1984 intel_dp->encoder = gma_encoder;
1985 intel_dp->output_reg = output_reg;
1986
1987 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
1988 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
1989
1990 connector->polled = DRM_CONNECTOR_POLL_HPD;
1991 connector->interlace_allowed = false;
1992 connector->doublescan_allowed = false;
1993
1994 /* Set up the DDC bus. */
1995 switch (output_reg) {
1996 case DP_B:
1997 name = "DPDDC-B";
1998 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
1999 break;
2000 case DP_C:
2001 name = "DPDDC-C";
2002 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2003 break;
2004 }
2005
2006 cdv_disable_intel_clock_gating(dev);
2007
2008 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2009 /* FIXME:fail check */
2010 cdv_intel_dp_add_properties(connector);
2011
2012 if (is_edp(gma_encoder)) {
2013 int ret;
2014 struct edp_power_seq cur;
2015 u32 pp_on, pp_off, pp_div;
2016 u32 pwm_ctrl;
2017
2018 pp_on = REG_READ(PP_CONTROL);
2019 pp_on &= ~PANEL_UNLOCK_MASK;
2020 pp_on |= PANEL_UNLOCK_REGS;
2021
2022 REG_WRITE(PP_CONTROL, pp_on);
2023
2024 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2025 pwm_ctrl |= PWM_PIPE_B;
2026 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2027
2028 pp_on = REG_READ(PP_ON_DELAYS);
2029 pp_off = REG_READ(PP_OFF_DELAYS);
2030 pp_div = REG_READ(PP_DIVISOR);
2031
2032 /* Pull timing values out of registers */
2033 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2034 PANEL_POWER_UP_DELAY_SHIFT;
2035
2036 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2037 PANEL_LIGHT_ON_DELAY_SHIFT;
2038
2039 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2040 PANEL_LIGHT_OFF_DELAY_SHIFT;
2041
2042 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2043 PANEL_POWER_DOWN_DELAY_SHIFT;
2044
2045 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2046 PANEL_POWER_CYCLE_DELAY_SHIFT);
2047
2048 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2049 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2050
2051
2052 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2053 intel_dp->backlight_on_delay = cur.t8 / 10;
2054 intel_dp->backlight_off_delay = cur.t9 / 10;
2055 intel_dp->panel_power_down_delay = cur.t10 / 10;
2056 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2057
2058 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2059 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2060 intel_dp->panel_power_cycle_delay);
2061
2062 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2063 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2064
2065
2066 cdv_intel_edp_panel_vdd_on(gma_encoder);
2067 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2068 intel_dp->dpcd,
2069 sizeof(intel_dp->dpcd));
2070 cdv_intel_edp_panel_vdd_off(gma_encoder);
2071 if (ret <= 0) {
2072 /* if this fails, presume the device is a ghost */
2073 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2074 drm_encoder_cleanup(encoder);
2075 cdv_intel_dp_destroy(connector);
2076 goto err_connector;
2077 } else {
2078 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2079 intel_dp->dpcd[0], intel_dp->dpcd[1],
2080 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2081
2082 }
2083 /* The CDV reference driver moves pnale backlight setup into the displays that
2084 have a backlight: this is a good idea and one we should probably adopt, however
2085 we need to migrate all the drivers before we can do that */
2086 /*cdv_intel_panel_setup_backlight(dev); */
2087 }
2088 return;
2089
2090 err_priv:
2091 kfree(gma_connector);
2092 err_connector:
2093 kfree(gma_encoder);
2094 }
2095