1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include "drmP.h" 27 #include "radeon_drm.h" 28 #include "radeon.h" 29 30 #include "atom.h" 31 #include "atom-bits.h" 32 #include "drm_dp_helper.h" 33 34 /* move these to drm_dp_helper.c/h */ 35 #define DP_LINK_CONFIGURATION_SIZE 9 36 #define DP_LINK_STATUS_SIZE 6 37 #define DP_DPCD_SIZE 8 38 39 static char *voltage_names[] = { 40 "0.4V", "0.6V", "0.8V", "1.2V" 41 }; 42 static char *pre_emph_names[] = { 43 "0dB", "3.5dB", "6dB", "9.5dB" 44 }; 45 46 /***** radeon AUX functions *****/ 47 union aux_channel_transaction { 48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; 49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; 50 }; 51 52 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan, 53 u8 *send, int send_bytes, 54 u8 *recv, int recv_size, 55 u8 delay, u8 *ack) 56 { 57 struct drm_device *dev = chan->dev; 58 struct radeon_device *rdev = dev->dev_private; 59 union aux_channel_transaction args; 60 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); 61 unsigned char *base; 62 int recv_bytes; 63 64 memset(&args, 0, sizeof(args)); 65 66 base = (unsigned char *)rdev->mode_info.atom_context->scratch; 67 68 memcpy(base, send, send_bytes); 69 70 args.v1.lpAuxRequest = 0; 71 args.v1.lpDataOut = 16; 72 args.v1.ucDataOutLen = 0; 73 args.v1.ucChannelID = chan->rec.i2c_id; 74 args.v1.ucDelay = delay / 10; 75 if (ASIC_IS_DCE4(rdev)) 76 args.v2.ucHPD_ID = chan->rec.hpd; 77 78 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 79 80 *ack = args.v1.ucReplyStatus; 81 82 /* timeout */ 83 if (args.v1.ucReplyStatus == 1) { 84 DRM_DEBUG_KMS("dp_aux_ch timeout\n"); 85 return -ETIMEDOUT; 86 } 87 88 /* flags not zero */ 89 if (args.v1.ucReplyStatus == 2) { 90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n"); 91 return -EBUSY; 92 } 93 94 /* error */ 95 if (args.v1.ucReplyStatus == 3) { 96 DRM_DEBUG_KMS("dp_aux_ch error\n"); 97 return -EIO; 98 } 99 100 recv_bytes = args.v1.ucDataOutLen; 101 if (recv_bytes > recv_size) 102 recv_bytes = recv_size; 103 104 if (recv && recv_size) 105 memcpy(recv, base + 16, recv_bytes); 106 107 return recv_bytes; 108 } 109 110 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, 111 u16 address, u8 *send, u8 send_bytes, u8 delay) 112 { 113 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 114 int ret; 115 u8 msg[20]; 116 int msg_bytes = send_bytes + 4; 117 u8 ack; 118 unsigned retry; 119 120 if (send_bytes > 16) 121 return -1; 122 123 msg[0] = address; 124 msg[1] = address >> 8; 125 msg[2] = AUX_NATIVE_WRITE << 4; 126 msg[3] = (msg_bytes << 4) | (send_bytes - 1); 127 memcpy(&msg[4], send, send_bytes); 128 129 for (retry = 0; retry < 4; retry++) { 130 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 131 msg, msg_bytes, NULL, 0, delay, &ack); 132 if (ret == -EBUSY) 133 continue; 134 else if (ret < 0) 135 return ret; 136 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 137 return send_bytes; 138 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 139 udelay(400); 140 else 141 return -EIO; 142 } 143 144 return -EIO; 145 } 146 147 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, 148 u16 address, u8 *recv, int recv_bytes, u8 delay) 149 { 150 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 151 u8 msg[4]; 152 int msg_bytes = 4; 153 u8 ack; 154 int ret; 155 unsigned retry; 156 157 msg[0] = address; 158 msg[1] = address >> 8; 159 msg[2] = AUX_NATIVE_READ << 4; 160 msg[3] = (msg_bytes << 4) | (recv_bytes - 1); 161 162 for (retry = 0; retry < 4; retry++) { 163 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 164 msg, msg_bytes, recv, recv_bytes, delay, &ack); 165 if (ret == -EBUSY) 166 continue; 167 else if (ret < 0) 168 return ret; 169 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 170 return ret; 171 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 172 udelay(400); 173 else if (ret == 0) 174 return -EPROTO; 175 else 176 return -EIO; 177 } 178 179 return -EIO; 180 } 181 182 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector, 183 u16 reg, u8 val) 184 { 185 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0); 186 } 187 188 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector, 189 u16 reg) 190 { 191 u8 val = 0; 192 193 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0); 194 195 return val; 196 } 197 198 int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 199 u8 write_byte, u8 *read_byte) 200 { 201 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 202 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter; 203 u16 address = algo_data->address; 204 u8 msg[5]; 205 u8 reply[2]; 206 unsigned retry; 207 int msg_bytes; 208 int reply_bytes = 1; 209 int ret; 210 u8 ack; 211 212 /* Set up the command byte */ 213 if (mode & MODE_I2C_READ) 214 msg[2] = AUX_I2C_READ << 4; 215 else 216 msg[2] = AUX_I2C_WRITE << 4; 217 218 if (!(mode & MODE_I2C_STOP)) 219 msg[2] |= AUX_I2C_MOT << 4; 220 221 msg[0] = address; 222 msg[1] = address >> 8; 223 224 switch (mode) { 225 case MODE_I2C_WRITE: 226 msg_bytes = 5; 227 msg[3] = msg_bytes << 4; 228 msg[4] = write_byte; 229 break; 230 case MODE_I2C_READ: 231 msg_bytes = 4; 232 msg[3] = msg_bytes << 4; 233 break; 234 default: 235 msg_bytes = 4; 236 msg[3] = 3 << 4; 237 break; 238 } 239 240 for (retry = 0; retry < 4; retry++) { 241 ret = radeon_process_aux_ch(auxch, 242 msg, msg_bytes, reply, reply_bytes, 0, &ack); 243 if (ret == -EBUSY) 244 continue; 245 else if (ret < 0) { 246 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 247 return ret; 248 } 249 250 switch (ack & AUX_NATIVE_REPLY_MASK) { 251 case AUX_NATIVE_REPLY_ACK: 252 /* I2C-over-AUX Reply field is only valid 253 * when paired with AUX ACK. 254 */ 255 break; 256 case AUX_NATIVE_REPLY_NACK: 257 DRM_DEBUG_KMS("aux_ch native nack\n"); 258 return -EREMOTEIO; 259 case AUX_NATIVE_REPLY_DEFER: 260 DRM_DEBUG_KMS("aux_ch native defer\n"); 261 udelay(400); 262 continue; 263 default: 264 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack); 265 return -EREMOTEIO; 266 } 267 268 switch (ack & AUX_I2C_REPLY_MASK) { 269 case AUX_I2C_REPLY_ACK: 270 if (mode == MODE_I2C_READ) 271 *read_byte = reply[0]; 272 return ret; 273 case AUX_I2C_REPLY_NACK: 274 DRM_DEBUG_KMS("aux_i2c nack\n"); 275 return -EREMOTEIO; 276 case AUX_I2C_REPLY_DEFER: 277 DRM_DEBUG_KMS("aux_i2c defer\n"); 278 udelay(400); 279 break; 280 default: 281 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack); 282 return -EREMOTEIO; 283 } 284 } 285 286 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n"); 287 return -EREMOTEIO; 288 } 289 290 /***** general DP utility functions *****/ 291 292 static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r) 293 { 294 return link_status[r - DP_LANE0_1_STATUS]; 295 } 296 297 static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE], 298 int lane) 299 { 300 int i = DP_LANE0_1_STATUS + (lane >> 1); 301 int s = (lane & 1) * 4; 302 u8 l = dp_link_status(link_status, i); 303 return (l >> s) & 0xf; 304 } 305 306 static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE], 307 int lane_count) 308 { 309 int lane; 310 u8 lane_status; 311 312 for (lane = 0; lane < lane_count; lane++) { 313 lane_status = dp_get_lane_status(link_status, lane); 314 if ((lane_status & DP_LANE_CR_DONE) == 0) 315 return false; 316 } 317 return true; 318 } 319 320 static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE], 321 int lane_count) 322 { 323 u8 lane_align; 324 u8 lane_status; 325 int lane; 326 327 lane_align = dp_link_status(link_status, 328 DP_LANE_ALIGN_STATUS_UPDATED); 329 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 330 return false; 331 for (lane = 0; lane < lane_count; lane++) { 332 lane_status = dp_get_lane_status(link_status, lane); 333 if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS) 334 return false; 335 } 336 return true; 337 } 338 339 static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE], 340 int lane) 341 342 { 343 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 344 int s = ((lane & 1) ? 345 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 346 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 347 u8 l = dp_link_status(link_status, i); 348 349 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 350 } 351 352 static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE], 353 int lane) 354 { 355 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 356 int s = ((lane & 1) ? 357 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 358 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 359 u8 l = dp_link_status(link_status, i); 360 361 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 362 } 363 364 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 365 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5 366 367 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], 368 int lane_count, 369 u8 train_set[4]) 370 { 371 u8 v = 0; 372 u8 p = 0; 373 int lane; 374 375 for (lane = 0; lane < lane_count; lane++) { 376 u8 this_v = dp_get_adjust_request_voltage(link_status, lane); 377 u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane); 378 379 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n", 380 lane, 381 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 382 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 383 384 if (this_v > v) 385 v = this_v; 386 if (this_p > p) 387 p = this_p; 388 } 389 390 if (v >= DP_VOLTAGE_MAX) 391 v |= DP_TRAIN_MAX_SWING_REACHED; 392 393 if (p >= DP_PRE_EMPHASIS_MAX) 394 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 395 396 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n", 397 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 398 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 399 400 for (lane = 0; lane < 4; lane++) 401 train_set[lane] = v | p; 402 } 403 404 /* convert bits per color to bits per pixel */ 405 /* get bpc from the EDID */ 406 static int convert_bpc_to_bpp(int bpc) 407 { 408 if (bpc == 0) 409 return 24; 410 else 411 return bpc * 3; 412 } 413 414 /* get the max pix clock supported by the link rate and lane num */ 415 static int dp_get_max_dp_pix_clock(int link_rate, 416 int lane_num, 417 int bpp) 418 { 419 return (link_rate * lane_num * 8) / bpp; 420 } 421 422 static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE]) 423 { 424 switch (dpcd[DP_MAX_LINK_RATE]) { 425 case DP_LINK_BW_1_62: 426 default: 427 return 162000; 428 case DP_LINK_BW_2_7: 429 return 270000; 430 case DP_LINK_BW_5_4: 431 return 540000; 432 } 433 } 434 435 static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE]) 436 { 437 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 438 } 439 440 static u8 dp_get_dp_link_rate_coded(int link_rate) 441 { 442 switch (link_rate) { 443 case 162000: 444 default: 445 return DP_LINK_BW_1_62; 446 case 270000: 447 return DP_LINK_BW_2_7; 448 case 540000: 449 return DP_LINK_BW_5_4; 450 } 451 } 452 453 /***** radeon specific DP functions *****/ 454 455 /* First get the min lane# when low rate is used according to pixel clock 456 * (prefer low rate), second check max lane# supported by DP panel, 457 * if the max lane# < low rate lane# then use max lane# instead. 458 */ 459 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector, 460 u8 dpcd[DP_DPCD_SIZE], 461 int pix_clock) 462 { 463 int bpp = convert_bpc_to_bpp(connector->display_info.bpc); 464 int max_link_rate = dp_get_max_link_rate(dpcd); 465 int max_lane_num = dp_get_max_lane_number(dpcd); 466 int lane_num; 467 int max_dp_pix_clock; 468 469 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) { 470 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp); 471 if (pix_clock <= max_dp_pix_clock) 472 break; 473 } 474 475 return lane_num; 476 } 477 478 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector, 479 u8 dpcd[DP_DPCD_SIZE], 480 int pix_clock) 481 { 482 int bpp = convert_bpc_to_bpp(connector->display_info.bpc); 483 int lane_num, max_pix_clock; 484 485 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 486 ENCODER_OBJECT_ID_NUTMEG) 487 return 270000; 488 489 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock); 490 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp); 491 if (pix_clock <= max_pix_clock) 492 return 162000; 493 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp); 494 if (pix_clock <= max_pix_clock) 495 return 270000; 496 if (radeon_connector_is_dp12_capable(connector)) { 497 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp); 498 if (pix_clock <= max_pix_clock) 499 return 540000; 500 } 501 502 return dp_get_max_link_rate(dpcd); 503 } 504 505 static u8 radeon_dp_encoder_service(struct radeon_device *rdev, 506 int action, int dp_clock, 507 u8 ucconfig, u8 lane_num) 508 { 509 DP_ENCODER_SERVICE_PARAMETERS args; 510 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 511 512 memset(&args, 0, sizeof(args)); 513 args.ucLinkClock = dp_clock / 10; 514 args.ucConfig = ucconfig; 515 args.ucAction = action; 516 args.ucLaneNum = lane_num; 517 args.ucStatus = 0; 518 519 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 520 return args.ucStatus; 521 } 522 523 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector) 524 { 525 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 526 struct drm_device *dev = radeon_connector->base.dev; 527 struct radeon_device *rdev = dev->dev_private; 528 529 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0, 530 dig_connector->dp_i2c_bus->rec.i2c_id, 0); 531 } 532 533 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector) 534 { 535 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 536 u8 msg[25]; 537 int ret, i; 538 539 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0); 540 if (ret > 0) { 541 memcpy(dig_connector->dpcd, msg, 8); 542 DRM_DEBUG_KMS("DPCD: "); 543 for (i = 0; i < 8; i++) 544 DRM_DEBUG_KMS("%02x ", msg[i]); 545 DRM_DEBUG_KMS("\n"); 546 return true; 547 } 548 dig_connector->dpcd[0] = 0; 549 return false; 550 } 551 552 int radeon_dp_get_panel_mode(struct drm_encoder *encoder, 553 struct drm_connector *connector) 554 { 555 struct drm_device *dev = encoder->dev; 556 struct radeon_device *rdev = dev->dev_private; 557 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 558 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 559 560 if (!ASIC_IS_DCE4(rdev)) 561 return panel_mode; 562 563 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 564 ENCODER_OBJECT_ID_NUTMEG) 565 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; 566 else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 567 ENCODER_OBJECT_ID_TRAVIS) { 568 u8 id[6]; 569 int i; 570 for (i = 0; i < 6; i++) 571 id[i] = radeon_read_dpcd_reg(radeon_connector, 0x503 + i); 572 if (id[0] == 0x73 && 573 id[1] == 0x69 && 574 id[2] == 0x76 && 575 id[3] == 0x61 && 576 id[4] == 0x72 && 577 id[5] == 0x54) 578 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; 579 else 580 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 581 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 582 u8 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP); 583 if (tmp & 1) 584 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 585 } 586 587 return panel_mode; 588 } 589 590 void radeon_dp_set_link_config(struct drm_connector *connector, 591 struct drm_display_mode *mode) 592 { 593 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 594 struct radeon_connector_atom_dig *dig_connector; 595 596 if (!radeon_connector->con_priv) 597 return; 598 dig_connector = radeon_connector->con_priv; 599 600 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 601 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 602 dig_connector->dp_clock = 603 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 604 dig_connector->dp_lane_count = 605 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock); 606 } 607 } 608 609 int radeon_dp_mode_valid_helper(struct drm_connector *connector, 610 struct drm_display_mode *mode) 611 { 612 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 613 struct radeon_connector_atom_dig *dig_connector; 614 int dp_clock; 615 616 if (!radeon_connector->con_priv) 617 return MODE_CLOCK_HIGH; 618 dig_connector = radeon_connector->con_priv; 619 620 dp_clock = 621 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 622 623 if ((dp_clock == 540000) && 624 (!radeon_connector_is_dp12_capable(connector))) 625 return MODE_CLOCK_HIGH; 626 627 return MODE_OK; 628 } 629 630 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector, 631 u8 link_status[DP_LINK_STATUS_SIZE]) 632 { 633 int ret; 634 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 635 link_status, DP_LINK_STATUS_SIZE, 100); 636 if (ret <= 0) { 637 DRM_ERROR("displayport link status failed\n"); 638 return false; 639 } 640 641 DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n", 642 link_status[0], link_status[1], link_status[2], 643 link_status[3], link_status[4], link_status[5]); 644 return true; 645 } 646 647 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector) 648 { 649 u8 link_status[DP_LINK_STATUS_SIZE]; 650 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; 651 652 if (!radeon_dp_get_link_status(radeon_connector, link_status)) 653 return false; 654 if (dp_channel_eq_ok(link_status, dig->dp_lane_count)) 655 return false; 656 return true; 657 } 658 659 struct radeon_dp_link_train_info { 660 struct radeon_device *rdev; 661 struct drm_encoder *encoder; 662 struct drm_connector *connector; 663 struct radeon_connector *radeon_connector; 664 int enc_id; 665 int dp_clock; 666 int dp_lane_count; 667 int rd_interval; 668 bool tp3_supported; 669 u8 dpcd[8]; 670 u8 train_set[4]; 671 u8 link_status[DP_LINK_STATUS_SIZE]; 672 u8 tries; 673 bool use_dpencoder; 674 }; 675 676 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info) 677 { 678 /* set the initial vs/emph on the source */ 679 atombios_dig_transmitter_setup(dp_info->encoder, 680 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH, 681 0, dp_info->train_set[0]); /* sets all lanes at once */ 682 683 /* set the vs/emph on the sink */ 684 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET, 685 dp_info->train_set, dp_info->dp_lane_count, 0); 686 } 687 688 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp) 689 { 690 int rtp = 0; 691 692 /* set training pattern on the source */ 693 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) { 694 switch (tp) { 695 case DP_TRAINING_PATTERN_1: 696 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1; 697 break; 698 case DP_TRAINING_PATTERN_2: 699 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2; 700 break; 701 case DP_TRAINING_PATTERN_3: 702 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3; 703 break; 704 } 705 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0); 706 } else { 707 switch (tp) { 708 case DP_TRAINING_PATTERN_1: 709 rtp = 0; 710 break; 711 case DP_TRAINING_PATTERN_2: 712 rtp = 1; 713 break; 714 } 715 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, 716 dp_info->dp_clock, dp_info->enc_id, rtp); 717 } 718 719 /* enable training pattern on the sink */ 720 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp); 721 } 722 723 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info) 724 { 725 struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder); 726 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 727 u8 tmp; 728 729 /* power up the sink */ 730 if (dp_info->dpcd[0] >= 0x11) 731 radeon_write_dpcd_reg(dp_info->radeon_connector, 732 DP_SET_POWER, DP_SET_POWER_D0); 733 734 /* possibly enable downspread on the sink */ 735 if (dp_info->dpcd[3] & 0x1) 736 radeon_write_dpcd_reg(dp_info->radeon_connector, 737 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5); 738 else 739 radeon_write_dpcd_reg(dp_info->radeon_connector, 740 DP_DOWNSPREAD_CTRL, 0); 741 742 if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) && 743 (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) { 744 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1); 745 } 746 747 /* set the lane count on the sink */ 748 tmp = dp_info->dp_lane_count; 749 if (dp_info->dpcd[0] >= 0x11) 750 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 751 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp); 752 753 /* set the link rate on the sink */ 754 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock); 755 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp); 756 757 /* start training on the source */ 758 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 759 atombios_dig_encoder_setup(dp_info->encoder, 760 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0); 761 else 762 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START, 763 dp_info->dp_clock, dp_info->enc_id, 0); 764 765 /* disable the training pattern on the sink */ 766 radeon_write_dpcd_reg(dp_info->radeon_connector, 767 DP_TRAINING_PATTERN_SET, 768 DP_TRAINING_PATTERN_DISABLE); 769 770 return 0; 771 } 772 773 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info) 774 { 775 udelay(400); 776 777 /* disable the training pattern on the sink */ 778 radeon_write_dpcd_reg(dp_info->radeon_connector, 779 DP_TRAINING_PATTERN_SET, 780 DP_TRAINING_PATTERN_DISABLE); 781 782 /* disable the training pattern on the source */ 783 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 784 atombios_dig_encoder_setup(dp_info->encoder, 785 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0); 786 else 787 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE, 788 dp_info->dp_clock, dp_info->enc_id, 0); 789 790 return 0; 791 } 792 793 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info) 794 { 795 bool clock_recovery; 796 u8 voltage; 797 int i; 798 799 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1); 800 memset(dp_info->train_set, 0, 4); 801 radeon_dp_update_vs_emph(dp_info); 802 803 udelay(400); 804 805 /* clock recovery loop */ 806 clock_recovery = false; 807 dp_info->tries = 0; 808 voltage = 0xff; 809 while (1) { 810 if (dp_info->rd_interval == 0) 811 udelay(100); 812 else 813 mdelay(dp_info->rd_interval * 4); 814 815 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) 816 break; 817 818 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) { 819 clock_recovery = true; 820 break; 821 } 822 823 for (i = 0; i < dp_info->dp_lane_count; i++) { 824 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 825 break; 826 } 827 if (i == dp_info->dp_lane_count) { 828 DRM_ERROR("clock recovery reached max voltage\n"); 829 break; 830 } 831 832 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 833 ++dp_info->tries; 834 if (dp_info->tries == 5) { 835 DRM_ERROR("clock recovery tried 5 times\n"); 836 break; 837 } 838 } else 839 dp_info->tries = 0; 840 841 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 842 843 /* Compute new train_set as requested by sink */ 844 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 845 846 radeon_dp_update_vs_emph(dp_info); 847 } 848 if (!clock_recovery) { 849 DRM_ERROR("clock recovery failed\n"); 850 return -1; 851 } else { 852 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n", 853 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 854 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> 855 DP_TRAIN_PRE_EMPHASIS_SHIFT); 856 return 0; 857 } 858 } 859 860 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info) 861 { 862 bool channel_eq; 863 864 if (dp_info->tp3_supported) 865 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3); 866 else 867 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2); 868 869 /* channel equalization loop */ 870 dp_info->tries = 0; 871 channel_eq = false; 872 while (1) { 873 if (dp_info->rd_interval == 0) 874 udelay(400); 875 else 876 mdelay(dp_info->rd_interval * 4); 877 878 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) 879 break; 880 881 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) { 882 channel_eq = true; 883 break; 884 } 885 886 /* Try 5 times */ 887 if (dp_info->tries > 5) { 888 DRM_ERROR("channel eq failed: 5 tries\n"); 889 break; 890 } 891 892 /* Compute new train_set as requested by sink */ 893 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 894 895 radeon_dp_update_vs_emph(dp_info); 896 dp_info->tries++; 897 } 898 899 if (!channel_eq) { 900 DRM_ERROR("channel eq failed\n"); 901 return -1; 902 } else { 903 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n", 904 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 905 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) 906 >> DP_TRAIN_PRE_EMPHASIS_SHIFT); 907 return 0; 908 } 909 } 910 911 void radeon_dp_link_train(struct drm_encoder *encoder, 912 struct drm_connector *connector) 913 { 914 struct drm_device *dev = encoder->dev; 915 struct radeon_device *rdev = dev->dev_private; 916 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 917 struct radeon_encoder_atom_dig *dig; 918 struct radeon_connector *radeon_connector; 919 struct radeon_connector_atom_dig *dig_connector; 920 struct radeon_dp_link_train_info dp_info; 921 int index; 922 u8 tmp, frev, crev; 923 924 if (!radeon_encoder->enc_priv) 925 return; 926 dig = radeon_encoder->enc_priv; 927 928 radeon_connector = to_radeon_connector(connector); 929 if (!radeon_connector->con_priv) 930 return; 931 dig_connector = radeon_connector->con_priv; 932 933 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) && 934 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP)) 935 return; 936 937 /* DPEncoderService newer than 1.1 can't program properly the 938 * training pattern. When facing such version use the 939 * DIGXEncoderControl (X== 1 | 2) 940 */ 941 dp_info.use_dpencoder = true; 942 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 943 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) { 944 if (crev > 1) { 945 dp_info.use_dpencoder = false; 946 } 947 } 948 949 dp_info.enc_id = 0; 950 if (dig->dig_encoder) 951 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; 952 else 953 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; 954 if (dig->linkb) 955 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B; 956 else 957 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A; 958 959 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL); 960 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT); 961 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED)) 962 dp_info.tp3_supported = true; 963 else 964 dp_info.tp3_supported = false; 965 966 memcpy(dp_info.dpcd, dig_connector->dpcd, 8); 967 dp_info.rdev = rdev; 968 dp_info.encoder = encoder; 969 dp_info.connector = connector; 970 dp_info.radeon_connector = radeon_connector; 971 dp_info.dp_lane_count = dig_connector->dp_lane_count; 972 dp_info.dp_clock = dig_connector->dp_clock; 973 974 if (radeon_dp_link_train_init(&dp_info)) 975 goto done; 976 if (radeon_dp_link_train_cr(&dp_info)) 977 goto done; 978 if (radeon_dp_link_train_ce(&dp_info)) 979 goto done; 980 done: 981 if (radeon_dp_link_train_finish(&dp_info)) 982 return; 983 } 984