1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/drm_crtc_helper.h> 28 #include <drm/amdgpu_drm.h> 29 #include "amdgpu.h" 30 #include "amdgpu_connectors.h" 31 #include "atom.h" 32 #include "atombios_encoders.h" 33 #include "atombios_dp.h" 34 #include <linux/backlight.h> 35 #include "bif/bif_4_1_d.h" 36 37 static u8 38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 39 { 40 u8 backlight_level; 41 u32 bios_2_scratch; 42 43 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 44 45 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 46 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 47 48 return backlight_level; 49 } 50 51 static void 52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 53 u8 backlight_level) 54 { 55 u32 bios_2_scratch; 56 57 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 58 59 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 60 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 61 ATOM_S2_CURRENT_BL_LEVEL_MASK); 62 63 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 64 } 65 66 u8 67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 68 { 69 struct drm_device *dev = amdgpu_encoder->base.dev; 70 struct amdgpu_device *adev = dev->dev_private; 71 72 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 73 return 0; 74 75 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 76 } 77 78 void 79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 80 u8 level) 81 { 82 struct drm_encoder *encoder = &amdgpu_encoder->base; 83 struct drm_device *dev = amdgpu_encoder->base.dev; 84 struct amdgpu_device *adev = dev->dev_private; 85 struct amdgpu_encoder_atom_dig *dig; 86 87 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 88 return; 89 90 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 91 amdgpu_encoder->enc_priv) { 92 dig = amdgpu_encoder->enc_priv; 93 dig->backlight_level = level; 94 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 95 96 switch (amdgpu_encoder->encoder_id) { 97 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 98 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 99 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 101 if (dig->backlight_level == 0) 102 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 103 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 104 else { 105 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 106 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 107 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 108 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 109 } 110 break; 111 default: 112 break; 113 } 114 } 115 } 116 117 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 118 119 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 120 { 121 u8 level; 122 123 /* Convert brightness to hardware level */ 124 if (bd->props.brightness < 0) 125 level = 0; 126 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 127 level = AMDGPU_MAX_BL_LEVEL; 128 else 129 level = bd->props.brightness; 130 131 return level; 132 } 133 134 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 135 { 136 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 137 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 138 139 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 140 amdgpu_atombios_encoder_backlight_level(bd)); 141 142 return 0; 143 } 144 145 static int 146 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 147 { 148 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 149 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 150 struct drm_device *dev = amdgpu_encoder->base.dev; 151 struct amdgpu_device *adev = dev->dev_private; 152 153 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 154 } 155 156 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 157 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 158 .update_status = amdgpu_atombios_encoder_update_backlight_status, 159 }; 160 161 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 162 struct drm_connector *drm_connector) 163 { 164 struct drm_device *dev = amdgpu_encoder->base.dev; 165 struct amdgpu_device *adev = dev->dev_private; 166 struct backlight_device *bd; 167 struct backlight_properties props; 168 struct amdgpu_backlight_privdata *pdata; 169 struct amdgpu_encoder_atom_dig *dig; 170 u8 backlight_level; 171 char bl_name[16]; 172 173 /* Mac laptops with multiple GPUs use the gmux driver for backlight 174 * so don't register a backlight device 175 */ 176 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 177 (adev->pdev->device == 0x6741)) 178 return; 179 180 if (!amdgpu_encoder->enc_priv) 181 return; 182 183 if (!adev->is_atom_bios) 184 return; 185 186 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 187 return; 188 189 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 190 if (!pdata) { 191 DRM_ERROR("Memory allocation failed\n"); 192 goto error; 193 } 194 195 memset(&props, 0, sizeof(props)); 196 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 197 props.type = BACKLIGHT_RAW; 198 snprintf(bl_name, sizeof(bl_name), 199 "amdgpu_bl%d", dev->primary->index); 200 bd = backlight_device_register(bl_name, drm_connector->kdev, 201 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 202 if (IS_ERR(bd)) { 203 DRM_ERROR("Backlight registration failed\n"); 204 goto error; 205 } 206 207 pdata->encoder = amdgpu_encoder; 208 209 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 210 211 dig = amdgpu_encoder->enc_priv; 212 dig->bl_dev = bd; 213 214 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 215 bd->props.power = FB_BLANK_UNBLANK; 216 backlight_update_status(bd); 217 218 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 219 220 return; 221 222 error: 223 kfree(pdata); 224 return; 225 } 226 227 void 228 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 229 { 230 struct drm_device *dev = amdgpu_encoder->base.dev; 231 struct amdgpu_device *adev = dev->dev_private; 232 struct backlight_device *bd = NULL; 233 struct amdgpu_encoder_atom_dig *dig; 234 235 if (!amdgpu_encoder->enc_priv) 236 return; 237 238 if (!adev->is_atom_bios) 239 return; 240 241 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 242 return; 243 244 dig = amdgpu_encoder->enc_priv; 245 bd = dig->bl_dev; 246 dig->bl_dev = NULL; 247 248 if (bd) { 249 struct amdgpu_legacy_backlight_privdata *pdata; 250 251 pdata = bl_get_data(bd); 252 backlight_device_unregister(bd); 253 kfree(pdata); 254 255 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 256 } 257 } 258 259 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 260 261 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 262 { 263 } 264 265 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 266 { 267 } 268 269 #endif 270 271 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 272 { 273 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 274 switch (amdgpu_encoder->encoder_id) { 275 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 276 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 277 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 278 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 279 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 280 return true; 281 default: 282 return false; 283 } 284 } 285 286 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 287 const struct drm_display_mode *mode, 288 struct drm_display_mode *adjusted_mode) 289 { 290 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 291 292 /* set the active encoder to connector routing */ 293 amdgpu_encoder_set_active_device(encoder); 294 drm_mode_set_crtcinfo(adjusted_mode, 0); 295 296 /* hw bug */ 297 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 298 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 299 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 300 301 /* vertical FP must be at least 1 */ 302 if (mode->crtc_vsync_start == mode->crtc_vdisplay) 303 adjusted_mode->crtc_vsync_start++; 304 305 /* get the native mode for scaling */ 306 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 307 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 308 else if (amdgpu_encoder->rmx_type != RMX_OFF) 309 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 310 311 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 312 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 313 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 314 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 315 } 316 317 return true; 318 } 319 320 static void 321 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 322 { 323 struct drm_device *dev = encoder->dev; 324 struct amdgpu_device *adev = dev->dev_private; 325 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 326 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 327 int index = 0; 328 329 memset(&args, 0, sizeof(args)); 330 331 switch (amdgpu_encoder->encoder_id) { 332 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 333 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 334 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 335 break; 336 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 337 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 338 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 339 break; 340 } 341 342 args.ucAction = action; 343 args.ucDacStandard = ATOM_DAC1_PS2; 344 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 345 346 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 347 348 } 349 350 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 351 { 352 int bpc = 8; 353 354 if (encoder->crtc) { 355 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 356 bpc = amdgpu_crtc->bpc; 357 } 358 359 switch (bpc) { 360 case 0: 361 return PANEL_BPC_UNDEFINE; 362 case 6: 363 return PANEL_6BIT_PER_COLOR; 364 case 8: 365 default: 366 return PANEL_8BIT_PER_COLOR; 367 case 10: 368 return PANEL_10BIT_PER_COLOR; 369 case 12: 370 return PANEL_12BIT_PER_COLOR; 371 case 16: 372 return PANEL_16BIT_PER_COLOR; 373 } 374 } 375 376 union dvo_encoder_control { 377 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 378 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 379 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 380 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 381 }; 382 383 static void 384 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 385 { 386 struct drm_device *dev = encoder->dev; 387 struct amdgpu_device *adev = dev->dev_private; 388 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 389 union dvo_encoder_control args; 390 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 391 uint8_t frev, crev; 392 393 memset(&args, 0, sizeof(args)); 394 395 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 396 return; 397 398 switch (frev) { 399 case 1: 400 switch (crev) { 401 case 1: 402 /* R4xx, R5xx */ 403 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 404 405 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 406 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 407 408 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 409 break; 410 case 2: 411 /* RS600/690/740 */ 412 args.dvo.sDVOEncoder.ucAction = action; 413 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 414 /* DFP1, CRT1, TV1 depending on the type of port */ 415 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 416 417 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 418 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 419 break; 420 case 3: 421 /* R6xx */ 422 args.dvo_v3.ucAction = action; 423 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 424 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 425 break; 426 case 4: 427 /* DCE8 */ 428 args.dvo_v4.ucAction = action; 429 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 430 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 431 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 432 break; 433 default: 434 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 435 break; 436 } 437 break; 438 default: 439 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 440 break; 441 } 442 443 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 444 } 445 446 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 447 { 448 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 449 struct drm_connector *connector; 450 struct amdgpu_connector *amdgpu_connector; 451 struct amdgpu_connector_atom_dig *dig_connector; 452 453 /* dp bridges are always DP */ 454 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 455 return ATOM_ENCODER_MODE_DP; 456 457 /* DVO is always DVO */ 458 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 459 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 460 return ATOM_ENCODER_MODE_DVO; 461 462 connector = amdgpu_get_connector_for_encoder(encoder); 463 /* if we don't have an active device yet, just use one of 464 * the connectors tied to the encoder. 465 */ 466 if (!connector) 467 connector = amdgpu_get_connector_for_encoder_init(encoder); 468 amdgpu_connector = to_amdgpu_connector(connector); 469 470 switch (connector->connector_type) { 471 case DRM_MODE_CONNECTOR_DVII: 472 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 473 if (amdgpu_audio != 0) { 474 if (amdgpu_connector->use_digital && 475 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 476 return ATOM_ENCODER_MODE_HDMI; 477 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 478 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 479 return ATOM_ENCODER_MODE_HDMI; 480 else if (amdgpu_connector->use_digital) 481 return ATOM_ENCODER_MODE_DVI; 482 else 483 return ATOM_ENCODER_MODE_CRT; 484 } else if (amdgpu_connector->use_digital) { 485 return ATOM_ENCODER_MODE_DVI; 486 } else { 487 return ATOM_ENCODER_MODE_CRT; 488 } 489 break; 490 case DRM_MODE_CONNECTOR_DVID: 491 case DRM_MODE_CONNECTOR_HDMIA: 492 default: 493 if (amdgpu_audio != 0) { 494 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 495 return ATOM_ENCODER_MODE_HDMI; 496 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 497 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 498 return ATOM_ENCODER_MODE_HDMI; 499 else 500 return ATOM_ENCODER_MODE_DVI; 501 } else { 502 return ATOM_ENCODER_MODE_DVI; 503 } 504 break; 505 case DRM_MODE_CONNECTOR_LVDS: 506 return ATOM_ENCODER_MODE_LVDS; 507 break; 508 case DRM_MODE_CONNECTOR_DisplayPort: 509 dig_connector = amdgpu_connector->con_priv; 510 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 511 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 512 return ATOM_ENCODER_MODE_DP; 513 } else if (amdgpu_audio != 0) { 514 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 515 return ATOM_ENCODER_MODE_HDMI; 516 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 517 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 518 return ATOM_ENCODER_MODE_HDMI; 519 else 520 return ATOM_ENCODER_MODE_DVI; 521 } else { 522 return ATOM_ENCODER_MODE_DVI; 523 } 524 break; 525 case DRM_MODE_CONNECTOR_eDP: 526 return ATOM_ENCODER_MODE_DP; 527 case DRM_MODE_CONNECTOR_DVIA: 528 case DRM_MODE_CONNECTOR_VGA: 529 return ATOM_ENCODER_MODE_CRT; 530 break; 531 case DRM_MODE_CONNECTOR_Composite: 532 case DRM_MODE_CONNECTOR_SVIDEO: 533 case DRM_MODE_CONNECTOR_9PinDIN: 534 /* fix me */ 535 return ATOM_ENCODER_MODE_TV; 536 /*return ATOM_ENCODER_MODE_CV;*/ 537 break; 538 } 539 } 540 541 /* 542 * DIG Encoder/Transmitter Setup 543 * 544 * DCE 6.0 545 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 546 * Supports up to 6 digital outputs 547 * - 6 DIG encoder blocks. 548 * - DIG to PHY mapping is hardcoded 549 * DIG1 drives UNIPHY0 link A, A+B 550 * DIG2 drives UNIPHY0 link B 551 * DIG3 drives UNIPHY1 link A, A+B 552 * DIG4 drives UNIPHY1 link B 553 * DIG5 drives UNIPHY2 link A, A+B 554 * DIG6 drives UNIPHY2 link B 555 * 556 * Routing 557 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 558 * Examples: 559 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 560 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 561 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 562 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 563 */ 564 565 union dig_encoder_control { 566 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 567 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 568 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 569 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 570 }; 571 572 void 573 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 574 int action, int panel_mode) 575 { 576 struct drm_device *dev = encoder->dev; 577 struct amdgpu_device *adev = dev->dev_private; 578 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 579 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 580 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 581 union dig_encoder_control args; 582 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 583 uint8_t frev, crev; 584 int dp_clock = 0; 585 int dp_lane_count = 0; 586 int hpd_id = AMDGPU_HPD_NONE; 587 588 if (connector) { 589 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 590 struct amdgpu_connector_atom_dig *dig_connector = 591 amdgpu_connector->con_priv; 592 593 dp_clock = dig_connector->dp_clock; 594 dp_lane_count = dig_connector->dp_lane_count; 595 hpd_id = amdgpu_connector->hpd.hpd; 596 } 597 598 /* no dig encoder assigned */ 599 if (dig->dig_encoder == -1) 600 return; 601 602 memset(&args, 0, sizeof(args)); 603 604 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 605 return; 606 607 switch (frev) { 608 case 1: 609 switch (crev) { 610 case 1: 611 args.v1.ucAction = action; 612 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 613 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 614 args.v3.ucPanelMode = panel_mode; 615 else 616 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 617 618 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 619 args.v1.ucLaneNum = dp_lane_count; 620 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 621 args.v1.ucLaneNum = 8; 622 else 623 args.v1.ucLaneNum = 4; 624 625 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 626 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 627 switch (amdgpu_encoder->encoder_id) { 628 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 629 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 630 break; 631 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 632 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 633 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 634 break; 635 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 636 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 637 break; 638 } 639 if (dig->linkb) 640 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 641 else 642 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 643 break; 644 case 2: 645 case 3: 646 args.v3.ucAction = action; 647 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 648 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 649 args.v3.ucPanelMode = panel_mode; 650 else 651 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 652 653 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 654 args.v3.ucLaneNum = dp_lane_count; 655 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 656 args.v3.ucLaneNum = 8; 657 else 658 args.v3.ucLaneNum = 4; 659 660 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 661 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 662 args.v3.acConfig.ucDigSel = dig->dig_encoder; 663 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 664 break; 665 case 4: 666 args.v4.ucAction = action; 667 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 668 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 669 args.v4.ucPanelMode = panel_mode; 670 else 671 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 672 673 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 674 args.v4.ucLaneNum = dp_lane_count; 675 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 676 args.v4.ucLaneNum = 8; 677 else 678 args.v4.ucLaneNum = 4; 679 680 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 681 if (dp_clock == 540000) 682 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 683 else if (dp_clock == 324000) 684 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 685 else if (dp_clock == 270000) 686 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 687 else 688 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 689 } 690 args.v4.acConfig.ucDigSel = dig->dig_encoder; 691 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 692 if (hpd_id == AMDGPU_HPD_NONE) 693 args.v4.ucHPD_ID = 0; 694 else 695 args.v4.ucHPD_ID = hpd_id + 1; 696 break; 697 default: 698 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 699 break; 700 } 701 break; 702 default: 703 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 704 break; 705 } 706 707 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 708 709 } 710 711 union dig_transmitter_control { 712 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 713 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 714 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 715 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 716 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 717 }; 718 719 void 720 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 721 uint8_t lane_num, uint8_t lane_set) 722 { 723 struct drm_device *dev = encoder->dev; 724 struct amdgpu_device *adev = dev->dev_private; 725 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 726 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 727 struct drm_connector *connector; 728 union dig_transmitter_control args; 729 int index = 0; 730 uint8_t frev, crev; 731 bool is_dp = false; 732 int pll_id = 0; 733 int dp_clock = 0; 734 int dp_lane_count = 0; 735 int connector_object_id = 0; 736 int igp_lane_info = 0; 737 int dig_encoder = dig->dig_encoder; 738 int hpd_id = AMDGPU_HPD_NONE; 739 740 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 741 connector = amdgpu_get_connector_for_encoder_init(encoder); 742 /* just needed to avoid bailing in the encoder check. the encoder 743 * isn't used for init 744 */ 745 dig_encoder = 0; 746 } else 747 connector = amdgpu_get_connector_for_encoder(encoder); 748 749 if (connector) { 750 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 751 struct amdgpu_connector_atom_dig *dig_connector = 752 amdgpu_connector->con_priv; 753 754 hpd_id = amdgpu_connector->hpd.hpd; 755 dp_clock = dig_connector->dp_clock; 756 dp_lane_count = dig_connector->dp_lane_count; 757 connector_object_id = 758 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 759 } 760 761 if (encoder->crtc) { 762 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 763 pll_id = amdgpu_crtc->pll_id; 764 } 765 766 /* no dig encoder assigned */ 767 if (dig_encoder == -1) 768 return; 769 770 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 771 is_dp = true; 772 773 memset(&args, 0, sizeof(args)); 774 775 switch (amdgpu_encoder->encoder_id) { 776 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 777 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 778 break; 779 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 780 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 781 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 782 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 783 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 784 break; 785 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 786 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 787 break; 788 } 789 790 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 791 return; 792 793 switch (frev) { 794 case 1: 795 switch (crev) { 796 case 1: 797 args.v1.ucAction = action; 798 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 799 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 800 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 801 args.v1.asMode.ucLaneSel = lane_num; 802 args.v1.asMode.ucLaneSet = lane_set; 803 } else { 804 if (is_dp) 805 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 806 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 807 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 808 else 809 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 810 } 811 812 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 813 814 if (dig_encoder) 815 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 816 else 817 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 818 819 if ((adev->flags & AMD_IS_APU) && 820 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 821 if (is_dp || 822 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 823 if (igp_lane_info & 0x1) 824 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 825 else if (igp_lane_info & 0x2) 826 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 827 else if (igp_lane_info & 0x4) 828 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 829 else if (igp_lane_info & 0x8) 830 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 831 } else { 832 if (igp_lane_info & 0x3) 833 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 834 else if (igp_lane_info & 0xc) 835 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 836 } 837 } 838 839 if (dig->linkb) 840 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 841 else 842 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 843 844 if (is_dp) 845 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 846 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 847 if (dig->coherent_mode) 848 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 849 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 850 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 851 } 852 break; 853 case 2: 854 args.v2.ucAction = action; 855 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 856 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 857 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 858 args.v2.asMode.ucLaneSel = lane_num; 859 args.v2.asMode.ucLaneSet = lane_set; 860 } else { 861 if (is_dp) 862 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 863 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 864 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 865 else 866 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 867 } 868 869 args.v2.acConfig.ucEncoderSel = dig_encoder; 870 if (dig->linkb) 871 args.v2.acConfig.ucLinkSel = 1; 872 873 switch (amdgpu_encoder->encoder_id) { 874 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 875 args.v2.acConfig.ucTransmitterSel = 0; 876 break; 877 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 878 args.v2.acConfig.ucTransmitterSel = 1; 879 break; 880 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 881 args.v2.acConfig.ucTransmitterSel = 2; 882 break; 883 } 884 885 if (is_dp) { 886 args.v2.acConfig.fCoherentMode = 1; 887 args.v2.acConfig.fDPConnector = 1; 888 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 889 if (dig->coherent_mode) 890 args.v2.acConfig.fCoherentMode = 1; 891 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 892 args.v2.acConfig.fDualLinkConnector = 1; 893 } 894 break; 895 case 3: 896 args.v3.ucAction = action; 897 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 898 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 899 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 900 args.v3.asMode.ucLaneSel = lane_num; 901 args.v3.asMode.ucLaneSet = lane_set; 902 } else { 903 if (is_dp) 904 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 905 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 906 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 907 else 908 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 909 } 910 911 if (is_dp) 912 args.v3.ucLaneNum = dp_lane_count; 913 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 914 args.v3.ucLaneNum = 8; 915 else 916 args.v3.ucLaneNum = 4; 917 918 if (dig->linkb) 919 args.v3.acConfig.ucLinkSel = 1; 920 if (dig_encoder & 1) 921 args.v3.acConfig.ucEncoderSel = 1; 922 923 /* Select the PLL for the PHY 924 * DP PHY should be clocked from external src if there is 925 * one. 926 */ 927 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 928 if (is_dp && adev->clock.dp_extclk) 929 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 930 else 931 args.v3.acConfig.ucRefClkSource = pll_id; 932 933 switch (amdgpu_encoder->encoder_id) { 934 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 935 args.v3.acConfig.ucTransmitterSel = 0; 936 break; 937 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 938 args.v3.acConfig.ucTransmitterSel = 1; 939 break; 940 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 941 args.v3.acConfig.ucTransmitterSel = 2; 942 break; 943 } 944 945 if (is_dp) 946 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 947 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 948 if (dig->coherent_mode) 949 args.v3.acConfig.fCoherentMode = 1; 950 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 951 args.v3.acConfig.fDualLinkConnector = 1; 952 } 953 break; 954 case 4: 955 args.v4.ucAction = action; 956 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 957 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 958 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 959 args.v4.asMode.ucLaneSel = lane_num; 960 args.v4.asMode.ucLaneSet = lane_set; 961 } else { 962 if (is_dp) 963 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 964 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 965 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 966 else 967 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 968 } 969 970 if (is_dp) 971 args.v4.ucLaneNum = dp_lane_count; 972 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 973 args.v4.ucLaneNum = 8; 974 else 975 args.v4.ucLaneNum = 4; 976 977 if (dig->linkb) 978 args.v4.acConfig.ucLinkSel = 1; 979 if (dig_encoder & 1) 980 args.v4.acConfig.ucEncoderSel = 1; 981 982 /* Select the PLL for the PHY 983 * DP PHY should be clocked from external src if there is 984 * one. 985 */ 986 /* On DCE5 DCPLL usually generates the DP ref clock */ 987 if (is_dp) { 988 if (adev->clock.dp_extclk) 989 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 990 else 991 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 992 } else 993 args.v4.acConfig.ucRefClkSource = pll_id; 994 995 switch (amdgpu_encoder->encoder_id) { 996 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 997 args.v4.acConfig.ucTransmitterSel = 0; 998 break; 999 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1000 args.v4.acConfig.ucTransmitterSel = 1; 1001 break; 1002 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1003 args.v4.acConfig.ucTransmitterSel = 2; 1004 break; 1005 } 1006 1007 if (is_dp) 1008 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1009 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1010 if (dig->coherent_mode) 1011 args.v4.acConfig.fCoherentMode = 1; 1012 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1013 args.v4.acConfig.fDualLinkConnector = 1; 1014 } 1015 break; 1016 case 5: 1017 args.v5.ucAction = action; 1018 if (is_dp) 1019 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1020 else 1021 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1022 1023 switch (amdgpu_encoder->encoder_id) { 1024 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1025 if (dig->linkb) 1026 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1027 else 1028 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1029 break; 1030 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1031 if (dig->linkb) 1032 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1033 else 1034 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1035 break; 1036 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1037 if (dig->linkb) 1038 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1039 else 1040 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1041 break; 1042 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1043 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1044 break; 1045 } 1046 if (is_dp) 1047 args.v5.ucLaneNum = dp_lane_count; 1048 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1049 args.v5.ucLaneNum = 8; 1050 else 1051 args.v5.ucLaneNum = 4; 1052 args.v5.ucConnObjId = connector_object_id; 1053 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1054 1055 if (is_dp && adev->clock.dp_extclk) 1056 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1057 else 1058 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1059 1060 if (is_dp) 1061 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1062 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1063 if (dig->coherent_mode) 1064 args.v5.asConfig.ucCoherentMode = 1; 1065 } 1066 if (hpd_id == AMDGPU_HPD_NONE) 1067 args.v5.asConfig.ucHPDSel = 0; 1068 else 1069 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1070 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1071 args.v5.ucDPLaneSet = lane_set; 1072 break; 1073 default: 1074 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1075 break; 1076 } 1077 break; 1078 default: 1079 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1080 break; 1081 } 1082 1083 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1084 } 1085 1086 bool 1087 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1088 int action) 1089 { 1090 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1091 struct drm_device *dev = amdgpu_connector->base.dev; 1092 struct amdgpu_device *adev = dev->dev_private; 1093 union dig_transmitter_control args; 1094 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1095 uint8_t frev, crev; 1096 1097 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1098 goto done; 1099 1100 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1101 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1102 goto done; 1103 1104 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1105 goto done; 1106 1107 memset(&args, 0, sizeof(args)); 1108 1109 args.v1.ucAction = action; 1110 1111 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1112 1113 /* wait for the panel to power up */ 1114 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1115 int i; 1116 1117 for (i = 0; i < 300; i++) { 1118 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1119 return true; 1120 mdelay(1); 1121 } 1122 return false; 1123 } 1124 done: 1125 return true; 1126 } 1127 1128 union external_encoder_control { 1129 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1130 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1131 }; 1132 1133 static void 1134 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1135 struct drm_encoder *ext_encoder, 1136 int action) 1137 { 1138 struct drm_device *dev = encoder->dev; 1139 struct amdgpu_device *adev = dev->dev_private; 1140 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1141 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1142 union external_encoder_control args; 1143 struct drm_connector *connector; 1144 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1145 u8 frev, crev; 1146 int dp_clock = 0; 1147 int dp_lane_count = 0; 1148 int connector_object_id = 0; 1149 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1150 1151 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1152 connector = amdgpu_get_connector_for_encoder_init(encoder); 1153 else 1154 connector = amdgpu_get_connector_for_encoder(encoder); 1155 1156 if (connector) { 1157 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1158 struct amdgpu_connector_atom_dig *dig_connector = 1159 amdgpu_connector->con_priv; 1160 1161 dp_clock = dig_connector->dp_clock; 1162 dp_lane_count = dig_connector->dp_lane_count; 1163 connector_object_id = 1164 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1165 } 1166 1167 memset(&args, 0, sizeof(args)); 1168 1169 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1170 return; 1171 1172 switch (frev) { 1173 case 1: 1174 /* no params on frev 1 */ 1175 break; 1176 case 2: 1177 switch (crev) { 1178 case 1: 1179 case 2: 1180 args.v1.sDigEncoder.ucAction = action; 1181 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1182 args.v1.sDigEncoder.ucEncoderMode = 1183 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1184 1185 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1186 if (dp_clock == 270000) 1187 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1188 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1189 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1190 args.v1.sDigEncoder.ucLaneNum = 8; 1191 else 1192 args.v1.sDigEncoder.ucLaneNum = 4; 1193 break; 1194 case 3: 1195 args.v3.sExtEncoder.ucAction = action; 1196 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1197 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1198 else 1199 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1200 args.v3.sExtEncoder.ucEncoderMode = 1201 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1202 1203 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1204 if (dp_clock == 270000) 1205 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1206 else if (dp_clock == 540000) 1207 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1208 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1209 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1210 args.v3.sExtEncoder.ucLaneNum = 8; 1211 else 1212 args.v3.sExtEncoder.ucLaneNum = 4; 1213 switch (ext_enum) { 1214 case GRAPH_OBJECT_ENUM_ID1: 1215 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1216 break; 1217 case GRAPH_OBJECT_ENUM_ID2: 1218 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1219 break; 1220 case GRAPH_OBJECT_ENUM_ID3: 1221 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1222 break; 1223 } 1224 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1225 break; 1226 default: 1227 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1228 return; 1229 } 1230 break; 1231 default: 1232 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1233 return; 1234 } 1235 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1236 } 1237 1238 static void 1239 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1240 { 1241 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1242 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1243 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1244 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1245 struct amdgpu_connector *amdgpu_connector = NULL; 1246 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1247 1248 if (connector) { 1249 amdgpu_connector = to_amdgpu_connector(connector); 1250 amdgpu_dig_connector = amdgpu_connector->con_priv; 1251 } 1252 1253 if (action == ATOM_ENABLE) { 1254 if (!connector) 1255 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1256 else 1257 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1258 1259 /* setup and enable the encoder */ 1260 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1261 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1262 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1263 dig->panel_mode); 1264 if (ext_encoder) 1265 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1266 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1267 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1268 connector) { 1269 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1270 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1271 ATOM_TRANSMITTER_ACTION_POWER_ON); 1272 amdgpu_dig_connector->edp_on = true; 1273 } 1274 } 1275 /* enable the transmitter */ 1276 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1277 ATOM_TRANSMITTER_ACTION_ENABLE, 1278 0, 0); 1279 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1280 connector) { 1281 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1282 amdgpu_atombios_dp_link_train(encoder, connector); 1283 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1284 } 1285 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1286 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1287 if (ext_encoder) 1288 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1289 } else { 1290 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1291 connector) 1292 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1293 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1294 if (ext_encoder) 1295 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1296 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1297 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1298 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1299 1300 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1301 connector) 1302 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1303 /* disable the transmitter */ 1304 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1305 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1306 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1307 connector) { 1308 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1309 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1310 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1311 amdgpu_dig_connector->edp_on = false; 1312 } 1313 } 1314 } 1315 } 1316 1317 void 1318 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1319 { 1320 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1321 1322 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1323 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1324 amdgpu_encoder->active_device); 1325 switch (amdgpu_encoder->encoder_id) { 1326 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1327 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1328 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1329 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1330 switch (mode) { 1331 case DRM_MODE_DPMS_ON: 1332 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1333 break; 1334 case DRM_MODE_DPMS_STANDBY: 1335 case DRM_MODE_DPMS_SUSPEND: 1336 case DRM_MODE_DPMS_OFF: 1337 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1338 break; 1339 } 1340 break; 1341 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1342 switch (mode) { 1343 case DRM_MODE_DPMS_ON: 1344 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1345 break; 1346 case DRM_MODE_DPMS_STANDBY: 1347 case DRM_MODE_DPMS_SUSPEND: 1348 case DRM_MODE_DPMS_OFF: 1349 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1350 break; 1351 } 1352 break; 1353 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1354 switch (mode) { 1355 case DRM_MODE_DPMS_ON: 1356 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1357 break; 1358 case DRM_MODE_DPMS_STANDBY: 1359 case DRM_MODE_DPMS_SUSPEND: 1360 case DRM_MODE_DPMS_OFF: 1361 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1362 break; 1363 } 1364 break; 1365 default: 1366 return; 1367 } 1368 } 1369 1370 union crtc_source_param { 1371 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1372 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1373 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1374 }; 1375 1376 void 1377 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1378 { 1379 struct drm_device *dev = encoder->dev; 1380 struct amdgpu_device *adev = dev->dev_private; 1381 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1382 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1383 union crtc_source_param args; 1384 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1385 uint8_t frev, crev; 1386 struct amdgpu_encoder_atom_dig *dig; 1387 1388 memset(&args, 0, sizeof(args)); 1389 1390 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1391 return; 1392 1393 switch (frev) { 1394 case 1: 1395 switch (crev) { 1396 case 1: 1397 default: 1398 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1399 switch (amdgpu_encoder->encoder_id) { 1400 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1401 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1402 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1403 break; 1404 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1405 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1406 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1407 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1408 else 1409 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1410 break; 1411 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1412 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1413 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1414 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1415 break; 1416 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1417 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1418 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1419 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1420 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1421 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1422 else 1423 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1424 break; 1425 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1426 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1427 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1428 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1429 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1430 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1431 else 1432 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1433 break; 1434 } 1435 break; 1436 case 2: 1437 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1438 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1439 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1440 1441 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1442 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1443 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1444 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1445 else 1446 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1447 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1448 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1449 } else { 1450 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1451 } 1452 switch (amdgpu_encoder->encoder_id) { 1453 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1454 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1455 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1456 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1457 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1458 dig = amdgpu_encoder->enc_priv; 1459 switch (dig->dig_encoder) { 1460 case 0: 1461 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1462 break; 1463 case 1: 1464 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1465 break; 1466 case 2: 1467 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1468 break; 1469 case 3: 1470 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1471 break; 1472 case 4: 1473 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1474 break; 1475 case 5: 1476 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1477 break; 1478 case 6: 1479 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1480 break; 1481 } 1482 break; 1483 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1484 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1485 break; 1486 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1487 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1488 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1489 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1490 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1491 else 1492 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1493 break; 1494 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1495 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1496 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1497 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1498 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1499 else 1500 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1501 break; 1502 } 1503 break; 1504 case 3: 1505 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1506 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1507 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1508 1509 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1510 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1511 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1512 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1513 else 1514 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1515 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1516 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1517 } else { 1518 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1519 } 1520 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1521 switch (amdgpu_encoder->encoder_id) { 1522 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1523 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1524 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1525 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1526 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1527 dig = amdgpu_encoder->enc_priv; 1528 switch (dig->dig_encoder) { 1529 case 0: 1530 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1531 break; 1532 case 1: 1533 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1534 break; 1535 case 2: 1536 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1537 break; 1538 case 3: 1539 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1540 break; 1541 case 4: 1542 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1543 break; 1544 case 5: 1545 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1546 break; 1547 case 6: 1548 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1549 break; 1550 } 1551 break; 1552 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1553 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1554 break; 1555 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1556 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1557 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1558 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1559 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1560 else 1561 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1562 break; 1563 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1564 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1565 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1566 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1567 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1568 else 1569 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1570 break; 1571 } 1572 break; 1573 } 1574 break; 1575 default: 1576 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1577 return; 1578 } 1579 1580 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1581 } 1582 1583 /* This only needs to be called once at startup */ 1584 void 1585 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1586 { 1587 struct drm_device *dev = adev->ddev; 1588 struct drm_encoder *encoder; 1589 1590 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1591 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1592 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1593 1594 switch (amdgpu_encoder->encoder_id) { 1595 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1596 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1597 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1598 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1599 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1600 0, 0); 1601 break; 1602 } 1603 1604 if (ext_encoder) 1605 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1606 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1607 } 1608 } 1609 1610 static bool 1611 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1612 struct drm_connector *connector) 1613 { 1614 struct drm_device *dev = encoder->dev; 1615 struct amdgpu_device *adev = dev->dev_private; 1616 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1617 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1618 1619 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1620 ATOM_DEVICE_CV_SUPPORT | 1621 ATOM_DEVICE_CRT_SUPPORT)) { 1622 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1623 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1624 uint8_t frev, crev; 1625 1626 memset(&args, 0, sizeof(args)); 1627 1628 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1629 return false; 1630 1631 args.sDacload.ucMisc = 0; 1632 1633 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1634 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1635 args.sDacload.ucDacType = ATOM_DAC_A; 1636 else 1637 args.sDacload.ucDacType = ATOM_DAC_B; 1638 1639 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1640 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1641 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1642 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1643 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1644 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1645 if (crev >= 3) 1646 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1647 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1648 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1649 if (crev >= 3) 1650 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1651 } 1652 1653 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1654 1655 return true; 1656 } else 1657 return false; 1658 } 1659 1660 enum drm_connector_status 1661 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1662 struct drm_connector *connector) 1663 { 1664 struct drm_device *dev = encoder->dev; 1665 struct amdgpu_device *adev = dev->dev_private; 1666 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1667 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1668 uint32_t bios_0_scratch; 1669 1670 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1671 DRM_DEBUG_KMS("detect returned false \n"); 1672 return connector_status_unknown; 1673 } 1674 1675 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1676 1677 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1678 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1679 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1680 return connector_status_connected; 1681 } 1682 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1683 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1684 return connector_status_connected; 1685 } 1686 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1687 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1688 return connector_status_connected; 1689 } 1690 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1691 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1692 return connector_status_connected; /* CTV */ 1693 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1694 return connector_status_connected; /* STV */ 1695 } 1696 return connector_status_disconnected; 1697 } 1698 1699 enum drm_connector_status 1700 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1701 struct drm_connector *connector) 1702 { 1703 struct drm_device *dev = encoder->dev; 1704 struct amdgpu_device *adev = dev->dev_private; 1705 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1706 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1707 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1708 u32 bios_0_scratch; 1709 1710 if (!ext_encoder) 1711 return connector_status_unknown; 1712 1713 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1714 return connector_status_unknown; 1715 1716 /* load detect on the dp bridge */ 1717 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1718 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1719 1720 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1721 1722 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1723 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1724 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1725 return connector_status_connected; 1726 } 1727 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1728 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1729 return connector_status_connected; 1730 } 1731 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1732 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1733 return connector_status_connected; 1734 } 1735 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1736 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1737 return connector_status_connected; /* CTV */ 1738 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1739 return connector_status_connected; /* STV */ 1740 } 1741 return connector_status_disconnected; 1742 } 1743 1744 void 1745 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1746 { 1747 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1748 1749 if (ext_encoder) 1750 /* ddc_setup on the dp bridge */ 1751 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1752 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1753 1754 } 1755 1756 void 1757 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1758 struct drm_encoder *encoder, 1759 bool connected) 1760 { 1761 struct drm_device *dev = connector->dev; 1762 struct amdgpu_device *adev = dev->dev_private; 1763 struct amdgpu_connector *amdgpu_connector = 1764 to_amdgpu_connector(connector); 1765 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1766 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1767 1768 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1769 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1770 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1771 1772 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1773 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1774 if (connected) { 1775 DRM_DEBUG_KMS("LCD1 connected\n"); 1776 bios_0_scratch |= ATOM_S0_LCD1; 1777 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1778 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1779 } else { 1780 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1781 bios_0_scratch &= ~ATOM_S0_LCD1; 1782 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1783 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1784 } 1785 } 1786 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1787 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1788 if (connected) { 1789 DRM_DEBUG_KMS("CRT1 connected\n"); 1790 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1791 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1792 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1793 } else { 1794 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1795 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1796 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1797 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1798 } 1799 } 1800 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1801 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1802 if (connected) { 1803 DRM_DEBUG_KMS("CRT2 connected\n"); 1804 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1805 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1806 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1807 } else { 1808 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1809 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1810 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1811 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1812 } 1813 } 1814 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1815 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1816 if (connected) { 1817 DRM_DEBUG_KMS("DFP1 connected\n"); 1818 bios_0_scratch |= ATOM_S0_DFP1; 1819 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1820 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1821 } else { 1822 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1823 bios_0_scratch &= ~ATOM_S0_DFP1; 1824 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1825 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1826 } 1827 } 1828 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1829 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1830 if (connected) { 1831 DRM_DEBUG_KMS("DFP2 connected\n"); 1832 bios_0_scratch |= ATOM_S0_DFP2; 1833 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1834 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1835 } else { 1836 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1837 bios_0_scratch &= ~ATOM_S0_DFP2; 1838 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1839 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1840 } 1841 } 1842 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1843 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1844 if (connected) { 1845 DRM_DEBUG_KMS("DFP3 connected\n"); 1846 bios_0_scratch |= ATOM_S0_DFP3; 1847 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1848 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1849 } else { 1850 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1851 bios_0_scratch &= ~ATOM_S0_DFP3; 1852 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1853 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1854 } 1855 } 1856 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1857 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1858 if (connected) { 1859 DRM_DEBUG_KMS("DFP4 connected\n"); 1860 bios_0_scratch |= ATOM_S0_DFP4; 1861 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1862 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1863 } else { 1864 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1865 bios_0_scratch &= ~ATOM_S0_DFP4; 1866 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1867 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1868 } 1869 } 1870 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1871 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1872 if (connected) { 1873 DRM_DEBUG_KMS("DFP5 connected\n"); 1874 bios_0_scratch |= ATOM_S0_DFP5; 1875 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1876 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1877 } else { 1878 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1879 bios_0_scratch &= ~ATOM_S0_DFP5; 1880 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1881 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1882 } 1883 } 1884 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1885 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1886 if (connected) { 1887 DRM_DEBUG_KMS("DFP6 connected\n"); 1888 bios_0_scratch |= ATOM_S0_DFP6; 1889 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1890 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1891 } else { 1892 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1893 bios_0_scratch &= ~ATOM_S0_DFP6; 1894 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1895 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1896 } 1897 } 1898 1899 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1900 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1901 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1902 } 1903 1904 union lvds_info { 1905 struct _ATOM_LVDS_INFO info; 1906 struct _ATOM_LVDS_INFO_V12 info_12; 1907 }; 1908 1909 struct amdgpu_encoder_atom_dig * 1910 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1911 { 1912 struct drm_device *dev = encoder->base.dev; 1913 struct amdgpu_device *adev = dev->dev_private; 1914 struct amdgpu_mode_info *mode_info = &adev->mode_info; 1915 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1916 uint16_t data_offset, misc; 1917 union lvds_info *lvds_info; 1918 uint8_t frev, crev; 1919 struct amdgpu_encoder_atom_dig *lvds = NULL; 1920 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1921 1922 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 1923 &frev, &crev, &data_offset)) { 1924 lvds_info = 1925 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 1926 lvds = 1927 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 1928 1929 if (!lvds) 1930 return NULL; 1931 1932 lvds->native_mode.clock = 1933 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 1934 lvds->native_mode.hdisplay = 1935 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 1936 lvds->native_mode.vdisplay = 1937 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 1938 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 1939 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 1940 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 1941 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 1942 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 1943 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 1944 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 1945 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 1946 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 1947 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 1948 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 1949 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 1950 lvds->panel_pwr_delay = 1951 le16_to_cpu(lvds_info->info.usOffDelayInMs); 1952 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 1953 1954 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 1955 if (misc & ATOM_VSYNC_POLARITY) 1956 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 1957 if (misc & ATOM_HSYNC_POLARITY) 1958 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 1959 if (misc & ATOM_COMPOSITESYNC) 1960 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 1961 if (misc & ATOM_INTERLACE) 1962 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 1963 if (misc & ATOM_DOUBLE_CLOCK_MODE) 1964 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 1965 1966 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 1967 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 1968 1969 /* set crtc values */ 1970 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 1971 1972 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 1973 1974 encoder->native_mode = lvds->native_mode; 1975 1976 if (encoder_enum == 2) 1977 lvds->linkb = true; 1978 else 1979 lvds->linkb = false; 1980 1981 /* parse the lcd record table */ 1982 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 1983 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 1984 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 1985 bool bad_record = false; 1986 u8 *record; 1987 1988 if ((frev == 1) && (crev < 2)) 1989 /* absolute */ 1990 record = (u8 *)(mode_info->atom_context->bios + 1991 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 1992 else 1993 /* relative */ 1994 record = (u8 *)(mode_info->atom_context->bios + 1995 data_offset + 1996 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 1997 while (*record != ATOM_RECORD_END_TYPE) { 1998 switch (*record) { 1999 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2000 record += sizeof(ATOM_PATCH_RECORD_MODE); 2001 break; 2002 case LCD_RTS_RECORD_TYPE: 2003 record += sizeof(ATOM_LCD_RTS_RECORD); 2004 break; 2005 case LCD_CAP_RECORD_TYPE: 2006 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2007 break; 2008 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2009 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2010 if (fake_edid_record->ucFakeEDIDLength) { 2011 struct edid *edid; 2012 int edid_size = 2013 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2014 edid = kmalloc(edid_size, GFP_KERNEL); 2015 if (edid) { 2016 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2017 fake_edid_record->ucFakeEDIDLength); 2018 2019 if (drm_edid_is_valid(edid)) { 2020 adev->mode_info.bios_hardcoded_edid = edid; 2021 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2022 } else 2023 kfree(edid); 2024 } 2025 } 2026 record += fake_edid_record->ucFakeEDIDLength ? 2027 fake_edid_record->ucFakeEDIDLength + 2 : 2028 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2029 break; 2030 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2031 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2032 lvds->native_mode.width_mm = panel_res_record->usHSize; 2033 lvds->native_mode.height_mm = panel_res_record->usVSize; 2034 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2035 break; 2036 default: 2037 DRM_ERROR("Bad LCD record %d\n", *record); 2038 bad_record = true; 2039 break; 2040 } 2041 if (bad_record) 2042 break; 2043 } 2044 } 2045 } 2046 return lvds; 2047 } 2048 2049 struct amdgpu_encoder_atom_dig * 2050 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2051 { 2052 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2053 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2054 2055 if (!dig) 2056 return NULL; 2057 2058 /* coherent mode by default */ 2059 dig->coherent_mode = true; 2060 dig->dig_encoder = -1; 2061 2062 if (encoder_enum == 2) 2063 dig->linkb = true; 2064 else 2065 dig->linkb = false; 2066 2067 return dig; 2068 } 2069 2070