Lines Matching +full:lvds +full:- +full:encoder

2  * Copyright 2007-11 Advanced Micro Devices, Inc.
74 struct drm_device *dev = amdgpu_encoder->base.dev;
77 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
87 struct drm_encoder *encoder = &amdgpu_encoder->base;
88 struct drm_device *dev = amdgpu_encoder->base.dev;
92 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
95 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
96 amdgpu_encoder->enc_priv) {
97 dig = amdgpu_encoder->enc_priv;
98 dig->backlight_level = level;
99 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
101 switch (amdgpu_encoder->encoder_id) {
107 if (dig->backlight_level == 0)
108 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
111 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
113 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
128 if (bd->props.brightness < 0)
130 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
133 level = bd->props.brightness;
141 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
153 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
154 struct drm_device *dev = amdgpu_encoder->base.dev;
168 struct drm_device *dev = amdgpu_encoder->base.dev;
179 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
180 (adev->pdev->device == 0x6741))
183 if (!amdgpu_encoder->enc_priv)
186 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
204 "amdgpu_bl%d", dev->primary->index);
205 bd = backlight_device_register(bl_name, drm_connector->kdev,
212 pdata->encoder = amdgpu_encoder;
214 dig = amdgpu_encoder->enc_priv;
215 dig->bl_dev = bd;
217 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
218 bd->props.power = BACKLIGHT_POWER_ON;
237 struct drm_device *dev = amdgpu_encoder->base.dev;
242 if (!amdgpu_encoder->enc_priv)
245 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
248 dig = amdgpu_encoder->enc_priv;
249 bd = dig->bl_dev;
250 dig->bl_dev = NULL;
259 DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
263 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
265 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
266 switch (amdgpu_encoder->encoder_id) {
278 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
282 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
284 /* set the active encoder to connector routing */
285 amdgpu_encoder_set_active_device(encoder);
289 if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
290 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
291 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
294 if (mode->crtc_vsync_start == mode->crtc_vdisplay)
295 adjusted_mode->crtc_vsync_start++;
298 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
299 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
300 else if (amdgpu_encoder->rmx_type != RMX_OFF)
301 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
303 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
304 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
305 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
313 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
315 struct drm_device *dev = encoder->dev;
317 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
323 switch (amdgpu_encoder->encoder_id) {
336 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
338 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
342 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
346 if (encoder->crtc) {
347 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
348 bpc = amdgpu_crtc->bpc;
376 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
378 struct drm_device *dev = encoder->dev;
380 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
387 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
397 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
405 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
409 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
415 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
421 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
423 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
435 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
438 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
440 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
446 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
450 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
451 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
454 connector = amdgpu_get_connector_for_encoder(encoder);
456 * the connectors tied to the encoder.
459 connector = amdgpu_get_connector_for_encoder_init(encoder);
462 switch (connector->connector_type) {
464 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
466 if (amdgpu_connector->use_digital &&
467 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
469 else if (connector->display_info.is_hdmi &&
470 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
472 else if (amdgpu_connector->use_digital)
476 } else if (amdgpu_connector->use_digital) {
486 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
488 else if (connector->display_info.is_hdmi &&
489 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
499 dig_connector = amdgpu_connector->con_priv;
500 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
501 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
504 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
506 else if (connector->display_info.is_hdmi &&
507 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
528 * DIG Encoder/Transmitter Setup
531 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
533 * - 6 DIG encoder blocks.
534 * - DIG to PHY mapping is hardcoded
543 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
545 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
546 * crtc1 -> dig1 -> UNIPHY0 link B -> DP
547 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
548 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
560 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
563 struct drm_device *dev = encoder->dev;
565 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
566 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
567 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
578 amdgpu_connector->con_priv;
580 dp_clock = dig_connector->dp_clock;
581 dp_lane_count = dig_connector->dp_lane_count;
582 hpd_id = amdgpu_connector->hpd.hpd;
585 /* no dig encoder assigned */
586 if (dig->dig_encoder == -1)
591 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
599 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
603 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
607 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
614 switch (amdgpu_encoder->encoder_id) {
626 if (dig->linkb)
634 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
638 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
642 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
649 args.v3.acConfig.ucDigSel = dig->dig_encoder;
650 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
654 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
658 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
662 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
677 args.v4.acConfig.ucDigSel = dig->dig_encoder;
678 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
689 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
693 args.v5.asStreamParam.ucDigId = dig->dig_encoder;
695 amdgpu_atombios_encoder_get_encoder_mode(encoder);
698 else if (amdgpu_dig_monitor_is_duallink(encoder,
699 amdgpu_encoder->pixel_clock))
704 cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
706 amdgpu_atombios_encoder_get_bpc(encoder);
718 args.v5.asCmdParam.ucDigId = dig->dig_encoder;
735 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
749 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
752 struct drm_device *dev = encoder->dev;
754 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
755 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
765 int dig_encoder = dig->dig_encoder;
769 connector = amdgpu_get_connector_for_encoder_init(encoder);
770 /* just needed to avoid bailing in the encoder check. the encoder
775 connector = amdgpu_get_connector_for_encoder(encoder);
780 amdgpu_connector->con_priv;
782 hpd_id = amdgpu_connector->hpd.hpd;
783 dp_clock = dig_connector->dp_clock;
784 dp_lane_count = dig_connector->dp_lane_count;
786 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
789 if (encoder->crtc) {
790 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
791 pll_id = amdgpu_crtc->pll_id;
794 /* no dig encoder assigned */
795 if (dig_encoder == -1)
798 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
803 switch (amdgpu_encoder->encoder_id) {
818 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
834 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
835 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
837 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
847 if (dig->linkb)
854 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
855 if (dig->coherent_mode)
857 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
871 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
872 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
874 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
878 if (dig->linkb)
881 switch (amdgpu_encoder->encoder_id) {
896 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
897 if (dig->coherent_mode)
899 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
913 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
914 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
916 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
921 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
926 if (dig->linkb)
936 if (is_dp && adev->clock.dp_extclk)
941 switch (amdgpu_encoder->encoder_id) {
955 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
956 if (dig->coherent_mode)
958 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
972 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
973 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
975 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
980 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
985 if (dig->linkb)
996 if (adev->clock.dp_extclk)
1003 switch (amdgpu_encoder->encoder_id) {
1017 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1018 if (dig->coherent_mode)
1020 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1029 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1031 switch (amdgpu_encoder->encoder_id) {
1033 if (dig->linkb)
1039 if (dig->linkb)
1045 if (dig->linkb)
1056 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1061 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1063 if (is_dp && adev->clock.dp_extclk)
1070 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1071 if (dig->coherent_mode)
1086 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1088 switch (amdgpu_encoder->encoder_id) {
1090 if (dig->linkb)
1096 if (dig->linkb)
1102 if (dig->linkb)
1113 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1121 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1139 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1147 struct drm_device *dev = amdgpu_connector->base.dev;
1153 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1160 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1167 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1174 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1190 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1194 struct drm_device *dev = encoder->dev;
1196 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1205 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1208 connector = amdgpu_get_connector_for_encoder_init(encoder);
1210 connector = amdgpu_get_connector_for_encoder(encoder);
1215 amdgpu_connector->con_priv;
1217 dp_clock = dig_connector->dp_clock;
1218 dp_lane_count = dig_connector->dp_lane_count;
1220 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1225 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1237 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1239 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1245 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1255 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1257 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1265 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1280 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1291 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1295 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1297 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1298 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1299 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1300 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1306 amdgpu_dig_connector = amdgpu_connector->con_priv;
1311 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1313 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1315 /* setup and enable the encoder */
1316 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1317 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1319 dig->panel_mode);
1321 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1323 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1325 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1328 amdgpu_dig_connector->edp_on = true;
1332 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1335 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1338 amdgpu_atombios_dp_link_train(encoder, connector);
1339 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1341 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1342 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1344 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1346 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1348 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1351 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1352 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1353 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1356 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1360 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1362 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1364 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1367 amdgpu_dig_connector->edp_on = false;
1374 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1376 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1378 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1379 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1380 amdgpu_encoder->active_device);
1381 switch (amdgpu_encoder->encoder_id) {
1388 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1393 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1400 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1405 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1412 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1417 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1433 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1435 struct drm_device *dev = encoder->dev;
1437 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1438 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1446 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1454 args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1455 switch (amdgpu_encoder->encoder_id) {
1462 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1474 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1476 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1483 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1485 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1493 args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1494 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1495 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1497 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1499 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1502 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1503 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1506 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1508 switch (amdgpu_encoder->encoder_id) {
1514 dig = amdgpu_encoder->enc_priv;
1515 switch (dig->dig_encoder) {
1543 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1545 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1551 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1553 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1561 args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1562 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1563 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1565 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1567 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1570 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1571 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1574 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1576 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1577 switch (amdgpu_encoder->encoder_id) {
1583 dig = amdgpu_encoder->enc_priv;
1584 switch (dig->dig_encoder) {
1612 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1614 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1620 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1622 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1636 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1644 struct drm_encoder *encoder;
1646 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1647 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1648 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1650 switch (amdgpu_encoder->encoder_id) {
1655 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1661 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1667 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1670 struct drm_device *dev = encoder->dev;
1672 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1675 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1684 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1689 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1690 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1695 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1697 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1699 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1703 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1709 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1717 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1720 struct drm_device *dev = encoder->dev;
1722 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1726 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1733 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1734 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1738 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1742 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1746 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1756 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1759 struct drm_device *dev = encoder->dev;
1761 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1763 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1769 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1773 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1778 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1779 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1783 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1787 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1791 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1801 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1803 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1807 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1814 struct drm_encoder *encoder,
1817 struct drm_device *dev = connector->dev;
1821 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1828 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1829 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1842 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1843 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1856 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1857 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1870 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1871 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1884 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1885 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1898 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1899 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1912 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1913 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1926 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1927 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1940 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1941 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1966 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1968 struct drm_device *dev = encoder->base.dev;
1970 struct amdgpu_mode_info *mode_info = &adev->mode_info;
1975 struct amdgpu_encoder_atom_dig *lvds = NULL;
1976 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1978 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1981 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
1982 lvds =
1985 if (!lvds)
1988 lvds->native_mode.clock =
1989 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1990 lvds->native_mode.hdisplay =
1991 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1992 lvds->native_mode.vdisplay =
1993 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1994 lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1995 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1996 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1997 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1998 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1999 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2000 lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2001 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2002 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2003 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2004 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2005 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2006 lvds->panel_pwr_delay =
2007 le16_to_cpu(lvds_info->info.usOffDelayInMs);
2008 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2010 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2012 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2014 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2016 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2018 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2020 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2022 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2023 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2026 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2028 lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2030 encoder->native_mode = lvds->native_mode;
2033 lvds->linkb = true;
2035 lvds->linkb = false;
2038 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2046 record = (u8 *)(mode_info->atom_context->bios +
2047 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2050 record = (u8 *)(mode_info->atom_context->bios +
2052 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2066 if (fake_edid_record->ucFakeEDIDLength) {
2070 if (fake_edid_record->ucFakeEDIDLength == 128)
2071 edid_size = fake_edid_record->ucFakeEDIDLength;
2073 edid_size = fake_edid_record->ucFakeEDIDLength * 128;
2074 edid = drm_edid_alloc(fake_edid_record->ucFakeEDIDString, edid_size);
2076 adev->mode_info.bios_hardcoded_edid = edid;
2089 lvds->native_mode.width_mm = panel_res_record->usHSize;
2090 lvds->native_mode.height_mm = panel_res_record->usVSize;
2103 return lvds;
2109 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2116 dig->coherent_mode = true;
2117 dig->dig_encoder = -1;
2120 dig->linkb = true;
2122 dig->linkb = false;