1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (c) 2019-2022 MediaTek Inc. 4 * Copyright (c) 2022 BayLibre 5 */ 6 7 #include <drm/display/drm_dp_aux_bus.h> 8 #include <drm/display/drm_dp.h> 9 #include <drm/display/drm_dp_helper.h> 10 #include <drm/drm_atomic_helper.h> 11 #include <drm/drm_bridge.h> 12 #include <drm/drm_crtc.h> 13 #include <drm/drm_edid.h> 14 #include <drm/drm_of.h> 15 #include <drm/drm_panel.h> 16 #include <drm/drm_print.h> 17 #include <drm/drm_probe_helper.h> 18 #include <linux/arm-smccc.h> 19 #include <linux/clk.h> 20 #include <linux/delay.h> 21 #include <linux/errno.h> 22 #include <linux/kernel.h> 23 #include <linux/media-bus-format.h> 24 #include <linux/nvmem-consumer.h> 25 #include <linux/of.h> 26 #include <linux/of_irq.h> 27 #include <linux/of_platform.h> 28 #include <linux/phy/phy.h> 29 #include <linux/platform_device.h> 30 #include <linux/pm_runtime.h> 31 #include <linux/regmap.h> 32 #include <linux/soc/mediatek/mtk_sip_svc.h> 33 #include <sound/hdmi-codec.h> 34 #include <video/videomode.h> 35 36 #include "mtk_dp_reg.h" 37 38 #define MTK_DP_SIP_CONTROL_AARCH32 MTK_SIP_SMC_CMD(0x523) 39 #define MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE (BIT(0) | BIT(5)) 40 #define MTK_DP_SIP_ATF_VIDEO_UNMUTE BIT(5) 41 42 #define MTK_DP_THREAD_CABLE_STATE_CHG BIT(0) 43 #define MTK_DP_THREAD_HPD_EVENT BIT(1) 44 45 #define MTK_DP_4P1T 4 46 #define MTK_DP_HDE 2 47 #define MTK_DP_PIX_PER_ADDR 2 48 #define MTK_DP_AUX_WAIT_REPLY_COUNT 20 49 #define MTK_DP_TBC_BUF_READ_START_ADDR 0x8 50 #define MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY 5 51 #define MTK_DP_TRAIN_DOWNSCALE_RETRY 10 52 #define MTK_DP_VERSION 0x11 53 #define MTK_DP_SDP_AUI 0x4 54 55 enum { 56 MTK_DP_CAL_GLB_BIAS_TRIM = 0, 57 MTK_DP_CAL_CLKTX_IMPSE, 58 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0, 59 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1, 60 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2, 61 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3, 62 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0, 63 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1, 64 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2, 65 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3, 66 MTK_DP_CAL_MAX, 67 }; 68 69 struct mtk_dp_train_info { 70 bool sink_ssc; 71 bool cable_plugged_in; 72 /* link_rate is in multiple of 0.27Gbps */ 73 int link_rate; 74 int lane_count; 75 unsigned int channel_eq_pattern; 76 }; 77 78 struct mtk_dp_audio_cfg { 79 bool detect_monitor; 80 int sad_count; 81 int sample_rate; 82 int word_length_bits; 83 int channels; 84 }; 85 86 struct mtk_dp_info { 87 enum dp_pixelformat format; 88 struct videomode vm; 89 struct mtk_dp_audio_cfg audio_cur_cfg; 90 }; 91 92 struct mtk_dp_efuse_fmt { 93 unsigned short idx; 94 unsigned short shift; 95 unsigned short mask; 96 unsigned short min_val; 97 unsigned short max_val; 98 unsigned short default_val; 99 }; 100 101 struct mtk_dp { 102 bool enabled; 103 bool need_debounce; 104 int irq; 105 u8 max_lanes; 106 u8 max_linkrate; 107 u8 rx_cap[DP_RECEIVER_CAP_SIZE]; 108 u32 cal_data[MTK_DP_CAL_MAX]; 109 u32 irq_thread_handle; 110 /* irq_thread_lock is used to protect irq_thread_handle */ 111 spinlock_t irq_thread_lock; 112 113 struct device *dev; 114 struct drm_bridge bridge; 115 struct drm_bridge *next_bridge; 116 struct drm_connector *conn; 117 struct drm_device *drm_dev; 118 struct drm_dp_aux aux; 119 120 const struct mtk_dp_data *data; 121 struct mtk_dp_info info; 122 struct mtk_dp_train_info train_info; 123 124 struct platform_device *phy_dev; 125 struct phy *phy; 126 struct regmap *regs; 127 struct timer_list debounce_timer; 128 129 /* For audio */ 130 bool audio_enable; 131 hdmi_codec_plugged_cb plugged_cb; 132 struct platform_device *audio_pdev; 133 134 struct device *codec_dev; 135 /* protect the plugged_cb as it's used in both bridge ops and audio */ 136 struct mutex update_plugged_status_lock; 137 }; 138 139 struct mtk_dp_data { 140 int bridge_type; 141 unsigned int smc_cmd; 142 const struct mtk_dp_efuse_fmt *efuse_fmt; 143 bool audio_supported; 144 bool audio_pkt_in_hblank_area; 145 u16 audio_m_div2_bit; 146 }; 147 148 static const struct mtk_dp_efuse_fmt mt8188_dp_efuse_fmt[MTK_DP_CAL_MAX] = { 149 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 150 .idx = 0, 151 .shift = 10, 152 .mask = 0x1f, 153 .min_val = 1, 154 .max_val = 0x1e, 155 .default_val = 0xf, 156 }, 157 [MTK_DP_CAL_CLKTX_IMPSE] = { 158 .idx = 0, 159 .shift = 15, 160 .mask = 0xf, 161 .min_val = 1, 162 .max_val = 0xe, 163 .default_val = 0x8, 164 }, 165 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 166 .idx = 1, 167 .shift = 0, 168 .mask = 0xf, 169 .min_val = 1, 170 .max_val = 0xe, 171 .default_val = 0x8, 172 }, 173 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 174 .idx = 1, 175 .shift = 8, 176 .mask = 0xf, 177 .min_val = 1, 178 .max_val = 0xe, 179 .default_val = 0x8, 180 }, 181 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 182 .idx = 1, 183 .shift = 16, 184 .mask = 0xf, 185 .min_val = 1, 186 .max_val = 0xe, 187 .default_val = 0x8, 188 }, 189 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 190 .idx = 1, 191 .shift = 24, 192 .mask = 0xf, 193 .min_val = 1, 194 .max_val = 0xe, 195 .default_val = 0x8, 196 }, 197 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 198 .idx = 1, 199 .shift = 4, 200 .mask = 0xf, 201 .min_val = 1, 202 .max_val = 0xe, 203 .default_val = 0x8, 204 }, 205 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 206 .idx = 1, 207 .shift = 12, 208 .mask = 0xf, 209 .min_val = 1, 210 .max_val = 0xe, 211 .default_val = 0x8, 212 }, 213 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 214 .idx = 1, 215 .shift = 20, 216 .mask = 0xf, 217 .min_val = 1, 218 .max_val = 0xe, 219 .default_val = 0x8, 220 }, 221 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 222 .idx = 1, 223 .shift = 28, 224 .mask = 0xf, 225 .min_val = 1, 226 .max_val = 0xe, 227 .default_val = 0x8, 228 }, 229 }; 230 231 static const struct mtk_dp_efuse_fmt mt8195_edp_efuse_fmt[MTK_DP_CAL_MAX] = { 232 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 233 .idx = 3, 234 .shift = 27, 235 .mask = 0x1f, 236 .min_val = 1, 237 .max_val = 0x1e, 238 .default_val = 0xf, 239 }, 240 [MTK_DP_CAL_CLKTX_IMPSE] = { 241 .idx = 0, 242 .shift = 9, 243 .mask = 0xf, 244 .min_val = 1, 245 .max_val = 0xe, 246 .default_val = 0x8, 247 }, 248 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 249 .idx = 2, 250 .shift = 28, 251 .mask = 0xf, 252 .min_val = 1, 253 .max_val = 0xe, 254 .default_val = 0x8, 255 }, 256 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 257 .idx = 2, 258 .shift = 20, 259 .mask = 0xf, 260 .min_val = 1, 261 .max_val = 0xe, 262 .default_val = 0x8, 263 }, 264 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 265 .idx = 2, 266 .shift = 12, 267 .mask = 0xf, 268 .min_val = 1, 269 .max_val = 0xe, 270 .default_val = 0x8, 271 }, 272 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 273 .idx = 2, 274 .shift = 4, 275 .mask = 0xf, 276 .min_val = 1, 277 .max_val = 0xe, 278 .default_val = 0x8, 279 }, 280 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 281 .idx = 2, 282 .shift = 24, 283 .mask = 0xf, 284 .min_val = 1, 285 .max_val = 0xe, 286 .default_val = 0x8, 287 }, 288 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 289 .idx = 2, 290 .shift = 16, 291 .mask = 0xf, 292 .min_val = 1, 293 .max_val = 0xe, 294 .default_val = 0x8, 295 }, 296 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 297 .idx = 2, 298 .shift = 8, 299 .mask = 0xf, 300 .min_val = 1, 301 .max_val = 0xe, 302 .default_val = 0x8, 303 }, 304 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 305 .idx = 2, 306 .shift = 0, 307 .mask = 0xf, 308 .min_val = 1, 309 .max_val = 0xe, 310 .default_val = 0x8, 311 }, 312 }; 313 314 static const struct mtk_dp_efuse_fmt mt8195_dp_efuse_fmt[MTK_DP_CAL_MAX] = { 315 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 316 .idx = 0, 317 .shift = 27, 318 .mask = 0x1f, 319 .min_val = 1, 320 .max_val = 0x1e, 321 .default_val = 0xf, 322 }, 323 [MTK_DP_CAL_CLKTX_IMPSE] = { 324 .idx = 0, 325 .shift = 13, 326 .mask = 0xf, 327 .min_val = 1, 328 .max_val = 0xe, 329 .default_val = 0x8, 330 }, 331 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 332 .idx = 1, 333 .shift = 28, 334 .mask = 0xf, 335 .min_val = 1, 336 .max_val = 0xe, 337 .default_val = 0x8, 338 }, 339 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 340 .idx = 1, 341 .shift = 20, 342 .mask = 0xf, 343 .min_val = 1, 344 .max_val = 0xe, 345 .default_val = 0x8, 346 }, 347 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 348 .idx = 1, 349 .shift = 12, 350 .mask = 0xf, 351 .min_val = 1, 352 .max_val = 0xe, 353 .default_val = 0x8, 354 }, 355 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 356 .idx = 1, 357 .shift = 4, 358 .mask = 0xf, 359 .min_val = 1, 360 .max_val = 0xe, 361 .default_val = 0x8, 362 }, 363 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 364 .idx = 1, 365 .shift = 24, 366 .mask = 0xf, 367 .min_val = 1, 368 .max_val = 0xe, 369 .default_val = 0x8, 370 }, 371 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 372 .idx = 1, 373 .shift = 16, 374 .mask = 0xf, 375 .min_val = 1, 376 .max_val = 0xe, 377 .default_val = 0x8, 378 }, 379 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 380 .idx = 1, 381 .shift = 8, 382 .mask = 0xf, 383 .min_val = 1, 384 .max_val = 0xe, 385 .default_val = 0x8, 386 }, 387 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 388 .idx = 1, 389 .shift = 0, 390 .mask = 0xf, 391 .min_val = 1, 392 .max_val = 0xe, 393 .default_val = 0x8, 394 }, 395 }; 396 397 static const struct regmap_config mtk_dp_regmap_config = { 398 .reg_bits = 32, 399 .val_bits = 32, 400 .reg_stride = 4, 401 .max_register = SEC_OFFSET + 0x90, 402 .name = "mtk-dp-registers", 403 }; 404 405 static struct mtk_dp *mtk_dp_from_bridge(struct drm_bridge *b) 406 { 407 return container_of(b, struct mtk_dp, bridge); 408 } 409 410 static u32 mtk_dp_read(struct mtk_dp *mtk_dp, u32 offset) 411 { 412 u32 read_val; 413 int ret; 414 415 ret = regmap_read(mtk_dp->regs, offset, &read_val); 416 if (ret) { 417 dev_err(mtk_dp->dev, "Failed to read register 0x%x: %d\n", 418 offset, ret); 419 return 0; 420 } 421 422 return read_val; 423 } 424 425 static int mtk_dp_write(struct mtk_dp *mtk_dp, u32 offset, u32 val) 426 { 427 int ret = regmap_write(mtk_dp->regs, offset, val); 428 429 if (ret) 430 dev_err(mtk_dp->dev, 431 "Failed to write register 0x%x with value 0x%x\n", 432 offset, val); 433 return ret; 434 } 435 436 static int mtk_dp_update_bits(struct mtk_dp *mtk_dp, u32 offset, 437 u32 val, u32 mask) 438 { 439 int ret = regmap_update_bits(mtk_dp->regs, offset, mask, val); 440 441 if (ret) 442 dev_err(mtk_dp->dev, 443 "Failed to update register 0x%x with value 0x%x, mask 0x%x\n", 444 offset, val, mask); 445 return ret; 446 } 447 448 static void mtk_dp_bulk_16bit_write(struct mtk_dp *mtk_dp, u32 offset, u8 *buf, 449 size_t length) 450 { 451 int i; 452 453 /* 2 bytes per register */ 454 for (i = 0; i < length; i += 2) { 455 u32 val = buf[i] | (i + 1 < length ? buf[i + 1] << 8 : 0); 456 457 if (mtk_dp_write(mtk_dp, offset + i * 2, val)) 458 return; 459 } 460 } 461 462 static void mtk_dp_msa_bypass_enable(struct mtk_dp *mtk_dp, bool enable) 463 { 464 u32 mask = HTOTAL_SEL_DP_ENC0_P0 | VTOTAL_SEL_DP_ENC0_P0 | 465 HSTART_SEL_DP_ENC0_P0 | VSTART_SEL_DP_ENC0_P0 | 466 HWIDTH_SEL_DP_ENC0_P0 | VHEIGHT_SEL_DP_ENC0_P0 | 467 HSP_SEL_DP_ENC0_P0 | HSW_SEL_DP_ENC0_P0 | 468 VSP_SEL_DP_ENC0_P0 | VSW_SEL_DP_ENC0_P0; 469 470 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, enable ? 0 : mask, mask); 471 } 472 473 static void mtk_dp_set_msa(struct mtk_dp *mtk_dp) 474 { 475 struct drm_display_mode mode; 476 struct videomode *vm = &mtk_dp->info.vm; 477 478 drm_display_mode_from_videomode(vm, &mode); 479 480 /* horizontal */ 481 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3010, 482 mode.htotal, HTOTAL_SW_DP_ENC0_P0_MASK); 483 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3018, 484 vm->hsync_len + vm->hback_porch, 485 HSTART_SW_DP_ENC0_P0_MASK); 486 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 487 vm->hsync_len, HSW_SW_DP_ENC0_P0_MASK); 488 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 489 0, HSP_SW_DP_ENC0_P0_MASK); 490 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3020, 491 vm->hactive, HWIDTH_SW_DP_ENC0_P0_MASK); 492 493 /* vertical */ 494 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3014, 495 mode.vtotal, VTOTAL_SW_DP_ENC0_P0_MASK); 496 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_301C, 497 vm->vsync_len + vm->vback_porch, 498 VSTART_SW_DP_ENC0_P0_MASK); 499 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 500 vm->vsync_len, VSW_SW_DP_ENC0_P0_MASK); 501 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 502 0, VSP_SW_DP_ENC0_P0_MASK); 503 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3024, 504 vm->vactive, VHEIGHT_SW_DP_ENC0_P0_MASK); 505 506 /* horizontal */ 507 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3064, 508 vm->hactive, HDE_NUM_LAST_DP_ENC0_P0_MASK); 509 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3154, 510 mode.htotal, PGEN_HTOTAL_DP_ENC0_P0_MASK); 511 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3158, 512 vm->hfront_porch, 513 PGEN_HSYNC_RISING_DP_ENC0_P0_MASK); 514 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_315C, 515 vm->hsync_len, 516 PGEN_HSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 517 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3160, 518 vm->hback_porch + vm->hsync_len, 519 PGEN_HFDE_START_DP_ENC0_P0_MASK); 520 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3164, 521 vm->hactive, 522 PGEN_HFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 523 524 /* vertical */ 525 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3168, 526 mode.vtotal, 527 PGEN_VTOTAL_DP_ENC0_P0_MASK); 528 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_316C, 529 vm->vfront_porch, 530 PGEN_VSYNC_RISING_DP_ENC0_P0_MASK); 531 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3170, 532 vm->vsync_len, 533 PGEN_VSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 534 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3174, 535 vm->vback_porch + vm->vsync_len, 536 PGEN_VFDE_START_DP_ENC0_P0_MASK); 537 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3178, 538 vm->vactive, 539 PGEN_VFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 540 } 541 542 static int mtk_dp_set_color_format(struct mtk_dp *mtk_dp, 543 enum dp_pixelformat color_format) 544 { 545 u32 val; 546 u32 misc0_color; 547 548 switch (color_format) { 549 case DP_PIXELFORMAT_YUV422: 550 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_YCBCR422; 551 misc0_color = DP_COLOR_FORMAT_YCbCr422; 552 break; 553 case DP_PIXELFORMAT_RGB: 554 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_RGB; 555 misc0_color = DP_COLOR_FORMAT_RGB; 556 break; 557 default: 558 drm_warn(mtk_dp->drm_dev, "Unsupported color format: %d\n", 559 color_format); 560 return -EINVAL; 561 } 562 563 /* update MISC0 */ 564 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 565 misc0_color, 566 DP_TEST_COLOR_FORMAT_MASK); 567 568 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 569 val, PIXEL_ENCODE_FORMAT_DP_ENC0_P0_MASK); 570 return 0; 571 } 572 573 static void mtk_dp_set_color_depth(struct mtk_dp *mtk_dp) 574 { 575 /* Only support 8 bits currently */ 576 /* Update MISC0 */ 577 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 578 DP_MSA_MISC_8_BPC, DP_TEST_BIT_DEPTH_MASK); 579 580 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 581 VIDEO_COLOR_DEPTH_DP_ENC0_P0_8BIT, 582 VIDEO_COLOR_DEPTH_DP_ENC0_P0_MASK); 583 } 584 585 static void mtk_dp_config_mn_mode(struct mtk_dp *mtk_dp) 586 { 587 /* 0: hw mode, 1: sw mode */ 588 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 589 0, VIDEO_M_CODE_SEL_DP_ENC0_P0_MASK); 590 } 591 592 static void mtk_dp_set_sram_read_start(struct mtk_dp *mtk_dp, u32 val) 593 { 594 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 595 val, SRAM_START_READ_THRD_DP_ENC0_P0_MASK); 596 } 597 598 static void mtk_dp_setup_encoder(struct mtk_dp *mtk_dp) 599 { 600 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 601 VIDEO_MN_GEN_EN_DP_ENC0_P0, 602 VIDEO_MN_GEN_EN_DP_ENC0_P0); 603 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 604 SDP_DOWN_CNT_DP_ENC0_P0_VAL, 605 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 606 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 607 SDP_DOWN_CNT_IN_HBLANK_DP_ENC1_P0_VAL, 608 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 609 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3300, 610 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_VAL << 8, 611 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_MASK); 612 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 613 FIFO_READ_START_POINT_DP_ENC1_P0_VAL << 12, 614 FIFO_READ_START_POINT_DP_ENC1_P0_MASK); 615 mtk_dp_write(mtk_dp, MTK_DP_ENC1_P0_3368, DP_ENC1_P0_3368_VAL); 616 } 617 618 static void mtk_dp_pg_enable(struct mtk_dp *mtk_dp, bool enable) 619 { 620 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3038, 621 enable ? VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK : 0, 622 VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK); 623 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31B0, 624 PGEN_PATTERN_SEL_VAL << 4, PGEN_PATTERN_SEL_MASK); 625 } 626 627 static void mtk_dp_audio_setup_channels(struct mtk_dp *mtk_dp, 628 struct mtk_dp_audio_cfg *cfg) 629 { 630 u32 channel_enable_bits; 631 632 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3324, 633 AUDIO_SOURCE_MUX_DP_ENC1_P0_DPRX, 634 AUDIO_SOURCE_MUX_DP_ENC1_P0_MASK); 635 636 /* audio channel count change reset */ 637 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 638 DP_ENC_DUMMY_RW_1, DP_ENC_DUMMY_RW_1); 639 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3304, 640 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 641 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 642 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK, 643 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 644 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 645 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK); 646 647 switch (cfg->channels) { 648 case 2: 649 channel_enable_bits = AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 650 AUDIO_2CH_EN_DP_ENC0_P0_MASK; 651 break; 652 case 8: 653 default: 654 channel_enable_bits = AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 655 AUDIO_8CH_EN_DP_ENC0_P0_MASK; 656 break; 657 } 658 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 659 channel_enable_bits | AU_EN_DP_ENC0_P0, 660 AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 661 AUDIO_2CH_EN_DP_ENC0_P0_MASK | 662 AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 663 AUDIO_8CH_EN_DP_ENC0_P0_MASK | 664 AU_EN_DP_ENC0_P0); 665 666 /* audio channel count change reset */ 667 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 0, DP_ENC_DUMMY_RW_1); 668 669 /* enable audio reset */ 670 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 671 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN, 672 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN); 673 } 674 675 static void mtk_dp_audio_channel_status_set(struct mtk_dp *mtk_dp, 676 struct mtk_dp_audio_cfg *cfg) 677 { 678 struct snd_aes_iec958 iec = { 0 }; 679 680 switch (cfg->sample_rate) { 681 case 32000: 682 iec.status[3] = IEC958_AES3_CON_FS_32000; 683 break; 684 case 44100: 685 iec.status[3] = IEC958_AES3_CON_FS_44100; 686 break; 687 case 48000: 688 iec.status[3] = IEC958_AES3_CON_FS_48000; 689 break; 690 case 88200: 691 iec.status[3] = IEC958_AES3_CON_FS_88200; 692 break; 693 case 96000: 694 iec.status[3] = IEC958_AES3_CON_FS_96000; 695 break; 696 case 192000: 697 iec.status[3] = IEC958_AES3_CON_FS_192000; 698 break; 699 default: 700 iec.status[3] = IEC958_AES3_CON_FS_NOTID; 701 break; 702 } 703 704 switch (cfg->word_length_bits) { 705 case 16: 706 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16; 707 break; 708 case 20: 709 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16 | 710 IEC958_AES4_CON_MAX_WORDLEN_24; 711 break; 712 case 24: 713 iec.status[4] = IEC958_AES4_CON_WORDLEN_24_20 | 714 IEC958_AES4_CON_MAX_WORDLEN_24; 715 break; 716 default: 717 iec.status[4] = IEC958_AES4_CON_WORDLEN_NOTID; 718 } 719 720 /* IEC 60958 consumer channel status bits */ 721 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_308C, 722 0, CH_STATUS_0_DP_ENC0_P0_MASK); 723 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3090, 724 iec.status[3] << 8, CH_STATUS_1_DP_ENC0_P0_MASK); 725 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3094, 726 iec.status[4], CH_STATUS_2_DP_ENC0_P0_MASK); 727 } 728 729 static void mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp *mtk_dp, 730 int channels) 731 { 732 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_312C, 733 (min(8, channels) - 1) << 8, 734 ASP_HB2_DP_ENC0_P0_MASK | ASP_HB3_DP_ENC0_P0_MASK); 735 } 736 737 static void mtk_dp_audio_set_divider(struct mtk_dp *mtk_dp) 738 { 739 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30BC, 740 mtk_dp->data->audio_m_div2_bit, 741 AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_MASK); 742 } 743 744 static void mtk_dp_sdp_trigger_aui(struct mtk_dp *mtk_dp) 745 { 746 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 747 MTK_DP_SDP_AUI, SDP_PACKET_TYPE_DP_ENC1_P0_MASK); 748 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 749 SDP_PACKET_W_DP_ENC1_P0, SDP_PACKET_W_DP_ENC1_P0); 750 } 751 752 static void mtk_dp_sdp_set_data(struct mtk_dp *mtk_dp, u8 *data_bytes) 753 { 754 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_ENC1_P0_3200, 755 data_bytes, 0x10); 756 } 757 758 static void mtk_dp_sdp_set_header_aui(struct mtk_dp *mtk_dp, 759 struct dp_sdp_header *header) 760 { 761 u32 db_addr = MTK_DP_ENC0_P0_30D8 + (MTK_DP_SDP_AUI - 1) * 8; 762 763 mtk_dp_bulk_16bit_write(mtk_dp, db_addr, (u8 *)header, 4); 764 } 765 766 static void mtk_dp_disable_sdp_aui(struct mtk_dp *mtk_dp) 767 { 768 /* Disable periodic send */ 769 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 0, 770 0xff << ((MTK_DP_ENC0_P0_30A8 & 3) * 8)); 771 } 772 773 static void mtk_dp_setup_sdp_aui(struct mtk_dp *mtk_dp, 774 struct dp_sdp *sdp) 775 { 776 u32 shift; 777 778 mtk_dp_sdp_set_data(mtk_dp, sdp->db); 779 mtk_dp_sdp_set_header_aui(mtk_dp, &sdp->sdp_header); 780 mtk_dp_disable_sdp_aui(mtk_dp); 781 782 shift = (MTK_DP_ENC0_P0_30A8 & 3) * 8; 783 784 mtk_dp_sdp_trigger_aui(mtk_dp); 785 /* Enable periodic sending */ 786 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 787 0x05 << shift, 0xff << shift); 788 } 789 790 static void mtk_dp_aux_irq_clear(struct mtk_dp *mtk_dp) 791 { 792 mtk_dp_write(mtk_dp, MTK_DP_AUX_P0_3640, DP_AUX_P0_3640_VAL); 793 } 794 795 static void mtk_dp_aux_set_cmd(struct mtk_dp *mtk_dp, u8 cmd, u32 addr) 796 { 797 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3644, 798 cmd, MCU_REQUEST_COMMAND_AUX_TX_P0_MASK); 799 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3648, 800 addr, MCU_REQUEST_ADDRESS_LSB_AUX_TX_P0_MASK); 801 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_364C, 802 addr >> 16, MCU_REQUEST_ADDRESS_MSB_AUX_TX_P0_MASK); 803 } 804 805 static void mtk_dp_aux_clear_fifo(struct mtk_dp *mtk_dp) 806 { 807 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 808 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0, 809 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0 | 810 PHY_FIFO_RST_AUX_TX_P0_MASK | 811 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 812 } 813 814 static void mtk_dp_aux_request_ready(struct mtk_dp *mtk_dp) 815 { 816 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3630, 817 AUX_TX_REQUEST_READY_AUX_TX_P0, 818 AUX_TX_REQUEST_READY_AUX_TX_P0); 819 } 820 821 static void mtk_dp_aux_fill_write_fifo(struct mtk_dp *mtk_dp, u8 *buf, 822 size_t length) 823 { 824 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_AUX_P0_3708, buf, length); 825 } 826 827 static void mtk_dp_aux_read_rx_fifo(struct mtk_dp *mtk_dp, u8 *buf, 828 size_t length, int read_delay) 829 { 830 int read_pos; 831 832 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 833 0, AUX_RD_MODE_AUX_TX_P0_MASK); 834 835 for (read_pos = 0; read_pos < length; read_pos++) { 836 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 837 AUX_RX_FIFO_READ_PULSE_TX_P0, 838 AUX_RX_FIFO_READ_PULSE_TX_P0); 839 840 /* Hardware needs time to update the data */ 841 usleep_range(read_delay, read_delay * 2); 842 buf[read_pos] = (u8)(mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3620) & 843 AUX_RX_FIFO_READ_DATA_AUX_TX_P0_MASK); 844 } 845 } 846 847 static void mtk_dp_aux_set_length(struct mtk_dp *mtk_dp, size_t length) 848 { 849 if (length > 0) { 850 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 851 (length - 1) << 12, 852 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 853 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 854 0, 855 AUX_NO_LENGTH_AUX_TX_P0 | 856 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 857 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 858 } else { 859 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 860 AUX_NO_LENGTH_AUX_TX_P0, 861 AUX_NO_LENGTH_AUX_TX_P0 | 862 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 863 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 864 } 865 } 866 867 static int mtk_dp_aux_wait_for_completion(struct mtk_dp *mtk_dp, bool is_read) 868 { 869 int wait_reply = MTK_DP_AUX_WAIT_REPLY_COUNT; 870 871 while (--wait_reply) { 872 u32 aux_irq_status; 873 874 if (is_read) { 875 u32 fifo_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3618); 876 877 if (fifo_status & 878 (AUX_RX_FIFO_WRITE_POINTER_AUX_TX_P0_MASK | 879 AUX_RX_FIFO_FULL_AUX_TX_P0_MASK)) { 880 return 0; 881 } 882 } 883 884 aux_irq_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3640); 885 if (aux_irq_status & AUX_RX_AUX_RECV_COMPLETE_IRQ_AUX_TX_P0) 886 return 0; 887 888 if (aux_irq_status & AUX_400US_TIMEOUT_IRQ_AUX_TX_P0) 889 return -ETIMEDOUT; 890 891 /* Give the hardware a chance to reach completion before retrying */ 892 usleep_range(100, 500); 893 } 894 895 return -ETIMEDOUT; 896 } 897 898 static int mtk_dp_aux_do_transfer(struct mtk_dp *mtk_dp, bool is_read, u8 cmd, 899 u32 addr, u8 *buf, size_t length, u8 *reply_cmd) 900 { 901 int ret; 902 903 if (is_read && (length > DP_AUX_MAX_PAYLOAD_BYTES || 904 (cmd == DP_AUX_NATIVE_READ && !length))) 905 return -EINVAL; 906 907 if (!is_read) 908 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 909 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0, 910 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0); 911 912 /* We need to clear fifo and irq before sending commands to the sink device. */ 913 mtk_dp_aux_clear_fifo(mtk_dp); 914 mtk_dp_aux_irq_clear(mtk_dp); 915 916 mtk_dp_aux_set_cmd(mtk_dp, cmd, addr); 917 mtk_dp_aux_set_length(mtk_dp, length); 918 919 if (!is_read) { 920 if (length) 921 mtk_dp_aux_fill_write_fifo(mtk_dp, buf, length); 922 923 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 924 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK, 925 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK); 926 } 927 928 mtk_dp_aux_request_ready(mtk_dp); 929 930 /* Wait for feedback from sink device. */ 931 ret = mtk_dp_aux_wait_for_completion(mtk_dp, is_read); 932 933 *reply_cmd = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3624) & 934 AUX_RX_REPLY_COMMAND_AUX_TX_P0_MASK; 935 936 if (ret) { 937 u32 phy_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3628) & 938 AUX_RX_PHY_STATE_AUX_TX_P0_MASK; 939 if (phy_status != AUX_RX_PHY_STATE_AUX_TX_P0_RX_IDLE) { 940 dev_err(mtk_dp->dev, 941 "AUX Rx Aux hang, need SW reset\n"); 942 return -EIO; 943 } 944 945 return -ETIMEDOUT; 946 } 947 948 if (!length) { 949 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 950 0, 951 AUX_NO_LENGTH_AUX_TX_P0 | 952 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 953 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 954 } else if (is_read) { 955 int read_delay; 956 957 if (cmd == (DP_AUX_I2C_READ | DP_AUX_I2C_MOT) || 958 cmd == DP_AUX_I2C_READ) 959 read_delay = 500; 960 else 961 read_delay = 100; 962 963 mtk_dp_aux_read_rx_fifo(mtk_dp, buf, length, read_delay); 964 } 965 966 return 0; 967 } 968 969 static void mtk_dp_set_swing_pre_emphasis(struct mtk_dp *mtk_dp, int lane_num, 970 int swing_val, int preemphasis) 971 { 972 u32 lane_shift = lane_num * DP_TX1_VOLT_SWING_SHIFT; 973 974 dev_dbg(mtk_dp->dev, 975 "link training: swing_val = 0x%x, pre-emphasis = 0x%x\n", 976 swing_val, preemphasis); 977 978 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 979 swing_val << (DP_TX0_VOLT_SWING_SHIFT + lane_shift), 980 DP_TX0_VOLT_SWING_MASK << lane_shift); 981 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 982 preemphasis << (DP_TX0_PRE_EMPH_SHIFT + lane_shift), 983 DP_TX0_PRE_EMPH_MASK << lane_shift); 984 } 985 986 static void mtk_dp_reset_swing_pre_emphasis(struct mtk_dp *mtk_dp) 987 { 988 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 989 0, 990 DP_TX0_VOLT_SWING_MASK | 991 DP_TX1_VOLT_SWING_MASK | 992 DP_TX2_VOLT_SWING_MASK | 993 DP_TX3_VOLT_SWING_MASK | 994 DP_TX0_PRE_EMPH_MASK | 995 DP_TX1_PRE_EMPH_MASK | 996 DP_TX2_PRE_EMPH_MASK | 997 DP_TX3_PRE_EMPH_MASK); 998 } 999 1000 static u32 mtk_dp_swirq_get_clear(struct mtk_dp *mtk_dp) 1001 { 1002 u32 irq_status = mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_35D0) & 1003 SW_IRQ_FINAL_STATUS_DP_TRANS_P0_MASK; 1004 1005 if (irq_status) { 1006 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 1007 irq_status, SW_IRQ_CLR_DP_TRANS_P0_MASK); 1008 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 1009 0, SW_IRQ_CLR_DP_TRANS_P0_MASK); 1010 } 1011 1012 return irq_status; 1013 } 1014 1015 static u32 mtk_dp_hwirq_get_clear(struct mtk_dp *mtk_dp) 1016 { 1017 u32 irq_status = (mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3418) & 1018 IRQ_STATUS_DP_TRANS_P0_MASK) >> 12; 1019 1020 if (irq_status) { 1021 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 1022 irq_status, IRQ_CLR_DP_TRANS_P0_MASK); 1023 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 1024 0, IRQ_CLR_DP_TRANS_P0_MASK); 1025 } 1026 1027 return irq_status; 1028 } 1029 1030 static void mtk_dp_hwirq_enable(struct mtk_dp *mtk_dp, bool enable) 1031 { 1032 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 1033 enable ? 0 : 1034 IRQ_MASK_DP_TRANS_P0_DISC_IRQ | 1035 IRQ_MASK_DP_TRANS_P0_CONN_IRQ | 1036 IRQ_MASK_DP_TRANS_P0_INT_IRQ, 1037 IRQ_MASK_DP_TRANS_P0_MASK); 1038 } 1039 1040 static void mtk_dp_initialize_settings(struct mtk_dp *mtk_dp) 1041 { 1042 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_342C, 1043 XTAL_FREQ_DP_TRANS_P0_DEFAULT, 1044 XTAL_FREQ_DP_TRANS_P0_MASK); 1045 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3540, 1046 FEC_CLOCK_EN_MODE_DP_TRANS_P0, 1047 FEC_CLOCK_EN_MODE_DP_TRANS_P0); 1048 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31EC, 1049 AUDIO_CH_SRC_SEL_DP_ENC0_P0, 1050 AUDIO_CH_SRC_SEL_DP_ENC0_P0); 1051 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 1052 0, SDP_VSYNC_RISING_MASK_DP_ENC0_P0_MASK); 1053 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_IRQ_MASK, 1054 IRQ_MASK_AUX_TOP_IRQ, IRQ_MASK_AUX_TOP_IRQ); 1055 } 1056 1057 static void mtk_dp_initialize_hpd_detect_settings(struct mtk_dp *mtk_dp) 1058 { 1059 u32 val; 1060 /* Debounce threshold */ 1061 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 1062 8, HPD_DEB_THD_DP_TRANS_P0_MASK); 1063 1064 val = (HPD_INT_THD_DP_TRANS_P0_LOWER_500US | 1065 HPD_INT_THD_DP_TRANS_P0_UPPER_1100US) << 4; 1066 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 1067 val, HPD_INT_THD_DP_TRANS_P0_MASK); 1068 1069 /* 1070 * Connect threshold 1.5ms + 5 x 0.1ms = 2ms 1071 * Disconnect threshold 1.5ms + 5 x 0.1ms = 2ms 1072 */ 1073 val = (5 << 8) | (5 << 12); 1074 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 1075 val, 1076 HPD_DISC_THD_DP_TRANS_P0_MASK | 1077 HPD_CONN_THD_DP_TRANS_P0_MASK); 1078 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3430, 1079 HPD_INT_THD_ECO_DP_TRANS_P0_HIGH_BOUND_EXT, 1080 HPD_INT_THD_ECO_DP_TRANS_P0_MASK); 1081 } 1082 1083 static void mtk_dp_initialize_aux_settings(struct mtk_dp *mtk_dp) 1084 { 1085 /* modify timeout threshold = 0x1595 */ 1086 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_360C, 1087 AUX_TIMEOUT_THR_AUX_TX_P0_VAL, 1088 AUX_TIMEOUT_THR_AUX_TX_P0_MASK); 1089 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3658, 1090 0, AUX_TX_OV_EN_AUX_TX_P0_MASK); 1091 /* 25 for 26M */ 1092 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3634, 1093 AUX_TX_OVER_SAMPLE_RATE_FOR_26M << 8, 1094 AUX_TX_OVER_SAMPLE_RATE_AUX_TX_P0_MASK); 1095 /* 13 for 26M */ 1096 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3614, 1097 AUX_RX_UI_CNT_THR_AUX_FOR_26M, 1098 AUX_RX_UI_CNT_THR_AUX_TX_P0_MASK); 1099 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_37C8, 1100 MTK_ATOP_EN_AUX_TX_P0, 1101 MTK_ATOP_EN_AUX_TX_P0); 1102 1103 /* Set complete reply mode for AUX */ 1104 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3690, 1105 RX_REPLY_COMPLETE_MODE_AUX_TX_P0, 1106 RX_REPLY_COMPLETE_MODE_AUX_TX_P0); 1107 } 1108 1109 static void mtk_dp_initialize_digital_settings(struct mtk_dp *mtk_dp) 1110 { 1111 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 1112 0, VBID_VIDEO_MUTE_DP_ENC0_P0_MASK); 1113 1114 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3368, 1115 BS2BS_MODE_DP_ENC1_P0_VAL << 12, 1116 BS2BS_MODE_DP_ENC1_P0_MASK); 1117 1118 /* dp tx encoder reset all sw */ 1119 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1120 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0, 1121 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1122 1123 /* Wait for sw reset to complete */ 1124 usleep_range(1000, 5000); 1125 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1126 0, DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1127 } 1128 1129 static void mtk_dp_digital_sw_reset(struct mtk_dp *mtk_dp) 1130 { 1131 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1132 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0, 1133 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1134 1135 /* Wait for sw reset to complete */ 1136 usleep_range(1000, 5000); 1137 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1138 0, DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1139 } 1140 1141 static void mtk_dp_set_lanes(struct mtk_dp *mtk_dp, int lanes) 1142 { 1143 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35F0, 1144 lanes == 0 ? 0 : DP_TRANS_DUMMY_RW_0, 1145 DP_TRANS_DUMMY_RW_0_MASK); 1146 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1147 lanes, LANE_NUM_DP_ENC0_P0_MASK); 1148 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_34A4, 1149 lanes << 2, LANE_NUM_DP_TRANS_P0_MASK); 1150 } 1151 1152 static void mtk_dp_get_calibration_data(struct mtk_dp *mtk_dp) 1153 { 1154 const struct mtk_dp_efuse_fmt *fmt; 1155 struct device *dev = mtk_dp->dev; 1156 struct nvmem_cell *cell; 1157 u32 *cal_data = mtk_dp->cal_data; 1158 u32 *buf; 1159 int i; 1160 size_t len; 1161 1162 cell = nvmem_cell_get(dev, "dp_calibration_data"); 1163 if (IS_ERR(cell)) { 1164 dev_warn(dev, "Failed to get nvmem cell dp_calibration_data\n"); 1165 goto use_default_val; 1166 } 1167 1168 buf = (u32 *)nvmem_cell_read(cell, &len); 1169 nvmem_cell_put(cell); 1170 1171 if (IS_ERR(buf) || ((len / sizeof(u32)) != 4)) { 1172 dev_warn(dev, "Failed to read nvmem_cell_read\n"); 1173 1174 if (!IS_ERR(buf)) 1175 kfree(buf); 1176 1177 goto use_default_val; 1178 } 1179 1180 for (i = 0; i < MTK_DP_CAL_MAX; i++) { 1181 fmt = &mtk_dp->data->efuse_fmt[i]; 1182 cal_data[i] = (buf[fmt->idx] >> fmt->shift) & fmt->mask; 1183 1184 if (cal_data[i] < fmt->min_val || cal_data[i] > fmt->max_val) { 1185 dev_warn(mtk_dp->dev, "Invalid efuse data, idx = %d\n", i); 1186 kfree(buf); 1187 goto use_default_val; 1188 } 1189 } 1190 kfree(buf); 1191 1192 return; 1193 1194 use_default_val: 1195 dev_warn(mtk_dp->dev, "Use default calibration data\n"); 1196 for (i = 0; i < MTK_DP_CAL_MAX; i++) 1197 cal_data[i] = mtk_dp->data->efuse_fmt[i].default_val; 1198 } 1199 1200 static void mtk_dp_set_calibration_data(struct mtk_dp *mtk_dp) 1201 { 1202 u32 *cal_data = mtk_dp->cal_data; 1203 1204 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_DPAUX_TX, 1205 cal_data[MTK_DP_CAL_CLKTX_IMPSE] << 20, 1206 RG_CKM_PT0_CKTX_IMPSEL); 1207 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_BIAS_GEN_00, 1208 cal_data[MTK_DP_CAL_GLB_BIAS_TRIM] << 16, 1209 RG_XTP_GLB_BIAS_INTR_CTRL); 1210 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1211 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] << 12, 1212 RG_XTP_LN0_TX_IMPSEL_PMOS); 1213 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1214 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] << 16, 1215 RG_XTP_LN0_TX_IMPSEL_NMOS); 1216 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1217 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] << 12, 1218 RG_XTP_LN1_TX_IMPSEL_PMOS); 1219 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1220 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] << 16, 1221 RG_XTP_LN1_TX_IMPSEL_NMOS); 1222 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1223 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] << 12, 1224 RG_XTP_LN2_TX_IMPSEL_PMOS); 1225 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1226 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] << 16, 1227 RG_XTP_LN2_TX_IMPSEL_NMOS); 1228 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1229 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] << 12, 1230 RG_XTP_LN3_TX_IMPSEL_PMOS); 1231 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1232 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] << 16, 1233 RG_XTP_LN3_TX_IMPSEL_NMOS); 1234 } 1235 1236 static int mtk_dp_phy_configure(struct mtk_dp *mtk_dp, 1237 u32 link_rate, int lane_count) 1238 { 1239 int ret; 1240 union phy_configure_opts phy_opts = { 1241 .dp = { 1242 .link_rate = drm_dp_bw_code_to_link_rate(link_rate) / 100, 1243 .set_rate = 1, 1244 .lanes = lane_count, 1245 .set_lanes = 1, 1246 .ssc = mtk_dp->train_info.sink_ssc, 1247 } 1248 }; 1249 1250 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, DP_PWR_STATE_BANDGAP, 1251 DP_PWR_STATE_MASK); 1252 1253 ret = phy_configure(mtk_dp->phy, &phy_opts); 1254 if (ret) 1255 return ret; 1256 1257 mtk_dp_set_calibration_data(mtk_dp); 1258 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1259 DP_PWR_STATE_BANDGAP_TPLL_LANE, DP_PWR_STATE_MASK); 1260 1261 return 0; 1262 } 1263 1264 static void mtk_dp_set_idle_pattern(struct mtk_dp *mtk_dp, bool enable) 1265 { 1266 u32 val = POST_MISC_DATA_LANE0_OV_DP_TRANS_P0_MASK | 1267 POST_MISC_DATA_LANE1_OV_DP_TRANS_P0_MASK | 1268 POST_MISC_DATA_LANE2_OV_DP_TRANS_P0_MASK | 1269 POST_MISC_DATA_LANE3_OV_DP_TRANS_P0_MASK; 1270 1271 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3580, 1272 enable ? val : 0, val); 1273 } 1274 1275 static void mtk_dp_train_set_pattern(struct mtk_dp *mtk_dp, int pattern) 1276 { 1277 /* TPS1 */ 1278 if (pattern == 1) 1279 mtk_dp_set_idle_pattern(mtk_dp, false); 1280 1281 mtk_dp_update_bits(mtk_dp, 1282 MTK_DP_TRANS_P0_3400, 1283 pattern ? BIT(pattern - 1) << 12 : 0, 1284 PATTERN1_EN_DP_TRANS_P0_MASK | 1285 PATTERN2_EN_DP_TRANS_P0_MASK | 1286 PATTERN3_EN_DP_TRANS_P0_MASK | 1287 PATTERN4_EN_DP_TRANS_P0_MASK); 1288 } 1289 1290 static void mtk_dp_set_enhanced_frame_mode(struct mtk_dp *mtk_dp) 1291 { 1292 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1293 ENHANCED_FRAME_EN_DP_ENC0_P0, 1294 ENHANCED_FRAME_EN_DP_ENC0_P0); 1295 } 1296 1297 static void mtk_dp_training_set_scramble(struct mtk_dp *mtk_dp, bool enable) 1298 { 1299 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3404, 1300 enable ? DP_SCR_EN_DP_TRANS_P0_MASK : 0, 1301 DP_SCR_EN_DP_TRANS_P0_MASK); 1302 } 1303 1304 static void mtk_dp_video_mute(struct mtk_dp *mtk_dp, bool enable) 1305 { 1306 struct arm_smccc_res res; 1307 u32 val = VIDEO_MUTE_SEL_DP_ENC0_P0 | 1308 (enable ? VIDEO_MUTE_SW_DP_ENC0_P0 : 0); 1309 1310 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1311 val, 1312 VIDEO_MUTE_SEL_DP_ENC0_P0 | 1313 VIDEO_MUTE_SW_DP_ENC0_P0); 1314 1315 arm_smccc_smc(MTK_DP_SIP_CONTROL_AARCH32, 1316 mtk_dp->data->smc_cmd, enable, 1317 0, 0, 0, 0, 0, &res); 1318 1319 dev_dbg(mtk_dp->dev, "smc cmd: 0x%x, p1: %s, ret: 0x%lx-0x%lx\n", 1320 mtk_dp->data->smc_cmd, enable ? "enable" : "disable", res.a0, res.a1); 1321 } 1322 1323 static void mtk_dp_audio_mute(struct mtk_dp *mtk_dp, bool mute) 1324 { 1325 u32 val[3]; 1326 1327 if (mute) { 1328 val[0] = VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1329 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0; 1330 val[1] = 0; 1331 val[2] = 0; 1332 } else { 1333 val[0] = 0; 1334 val[1] = AU_EN_DP_ENC0_P0; 1335 /* Send one every two frames */ 1336 val[2] = 0x0F; 1337 } 1338 1339 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, 1340 val[0], 1341 VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1342 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0); 1343 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 1344 val[1], AU_EN_DP_ENC0_P0); 1345 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A4, 1346 val[2], AU_TS_CFG_DP_ENC0_P0_MASK); 1347 } 1348 1349 static void mtk_dp_aux_panel_poweron(struct mtk_dp *mtk_dp, bool pwron) 1350 { 1351 if (pwron) { 1352 /* power on aux */ 1353 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1354 DP_PWR_STATE_BANDGAP_TPLL_LANE, 1355 DP_PWR_STATE_MASK); 1356 1357 /* power on panel */ 1358 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); 1359 usleep_range(2000, 5000); 1360 } else { 1361 /* power off panel */ 1362 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 1363 usleep_range(2000, 3000); 1364 1365 /* power off aux */ 1366 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1367 DP_PWR_STATE_BANDGAP_TPLL, 1368 DP_PWR_STATE_MASK); 1369 } 1370 } 1371 1372 static void mtk_dp_power_enable(struct mtk_dp *mtk_dp) 1373 { 1374 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1375 0, SW_RST_B_PHYD); 1376 1377 /* Wait for power enable */ 1378 usleep_range(10, 200); 1379 1380 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1381 SW_RST_B_PHYD, SW_RST_B_PHYD); 1382 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1383 DP_PWR_STATE_BANDGAP_TPLL, DP_PWR_STATE_MASK); 1384 mtk_dp_write(mtk_dp, MTK_DP_1040, 1385 RG_DPAUX_RX_VALID_DEGLITCH_EN | RG_XTP_GLB_CKDET_EN | 1386 RG_DPAUX_RX_EN); 1387 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 0, DA_CKM_CKTX0_EN_FORCE_EN); 1388 } 1389 1390 static void mtk_dp_power_disable(struct mtk_dp *mtk_dp) 1391 { 1392 mtk_dp_write(mtk_dp, MTK_DP_TOP_PWR_STATE, 0); 1393 1394 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 1395 DA_CKM_CKTX0_EN_FORCE_EN, DA_CKM_CKTX0_EN_FORCE_EN); 1396 1397 /* Disable RX */ 1398 mtk_dp_write(mtk_dp, MTK_DP_1040, 0); 1399 mtk_dp_write(mtk_dp, MTK_DP_TOP_MEM_PD, 1400 0x550 | FUSE_SEL | MEM_ISO_EN); 1401 } 1402 1403 static void mtk_dp_initialize_priv_data(struct mtk_dp *mtk_dp) 1404 { 1405 bool plugged_in = (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP); 1406 1407 mtk_dp->train_info.link_rate = DP_LINK_BW_5_4; 1408 mtk_dp->train_info.lane_count = mtk_dp->max_lanes; 1409 mtk_dp->train_info.cable_plugged_in = plugged_in; 1410 1411 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 1412 memset(&mtk_dp->info.vm, 0, sizeof(struct videomode)); 1413 mtk_dp->audio_enable = false; 1414 } 1415 1416 static void mtk_dp_sdp_set_down_cnt_init(struct mtk_dp *mtk_dp, 1417 u32 sram_read_start) 1418 { 1419 u32 sdp_down_cnt_init = 0; 1420 struct drm_display_mode mode; 1421 struct videomode *vm = &mtk_dp->info.vm; 1422 1423 drm_display_mode_from_videomode(vm, &mode); 1424 1425 if (mode.clock > 0) 1426 sdp_down_cnt_init = sram_read_start * 1427 mtk_dp->train_info.link_rate * 2700 * 8 / 1428 (mode.clock * 4); 1429 1430 switch (mtk_dp->train_info.lane_count) { 1431 case 1: 1432 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x1A); 1433 break; 1434 case 2: 1435 /* case for LowResolution && High Audio Sample Rate */ 1436 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x10); 1437 sdp_down_cnt_init += mode.vtotal <= 525 ? 4 : 0; 1438 break; 1439 case 4: 1440 default: 1441 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 6); 1442 break; 1443 } 1444 1445 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 1446 sdp_down_cnt_init, 1447 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 1448 } 1449 1450 static void mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp *mtk_dp) 1451 { 1452 int pix_clk_mhz; 1453 u32 dc_offset; 1454 u32 spd_down_cnt_init = 0; 1455 struct drm_display_mode mode; 1456 struct videomode *vm = &mtk_dp->info.vm; 1457 1458 drm_display_mode_from_videomode(vm, &mode); 1459 1460 pix_clk_mhz = mtk_dp->info.format == DP_PIXELFORMAT_YUV420 ? 1461 mode.clock / 2000 : mode.clock / 1000; 1462 1463 switch (mtk_dp->train_info.lane_count) { 1464 case 1: 1465 spd_down_cnt_init = 0x20; 1466 break; 1467 case 2: 1468 dc_offset = (mode.vtotal <= 525) ? 0x14 : 0x00; 1469 spd_down_cnt_init = 0x18 + dc_offset; 1470 break; 1471 case 4: 1472 default: 1473 dc_offset = (mode.vtotal <= 525) ? 0x08 : 0x00; 1474 if (pix_clk_mhz > mtk_dp->train_info.link_rate * 27) 1475 spd_down_cnt_init = 0x8; 1476 else 1477 spd_down_cnt_init = 0x10 + dc_offset; 1478 break; 1479 } 1480 1481 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, spd_down_cnt_init, 1482 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 1483 } 1484 1485 static void mtk_dp_audio_sample_arrange_disable(struct mtk_dp *mtk_dp) 1486 { 1487 /* arrange audio packets into the Hblanking and Vblanking area */ 1488 if (!mtk_dp->data->audio_pkt_in_hblank_area) 1489 return; 1490 1491 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3374, 0, 1492 SDP_ASP_INSERT_IN_HBLANK_DP_ENC1_P0_MASK); 1493 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3374, 0, 1494 SDP_DOWN_ASP_CNT_INIT_DP_ENC1_P0_MASK); 1495 } 1496 1497 static void mtk_dp_setup_tu(struct mtk_dp *mtk_dp) 1498 { 1499 u32 sram_read_start = min_t(u32, MTK_DP_TBC_BUF_READ_START_ADDR, 1500 mtk_dp->info.vm.hactive / 1501 mtk_dp->train_info.lane_count / 1502 MTK_DP_4P1T / MTK_DP_HDE / 1503 MTK_DP_PIX_PER_ADDR); 1504 mtk_dp_set_sram_read_start(mtk_dp, sram_read_start); 1505 mtk_dp_setup_encoder(mtk_dp); 1506 mtk_dp_audio_sample_arrange_disable(mtk_dp); 1507 mtk_dp_sdp_set_down_cnt_init_in_hblank(mtk_dp); 1508 mtk_dp_sdp_set_down_cnt_init(mtk_dp, sram_read_start); 1509 } 1510 1511 static void mtk_dp_set_tx_out(struct mtk_dp *mtk_dp) 1512 { 1513 mtk_dp_setup_tu(mtk_dp); 1514 } 1515 1516 static void mtk_dp_train_update_swing_pre(struct mtk_dp *mtk_dp, int lanes, 1517 u8 dpcd_adjust_req[2]) 1518 { 1519 int lane; 1520 1521 for (lane = 0; lane < lanes; ++lane) { 1522 u8 val; 1523 u8 swing; 1524 u8 preemphasis; 1525 int index = lane / 2; 1526 int shift = lane % 2 ? DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 0; 1527 1528 swing = (dpcd_adjust_req[index] >> shift) & 1529 DP_ADJUST_VOLTAGE_SWING_LANE0_MASK; 1530 preemphasis = ((dpcd_adjust_req[index] >> shift) & 1531 DP_ADJUST_PRE_EMPHASIS_LANE0_MASK) >> 1532 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT; 1533 val = swing << DP_TRAIN_VOLTAGE_SWING_SHIFT | 1534 preemphasis << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1535 1536 if (swing == DP_TRAIN_VOLTAGE_SWING_LEVEL_3) 1537 val |= DP_TRAIN_MAX_SWING_REACHED; 1538 if (preemphasis == 3) 1539 val |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1540 1541 mtk_dp_set_swing_pre_emphasis(mtk_dp, lane, swing, preemphasis); 1542 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_LANE0_SET + lane, 1543 val); 1544 } 1545 } 1546 1547 static void mtk_dp_pattern(struct mtk_dp *mtk_dp, bool is_tps1) 1548 { 1549 int pattern; 1550 unsigned int aux_offset; 1551 1552 if (is_tps1) { 1553 pattern = 1; 1554 aux_offset = DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1; 1555 } else { 1556 aux_offset = mtk_dp->train_info.channel_eq_pattern; 1557 1558 switch (mtk_dp->train_info.channel_eq_pattern) { 1559 case DP_TRAINING_PATTERN_4: 1560 pattern = 4; 1561 break; 1562 case DP_TRAINING_PATTERN_3: 1563 pattern = 3; 1564 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1565 break; 1566 case DP_TRAINING_PATTERN_2: 1567 default: 1568 pattern = 2; 1569 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1570 break; 1571 } 1572 } 1573 1574 mtk_dp_train_set_pattern(mtk_dp, pattern); 1575 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, aux_offset); 1576 } 1577 1578 static int mtk_dp_train_setting(struct mtk_dp *mtk_dp, u8 target_link_rate, 1579 u8 target_lane_count) 1580 { 1581 int ret; 1582 1583 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LINK_BW_SET, target_link_rate); 1584 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LANE_COUNT_SET, 1585 target_lane_count | DP_LANE_COUNT_ENHANCED_FRAME_EN); 1586 1587 if (mtk_dp->train_info.sink_ssc) 1588 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_DOWNSPREAD_CTRL, 1589 DP_SPREAD_AMP_0_5); 1590 1591 mtk_dp_set_lanes(mtk_dp, target_lane_count / 2); 1592 ret = mtk_dp_phy_configure(mtk_dp, target_link_rate, target_lane_count); 1593 if (ret) 1594 return ret; 1595 1596 dev_dbg(mtk_dp->dev, 1597 "Link train target_link_rate = 0x%x, target_lane_count = 0x%x\n", 1598 target_link_rate, target_lane_count); 1599 1600 return 0; 1601 } 1602 1603 static int mtk_dp_train_cr(struct mtk_dp *mtk_dp, u8 target_lane_count) 1604 { 1605 u8 lane_adjust[2] = {}; 1606 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1607 u8 prev_lane_adjust = 0xff; 1608 int train_retries = 0; 1609 int voltage_retries = 0; 1610 1611 mtk_dp_pattern(mtk_dp, true); 1612 1613 /* In DP spec 1.4, the retry count of CR is defined as 10. */ 1614 do { 1615 train_retries++; 1616 if (!mtk_dp->train_info.cable_plugged_in) { 1617 mtk_dp_train_set_pattern(mtk_dp, 0); 1618 return -ENODEV; 1619 } 1620 1621 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1622 lane_adjust, sizeof(lane_adjust)); 1623 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1624 lane_adjust); 1625 1626 drm_dp_link_train_clock_recovery_delay(&mtk_dp->aux, 1627 mtk_dp->rx_cap); 1628 1629 /* check link status from sink device */ 1630 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1631 if (drm_dp_clock_recovery_ok(link_status, 1632 target_lane_count)) { 1633 dev_dbg(mtk_dp->dev, "Link train CR pass\n"); 1634 return 0; 1635 } 1636 1637 /* 1638 * In DP spec 1.4, if current voltage level is the same 1639 * with previous voltage level, we need to retry 5 times. 1640 */ 1641 if (prev_lane_adjust == link_status[4]) { 1642 voltage_retries++; 1643 /* 1644 * Condition of CR fail: 1645 * 1. Failed to pass CR using the same voltage 1646 * level over five times. 1647 * 2. Failed to pass CR when the current voltage 1648 * level is the same with previous voltage 1649 * level and reach max voltage level (3). 1650 */ 1651 if (voltage_retries > MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY || 1652 (prev_lane_adjust & DP_ADJUST_VOLTAGE_SWING_LANE0_MASK) == 3) { 1653 dev_dbg(mtk_dp->dev, "Link train CR fail\n"); 1654 break; 1655 } 1656 } else { 1657 /* 1658 * If the voltage level is changed, we need to 1659 * re-calculate this retry count. 1660 */ 1661 voltage_retries = 0; 1662 } 1663 prev_lane_adjust = link_status[4]; 1664 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1665 1666 /* Failed to train CR, and disable pattern. */ 1667 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1668 DP_TRAINING_PATTERN_DISABLE); 1669 mtk_dp_train_set_pattern(mtk_dp, 0); 1670 1671 return -ETIMEDOUT; 1672 } 1673 1674 static int mtk_dp_train_eq(struct mtk_dp *mtk_dp, u8 target_lane_count) 1675 { 1676 u8 lane_adjust[2] = {}; 1677 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1678 int train_retries = 0; 1679 1680 mtk_dp_pattern(mtk_dp, false); 1681 1682 do { 1683 train_retries++; 1684 if (!mtk_dp->train_info.cable_plugged_in) { 1685 mtk_dp_train_set_pattern(mtk_dp, 0); 1686 return -ENODEV; 1687 } 1688 1689 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1690 lane_adjust, sizeof(lane_adjust)); 1691 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1692 lane_adjust); 1693 1694 drm_dp_link_train_channel_eq_delay(&mtk_dp->aux, 1695 mtk_dp->rx_cap); 1696 1697 /* check link status from sink device */ 1698 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1699 if (drm_dp_channel_eq_ok(link_status, target_lane_count)) { 1700 dev_dbg(mtk_dp->dev, "Link train EQ pass\n"); 1701 1702 /* Training done, and disable pattern. */ 1703 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1704 DP_TRAINING_PATTERN_DISABLE); 1705 mtk_dp_train_set_pattern(mtk_dp, 0); 1706 return 0; 1707 } 1708 dev_dbg(mtk_dp->dev, "Link train EQ fail\n"); 1709 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1710 1711 /* Failed to train EQ, and disable pattern. */ 1712 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1713 DP_TRAINING_PATTERN_DISABLE); 1714 mtk_dp_train_set_pattern(mtk_dp, 0); 1715 1716 return -ETIMEDOUT; 1717 } 1718 1719 static int mtk_dp_parse_capabilities(struct mtk_dp *mtk_dp) 1720 { 1721 u8 val; 1722 ssize_t ret; 1723 1724 /* 1725 * If we're eDP and capabilities were already parsed we can skip 1726 * reading again because eDP panels aren't hotpluggable hence the 1727 * caps and training information won't ever change in a boot life 1728 */ 1729 if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP && 1730 mtk_dp->rx_cap[DP_MAX_LINK_RATE] && 1731 mtk_dp->train_info.sink_ssc) 1732 return 0; 1733 1734 ret = drm_dp_read_dpcd_caps(&mtk_dp->aux, mtk_dp->rx_cap); 1735 if (ret < 0) 1736 return ret; 1737 1738 if (drm_dp_tps4_supported(mtk_dp->rx_cap)) 1739 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_4; 1740 else if (drm_dp_tps3_supported(mtk_dp->rx_cap)) 1741 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_3; 1742 else 1743 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_2; 1744 1745 mtk_dp->train_info.sink_ssc = drm_dp_max_downspread(mtk_dp->rx_cap); 1746 1747 ret = drm_dp_dpcd_readb(&mtk_dp->aux, DP_MSTM_CAP, &val); 1748 if (ret < 1) { 1749 drm_err(mtk_dp->drm_dev, "Read mstm cap failed\n"); 1750 return ret == 0 ? -EIO : ret; 1751 } 1752 1753 if (val & DP_MST_CAP) { 1754 /* Clear DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 */ 1755 ret = drm_dp_dpcd_readb(&mtk_dp->aux, 1756 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1757 &val); 1758 if (ret < 1) { 1759 drm_err(mtk_dp->drm_dev, "Read irq vector failed\n"); 1760 return ret == 0 ? -EIO : ret; 1761 } 1762 1763 if (val) { 1764 ret = drm_dp_dpcd_writeb(&mtk_dp->aux, 1765 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1766 val); 1767 if (ret < 0) 1768 return ret; 1769 } 1770 } 1771 1772 return 0; 1773 } 1774 1775 static bool mtk_dp_edid_parse_audio_capabilities(struct mtk_dp *mtk_dp, 1776 struct mtk_dp_audio_cfg *cfg) 1777 { 1778 if (!mtk_dp->data->audio_supported) 1779 return false; 1780 1781 if (mtk_dp->info.audio_cur_cfg.sad_count <= 0) { 1782 drm_info(mtk_dp->drm_dev, "The SADs is NULL\n"); 1783 return false; 1784 } 1785 1786 return true; 1787 } 1788 1789 static void mtk_dp_train_change_mode(struct mtk_dp *mtk_dp) 1790 { 1791 phy_reset(mtk_dp->phy); 1792 mtk_dp_reset_swing_pre_emphasis(mtk_dp); 1793 } 1794 1795 static int mtk_dp_training(struct mtk_dp *mtk_dp) 1796 { 1797 int ret; 1798 u8 lane_count, link_rate, train_limit, max_link_rate; 1799 1800 link_rate = min_t(u8, mtk_dp->max_linkrate, 1801 mtk_dp->rx_cap[DP_MAX_LINK_RATE]); 1802 max_link_rate = link_rate; 1803 lane_count = min_t(u8, mtk_dp->max_lanes, 1804 drm_dp_max_lane_count(mtk_dp->rx_cap)); 1805 1806 /* 1807 * TPS are generated by the hardware pattern generator. From the 1808 * hardware setting we need to disable this scramble setting before 1809 * use the TPS pattern generator. 1810 */ 1811 mtk_dp_training_set_scramble(mtk_dp, false); 1812 1813 for (train_limit = 6; train_limit > 0; train_limit--) { 1814 mtk_dp_train_change_mode(mtk_dp); 1815 1816 ret = mtk_dp_train_setting(mtk_dp, link_rate, lane_count); 1817 if (ret) 1818 return ret; 1819 1820 ret = mtk_dp_train_cr(mtk_dp, lane_count); 1821 if (ret == -ENODEV) { 1822 return ret; 1823 } else if (ret) { 1824 /* reduce link rate */ 1825 switch (link_rate) { 1826 case DP_LINK_BW_1_62: 1827 lane_count = lane_count / 2; 1828 link_rate = max_link_rate; 1829 if (lane_count == 0) 1830 return -EIO; 1831 break; 1832 case DP_LINK_BW_2_7: 1833 link_rate = DP_LINK_BW_1_62; 1834 break; 1835 case DP_LINK_BW_5_4: 1836 link_rate = DP_LINK_BW_2_7; 1837 break; 1838 case DP_LINK_BW_8_1: 1839 link_rate = DP_LINK_BW_5_4; 1840 break; 1841 default: 1842 return -EINVAL; 1843 } 1844 continue; 1845 } 1846 1847 ret = mtk_dp_train_eq(mtk_dp, lane_count); 1848 if (ret == -ENODEV) { 1849 return ret; 1850 } else if (ret) { 1851 /* reduce lane count */ 1852 if (lane_count == 0) 1853 return -EIO; 1854 lane_count /= 2; 1855 continue; 1856 } 1857 1858 /* if we can run to this, training is done. */ 1859 break; 1860 } 1861 1862 if (train_limit == 0) 1863 return -ETIMEDOUT; 1864 1865 mtk_dp->train_info.link_rate = link_rate; 1866 mtk_dp->train_info.lane_count = lane_count; 1867 1868 /* 1869 * After training done, we need to output normal stream instead of TPS, 1870 * so we need to enable scramble. 1871 */ 1872 mtk_dp_training_set_scramble(mtk_dp, true); 1873 mtk_dp_set_enhanced_frame_mode(mtk_dp); 1874 1875 return 0; 1876 } 1877 1878 static void mtk_dp_video_enable(struct mtk_dp *mtk_dp, bool enable) 1879 { 1880 /* the mute sequence is different between enable and disable */ 1881 if (enable) { 1882 mtk_dp_msa_bypass_enable(mtk_dp, false); 1883 mtk_dp_pg_enable(mtk_dp, false); 1884 mtk_dp_set_tx_out(mtk_dp); 1885 mtk_dp_video_mute(mtk_dp, false); 1886 } else { 1887 mtk_dp_video_mute(mtk_dp, true); 1888 mtk_dp_pg_enable(mtk_dp, true); 1889 mtk_dp_msa_bypass_enable(mtk_dp, true); 1890 } 1891 } 1892 1893 static void mtk_dp_audio_sdp_setup(struct mtk_dp *mtk_dp, 1894 struct mtk_dp_audio_cfg *cfg) 1895 { 1896 struct dp_sdp sdp; 1897 struct hdmi_audio_infoframe frame; 1898 1899 hdmi_audio_infoframe_init(&frame); 1900 frame.coding_type = HDMI_AUDIO_CODING_TYPE_PCM; 1901 frame.channels = cfg->channels; 1902 frame.sample_frequency = cfg->sample_rate; 1903 1904 switch (cfg->word_length_bits) { 1905 case 16: 1906 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_16; 1907 break; 1908 case 20: 1909 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_20; 1910 break; 1911 case 24: 1912 default: 1913 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_24; 1914 break; 1915 } 1916 1917 hdmi_audio_infoframe_pack_for_dp(&frame, &sdp, MTK_DP_VERSION); 1918 1919 mtk_dp_audio_sdp_asp_set_channels(mtk_dp, cfg->channels); 1920 mtk_dp_setup_sdp_aui(mtk_dp, &sdp); 1921 } 1922 1923 static void mtk_dp_audio_setup(struct mtk_dp *mtk_dp, 1924 struct mtk_dp_audio_cfg *cfg) 1925 { 1926 mtk_dp_audio_sdp_setup(mtk_dp, cfg); 1927 mtk_dp_audio_channel_status_set(mtk_dp, cfg); 1928 1929 mtk_dp_audio_setup_channels(mtk_dp, cfg); 1930 mtk_dp_audio_set_divider(mtk_dp); 1931 } 1932 1933 static int mtk_dp_video_config(struct mtk_dp *mtk_dp) 1934 { 1935 mtk_dp_config_mn_mode(mtk_dp); 1936 mtk_dp_set_msa(mtk_dp); 1937 mtk_dp_set_color_depth(mtk_dp); 1938 return mtk_dp_set_color_format(mtk_dp, mtk_dp->info.format); 1939 } 1940 1941 static void mtk_dp_init_port(struct mtk_dp *mtk_dp) 1942 { 1943 mtk_dp_set_idle_pattern(mtk_dp, true); 1944 mtk_dp_initialize_priv_data(mtk_dp); 1945 1946 mtk_dp_initialize_settings(mtk_dp); 1947 mtk_dp_initialize_aux_settings(mtk_dp); 1948 mtk_dp_initialize_digital_settings(mtk_dp); 1949 mtk_dp_initialize_hpd_detect_settings(mtk_dp); 1950 1951 mtk_dp_digital_sw_reset(mtk_dp); 1952 } 1953 1954 static irqreturn_t mtk_dp_hpd_event_thread(int hpd, void *dev) 1955 { 1956 struct mtk_dp *mtk_dp = dev; 1957 unsigned long flags; 1958 u32 status; 1959 1960 if (mtk_dp->need_debounce && mtk_dp->train_info.cable_plugged_in) 1961 msleep(100); 1962 1963 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 1964 status = mtk_dp->irq_thread_handle; 1965 mtk_dp->irq_thread_handle = 0; 1966 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 1967 1968 if (status & MTK_DP_THREAD_CABLE_STATE_CHG) { 1969 if (mtk_dp->bridge.dev) 1970 drm_helper_hpd_irq_event(mtk_dp->bridge.dev); 1971 1972 if (!mtk_dp->train_info.cable_plugged_in) { 1973 mtk_dp_disable_sdp_aui(mtk_dp); 1974 memset(&mtk_dp->info.audio_cur_cfg, 0, 1975 sizeof(mtk_dp->info.audio_cur_cfg)); 1976 1977 mtk_dp->need_debounce = false; 1978 mod_timer(&mtk_dp->debounce_timer, 1979 jiffies + msecs_to_jiffies(100) - 1); 1980 } 1981 } 1982 1983 if (status & MTK_DP_THREAD_HPD_EVENT) 1984 dev_dbg(mtk_dp->dev, "Receive IRQ from sink devices\n"); 1985 1986 return IRQ_HANDLED; 1987 } 1988 1989 static irqreturn_t mtk_dp_hpd_event(int hpd, void *dev) 1990 { 1991 struct mtk_dp *mtk_dp = dev; 1992 bool cable_sta_chg = false; 1993 unsigned long flags; 1994 u32 irq_status = mtk_dp_swirq_get_clear(mtk_dp) | 1995 mtk_dp_hwirq_get_clear(mtk_dp); 1996 1997 if (!irq_status) 1998 return IRQ_HANDLED; 1999 2000 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 2001 2002 if (irq_status & MTK_DP_HPD_INTERRUPT) 2003 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_HPD_EVENT; 2004 2005 /* Cable state is changed. */ 2006 if (irq_status != MTK_DP_HPD_INTERRUPT) { 2007 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_CABLE_STATE_CHG; 2008 cable_sta_chg = true; 2009 } 2010 2011 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 2012 2013 if (cable_sta_chg) { 2014 if (!!(mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3414) & 2015 HPD_DB_DP_TRANS_P0_MASK)) 2016 mtk_dp->train_info.cable_plugged_in = true; 2017 else 2018 mtk_dp->train_info.cable_plugged_in = false; 2019 } 2020 2021 return IRQ_WAKE_THREAD; 2022 } 2023 2024 static int mtk_dp_wait_hpd_asserted(struct drm_dp_aux *mtk_aux, unsigned long wait_us) 2025 { 2026 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2027 u32 val; 2028 int ret; 2029 2030 ret = regmap_read_poll_timeout(mtk_dp->regs, MTK_DP_TRANS_P0_3414, 2031 val, !!(val & HPD_DB_DP_TRANS_P0_MASK), 2032 wait_us / 100, wait_us); 2033 if (ret) { 2034 mtk_dp->train_info.cable_plugged_in = false; 2035 return ret; 2036 } 2037 2038 mtk_dp->train_info.cable_plugged_in = true; 2039 2040 ret = mtk_dp_parse_capabilities(mtk_dp); 2041 if (ret) { 2042 drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n"); 2043 return ret; 2044 } 2045 2046 return 0; 2047 } 2048 2049 static int mtk_dp_dt_parse(struct mtk_dp *mtk_dp, 2050 struct platform_device *pdev) 2051 { 2052 struct device_node *endpoint; 2053 struct device *dev = &pdev->dev; 2054 int ret; 2055 void __iomem *base; 2056 u32 linkrate; 2057 int len; 2058 2059 base = devm_platform_ioremap_resource(pdev, 0); 2060 if (IS_ERR(base)) 2061 return PTR_ERR(base); 2062 2063 mtk_dp->regs = devm_regmap_init_mmio(dev, base, &mtk_dp_regmap_config); 2064 if (IS_ERR(mtk_dp->regs)) 2065 return PTR_ERR(mtk_dp->regs); 2066 2067 endpoint = of_graph_get_endpoint_by_regs(pdev->dev.of_node, 1, -1); 2068 len = of_property_count_elems_of_size(endpoint, 2069 "data-lanes", sizeof(u32)); 2070 if (len < 0 || len > 4 || len == 3) { 2071 dev_err(dev, "invalid data lane size: %d\n", len); 2072 return -EINVAL; 2073 } 2074 2075 mtk_dp->max_lanes = len; 2076 2077 ret = device_property_read_u32(dev, "max-linkrate-mhz", &linkrate); 2078 if (ret) { 2079 dev_err(dev, "failed to read max linkrate: %d\n", ret); 2080 return ret; 2081 } 2082 2083 mtk_dp->max_linkrate = drm_dp_link_rate_to_bw_code(linkrate * 100); 2084 2085 return 0; 2086 } 2087 2088 static void mtk_dp_update_plugged_status(struct mtk_dp *mtk_dp) 2089 { 2090 if (!mtk_dp->data->audio_supported || !mtk_dp->audio_enable) 2091 return; 2092 2093 mutex_lock(&mtk_dp->update_plugged_status_lock); 2094 if (mtk_dp->plugged_cb && mtk_dp->codec_dev) 2095 mtk_dp->plugged_cb(mtk_dp->codec_dev, 2096 mtk_dp->enabled & 2097 mtk_dp->info.audio_cur_cfg.detect_monitor); 2098 mutex_unlock(&mtk_dp->update_plugged_status_lock); 2099 } 2100 2101 static enum drm_connector_status mtk_dp_bdg_detect(struct drm_bridge *bridge) 2102 { 2103 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2104 enum drm_connector_status ret = connector_status_disconnected; 2105 bool enabled = mtk_dp->enabled; 2106 2107 if (!mtk_dp->train_info.cable_plugged_in) 2108 return ret; 2109 2110 if (!enabled) 2111 mtk_dp_aux_panel_poweron(mtk_dp, true); 2112 2113 /* 2114 * Some dongles still source HPD when they do not connect to any 2115 * sink device. To avoid this, we need to read the sink count 2116 * to make sure we do connect to sink devices. After this detect 2117 * function, we just need to check the HPD connection to check 2118 * whether we connect to a sink device. 2119 */ 2120 2121 if (drm_dp_read_sink_count(&mtk_dp->aux) > 0) 2122 ret = connector_status_connected; 2123 2124 if (!enabled) 2125 mtk_dp_aux_panel_poweron(mtk_dp, false); 2126 2127 return ret; 2128 } 2129 2130 static const struct drm_edid *mtk_dp_edid_read(struct drm_bridge *bridge, 2131 struct drm_connector *connector) 2132 { 2133 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2134 bool enabled = mtk_dp->enabled; 2135 const struct drm_edid *drm_edid; 2136 struct mtk_dp_audio_cfg *audio_caps = &mtk_dp->info.audio_cur_cfg; 2137 2138 if (!enabled) { 2139 drm_atomic_bridge_chain_pre_enable(bridge, connector->state->state); 2140 mtk_dp_aux_panel_poweron(mtk_dp, true); 2141 } 2142 2143 drm_edid = drm_edid_read_ddc(connector, &mtk_dp->aux.ddc); 2144 2145 /* 2146 * Parse capability here to let atomic_get_input_bus_fmts and 2147 * mode_valid use the capability to calculate sink bitrates. 2148 */ 2149 if (mtk_dp_parse_capabilities(mtk_dp)) { 2150 drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n"); 2151 drm_edid_free(drm_edid); 2152 drm_edid = NULL; 2153 } 2154 2155 if (drm_edid) { 2156 /* 2157 * FIXME: get rid of drm_edid_raw() 2158 */ 2159 const struct edid *edid = drm_edid_raw(drm_edid); 2160 struct cea_sad *sads; 2161 int ret; 2162 2163 ret = drm_edid_to_sad(edid, &sads); 2164 /* Ignore any errors */ 2165 if (ret < 0) 2166 ret = 0; 2167 if (ret) 2168 kfree(sads); 2169 audio_caps->sad_count = ret; 2170 2171 /* 2172 * FIXME: This should use connector->display_info.has_audio from 2173 * a path that has read the EDID and called 2174 * drm_edid_connector_update(). 2175 */ 2176 audio_caps->detect_monitor = drm_detect_monitor_audio(edid); 2177 } 2178 2179 if (!enabled) { 2180 mtk_dp_aux_panel_poweron(mtk_dp, false); 2181 drm_atomic_bridge_chain_post_disable(bridge, connector->state->state); 2182 } 2183 2184 return drm_edid; 2185 } 2186 2187 static ssize_t mtk_dp_aux_transfer(struct drm_dp_aux *mtk_aux, 2188 struct drm_dp_aux_msg *msg) 2189 { 2190 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2191 bool is_read; 2192 u8 request; 2193 size_t accessed_bytes = 0; 2194 int ret; 2195 2196 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP && 2197 !mtk_dp->train_info.cable_plugged_in) { 2198 ret = -EIO; 2199 goto err; 2200 } 2201 2202 switch (msg->request) { 2203 case DP_AUX_I2C_MOT: 2204 case DP_AUX_I2C_WRITE: 2205 case DP_AUX_NATIVE_WRITE: 2206 case DP_AUX_I2C_WRITE_STATUS_UPDATE: 2207 case DP_AUX_I2C_WRITE_STATUS_UPDATE | DP_AUX_I2C_MOT: 2208 request = msg->request & ~DP_AUX_I2C_WRITE_STATUS_UPDATE; 2209 is_read = false; 2210 break; 2211 case DP_AUX_I2C_READ: 2212 case DP_AUX_NATIVE_READ: 2213 case DP_AUX_I2C_READ | DP_AUX_I2C_MOT: 2214 request = msg->request; 2215 is_read = true; 2216 break; 2217 default: 2218 dev_err(mtk_dp->dev, "invalid aux cmd = %d\n", 2219 msg->request); 2220 ret = -EINVAL; 2221 goto err; 2222 } 2223 2224 do { 2225 size_t to_access = min_t(size_t, DP_AUX_MAX_PAYLOAD_BYTES, 2226 msg->size - accessed_bytes); 2227 2228 ret = mtk_dp_aux_do_transfer(mtk_dp, is_read, request, 2229 msg->address + accessed_bytes, 2230 msg->buffer + accessed_bytes, 2231 to_access, &msg->reply); 2232 2233 if (ret) { 2234 dev_info(mtk_dp->dev, 2235 "Failed to do AUX transfer: %d\n", ret); 2236 goto err; 2237 } 2238 accessed_bytes += to_access; 2239 } while (accessed_bytes < msg->size); 2240 2241 return msg->size; 2242 err: 2243 msg->reply = DP_AUX_NATIVE_REPLY_NACK | DP_AUX_I2C_REPLY_NACK; 2244 return ret; 2245 } 2246 2247 static int mtk_dp_poweron(struct mtk_dp *mtk_dp) 2248 { 2249 int ret; 2250 2251 ret = phy_init(mtk_dp->phy); 2252 if (ret) { 2253 dev_err(mtk_dp->dev, "Failed to initialize phy: %d\n", ret); 2254 return ret; 2255 } 2256 2257 mtk_dp_init_port(mtk_dp); 2258 mtk_dp_power_enable(mtk_dp); 2259 2260 return 0; 2261 } 2262 2263 static void mtk_dp_poweroff(struct mtk_dp *mtk_dp) 2264 { 2265 mtk_dp_power_disable(mtk_dp); 2266 phy_exit(mtk_dp->phy); 2267 } 2268 2269 static int mtk_dp_bridge_attach(struct drm_bridge *bridge, 2270 enum drm_bridge_attach_flags flags) 2271 { 2272 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2273 int ret; 2274 2275 if (!(flags & DRM_BRIDGE_ATTACH_NO_CONNECTOR)) { 2276 dev_err(mtk_dp->dev, "Driver does not provide a connector!"); 2277 return -EINVAL; 2278 } 2279 2280 mtk_dp->aux.drm_dev = bridge->dev; 2281 ret = drm_dp_aux_register(&mtk_dp->aux); 2282 if (ret) { 2283 dev_err(mtk_dp->dev, 2284 "failed to register DP AUX channel: %d\n", ret); 2285 return ret; 2286 } 2287 2288 ret = mtk_dp_poweron(mtk_dp); 2289 if (ret) 2290 goto err_aux_register; 2291 2292 if (mtk_dp->next_bridge) { 2293 ret = drm_bridge_attach(bridge->encoder, mtk_dp->next_bridge, 2294 &mtk_dp->bridge, flags); 2295 if (ret) { 2296 drm_warn(mtk_dp->drm_dev, 2297 "Failed to attach external bridge: %d\n", ret); 2298 goto err_bridge_attach; 2299 } 2300 } 2301 2302 mtk_dp->drm_dev = bridge->dev; 2303 2304 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) { 2305 irq_clear_status_flags(mtk_dp->irq, IRQ_NOAUTOEN); 2306 enable_irq(mtk_dp->irq); 2307 mtk_dp_hwirq_enable(mtk_dp, true); 2308 } 2309 2310 return 0; 2311 2312 err_bridge_attach: 2313 mtk_dp_poweroff(mtk_dp); 2314 err_aux_register: 2315 drm_dp_aux_unregister(&mtk_dp->aux); 2316 return ret; 2317 } 2318 2319 static void mtk_dp_bridge_detach(struct drm_bridge *bridge) 2320 { 2321 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2322 2323 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) { 2324 mtk_dp_hwirq_enable(mtk_dp, false); 2325 disable_irq(mtk_dp->irq); 2326 } 2327 mtk_dp->drm_dev = NULL; 2328 mtk_dp_poweroff(mtk_dp); 2329 drm_dp_aux_unregister(&mtk_dp->aux); 2330 } 2331 2332 static void mtk_dp_bridge_atomic_enable(struct drm_bridge *bridge, 2333 struct drm_bridge_state *old_state) 2334 { 2335 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2336 int ret; 2337 2338 mtk_dp->conn = drm_atomic_get_new_connector_for_encoder(old_state->base.state, 2339 bridge->encoder); 2340 if (!mtk_dp->conn) { 2341 drm_err(mtk_dp->drm_dev, 2342 "Can't enable bridge as connector is missing\n"); 2343 return; 2344 } 2345 2346 mtk_dp_aux_panel_poweron(mtk_dp, true); 2347 2348 /* Training */ 2349 ret = mtk_dp_training(mtk_dp); 2350 if (ret) { 2351 drm_err(mtk_dp->drm_dev, "Training failed, %d\n", ret); 2352 goto power_off_aux; 2353 } 2354 2355 ret = mtk_dp_video_config(mtk_dp); 2356 if (ret) 2357 goto power_off_aux; 2358 2359 mtk_dp_video_enable(mtk_dp, true); 2360 2361 mtk_dp->audio_enable = 2362 mtk_dp_edid_parse_audio_capabilities(mtk_dp, 2363 &mtk_dp->info.audio_cur_cfg); 2364 if (mtk_dp->audio_enable) { 2365 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2366 mtk_dp_audio_mute(mtk_dp, false); 2367 } else { 2368 memset(&mtk_dp->info.audio_cur_cfg, 0, 2369 sizeof(mtk_dp->info.audio_cur_cfg)); 2370 } 2371 2372 mtk_dp->enabled = true; 2373 mtk_dp_update_plugged_status(mtk_dp); 2374 2375 return; 2376 power_off_aux: 2377 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2378 DP_PWR_STATE_BANDGAP_TPLL, 2379 DP_PWR_STATE_MASK); 2380 } 2381 2382 static void mtk_dp_bridge_atomic_disable(struct drm_bridge *bridge, 2383 struct drm_bridge_state *old_state) 2384 { 2385 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2386 2387 mtk_dp->enabled = false; 2388 mtk_dp_update_plugged_status(mtk_dp); 2389 mtk_dp_video_enable(mtk_dp, false); 2390 mtk_dp_audio_mute(mtk_dp, true); 2391 2392 if (mtk_dp->train_info.cable_plugged_in) { 2393 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 2394 usleep_range(2000, 3000); 2395 } 2396 2397 /* power off aux */ 2398 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2399 DP_PWR_STATE_BANDGAP_TPLL, 2400 DP_PWR_STATE_MASK); 2401 2402 /* Ensure the sink is muted */ 2403 msleep(20); 2404 } 2405 2406 static enum drm_mode_status 2407 mtk_dp_bridge_mode_valid(struct drm_bridge *bridge, 2408 const struct drm_display_info *info, 2409 const struct drm_display_mode *mode) 2410 { 2411 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2412 u32 bpp = info->color_formats & DRM_COLOR_FORMAT_YCBCR422 ? 16 : 24; 2413 u32 lane_count_min = mtk_dp->train_info.lane_count; 2414 u32 rate = drm_dp_bw_code_to_link_rate(mtk_dp->train_info.link_rate) * 2415 lane_count_min; 2416 2417 /* 2418 *FEC overhead is approximately 2.4% from DP 1.4a spec 2.2.1.4.2. 2419 *The down-spread amplitude shall either be disabled (0.0%) or up 2420 *to 0.5% from 1.4a 3.5.2.6. Add up to approximately 3% total overhead. 2421 * 2422 *Because rate is already divided by 10, 2423 *mode->clock does not need to be multiplied by 10 2424 */ 2425 if ((rate * 97 / 100) < (mode->clock * bpp / 8)) 2426 return MODE_CLOCK_HIGH; 2427 2428 return MODE_OK; 2429 } 2430 2431 static u32 *mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge *bridge, 2432 struct drm_bridge_state *bridge_state, 2433 struct drm_crtc_state *crtc_state, 2434 struct drm_connector_state *conn_state, 2435 unsigned int *num_output_fmts) 2436 { 2437 u32 *output_fmts; 2438 2439 *num_output_fmts = 0; 2440 output_fmts = kmalloc(sizeof(*output_fmts), GFP_KERNEL); 2441 if (!output_fmts) 2442 return NULL; 2443 *num_output_fmts = 1; 2444 output_fmts[0] = MEDIA_BUS_FMT_FIXED; 2445 return output_fmts; 2446 } 2447 2448 static const u32 mt8195_input_fmts[] = { 2449 MEDIA_BUS_FMT_RGB888_1X24, 2450 MEDIA_BUS_FMT_YUV8_1X24, 2451 MEDIA_BUS_FMT_YUYV8_1X16, 2452 }; 2453 2454 static u32 *mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge *bridge, 2455 struct drm_bridge_state *bridge_state, 2456 struct drm_crtc_state *crtc_state, 2457 struct drm_connector_state *conn_state, 2458 u32 output_fmt, 2459 unsigned int *num_input_fmts) 2460 { 2461 u32 *input_fmts; 2462 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2463 struct drm_display_mode *mode = &crtc_state->adjusted_mode; 2464 struct drm_display_info *display_info = 2465 &conn_state->connector->display_info; 2466 u32 lane_count_min = mtk_dp->train_info.lane_count; 2467 u32 rate = drm_dp_bw_code_to_link_rate(mtk_dp->train_info.link_rate) * 2468 lane_count_min; 2469 2470 *num_input_fmts = 0; 2471 2472 /* 2473 * If the linkrate is smaller than datarate of RGB888, larger than 2474 * datarate of YUV422 and sink device supports YUV422, we output YUV422 2475 * format. Use this condition, we can support more resolution. 2476 */ 2477 if (((rate * 97 / 100) < (mode->clock * 24 / 8)) && 2478 ((rate * 97 / 100) > (mode->clock * 16 / 8)) && 2479 (display_info->color_formats & DRM_COLOR_FORMAT_YCBCR422)) { 2480 input_fmts = kcalloc(1, sizeof(*input_fmts), GFP_KERNEL); 2481 if (!input_fmts) 2482 return NULL; 2483 *num_input_fmts = 1; 2484 input_fmts[0] = MEDIA_BUS_FMT_YUYV8_1X16; 2485 } else { 2486 input_fmts = kcalloc(ARRAY_SIZE(mt8195_input_fmts), 2487 sizeof(*input_fmts), 2488 GFP_KERNEL); 2489 if (!input_fmts) 2490 return NULL; 2491 2492 *num_input_fmts = ARRAY_SIZE(mt8195_input_fmts); 2493 memcpy(input_fmts, mt8195_input_fmts, sizeof(mt8195_input_fmts)); 2494 } 2495 2496 return input_fmts; 2497 } 2498 2499 static int mtk_dp_bridge_atomic_check(struct drm_bridge *bridge, 2500 struct drm_bridge_state *bridge_state, 2501 struct drm_crtc_state *crtc_state, 2502 struct drm_connector_state *conn_state) 2503 { 2504 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2505 struct drm_crtc *crtc = conn_state->crtc; 2506 unsigned int input_bus_format; 2507 2508 input_bus_format = bridge_state->input_bus_cfg.format; 2509 2510 dev_dbg(mtk_dp->dev, "input format 0x%04x, output format 0x%04x\n", 2511 bridge_state->input_bus_cfg.format, 2512 bridge_state->output_bus_cfg.format); 2513 2514 if (input_bus_format == MEDIA_BUS_FMT_YUYV8_1X16) 2515 mtk_dp->info.format = DP_PIXELFORMAT_YUV422; 2516 else 2517 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 2518 2519 if (!crtc) { 2520 drm_err(mtk_dp->drm_dev, 2521 "Can't enable bridge as connector state doesn't have a crtc\n"); 2522 return -EINVAL; 2523 } 2524 2525 drm_display_mode_to_videomode(&crtc_state->adjusted_mode, &mtk_dp->info.vm); 2526 2527 return 0; 2528 } 2529 2530 static const struct drm_bridge_funcs mtk_dp_bridge_funcs = { 2531 .atomic_check = mtk_dp_bridge_atomic_check, 2532 .atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state, 2533 .atomic_destroy_state = drm_atomic_helper_bridge_destroy_state, 2534 .atomic_get_output_bus_fmts = mtk_dp_bridge_atomic_get_output_bus_fmts, 2535 .atomic_get_input_bus_fmts = mtk_dp_bridge_atomic_get_input_bus_fmts, 2536 .atomic_reset = drm_atomic_helper_bridge_reset, 2537 .attach = mtk_dp_bridge_attach, 2538 .detach = mtk_dp_bridge_detach, 2539 .atomic_enable = mtk_dp_bridge_atomic_enable, 2540 .atomic_disable = mtk_dp_bridge_atomic_disable, 2541 .mode_valid = mtk_dp_bridge_mode_valid, 2542 .edid_read = mtk_dp_edid_read, 2543 .detect = mtk_dp_bdg_detect, 2544 }; 2545 2546 static void mtk_dp_debounce_timer(struct timer_list *t) 2547 { 2548 struct mtk_dp *mtk_dp = from_timer(mtk_dp, t, debounce_timer); 2549 2550 mtk_dp->need_debounce = true; 2551 } 2552 2553 /* 2554 * HDMI audio codec callbacks 2555 */ 2556 static int mtk_dp_audio_hw_params(struct device *dev, void *data, 2557 struct hdmi_codec_daifmt *daifmt, 2558 struct hdmi_codec_params *params) 2559 { 2560 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2561 2562 if (!mtk_dp->enabled) { 2563 dev_err(mtk_dp->dev, "%s, DP is not ready!\n", __func__); 2564 return -ENODEV; 2565 } 2566 2567 mtk_dp->info.audio_cur_cfg.channels = params->cea.channels; 2568 mtk_dp->info.audio_cur_cfg.sample_rate = params->sample_rate; 2569 2570 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2571 2572 return 0; 2573 } 2574 2575 static int mtk_dp_audio_startup(struct device *dev, void *data) 2576 { 2577 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2578 2579 mtk_dp_audio_mute(mtk_dp, false); 2580 2581 return 0; 2582 } 2583 2584 static void mtk_dp_audio_shutdown(struct device *dev, void *data) 2585 { 2586 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2587 2588 mtk_dp_audio_mute(mtk_dp, true); 2589 } 2590 2591 static int mtk_dp_audio_get_eld(struct device *dev, void *data, uint8_t *buf, 2592 size_t len) 2593 { 2594 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2595 2596 if (mtk_dp->enabled) 2597 memcpy(buf, mtk_dp->conn->eld, len); 2598 else 2599 memset(buf, 0, len); 2600 2601 return 0; 2602 } 2603 2604 static int mtk_dp_audio_hook_plugged_cb(struct device *dev, void *data, 2605 hdmi_codec_plugged_cb fn, 2606 struct device *codec_dev) 2607 { 2608 struct mtk_dp *mtk_dp = data; 2609 2610 mutex_lock(&mtk_dp->update_plugged_status_lock); 2611 mtk_dp->plugged_cb = fn; 2612 mtk_dp->codec_dev = codec_dev; 2613 mutex_unlock(&mtk_dp->update_plugged_status_lock); 2614 2615 mtk_dp_update_plugged_status(mtk_dp); 2616 2617 return 0; 2618 } 2619 2620 static const struct hdmi_codec_ops mtk_dp_audio_codec_ops = { 2621 .hw_params = mtk_dp_audio_hw_params, 2622 .audio_startup = mtk_dp_audio_startup, 2623 .audio_shutdown = mtk_dp_audio_shutdown, 2624 .get_eld = mtk_dp_audio_get_eld, 2625 .hook_plugged_cb = mtk_dp_audio_hook_plugged_cb, 2626 .no_capture_mute = 1, 2627 }; 2628 2629 static int mtk_dp_register_audio_driver(struct device *dev) 2630 { 2631 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2632 struct hdmi_codec_pdata codec_data = { 2633 .ops = &mtk_dp_audio_codec_ops, 2634 .max_i2s_channels = 8, 2635 .i2s = 1, 2636 .data = mtk_dp, 2637 }; 2638 2639 mtk_dp->audio_pdev = platform_device_register_data(dev, 2640 HDMI_CODEC_DRV_NAME, 2641 PLATFORM_DEVID_AUTO, 2642 &codec_data, 2643 sizeof(codec_data)); 2644 return PTR_ERR_OR_ZERO(mtk_dp->audio_pdev); 2645 } 2646 2647 static int mtk_dp_register_phy(struct mtk_dp *mtk_dp) 2648 { 2649 struct device *dev = mtk_dp->dev; 2650 2651 mtk_dp->phy_dev = platform_device_register_data(dev, "mediatek-dp-phy", 2652 PLATFORM_DEVID_AUTO, 2653 &mtk_dp->regs, 2654 sizeof(struct regmap *)); 2655 if (IS_ERR(mtk_dp->phy_dev)) 2656 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy_dev), 2657 "Failed to create device mediatek-dp-phy\n"); 2658 2659 mtk_dp_get_calibration_data(mtk_dp); 2660 2661 mtk_dp->phy = devm_phy_get(&mtk_dp->phy_dev->dev, "dp"); 2662 if (IS_ERR(mtk_dp->phy)) { 2663 platform_device_unregister(mtk_dp->phy_dev); 2664 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy), "Failed to get phy\n"); 2665 } 2666 2667 return 0; 2668 } 2669 2670 static int mtk_dp_edp_link_panel(struct drm_dp_aux *mtk_aux) 2671 { 2672 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2673 struct device *dev = mtk_aux->dev; 2674 int ret; 2675 2676 mtk_dp->next_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0); 2677 2678 /* Power off the DP and AUX: either detection is done, or no panel present */ 2679 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2680 DP_PWR_STATE_BANDGAP_TPLL, 2681 DP_PWR_STATE_MASK); 2682 mtk_dp_power_disable(mtk_dp); 2683 2684 if (IS_ERR(mtk_dp->next_bridge)) { 2685 ret = PTR_ERR(mtk_dp->next_bridge); 2686 mtk_dp->next_bridge = NULL; 2687 return ret; 2688 } 2689 2690 /* For eDP, we add the bridge only if the panel was found */ 2691 ret = devm_drm_bridge_add(dev, &mtk_dp->bridge); 2692 if (ret) 2693 return ret; 2694 2695 return 0; 2696 } 2697 2698 static int mtk_dp_probe(struct platform_device *pdev) 2699 { 2700 struct mtk_dp *mtk_dp; 2701 struct device *dev = &pdev->dev; 2702 int ret; 2703 2704 mtk_dp = devm_kzalloc(dev, sizeof(*mtk_dp), GFP_KERNEL); 2705 if (!mtk_dp) 2706 return -ENOMEM; 2707 2708 mtk_dp->dev = dev; 2709 mtk_dp->data = (struct mtk_dp_data *)of_device_get_match_data(dev); 2710 2711 ret = mtk_dp_dt_parse(mtk_dp, pdev); 2712 if (ret) 2713 return dev_err_probe(dev, ret, "Failed to parse dt\n"); 2714 2715 /* 2716 * Request the interrupt and install service routine only if we are 2717 * on full DisplayPort. 2718 * For eDP, polling the HPD instead is more convenient because we 2719 * don't expect any (un)plug events during runtime, hence we can 2720 * avoid some locking. 2721 */ 2722 if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) { 2723 mtk_dp->irq = platform_get_irq(pdev, 0); 2724 if (mtk_dp->irq < 0) 2725 return dev_err_probe(dev, mtk_dp->irq, 2726 "failed to request dp irq resource\n"); 2727 2728 spin_lock_init(&mtk_dp->irq_thread_lock); 2729 2730 irq_set_status_flags(mtk_dp->irq, IRQ_NOAUTOEN); 2731 ret = devm_request_threaded_irq(dev, mtk_dp->irq, mtk_dp_hpd_event, 2732 mtk_dp_hpd_event_thread, 2733 IRQ_TYPE_LEVEL_HIGH, dev_name(dev), 2734 mtk_dp); 2735 if (ret) 2736 return dev_err_probe(dev, ret, 2737 "failed to request mediatek dptx irq\n"); 2738 2739 mtk_dp->need_debounce = true; 2740 timer_setup(&mtk_dp->debounce_timer, mtk_dp_debounce_timer, 0); 2741 } 2742 2743 mtk_dp->aux.name = "aux_mtk_dp"; 2744 mtk_dp->aux.dev = dev; 2745 mtk_dp->aux.transfer = mtk_dp_aux_transfer; 2746 mtk_dp->aux.wait_hpd_asserted = mtk_dp_wait_hpd_asserted; 2747 drm_dp_aux_init(&mtk_dp->aux); 2748 2749 platform_set_drvdata(pdev, mtk_dp); 2750 2751 if (mtk_dp->data->audio_supported) { 2752 mutex_init(&mtk_dp->update_plugged_status_lock); 2753 2754 ret = mtk_dp_register_audio_driver(dev); 2755 if (ret) 2756 return dev_err_probe(dev, ret, 2757 "Failed to register audio driver\n"); 2758 } 2759 2760 ret = mtk_dp_register_phy(mtk_dp); 2761 if (ret) 2762 return ret; 2763 2764 mtk_dp->bridge.funcs = &mtk_dp_bridge_funcs; 2765 mtk_dp->bridge.of_node = dev->of_node; 2766 mtk_dp->bridge.type = mtk_dp->data->bridge_type; 2767 2768 if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP) { 2769 /* 2770 * Set the data lanes to idle in case the bootloader didn't 2771 * properly close the eDP port to avoid stalls and then 2772 * reinitialize, reset and power on the AUX block. 2773 */ 2774 mtk_dp_set_idle_pattern(mtk_dp, true); 2775 mtk_dp_initialize_aux_settings(mtk_dp); 2776 mtk_dp_power_enable(mtk_dp); 2777 2778 /* Disable HW interrupts: we don't need any for eDP */ 2779 mtk_dp_hwirq_enable(mtk_dp, false); 2780 2781 /* 2782 * Power on the AUX to allow reading the EDID from aux-bus: 2783 * please note that it is necessary to call power off in the 2784 * .done_probing() callback (mtk_dp_edp_link_panel), as only 2785 * there we can safely assume that we finished reading EDID. 2786 */ 2787 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2788 DP_PWR_STATE_BANDGAP_TPLL_LANE, 2789 DP_PWR_STATE_MASK); 2790 2791 ret = devm_of_dp_aux_populate_bus(&mtk_dp->aux, mtk_dp_edp_link_panel); 2792 if (ret) { 2793 /* -ENODEV this means that the panel is not on the aux-bus */ 2794 if (ret == -ENODEV) { 2795 ret = mtk_dp_edp_link_panel(&mtk_dp->aux); 2796 if (ret) 2797 return ret; 2798 } else { 2799 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2800 DP_PWR_STATE_BANDGAP_TPLL, 2801 DP_PWR_STATE_MASK); 2802 mtk_dp_power_disable(mtk_dp); 2803 return ret; 2804 } 2805 } 2806 } else { 2807 mtk_dp->bridge.ops = DRM_BRIDGE_OP_DETECT | 2808 DRM_BRIDGE_OP_EDID | DRM_BRIDGE_OP_HPD; 2809 ret = devm_drm_bridge_add(dev, &mtk_dp->bridge); 2810 if (ret) 2811 return dev_err_probe(dev, ret, "Failed to add bridge\n"); 2812 } 2813 2814 pm_runtime_enable(dev); 2815 pm_runtime_get_sync(dev); 2816 2817 return 0; 2818 } 2819 2820 static void mtk_dp_remove(struct platform_device *pdev) 2821 { 2822 struct mtk_dp *mtk_dp = platform_get_drvdata(pdev); 2823 2824 pm_runtime_put(&pdev->dev); 2825 pm_runtime_disable(&pdev->dev); 2826 if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) 2827 del_timer_sync(&mtk_dp->debounce_timer); 2828 platform_device_unregister(mtk_dp->phy_dev); 2829 if (mtk_dp->audio_pdev) 2830 platform_device_unregister(mtk_dp->audio_pdev); 2831 } 2832 2833 #ifdef CONFIG_PM_SLEEP 2834 static int mtk_dp_suspend(struct device *dev) 2835 { 2836 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2837 2838 mtk_dp_power_disable(mtk_dp); 2839 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) 2840 mtk_dp_hwirq_enable(mtk_dp, false); 2841 pm_runtime_put_sync(dev); 2842 2843 return 0; 2844 } 2845 2846 static int mtk_dp_resume(struct device *dev) 2847 { 2848 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2849 2850 pm_runtime_get_sync(dev); 2851 mtk_dp_init_port(mtk_dp); 2852 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) 2853 mtk_dp_hwirq_enable(mtk_dp, true); 2854 mtk_dp_power_enable(mtk_dp); 2855 2856 return 0; 2857 } 2858 #endif 2859 2860 static SIMPLE_DEV_PM_OPS(mtk_dp_pm_ops, mtk_dp_suspend, mtk_dp_resume); 2861 2862 static const struct mtk_dp_data mt8188_dp_data = { 2863 .bridge_type = DRM_MODE_CONNECTOR_DisplayPort, 2864 .smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE, 2865 .efuse_fmt = mt8188_dp_efuse_fmt, 2866 .audio_supported = true, 2867 .audio_pkt_in_hblank_area = true, 2868 .audio_m_div2_bit = MT8188_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2869 }; 2870 2871 static const struct mtk_dp_data mt8195_edp_data = { 2872 .bridge_type = DRM_MODE_CONNECTOR_eDP, 2873 .smc_cmd = MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE, 2874 .efuse_fmt = mt8195_edp_efuse_fmt, 2875 .audio_supported = false, 2876 .audio_m_div2_bit = MT8195_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2877 }; 2878 2879 static const struct mtk_dp_data mt8195_dp_data = { 2880 .bridge_type = DRM_MODE_CONNECTOR_DisplayPort, 2881 .smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE, 2882 .efuse_fmt = mt8195_dp_efuse_fmt, 2883 .audio_supported = true, 2884 .audio_m_div2_bit = MT8195_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2885 }; 2886 2887 static const struct of_device_id mtk_dp_of_match[] = { 2888 { 2889 .compatible = "mediatek,mt8188-edp-tx", 2890 .data = &mt8195_edp_data, 2891 }, 2892 { 2893 .compatible = "mediatek,mt8188-dp-tx", 2894 .data = &mt8188_dp_data, 2895 }, 2896 { 2897 .compatible = "mediatek,mt8195-edp-tx", 2898 .data = &mt8195_edp_data, 2899 }, 2900 { 2901 .compatible = "mediatek,mt8195-dp-tx", 2902 .data = &mt8195_dp_data, 2903 }, 2904 {}, 2905 }; 2906 MODULE_DEVICE_TABLE(of, mtk_dp_of_match); 2907 2908 static struct platform_driver mtk_dp_driver = { 2909 .probe = mtk_dp_probe, 2910 .remove = mtk_dp_remove, 2911 .driver = { 2912 .name = "mediatek-drm-dp", 2913 .of_match_table = mtk_dp_of_match, 2914 .pm = &mtk_dp_pm_ops, 2915 }, 2916 }; 2917 2918 module_platform_driver(mtk_dp_driver); 2919 2920 MODULE_AUTHOR("Jitao Shi <jitao.shi@mediatek.com>"); 2921 MODULE_AUTHOR("Markus Schneider-Pargmann <msp@baylibre.com>"); 2922 MODULE_AUTHOR("Bo-Chen Chen <rex-bc.chen@mediatek.com>"); 2923 MODULE_DESCRIPTION("MediaTek DisplayPort Driver"); 2924 MODULE_LICENSE("GPL"); 2925 MODULE_SOFTDEP("pre: phy_mtk_dp"); 2926