1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (c) 2019-2022 MediaTek Inc. 4 * Copyright (c) 2022 BayLibre 5 */ 6 7 #include <drm/display/drm_dp_aux_bus.h> 8 #include <drm/display/drm_dp.h> 9 #include <drm/display/drm_dp_helper.h> 10 #include <drm/drm_atomic_helper.h> 11 #include <drm/drm_bridge.h> 12 #include <drm/drm_crtc.h> 13 #include <drm/drm_edid.h> 14 #include <drm/drm_of.h> 15 #include <drm/drm_panel.h> 16 #include <drm/drm_print.h> 17 #include <drm/drm_probe_helper.h> 18 #include <linux/arm-smccc.h> 19 #include <linux/clk.h> 20 #include <linux/delay.h> 21 #include <linux/errno.h> 22 #include <linux/kernel.h> 23 #include <linux/media-bus-format.h> 24 #include <linux/nvmem-consumer.h> 25 #include <linux/of.h> 26 #include <linux/of_irq.h> 27 #include <linux/of_platform.h> 28 #include <linux/phy/phy.h> 29 #include <linux/platform_device.h> 30 #include <linux/pm_runtime.h> 31 #include <linux/regmap.h> 32 #include <linux/soc/mediatek/mtk_sip_svc.h> 33 #include <sound/hdmi-codec.h> 34 #include <video/videomode.h> 35 36 #include "mtk_dp_reg.h" 37 38 #define MTK_DP_SIP_CONTROL_AARCH32 MTK_SIP_SMC_CMD(0x523) 39 #define MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE (BIT(0) | BIT(5)) 40 #define MTK_DP_SIP_ATF_VIDEO_UNMUTE BIT(5) 41 42 #define MTK_DP_THREAD_CABLE_STATE_CHG BIT(0) 43 #define MTK_DP_THREAD_HPD_EVENT BIT(1) 44 45 #define MTK_DP_4P1T 4 46 #define MTK_DP_HDE 2 47 #define MTK_DP_PIX_PER_ADDR 2 48 #define MTK_DP_AUX_WAIT_REPLY_COUNT 20 49 #define MTK_DP_TBC_BUF_READ_START_ADDR 0x8 50 #define MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY 5 51 #define MTK_DP_TRAIN_DOWNSCALE_RETRY 10 52 #define MTK_DP_VERSION 0x11 53 #define MTK_DP_SDP_AUI 0x4 54 55 enum { 56 MTK_DP_CAL_GLB_BIAS_TRIM = 0, 57 MTK_DP_CAL_CLKTX_IMPSE, 58 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0, 59 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1, 60 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2, 61 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3, 62 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0, 63 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1, 64 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2, 65 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3, 66 MTK_DP_CAL_MAX, 67 }; 68 69 struct mtk_dp_train_info { 70 bool sink_ssc; 71 bool cable_plugged_in; 72 /* link_rate is in multiple of 0.27Gbps */ 73 int link_rate; 74 int lane_count; 75 unsigned int channel_eq_pattern; 76 }; 77 78 struct mtk_dp_audio_cfg { 79 bool detect_monitor; 80 int sad_count; 81 int sample_rate; 82 int word_length_bits; 83 int channels; 84 }; 85 86 struct mtk_dp_info { 87 enum dp_pixelformat format; 88 struct videomode vm; 89 struct mtk_dp_audio_cfg audio_cur_cfg; 90 }; 91 92 struct mtk_dp_efuse_fmt { 93 unsigned short idx; 94 unsigned short shift; 95 unsigned short mask; 96 unsigned short min_val; 97 unsigned short max_val; 98 unsigned short default_val; 99 }; 100 101 struct mtk_dp { 102 bool enabled; 103 bool need_debounce; 104 int irq; 105 u8 max_lanes; 106 u8 max_linkrate; 107 u8 rx_cap[DP_RECEIVER_CAP_SIZE]; 108 u32 cal_data[MTK_DP_CAL_MAX]; 109 u32 irq_thread_handle; 110 /* irq_thread_lock is used to protect irq_thread_handle */ 111 spinlock_t irq_thread_lock; 112 113 struct device *dev; 114 struct drm_bridge bridge; 115 struct drm_bridge *next_bridge; 116 struct drm_connector *conn; 117 struct drm_device *drm_dev; 118 struct drm_dp_aux aux; 119 120 const struct mtk_dp_data *data; 121 struct mtk_dp_info info; 122 struct mtk_dp_train_info train_info; 123 124 struct platform_device *phy_dev; 125 struct phy *phy; 126 struct regmap *regs; 127 struct timer_list debounce_timer; 128 129 /* For audio */ 130 bool audio_enable; 131 hdmi_codec_plugged_cb plugged_cb; 132 struct platform_device *audio_pdev; 133 134 struct device *codec_dev; 135 /* protect the plugged_cb as it's used in both bridge ops and audio */ 136 struct mutex update_plugged_status_lock; 137 }; 138 139 struct mtk_dp_data { 140 int bridge_type; 141 unsigned int smc_cmd; 142 const struct mtk_dp_efuse_fmt *efuse_fmt; 143 bool audio_supported; 144 bool audio_pkt_in_hblank_area; 145 u16 audio_m_div2_bit; 146 }; 147 148 static const struct mtk_dp_efuse_fmt mt8195_edp_efuse_fmt[MTK_DP_CAL_MAX] = { 149 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 150 .idx = 3, 151 .shift = 27, 152 .mask = 0x1f, 153 .min_val = 1, 154 .max_val = 0x1e, 155 .default_val = 0xf, 156 }, 157 [MTK_DP_CAL_CLKTX_IMPSE] = { 158 .idx = 0, 159 .shift = 9, 160 .mask = 0xf, 161 .min_val = 1, 162 .max_val = 0xe, 163 .default_val = 0x8, 164 }, 165 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 166 .idx = 2, 167 .shift = 28, 168 .mask = 0xf, 169 .min_val = 1, 170 .max_val = 0xe, 171 .default_val = 0x8, 172 }, 173 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 174 .idx = 2, 175 .shift = 20, 176 .mask = 0xf, 177 .min_val = 1, 178 .max_val = 0xe, 179 .default_val = 0x8, 180 }, 181 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 182 .idx = 2, 183 .shift = 12, 184 .mask = 0xf, 185 .min_val = 1, 186 .max_val = 0xe, 187 .default_val = 0x8, 188 }, 189 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 190 .idx = 2, 191 .shift = 4, 192 .mask = 0xf, 193 .min_val = 1, 194 .max_val = 0xe, 195 .default_val = 0x8, 196 }, 197 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 198 .idx = 2, 199 .shift = 24, 200 .mask = 0xf, 201 .min_val = 1, 202 .max_val = 0xe, 203 .default_val = 0x8, 204 }, 205 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 206 .idx = 2, 207 .shift = 16, 208 .mask = 0xf, 209 .min_val = 1, 210 .max_val = 0xe, 211 .default_val = 0x8, 212 }, 213 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 214 .idx = 2, 215 .shift = 8, 216 .mask = 0xf, 217 .min_val = 1, 218 .max_val = 0xe, 219 .default_val = 0x8, 220 }, 221 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 222 .idx = 2, 223 .shift = 0, 224 .mask = 0xf, 225 .min_val = 1, 226 .max_val = 0xe, 227 .default_val = 0x8, 228 }, 229 }; 230 231 static const struct mtk_dp_efuse_fmt mt8195_dp_efuse_fmt[MTK_DP_CAL_MAX] = { 232 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 233 .idx = 0, 234 .shift = 27, 235 .mask = 0x1f, 236 .min_val = 1, 237 .max_val = 0x1e, 238 .default_val = 0xf, 239 }, 240 [MTK_DP_CAL_CLKTX_IMPSE] = { 241 .idx = 0, 242 .shift = 13, 243 .mask = 0xf, 244 .min_val = 1, 245 .max_val = 0xe, 246 .default_val = 0x8, 247 }, 248 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 249 .idx = 1, 250 .shift = 28, 251 .mask = 0xf, 252 .min_val = 1, 253 .max_val = 0xe, 254 .default_val = 0x8, 255 }, 256 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 257 .idx = 1, 258 .shift = 20, 259 .mask = 0xf, 260 .min_val = 1, 261 .max_val = 0xe, 262 .default_val = 0x8, 263 }, 264 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 265 .idx = 1, 266 .shift = 12, 267 .mask = 0xf, 268 .min_val = 1, 269 .max_val = 0xe, 270 .default_val = 0x8, 271 }, 272 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 273 .idx = 1, 274 .shift = 4, 275 .mask = 0xf, 276 .min_val = 1, 277 .max_val = 0xe, 278 .default_val = 0x8, 279 }, 280 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 281 .idx = 1, 282 .shift = 24, 283 .mask = 0xf, 284 .min_val = 1, 285 .max_val = 0xe, 286 .default_val = 0x8, 287 }, 288 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 289 .idx = 1, 290 .shift = 16, 291 .mask = 0xf, 292 .min_val = 1, 293 .max_val = 0xe, 294 .default_val = 0x8, 295 }, 296 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 297 .idx = 1, 298 .shift = 8, 299 .mask = 0xf, 300 .min_val = 1, 301 .max_val = 0xe, 302 .default_val = 0x8, 303 }, 304 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 305 .idx = 1, 306 .shift = 0, 307 .mask = 0xf, 308 .min_val = 1, 309 .max_val = 0xe, 310 .default_val = 0x8, 311 }, 312 }; 313 314 static struct regmap_config mtk_dp_regmap_config = { 315 .reg_bits = 32, 316 .val_bits = 32, 317 .reg_stride = 4, 318 .max_register = SEC_OFFSET + 0x90, 319 .name = "mtk-dp-registers", 320 }; 321 322 static struct mtk_dp *mtk_dp_from_bridge(struct drm_bridge *b) 323 { 324 return container_of(b, struct mtk_dp, bridge); 325 } 326 327 static u32 mtk_dp_read(struct mtk_dp *mtk_dp, u32 offset) 328 { 329 u32 read_val; 330 int ret; 331 332 ret = regmap_read(mtk_dp->regs, offset, &read_val); 333 if (ret) { 334 dev_err(mtk_dp->dev, "Failed to read register 0x%x: %d\n", 335 offset, ret); 336 return 0; 337 } 338 339 return read_val; 340 } 341 342 static int mtk_dp_write(struct mtk_dp *mtk_dp, u32 offset, u32 val) 343 { 344 int ret = regmap_write(mtk_dp->regs, offset, val); 345 346 if (ret) 347 dev_err(mtk_dp->dev, 348 "Failed to write register 0x%x with value 0x%x\n", 349 offset, val); 350 return ret; 351 } 352 353 static int mtk_dp_update_bits(struct mtk_dp *mtk_dp, u32 offset, 354 u32 val, u32 mask) 355 { 356 int ret = regmap_update_bits(mtk_dp->regs, offset, mask, val); 357 358 if (ret) 359 dev_err(mtk_dp->dev, 360 "Failed to update register 0x%x with value 0x%x, mask 0x%x\n", 361 offset, val, mask); 362 return ret; 363 } 364 365 static void mtk_dp_bulk_16bit_write(struct mtk_dp *mtk_dp, u32 offset, u8 *buf, 366 size_t length) 367 { 368 int i; 369 370 /* 2 bytes per register */ 371 for (i = 0; i < length; i += 2) { 372 u32 val = buf[i] | (i + 1 < length ? buf[i + 1] << 8 : 0); 373 374 if (mtk_dp_write(mtk_dp, offset + i * 2, val)) 375 return; 376 } 377 } 378 379 static void mtk_dp_msa_bypass_enable(struct mtk_dp *mtk_dp, bool enable) 380 { 381 u32 mask = HTOTAL_SEL_DP_ENC0_P0 | VTOTAL_SEL_DP_ENC0_P0 | 382 HSTART_SEL_DP_ENC0_P0 | VSTART_SEL_DP_ENC0_P0 | 383 HWIDTH_SEL_DP_ENC0_P0 | VHEIGHT_SEL_DP_ENC0_P0 | 384 HSP_SEL_DP_ENC0_P0 | HSW_SEL_DP_ENC0_P0 | 385 VSP_SEL_DP_ENC0_P0 | VSW_SEL_DP_ENC0_P0; 386 387 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, enable ? 0 : mask, mask); 388 } 389 390 static void mtk_dp_set_msa(struct mtk_dp *mtk_dp) 391 { 392 struct drm_display_mode mode; 393 struct videomode *vm = &mtk_dp->info.vm; 394 395 drm_display_mode_from_videomode(vm, &mode); 396 397 /* horizontal */ 398 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3010, 399 mode.htotal, HTOTAL_SW_DP_ENC0_P0_MASK); 400 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3018, 401 vm->hsync_len + vm->hback_porch, 402 HSTART_SW_DP_ENC0_P0_MASK); 403 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 404 vm->hsync_len, HSW_SW_DP_ENC0_P0_MASK); 405 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 406 0, HSP_SW_DP_ENC0_P0_MASK); 407 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3020, 408 vm->hactive, HWIDTH_SW_DP_ENC0_P0_MASK); 409 410 /* vertical */ 411 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3014, 412 mode.vtotal, VTOTAL_SW_DP_ENC0_P0_MASK); 413 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_301C, 414 vm->vsync_len + vm->vback_porch, 415 VSTART_SW_DP_ENC0_P0_MASK); 416 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 417 vm->vsync_len, VSW_SW_DP_ENC0_P0_MASK); 418 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 419 0, VSP_SW_DP_ENC0_P0_MASK); 420 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3024, 421 vm->vactive, VHEIGHT_SW_DP_ENC0_P0_MASK); 422 423 /* horizontal */ 424 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3064, 425 vm->hactive, HDE_NUM_LAST_DP_ENC0_P0_MASK); 426 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3154, 427 mode.htotal, PGEN_HTOTAL_DP_ENC0_P0_MASK); 428 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3158, 429 vm->hfront_porch, 430 PGEN_HSYNC_RISING_DP_ENC0_P0_MASK); 431 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_315C, 432 vm->hsync_len, 433 PGEN_HSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 434 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3160, 435 vm->hback_porch + vm->hsync_len, 436 PGEN_HFDE_START_DP_ENC0_P0_MASK); 437 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3164, 438 vm->hactive, 439 PGEN_HFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 440 441 /* vertical */ 442 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3168, 443 mode.vtotal, 444 PGEN_VTOTAL_DP_ENC0_P0_MASK); 445 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_316C, 446 vm->vfront_porch, 447 PGEN_VSYNC_RISING_DP_ENC0_P0_MASK); 448 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3170, 449 vm->vsync_len, 450 PGEN_VSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 451 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3174, 452 vm->vback_porch + vm->vsync_len, 453 PGEN_VFDE_START_DP_ENC0_P0_MASK); 454 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3178, 455 vm->vactive, 456 PGEN_VFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 457 } 458 459 static int mtk_dp_set_color_format(struct mtk_dp *mtk_dp, 460 enum dp_pixelformat color_format) 461 { 462 u32 val; 463 464 /* update MISC0 */ 465 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 466 color_format << DP_TEST_COLOR_FORMAT_SHIFT, 467 DP_TEST_COLOR_FORMAT_MASK); 468 469 switch (color_format) { 470 case DP_PIXELFORMAT_YUV422: 471 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_YCBCR422; 472 break; 473 case DP_PIXELFORMAT_RGB: 474 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_RGB; 475 break; 476 default: 477 drm_warn(mtk_dp->drm_dev, "Unsupported color format: %d\n", 478 color_format); 479 return -EINVAL; 480 } 481 482 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 483 val, PIXEL_ENCODE_FORMAT_DP_ENC0_P0_MASK); 484 return 0; 485 } 486 487 static void mtk_dp_set_color_depth(struct mtk_dp *mtk_dp) 488 { 489 /* Only support 8 bits currently */ 490 /* Update MISC0 */ 491 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 492 DP_MSA_MISC_8_BPC, DP_TEST_BIT_DEPTH_MASK); 493 494 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 495 VIDEO_COLOR_DEPTH_DP_ENC0_P0_8BIT, 496 VIDEO_COLOR_DEPTH_DP_ENC0_P0_MASK); 497 } 498 499 static void mtk_dp_config_mn_mode(struct mtk_dp *mtk_dp) 500 { 501 /* 0: hw mode, 1: sw mode */ 502 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 503 0, VIDEO_M_CODE_SEL_DP_ENC0_P0_MASK); 504 } 505 506 static void mtk_dp_set_sram_read_start(struct mtk_dp *mtk_dp, u32 val) 507 { 508 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 509 val, SRAM_START_READ_THRD_DP_ENC0_P0_MASK); 510 } 511 512 static void mtk_dp_setup_encoder(struct mtk_dp *mtk_dp) 513 { 514 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 515 VIDEO_MN_GEN_EN_DP_ENC0_P0, 516 VIDEO_MN_GEN_EN_DP_ENC0_P0); 517 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 518 SDP_DOWN_CNT_DP_ENC0_P0_VAL, 519 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 520 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 521 SDP_DOWN_CNT_IN_HBLANK_DP_ENC1_P0_VAL, 522 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 523 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3300, 524 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_VAL << 8, 525 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_MASK); 526 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 527 FIFO_READ_START_POINT_DP_ENC1_P0_VAL << 12, 528 FIFO_READ_START_POINT_DP_ENC1_P0_MASK); 529 mtk_dp_write(mtk_dp, MTK_DP_ENC1_P0_3368, DP_ENC1_P0_3368_VAL); 530 } 531 532 static void mtk_dp_pg_enable(struct mtk_dp *mtk_dp, bool enable) 533 { 534 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3038, 535 enable ? VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK : 0, 536 VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK); 537 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31B0, 538 PGEN_PATTERN_SEL_VAL << 4, PGEN_PATTERN_SEL_MASK); 539 } 540 541 static void mtk_dp_audio_setup_channels(struct mtk_dp *mtk_dp, 542 struct mtk_dp_audio_cfg *cfg) 543 { 544 u32 channel_enable_bits; 545 546 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3324, 547 AUDIO_SOURCE_MUX_DP_ENC1_P0_DPRX, 548 AUDIO_SOURCE_MUX_DP_ENC1_P0_MASK); 549 550 /* audio channel count change reset */ 551 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 552 DP_ENC_DUMMY_RW_1, DP_ENC_DUMMY_RW_1); 553 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3304, 554 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 555 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 556 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK, 557 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 558 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 559 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK); 560 561 switch (cfg->channels) { 562 case 2: 563 channel_enable_bits = AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 564 AUDIO_2CH_EN_DP_ENC0_P0_MASK; 565 break; 566 case 8: 567 default: 568 channel_enable_bits = AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 569 AUDIO_8CH_EN_DP_ENC0_P0_MASK; 570 break; 571 } 572 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 573 channel_enable_bits | AU_EN_DP_ENC0_P0, 574 AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 575 AUDIO_2CH_EN_DP_ENC0_P0_MASK | 576 AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 577 AUDIO_8CH_EN_DP_ENC0_P0_MASK | 578 AU_EN_DP_ENC0_P0); 579 580 /* audio channel count change reset */ 581 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 0, DP_ENC_DUMMY_RW_1); 582 583 /* enable audio reset */ 584 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 585 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN, 586 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN); 587 } 588 589 static void mtk_dp_audio_channel_status_set(struct mtk_dp *mtk_dp, 590 struct mtk_dp_audio_cfg *cfg) 591 { 592 struct snd_aes_iec958 iec = { 0 }; 593 594 switch (cfg->sample_rate) { 595 case 32000: 596 iec.status[3] = IEC958_AES3_CON_FS_32000; 597 break; 598 case 44100: 599 iec.status[3] = IEC958_AES3_CON_FS_44100; 600 break; 601 case 48000: 602 iec.status[3] = IEC958_AES3_CON_FS_48000; 603 break; 604 case 88200: 605 iec.status[3] = IEC958_AES3_CON_FS_88200; 606 break; 607 case 96000: 608 iec.status[3] = IEC958_AES3_CON_FS_96000; 609 break; 610 case 192000: 611 iec.status[3] = IEC958_AES3_CON_FS_192000; 612 break; 613 default: 614 iec.status[3] = IEC958_AES3_CON_FS_NOTID; 615 break; 616 } 617 618 switch (cfg->word_length_bits) { 619 case 16: 620 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16; 621 break; 622 case 20: 623 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16 | 624 IEC958_AES4_CON_MAX_WORDLEN_24; 625 break; 626 case 24: 627 iec.status[4] = IEC958_AES4_CON_WORDLEN_24_20 | 628 IEC958_AES4_CON_MAX_WORDLEN_24; 629 break; 630 default: 631 iec.status[4] = IEC958_AES4_CON_WORDLEN_NOTID; 632 } 633 634 /* IEC 60958 consumer channel status bits */ 635 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_308C, 636 0, CH_STATUS_0_DP_ENC0_P0_MASK); 637 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3090, 638 iec.status[3] << 8, CH_STATUS_1_DP_ENC0_P0_MASK); 639 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3094, 640 iec.status[4], CH_STATUS_2_DP_ENC0_P0_MASK); 641 } 642 643 static void mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp *mtk_dp, 644 int channels) 645 { 646 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_312C, 647 (min(8, channels) - 1) << 8, 648 ASP_HB2_DP_ENC0_P0_MASK | ASP_HB3_DP_ENC0_P0_MASK); 649 } 650 651 static void mtk_dp_audio_set_divider(struct mtk_dp *mtk_dp) 652 { 653 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30BC, 654 mtk_dp->data->audio_m_div2_bit, 655 AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_MASK); 656 } 657 658 static void mtk_dp_sdp_trigger_aui(struct mtk_dp *mtk_dp) 659 { 660 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 661 MTK_DP_SDP_AUI, SDP_PACKET_TYPE_DP_ENC1_P0_MASK); 662 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 663 SDP_PACKET_W_DP_ENC1_P0, SDP_PACKET_W_DP_ENC1_P0); 664 } 665 666 static void mtk_dp_sdp_set_data(struct mtk_dp *mtk_dp, u8 *data_bytes) 667 { 668 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_ENC1_P0_3200, 669 data_bytes, 0x10); 670 } 671 672 static void mtk_dp_sdp_set_header_aui(struct mtk_dp *mtk_dp, 673 struct dp_sdp_header *header) 674 { 675 u32 db_addr = MTK_DP_ENC0_P0_30D8 + (MTK_DP_SDP_AUI - 1) * 8; 676 677 mtk_dp_bulk_16bit_write(mtk_dp, db_addr, (u8 *)header, 4); 678 } 679 680 static void mtk_dp_disable_sdp_aui(struct mtk_dp *mtk_dp) 681 { 682 /* Disable periodic send */ 683 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 0, 684 0xff << ((MTK_DP_ENC0_P0_30A8 & 3) * 8)); 685 } 686 687 static void mtk_dp_setup_sdp_aui(struct mtk_dp *mtk_dp, 688 struct dp_sdp *sdp) 689 { 690 u32 shift; 691 692 mtk_dp_sdp_set_data(mtk_dp, sdp->db); 693 mtk_dp_sdp_set_header_aui(mtk_dp, &sdp->sdp_header); 694 mtk_dp_disable_sdp_aui(mtk_dp); 695 696 shift = (MTK_DP_ENC0_P0_30A8 & 3) * 8; 697 698 mtk_dp_sdp_trigger_aui(mtk_dp); 699 /* Enable periodic sending */ 700 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 701 0x05 << shift, 0xff << shift); 702 } 703 704 static void mtk_dp_aux_irq_clear(struct mtk_dp *mtk_dp) 705 { 706 mtk_dp_write(mtk_dp, MTK_DP_AUX_P0_3640, DP_AUX_P0_3640_VAL); 707 } 708 709 static void mtk_dp_aux_set_cmd(struct mtk_dp *mtk_dp, u8 cmd, u32 addr) 710 { 711 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3644, 712 cmd, MCU_REQUEST_COMMAND_AUX_TX_P0_MASK); 713 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3648, 714 addr, MCU_REQUEST_ADDRESS_LSB_AUX_TX_P0_MASK); 715 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_364C, 716 addr >> 16, MCU_REQUEST_ADDRESS_MSB_AUX_TX_P0_MASK); 717 } 718 719 static void mtk_dp_aux_clear_fifo(struct mtk_dp *mtk_dp) 720 { 721 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 722 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0, 723 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0 | 724 PHY_FIFO_RST_AUX_TX_P0_MASK | 725 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 726 } 727 728 static void mtk_dp_aux_request_ready(struct mtk_dp *mtk_dp) 729 { 730 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3630, 731 AUX_TX_REQUEST_READY_AUX_TX_P0, 732 AUX_TX_REQUEST_READY_AUX_TX_P0); 733 } 734 735 static void mtk_dp_aux_fill_write_fifo(struct mtk_dp *mtk_dp, u8 *buf, 736 size_t length) 737 { 738 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_AUX_P0_3708, buf, length); 739 } 740 741 static void mtk_dp_aux_read_rx_fifo(struct mtk_dp *mtk_dp, u8 *buf, 742 size_t length, int read_delay) 743 { 744 int read_pos; 745 746 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 747 0, AUX_RD_MODE_AUX_TX_P0_MASK); 748 749 for (read_pos = 0; read_pos < length; read_pos++) { 750 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 751 AUX_RX_FIFO_READ_PULSE_TX_P0, 752 AUX_RX_FIFO_READ_PULSE_TX_P0); 753 754 /* Hardware needs time to update the data */ 755 usleep_range(read_delay, read_delay * 2); 756 buf[read_pos] = (u8)(mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3620) & 757 AUX_RX_FIFO_READ_DATA_AUX_TX_P0_MASK); 758 } 759 } 760 761 static void mtk_dp_aux_set_length(struct mtk_dp *mtk_dp, size_t length) 762 { 763 if (length > 0) { 764 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 765 (length - 1) << 12, 766 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 767 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 768 0, 769 AUX_NO_LENGTH_AUX_TX_P0 | 770 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 771 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 772 } else { 773 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 774 AUX_NO_LENGTH_AUX_TX_P0, 775 AUX_NO_LENGTH_AUX_TX_P0 | 776 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 777 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 778 } 779 } 780 781 static int mtk_dp_aux_wait_for_completion(struct mtk_dp *mtk_dp, bool is_read) 782 { 783 int wait_reply = MTK_DP_AUX_WAIT_REPLY_COUNT; 784 785 while (--wait_reply) { 786 u32 aux_irq_status; 787 788 if (is_read) { 789 u32 fifo_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3618); 790 791 if (fifo_status & 792 (AUX_RX_FIFO_WRITE_POINTER_AUX_TX_P0_MASK | 793 AUX_RX_FIFO_FULL_AUX_TX_P0_MASK)) { 794 return 0; 795 } 796 } 797 798 aux_irq_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3640); 799 if (aux_irq_status & AUX_RX_AUX_RECV_COMPLETE_IRQ_AUX_TX_P0) 800 return 0; 801 802 if (aux_irq_status & AUX_400US_TIMEOUT_IRQ_AUX_TX_P0) 803 return -ETIMEDOUT; 804 805 /* Give the hardware a chance to reach completion before retrying */ 806 usleep_range(100, 500); 807 } 808 809 return -ETIMEDOUT; 810 } 811 812 static int mtk_dp_aux_do_transfer(struct mtk_dp *mtk_dp, bool is_read, u8 cmd, 813 u32 addr, u8 *buf, size_t length, u8 *reply_cmd) 814 { 815 int ret; 816 817 if (is_read && (length > DP_AUX_MAX_PAYLOAD_BYTES || 818 (cmd == DP_AUX_NATIVE_READ && !length))) 819 return -EINVAL; 820 821 if (!is_read) 822 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 823 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0, 824 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0); 825 826 /* We need to clear fifo and irq before sending commands to the sink device. */ 827 mtk_dp_aux_clear_fifo(mtk_dp); 828 mtk_dp_aux_irq_clear(mtk_dp); 829 830 mtk_dp_aux_set_cmd(mtk_dp, cmd, addr); 831 mtk_dp_aux_set_length(mtk_dp, length); 832 833 if (!is_read) { 834 if (length) 835 mtk_dp_aux_fill_write_fifo(mtk_dp, buf, length); 836 837 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 838 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK, 839 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK); 840 } 841 842 mtk_dp_aux_request_ready(mtk_dp); 843 844 /* Wait for feedback from sink device. */ 845 ret = mtk_dp_aux_wait_for_completion(mtk_dp, is_read); 846 847 *reply_cmd = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3624) & 848 AUX_RX_REPLY_COMMAND_AUX_TX_P0_MASK; 849 850 if (ret) { 851 u32 phy_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3628) & 852 AUX_RX_PHY_STATE_AUX_TX_P0_MASK; 853 if (phy_status != AUX_RX_PHY_STATE_AUX_TX_P0_RX_IDLE) { 854 dev_err(mtk_dp->dev, 855 "AUX Rx Aux hang, need SW reset\n"); 856 return -EIO; 857 } 858 859 return -ETIMEDOUT; 860 } 861 862 if (!length) { 863 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 864 0, 865 AUX_NO_LENGTH_AUX_TX_P0 | 866 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 867 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 868 } else if (is_read) { 869 int read_delay; 870 871 if (cmd == (DP_AUX_I2C_READ | DP_AUX_I2C_MOT) || 872 cmd == DP_AUX_I2C_READ) 873 read_delay = 500; 874 else 875 read_delay = 100; 876 877 mtk_dp_aux_read_rx_fifo(mtk_dp, buf, length, read_delay); 878 } 879 880 return 0; 881 } 882 883 static void mtk_dp_set_swing_pre_emphasis(struct mtk_dp *mtk_dp, int lane_num, 884 int swing_val, int preemphasis) 885 { 886 u32 lane_shift = lane_num * DP_TX1_VOLT_SWING_SHIFT; 887 888 dev_dbg(mtk_dp->dev, 889 "link training: swing_val = 0x%x, pre-emphasis = 0x%x\n", 890 swing_val, preemphasis); 891 892 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 893 swing_val << (DP_TX0_VOLT_SWING_SHIFT + lane_shift), 894 DP_TX0_VOLT_SWING_MASK << lane_shift); 895 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 896 preemphasis << (DP_TX0_PRE_EMPH_SHIFT + lane_shift), 897 DP_TX0_PRE_EMPH_MASK << lane_shift); 898 } 899 900 static void mtk_dp_reset_swing_pre_emphasis(struct mtk_dp *mtk_dp) 901 { 902 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 903 0, 904 DP_TX0_VOLT_SWING_MASK | 905 DP_TX1_VOLT_SWING_MASK | 906 DP_TX2_VOLT_SWING_MASK | 907 DP_TX3_VOLT_SWING_MASK | 908 DP_TX0_PRE_EMPH_MASK | 909 DP_TX1_PRE_EMPH_MASK | 910 DP_TX2_PRE_EMPH_MASK | 911 DP_TX3_PRE_EMPH_MASK); 912 } 913 914 static u32 mtk_dp_swirq_get_clear(struct mtk_dp *mtk_dp) 915 { 916 u32 irq_status = mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_35D0) & 917 SW_IRQ_FINAL_STATUS_DP_TRANS_P0_MASK; 918 919 if (irq_status) { 920 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 921 irq_status, SW_IRQ_CLR_DP_TRANS_P0_MASK); 922 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 923 0, SW_IRQ_CLR_DP_TRANS_P0_MASK); 924 } 925 926 return irq_status; 927 } 928 929 static u32 mtk_dp_hwirq_get_clear(struct mtk_dp *mtk_dp) 930 { 931 u32 irq_status = (mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3418) & 932 IRQ_STATUS_DP_TRANS_P0_MASK) >> 12; 933 934 if (irq_status) { 935 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 936 irq_status, IRQ_CLR_DP_TRANS_P0_MASK); 937 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 938 0, IRQ_CLR_DP_TRANS_P0_MASK); 939 } 940 941 return irq_status; 942 } 943 944 static void mtk_dp_hwirq_enable(struct mtk_dp *mtk_dp, bool enable) 945 { 946 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 947 enable ? 0 : 948 IRQ_MASK_DP_TRANS_P0_DISC_IRQ | 949 IRQ_MASK_DP_TRANS_P0_CONN_IRQ | 950 IRQ_MASK_DP_TRANS_P0_INT_IRQ, 951 IRQ_MASK_DP_TRANS_P0_MASK); 952 } 953 954 static void mtk_dp_initialize_settings(struct mtk_dp *mtk_dp) 955 { 956 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_342C, 957 XTAL_FREQ_DP_TRANS_P0_DEFAULT, 958 XTAL_FREQ_DP_TRANS_P0_MASK); 959 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3540, 960 FEC_CLOCK_EN_MODE_DP_TRANS_P0, 961 FEC_CLOCK_EN_MODE_DP_TRANS_P0); 962 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31EC, 963 AUDIO_CH_SRC_SEL_DP_ENC0_P0, 964 AUDIO_CH_SRC_SEL_DP_ENC0_P0); 965 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 966 0, SDP_VSYNC_RISING_MASK_DP_ENC0_P0_MASK); 967 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_IRQ_MASK, 968 IRQ_MASK_AUX_TOP_IRQ, IRQ_MASK_AUX_TOP_IRQ); 969 } 970 971 static void mtk_dp_initialize_hpd_detect_settings(struct mtk_dp *mtk_dp) 972 { 973 u32 val; 974 /* Debounce threshold */ 975 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 976 8, HPD_DEB_THD_DP_TRANS_P0_MASK); 977 978 val = (HPD_INT_THD_DP_TRANS_P0_LOWER_500US | 979 HPD_INT_THD_DP_TRANS_P0_UPPER_1100US) << 4; 980 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 981 val, HPD_INT_THD_DP_TRANS_P0_MASK); 982 983 /* 984 * Connect threshold 1.5ms + 5 x 0.1ms = 2ms 985 * Disconnect threshold 1.5ms + 5 x 0.1ms = 2ms 986 */ 987 val = (5 << 8) | (5 << 12); 988 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 989 val, 990 HPD_DISC_THD_DP_TRANS_P0_MASK | 991 HPD_CONN_THD_DP_TRANS_P0_MASK); 992 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3430, 993 HPD_INT_THD_ECO_DP_TRANS_P0_HIGH_BOUND_EXT, 994 HPD_INT_THD_ECO_DP_TRANS_P0_MASK); 995 } 996 997 static void mtk_dp_initialize_aux_settings(struct mtk_dp *mtk_dp) 998 { 999 /* modify timeout threshold = 0x1595 */ 1000 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_360C, 1001 AUX_TIMEOUT_THR_AUX_TX_P0_VAL, 1002 AUX_TIMEOUT_THR_AUX_TX_P0_MASK); 1003 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3658, 1004 0, AUX_TX_OV_EN_AUX_TX_P0_MASK); 1005 /* 25 for 26M */ 1006 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3634, 1007 AUX_TX_OVER_SAMPLE_RATE_FOR_26M << 8, 1008 AUX_TX_OVER_SAMPLE_RATE_AUX_TX_P0_MASK); 1009 /* 13 for 26M */ 1010 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3614, 1011 AUX_RX_UI_CNT_THR_AUX_FOR_26M, 1012 AUX_RX_UI_CNT_THR_AUX_TX_P0_MASK); 1013 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_37C8, 1014 MTK_ATOP_EN_AUX_TX_P0, 1015 MTK_ATOP_EN_AUX_TX_P0); 1016 1017 /* Set complete reply mode for AUX */ 1018 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3690, 1019 RX_REPLY_COMPLETE_MODE_AUX_TX_P0, 1020 RX_REPLY_COMPLETE_MODE_AUX_TX_P0); 1021 } 1022 1023 static void mtk_dp_initialize_digital_settings(struct mtk_dp *mtk_dp) 1024 { 1025 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 1026 0, VBID_VIDEO_MUTE_DP_ENC0_P0_MASK); 1027 1028 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3368, 1029 BS2BS_MODE_DP_ENC1_P0_VAL << 12, 1030 BS2BS_MODE_DP_ENC1_P0_MASK); 1031 1032 /* dp tx encoder reset all sw */ 1033 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1034 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0, 1035 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1036 1037 /* Wait for sw reset to complete */ 1038 usleep_range(1000, 5000); 1039 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1040 0, DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1041 } 1042 1043 static void mtk_dp_digital_sw_reset(struct mtk_dp *mtk_dp) 1044 { 1045 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1046 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0, 1047 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1048 1049 /* Wait for sw reset to complete */ 1050 usleep_range(1000, 5000); 1051 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1052 0, DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1053 } 1054 1055 static void mtk_dp_set_lanes(struct mtk_dp *mtk_dp, int lanes) 1056 { 1057 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35F0, 1058 lanes == 0 ? 0 : DP_TRANS_DUMMY_RW_0, 1059 DP_TRANS_DUMMY_RW_0_MASK); 1060 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1061 lanes, LANE_NUM_DP_ENC0_P0_MASK); 1062 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_34A4, 1063 lanes << 2, LANE_NUM_DP_TRANS_P0_MASK); 1064 } 1065 1066 static void mtk_dp_get_calibration_data(struct mtk_dp *mtk_dp) 1067 { 1068 const struct mtk_dp_efuse_fmt *fmt; 1069 struct device *dev = mtk_dp->dev; 1070 struct nvmem_cell *cell; 1071 u32 *cal_data = mtk_dp->cal_data; 1072 u32 *buf; 1073 int i; 1074 size_t len; 1075 1076 cell = nvmem_cell_get(dev, "dp_calibration_data"); 1077 if (IS_ERR(cell)) { 1078 dev_warn(dev, "Failed to get nvmem cell dp_calibration_data\n"); 1079 goto use_default_val; 1080 } 1081 1082 buf = (u32 *)nvmem_cell_read(cell, &len); 1083 nvmem_cell_put(cell); 1084 1085 if (IS_ERR(buf) || ((len / sizeof(u32)) != 4)) { 1086 dev_warn(dev, "Failed to read nvmem_cell_read\n"); 1087 1088 if (!IS_ERR(buf)) 1089 kfree(buf); 1090 1091 goto use_default_val; 1092 } 1093 1094 for (i = 0; i < MTK_DP_CAL_MAX; i++) { 1095 fmt = &mtk_dp->data->efuse_fmt[i]; 1096 cal_data[i] = (buf[fmt->idx] >> fmt->shift) & fmt->mask; 1097 1098 if (cal_data[i] < fmt->min_val || cal_data[i] > fmt->max_val) { 1099 dev_warn(mtk_dp->dev, "Invalid efuse data, idx = %d\n", i); 1100 kfree(buf); 1101 goto use_default_val; 1102 } 1103 } 1104 kfree(buf); 1105 1106 return; 1107 1108 use_default_val: 1109 dev_warn(mtk_dp->dev, "Use default calibration data\n"); 1110 for (i = 0; i < MTK_DP_CAL_MAX; i++) 1111 cal_data[i] = mtk_dp->data->efuse_fmt[i].default_val; 1112 } 1113 1114 static void mtk_dp_set_calibration_data(struct mtk_dp *mtk_dp) 1115 { 1116 u32 *cal_data = mtk_dp->cal_data; 1117 1118 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_DPAUX_TX, 1119 cal_data[MTK_DP_CAL_CLKTX_IMPSE] << 20, 1120 RG_CKM_PT0_CKTX_IMPSEL); 1121 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_BIAS_GEN_00, 1122 cal_data[MTK_DP_CAL_GLB_BIAS_TRIM] << 16, 1123 RG_XTP_GLB_BIAS_INTR_CTRL); 1124 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1125 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] << 12, 1126 RG_XTP_LN0_TX_IMPSEL_PMOS); 1127 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1128 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] << 16, 1129 RG_XTP_LN0_TX_IMPSEL_NMOS); 1130 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1131 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] << 12, 1132 RG_XTP_LN1_TX_IMPSEL_PMOS); 1133 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1134 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] << 16, 1135 RG_XTP_LN1_TX_IMPSEL_NMOS); 1136 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1137 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] << 12, 1138 RG_XTP_LN2_TX_IMPSEL_PMOS); 1139 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1140 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] << 16, 1141 RG_XTP_LN2_TX_IMPSEL_NMOS); 1142 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1143 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] << 12, 1144 RG_XTP_LN3_TX_IMPSEL_PMOS); 1145 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1146 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] << 16, 1147 RG_XTP_LN3_TX_IMPSEL_NMOS); 1148 } 1149 1150 static int mtk_dp_phy_configure(struct mtk_dp *mtk_dp, 1151 u32 link_rate, int lane_count) 1152 { 1153 int ret; 1154 union phy_configure_opts phy_opts = { 1155 .dp = { 1156 .link_rate = drm_dp_bw_code_to_link_rate(link_rate) / 100, 1157 .set_rate = 1, 1158 .lanes = lane_count, 1159 .set_lanes = 1, 1160 .ssc = mtk_dp->train_info.sink_ssc, 1161 } 1162 }; 1163 1164 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, DP_PWR_STATE_BANDGAP, 1165 DP_PWR_STATE_MASK); 1166 1167 ret = phy_configure(mtk_dp->phy, &phy_opts); 1168 if (ret) 1169 return ret; 1170 1171 mtk_dp_set_calibration_data(mtk_dp); 1172 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1173 DP_PWR_STATE_BANDGAP_TPLL_LANE, DP_PWR_STATE_MASK); 1174 1175 return 0; 1176 } 1177 1178 static void mtk_dp_set_idle_pattern(struct mtk_dp *mtk_dp, bool enable) 1179 { 1180 u32 val = POST_MISC_DATA_LANE0_OV_DP_TRANS_P0_MASK | 1181 POST_MISC_DATA_LANE1_OV_DP_TRANS_P0_MASK | 1182 POST_MISC_DATA_LANE2_OV_DP_TRANS_P0_MASK | 1183 POST_MISC_DATA_LANE3_OV_DP_TRANS_P0_MASK; 1184 1185 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3580, 1186 enable ? val : 0, val); 1187 } 1188 1189 static void mtk_dp_train_set_pattern(struct mtk_dp *mtk_dp, int pattern) 1190 { 1191 /* TPS1 */ 1192 if (pattern == 1) 1193 mtk_dp_set_idle_pattern(mtk_dp, false); 1194 1195 mtk_dp_update_bits(mtk_dp, 1196 MTK_DP_TRANS_P0_3400, 1197 pattern ? BIT(pattern - 1) << 12 : 0, 1198 PATTERN1_EN_DP_TRANS_P0_MASK | 1199 PATTERN2_EN_DP_TRANS_P0_MASK | 1200 PATTERN3_EN_DP_TRANS_P0_MASK | 1201 PATTERN4_EN_DP_TRANS_P0_MASK); 1202 } 1203 1204 static void mtk_dp_set_enhanced_frame_mode(struct mtk_dp *mtk_dp) 1205 { 1206 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1207 ENHANCED_FRAME_EN_DP_ENC0_P0, 1208 ENHANCED_FRAME_EN_DP_ENC0_P0); 1209 } 1210 1211 static void mtk_dp_training_set_scramble(struct mtk_dp *mtk_dp, bool enable) 1212 { 1213 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3404, 1214 enable ? DP_SCR_EN_DP_TRANS_P0_MASK : 0, 1215 DP_SCR_EN_DP_TRANS_P0_MASK); 1216 } 1217 1218 static void mtk_dp_video_mute(struct mtk_dp *mtk_dp, bool enable) 1219 { 1220 struct arm_smccc_res res; 1221 u32 val = VIDEO_MUTE_SEL_DP_ENC0_P0 | 1222 (enable ? VIDEO_MUTE_SW_DP_ENC0_P0 : 0); 1223 1224 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1225 val, 1226 VIDEO_MUTE_SEL_DP_ENC0_P0 | 1227 VIDEO_MUTE_SW_DP_ENC0_P0); 1228 1229 arm_smccc_smc(MTK_DP_SIP_CONTROL_AARCH32, 1230 mtk_dp->data->smc_cmd, enable, 1231 0, 0, 0, 0, 0, &res); 1232 1233 dev_dbg(mtk_dp->dev, "smc cmd: 0x%x, p1: %s, ret: 0x%lx-0x%lx\n", 1234 mtk_dp->data->smc_cmd, enable ? "enable" : "disable", res.a0, res.a1); 1235 } 1236 1237 static void mtk_dp_audio_mute(struct mtk_dp *mtk_dp, bool mute) 1238 { 1239 u32 val[3]; 1240 1241 if (mute) { 1242 val[0] = VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1243 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0; 1244 val[1] = 0; 1245 val[2] = 0; 1246 } else { 1247 val[0] = 0; 1248 val[1] = AU_EN_DP_ENC0_P0; 1249 /* Send one every two frames */ 1250 val[2] = 0x0F; 1251 } 1252 1253 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, 1254 val[0], 1255 VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1256 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0); 1257 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 1258 val[1], AU_EN_DP_ENC0_P0); 1259 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A4, 1260 val[2], AU_TS_CFG_DP_ENC0_P0_MASK); 1261 } 1262 1263 static void mtk_dp_aux_panel_poweron(struct mtk_dp *mtk_dp, bool pwron) 1264 { 1265 if (pwron) { 1266 /* power on aux */ 1267 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1268 DP_PWR_STATE_BANDGAP_TPLL_LANE, 1269 DP_PWR_STATE_MASK); 1270 1271 /* power on panel */ 1272 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); 1273 usleep_range(2000, 5000); 1274 } else { 1275 /* power off panel */ 1276 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 1277 usleep_range(2000, 3000); 1278 1279 /* power off aux */ 1280 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1281 DP_PWR_STATE_BANDGAP_TPLL, 1282 DP_PWR_STATE_MASK); 1283 } 1284 } 1285 1286 static void mtk_dp_power_enable(struct mtk_dp *mtk_dp) 1287 { 1288 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1289 0, SW_RST_B_PHYD); 1290 1291 /* Wait for power enable */ 1292 usleep_range(10, 200); 1293 1294 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1295 SW_RST_B_PHYD, SW_RST_B_PHYD); 1296 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1297 DP_PWR_STATE_BANDGAP_TPLL, DP_PWR_STATE_MASK); 1298 mtk_dp_write(mtk_dp, MTK_DP_1040, 1299 RG_DPAUX_RX_VALID_DEGLITCH_EN | RG_XTP_GLB_CKDET_EN | 1300 RG_DPAUX_RX_EN); 1301 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 0, DA_CKM_CKTX0_EN_FORCE_EN); 1302 } 1303 1304 static void mtk_dp_power_disable(struct mtk_dp *mtk_dp) 1305 { 1306 mtk_dp_write(mtk_dp, MTK_DP_TOP_PWR_STATE, 0); 1307 1308 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 1309 DA_CKM_CKTX0_EN_FORCE_EN, DA_CKM_CKTX0_EN_FORCE_EN); 1310 1311 /* Disable RX */ 1312 mtk_dp_write(mtk_dp, MTK_DP_1040, 0); 1313 mtk_dp_write(mtk_dp, MTK_DP_TOP_MEM_PD, 1314 0x550 | FUSE_SEL | MEM_ISO_EN); 1315 } 1316 1317 static void mtk_dp_initialize_priv_data(struct mtk_dp *mtk_dp) 1318 { 1319 bool plugged_in = (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP); 1320 1321 mtk_dp->train_info.link_rate = DP_LINK_BW_5_4; 1322 mtk_dp->train_info.lane_count = mtk_dp->max_lanes; 1323 mtk_dp->train_info.cable_plugged_in = plugged_in; 1324 1325 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 1326 memset(&mtk_dp->info.vm, 0, sizeof(struct videomode)); 1327 mtk_dp->audio_enable = false; 1328 } 1329 1330 static void mtk_dp_sdp_set_down_cnt_init(struct mtk_dp *mtk_dp, 1331 u32 sram_read_start) 1332 { 1333 u32 sdp_down_cnt_init = 0; 1334 struct drm_display_mode mode; 1335 struct videomode *vm = &mtk_dp->info.vm; 1336 1337 drm_display_mode_from_videomode(vm, &mode); 1338 1339 if (mode.clock > 0) 1340 sdp_down_cnt_init = sram_read_start * 1341 mtk_dp->train_info.link_rate * 2700 * 8 / 1342 (mode.clock * 4); 1343 1344 switch (mtk_dp->train_info.lane_count) { 1345 case 1: 1346 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x1A); 1347 break; 1348 case 2: 1349 /* case for LowResolution && High Audio Sample Rate */ 1350 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x10); 1351 sdp_down_cnt_init += mode.vtotal <= 525 ? 4 : 0; 1352 break; 1353 case 4: 1354 default: 1355 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 6); 1356 break; 1357 } 1358 1359 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 1360 sdp_down_cnt_init, 1361 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 1362 } 1363 1364 static void mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp *mtk_dp) 1365 { 1366 int pix_clk_mhz; 1367 u32 dc_offset; 1368 u32 spd_down_cnt_init = 0; 1369 struct drm_display_mode mode; 1370 struct videomode *vm = &mtk_dp->info.vm; 1371 1372 drm_display_mode_from_videomode(vm, &mode); 1373 1374 pix_clk_mhz = mtk_dp->info.format == DP_PIXELFORMAT_YUV420 ? 1375 mode.clock / 2000 : mode.clock / 1000; 1376 1377 switch (mtk_dp->train_info.lane_count) { 1378 case 1: 1379 spd_down_cnt_init = 0x20; 1380 break; 1381 case 2: 1382 dc_offset = (mode.vtotal <= 525) ? 0x14 : 0x00; 1383 spd_down_cnt_init = 0x18 + dc_offset; 1384 break; 1385 case 4: 1386 default: 1387 dc_offset = (mode.vtotal <= 525) ? 0x08 : 0x00; 1388 if (pix_clk_mhz > mtk_dp->train_info.link_rate * 27) 1389 spd_down_cnt_init = 0x8; 1390 else 1391 spd_down_cnt_init = 0x10 + dc_offset; 1392 break; 1393 } 1394 1395 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, spd_down_cnt_init, 1396 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 1397 } 1398 1399 static void mtk_dp_audio_sample_arrange_disable(struct mtk_dp *mtk_dp) 1400 { 1401 /* arrange audio packets into the Hblanking and Vblanking area */ 1402 if (!mtk_dp->data->audio_pkt_in_hblank_area) 1403 return; 1404 1405 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3374, 0, 1406 SDP_ASP_INSERT_IN_HBLANK_DP_ENC1_P0_MASK); 1407 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3374, 0, 1408 SDP_DOWN_ASP_CNT_INIT_DP_ENC1_P0_MASK); 1409 } 1410 1411 static void mtk_dp_setup_tu(struct mtk_dp *mtk_dp) 1412 { 1413 u32 sram_read_start = min_t(u32, MTK_DP_TBC_BUF_READ_START_ADDR, 1414 mtk_dp->info.vm.hactive / 1415 mtk_dp->train_info.lane_count / 1416 MTK_DP_4P1T / MTK_DP_HDE / 1417 MTK_DP_PIX_PER_ADDR); 1418 mtk_dp_set_sram_read_start(mtk_dp, sram_read_start); 1419 mtk_dp_setup_encoder(mtk_dp); 1420 mtk_dp_audio_sample_arrange_disable(mtk_dp); 1421 mtk_dp_sdp_set_down_cnt_init_in_hblank(mtk_dp); 1422 mtk_dp_sdp_set_down_cnt_init(mtk_dp, sram_read_start); 1423 } 1424 1425 static void mtk_dp_set_tx_out(struct mtk_dp *mtk_dp) 1426 { 1427 mtk_dp_setup_tu(mtk_dp); 1428 } 1429 1430 static void mtk_dp_train_update_swing_pre(struct mtk_dp *mtk_dp, int lanes, 1431 u8 dpcd_adjust_req[2]) 1432 { 1433 int lane; 1434 1435 for (lane = 0; lane < lanes; ++lane) { 1436 u8 val; 1437 u8 swing; 1438 u8 preemphasis; 1439 int index = lane / 2; 1440 int shift = lane % 2 ? DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 0; 1441 1442 swing = (dpcd_adjust_req[index] >> shift) & 1443 DP_ADJUST_VOLTAGE_SWING_LANE0_MASK; 1444 preemphasis = ((dpcd_adjust_req[index] >> shift) & 1445 DP_ADJUST_PRE_EMPHASIS_LANE0_MASK) >> 1446 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT; 1447 val = swing << DP_TRAIN_VOLTAGE_SWING_SHIFT | 1448 preemphasis << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1449 1450 if (swing == DP_TRAIN_VOLTAGE_SWING_LEVEL_3) 1451 val |= DP_TRAIN_MAX_SWING_REACHED; 1452 if (preemphasis == 3) 1453 val |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1454 1455 mtk_dp_set_swing_pre_emphasis(mtk_dp, lane, swing, preemphasis); 1456 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_LANE0_SET + lane, 1457 val); 1458 } 1459 } 1460 1461 static void mtk_dp_pattern(struct mtk_dp *mtk_dp, bool is_tps1) 1462 { 1463 int pattern; 1464 unsigned int aux_offset; 1465 1466 if (is_tps1) { 1467 pattern = 1; 1468 aux_offset = DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1; 1469 } else { 1470 aux_offset = mtk_dp->train_info.channel_eq_pattern; 1471 1472 switch (mtk_dp->train_info.channel_eq_pattern) { 1473 case DP_TRAINING_PATTERN_4: 1474 pattern = 4; 1475 break; 1476 case DP_TRAINING_PATTERN_3: 1477 pattern = 3; 1478 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1479 break; 1480 case DP_TRAINING_PATTERN_2: 1481 default: 1482 pattern = 2; 1483 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1484 break; 1485 } 1486 } 1487 1488 mtk_dp_train_set_pattern(mtk_dp, pattern); 1489 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, aux_offset); 1490 } 1491 1492 static int mtk_dp_train_setting(struct mtk_dp *mtk_dp, u8 target_link_rate, 1493 u8 target_lane_count) 1494 { 1495 int ret; 1496 1497 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LINK_BW_SET, target_link_rate); 1498 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LANE_COUNT_SET, 1499 target_lane_count | DP_LANE_COUNT_ENHANCED_FRAME_EN); 1500 1501 if (mtk_dp->train_info.sink_ssc) 1502 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_DOWNSPREAD_CTRL, 1503 DP_SPREAD_AMP_0_5); 1504 1505 mtk_dp_set_lanes(mtk_dp, target_lane_count / 2); 1506 ret = mtk_dp_phy_configure(mtk_dp, target_link_rate, target_lane_count); 1507 if (ret) 1508 return ret; 1509 1510 dev_dbg(mtk_dp->dev, 1511 "Link train target_link_rate = 0x%x, target_lane_count = 0x%x\n", 1512 target_link_rate, target_lane_count); 1513 1514 return 0; 1515 } 1516 1517 static int mtk_dp_train_cr(struct mtk_dp *mtk_dp, u8 target_lane_count) 1518 { 1519 u8 lane_adjust[2] = {}; 1520 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1521 u8 prev_lane_adjust = 0xff; 1522 int train_retries = 0; 1523 int voltage_retries = 0; 1524 1525 mtk_dp_pattern(mtk_dp, true); 1526 1527 /* In DP spec 1.4, the retry count of CR is defined as 10. */ 1528 do { 1529 train_retries++; 1530 if (!mtk_dp->train_info.cable_plugged_in) { 1531 mtk_dp_train_set_pattern(mtk_dp, 0); 1532 return -ENODEV; 1533 } 1534 1535 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1536 lane_adjust, sizeof(lane_adjust)); 1537 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1538 lane_adjust); 1539 1540 drm_dp_link_train_clock_recovery_delay(&mtk_dp->aux, 1541 mtk_dp->rx_cap); 1542 1543 /* check link status from sink device */ 1544 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1545 if (drm_dp_clock_recovery_ok(link_status, 1546 target_lane_count)) { 1547 dev_dbg(mtk_dp->dev, "Link train CR pass\n"); 1548 return 0; 1549 } 1550 1551 /* 1552 * In DP spec 1.4, if current voltage level is the same 1553 * with previous voltage level, we need to retry 5 times. 1554 */ 1555 if (prev_lane_adjust == link_status[4]) { 1556 voltage_retries++; 1557 /* 1558 * Condition of CR fail: 1559 * 1. Failed to pass CR using the same voltage 1560 * level over five times. 1561 * 2. Failed to pass CR when the current voltage 1562 * level is the same with previous voltage 1563 * level and reach max voltage level (3). 1564 */ 1565 if (voltage_retries > MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY || 1566 (prev_lane_adjust & DP_ADJUST_VOLTAGE_SWING_LANE0_MASK) == 3) { 1567 dev_dbg(mtk_dp->dev, "Link train CR fail\n"); 1568 break; 1569 } 1570 } else { 1571 /* 1572 * If the voltage level is changed, we need to 1573 * re-calculate this retry count. 1574 */ 1575 voltage_retries = 0; 1576 } 1577 prev_lane_adjust = link_status[4]; 1578 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1579 1580 /* Failed to train CR, and disable pattern. */ 1581 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1582 DP_TRAINING_PATTERN_DISABLE); 1583 mtk_dp_train_set_pattern(mtk_dp, 0); 1584 1585 return -ETIMEDOUT; 1586 } 1587 1588 static int mtk_dp_train_eq(struct mtk_dp *mtk_dp, u8 target_lane_count) 1589 { 1590 u8 lane_adjust[2] = {}; 1591 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1592 int train_retries = 0; 1593 1594 mtk_dp_pattern(mtk_dp, false); 1595 1596 do { 1597 train_retries++; 1598 if (!mtk_dp->train_info.cable_plugged_in) { 1599 mtk_dp_train_set_pattern(mtk_dp, 0); 1600 return -ENODEV; 1601 } 1602 1603 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1604 lane_adjust, sizeof(lane_adjust)); 1605 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1606 lane_adjust); 1607 1608 drm_dp_link_train_channel_eq_delay(&mtk_dp->aux, 1609 mtk_dp->rx_cap); 1610 1611 /* check link status from sink device */ 1612 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1613 if (drm_dp_channel_eq_ok(link_status, target_lane_count)) { 1614 dev_dbg(mtk_dp->dev, "Link train EQ pass\n"); 1615 1616 /* Training done, and disable pattern. */ 1617 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1618 DP_TRAINING_PATTERN_DISABLE); 1619 mtk_dp_train_set_pattern(mtk_dp, 0); 1620 return 0; 1621 } 1622 dev_dbg(mtk_dp->dev, "Link train EQ fail\n"); 1623 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1624 1625 /* Failed to train EQ, and disable pattern. */ 1626 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1627 DP_TRAINING_PATTERN_DISABLE); 1628 mtk_dp_train_set_pattern(mtk_dp, 0); 1629 1630 return -ETIMEDOUT; 1631 } 1632 1633 static int mtk_dp_parse_capabilities(struct mtk_dp *mtk_dp) 1634 { 1635 u8 val; 1636 ssize_t ret; 1637 1638 /* 1639 * If we're eDP and capabilities were already parsed we can skip 1640 * reading again because eDP panels aren't hotpluggable hence the 1641 * caps and training information won't ever change in a boot life 1642 */ 1643 if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP && 1644 mtk_dp->rx_cap[DP_MAX_LINK_RATE] && 1645 mtk_dp->train_info.sink_ssc) 1646 return 0; 1647 1648 ret = drm_dp_read_dpcd_caps(&mtk_dp->aux, mtk_dp->rx_cap); 1649 if (ret < 0) 1650 return ret; 1651 1652 if (drm_dp_tps4_supported(mtk_dp->rx_cap)) 1653 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_4; 1654 else if (drm_dp_tps3_supported(mtk_dp->rx_cap)) 1655 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_3; 1656 else 1657 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_2; 1658 1659 mtk_dp->train_info.sink_ssc = drm_dp_max_downspread(mtk_dp->rx_cap); 1660 1661 ret = drm_dp_dpcd_readb(&mtk_dp->aux, DP_MSTM_CAP, &val); 1662 if (ret < 1) { 1663 drm_err(mtk_dp->drm_dev, "Read mstm cap failed\n"); 1664 return ret == 0 ? -EIO : ret; 1665 } 1666 1667 if (val & DP_MST_CAP) { 1668 /* Clear DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 */ 1669 ret = drm_dp_dpcd_readb(&mtk_dp->aux, 1670 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1671 &val); 1672 if (ret < 1) { 1673 drm_err(mtk_dp->drm_dev, "Read irq vector failed\n"); 1674 return ret == 0 ? -EIO : ret; 1675 } 1676 1677 if (val) { 1678 ret = drm_dp_dpcd_writeb(&mtk_dp->aux, 1679 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1680 val); 1681 if (ret < 0) 1682 return ret; 1683 } 1684 } 1685 1686 return 0; 1687 } 1688 1689 static bool mtk_dp_edid_parse_audio_capabilities(struct mtk_dp *mtk_dp, 1690 struct mtk_dp_audio_cfg *cfg) 1691 { 1692 if (!mtk_dp->data->audio_supported) 1693 return false; 1694 1695 if (mtk_dp->info.audio_cur_cfg.sad_count <= 0) { 1696 drm_info(mtk_dp->drm_dev, "The SADs is NULL\n"); 1697 return false; 1698 } 1699 1700 return true; 1701 } 1702 1703 static void mtk_dp_train_change_mode(struct mtk_dp *mtk_dp) 1704 { 1705 phy_reset(mtk_dp->phy); 1706 mtk_dp_reset_swing_pre_emphasis(mtk_dp); 1707 } 1708 1709 static int mtk_dp_training(struct mtk_dp *mtk_dp) 1710 { 1711 int ret; 1712 u8 lane_count, link_rate, train_limit, max_link_rate; 1713 1714 link_rate = min_t(u8, mtk_dp->max_linkrate, 1715 mtk_dp->rx_cap[DP_MAX_LINK_RATE]); 1716 max_link_rate = link_rate; 1717 lane_count = min_t(u8, mtk_dp->max_lanes, 1718 drm_dp_max_lane_count(mtk_dp->rx_cap)); 1719 1720 /* 1721 * TPS are generated by the hardware pattern generator. From the 1722 * hardware setting we need to disable this scramble setting before 1723 * use the TPS pattern generator. 1724 */ 1725 mtk_dp_training_set_scramble(mtk_dp, false); 1726 1727 for (train_limit = 6; train_limit > 0; train_limit--) { 1728 mtk_dp_train_change_mode(mtk_dp); 1729 1730 ret = mtk_dp_train_setting(mtk_dp, link_rate, lane_count); 1731 if (ret) 1732 return ret; 1733 1734 ret = mtk_dp_train_cr(mtk_dp, lane_count); 1735 if (ret == -ENODEV) { 1736 return ret; 1737 } else if (ret) { 1738 /* reduce link rate */ 1739 switch (link_rate) { 1740 case DP_LINK_BW_1_62: 1741 lane_count = lane_count / 2; 1742 link_rate = max_link_rate; 1743 if (lane_count == 0) 1744 return -EIO; 1745 break; 1746 case DP_LINK_BW_2_7: 1747 link_rate = DP_LINK_BW_1_62; 1748 break; 1749 case DP_LINK_BW_5_4: 1750 link_rate = DP_LINK_BW_2_7; 1751 break; 1752 case DP_LINK_BW_8_1: 1753 link_rate = DP_LINK_BW_5_4; 1754 break; 1755 default: 1756 return -EINVAL; 1757 } 1758 continue; 1759 } 1760 1761 ret = mtk_dp_train_eq(mtk_dp, lane_count); 1762 if (ret == -ENODEV) { 1763 return ret; 1764 } else if (ret) { 1765 /* reduce lane count */ 1766 if (lane_count == 0) 1767 return -EIO; 1768 lane_count /= 2; 1769 continue; 1770 } 1771 1772 /* if we can run to this, training is done. */ 1773 break; 1774 } 1775 1776 if (train_limit == 0) 1777 return -ETIMEDOUT; 1778 1779 mtk_dp->train_info.link_rate = link_rate; 1780 mtk_dp->train_info.lane_count = lane_count; 1781 1782 /* 1783 * After training done, we need to output normal stream instead of TPS, 1784 * so we need to enable scramble. 1785 */ 1786 mtk_dp_training_set_scramble(mtk_dp, true); 1787 mtk_dp_set_enhanced_frame_mode(mtk_dp); 1788 1789 return 0; 1790 } 1791 1792 static void mtk_dp_video_enable(struct mtk_dp *mtk_dp, bool enable) 1793 { 1794 /* the mute sequence is different between enable and disable */ 1795 if (enable) { 1796 mtk_dp_msa_bypass_enable(mtk_dp, false); 1797 mtk_dp_pg_enable(mtk_dp, false); 1798 mtk_dp_set_tx_out(mtk_dp); 1799 mtk_dp_video_mute(mtk_dp, false); 1800 } else { 1801 mtk_dp_video_mute(mtk_dp, true); 1802 mtk_dp_pg_enable(mtk_dp, true); 1803 mtk_dp_msa_bypass_enable(mtk_dp, true); 1804 } 1805 } 1806 1807 static void mtk_dp_audio_sdp_setup(struct mtk_dp *mtk_dp, 1808 struct mtk_dp_audio_cfg *cfg) 1809 { 1810 struct dp_sdp sdp; 1811 struct hdmi_audio_infoframe frame; 1812 1813 hdmi_audio_infoframe_init(&frame); 1814 frame.coding_type = HDMI_AUDIO_CODING_TYPE_PCM; 1815 frame.channels = cfg->channels; 1816 frame.sample_frequency = cfg->sample_rate; 1817 1818 switch (cfg->word_length_bits) { 1819 case 16: 1820 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_16; 1821 break; 1822 case 20: 1823 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_20; 1824 break; 1825 case 24: 1826 default: 1827 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_24; 1828 break; 1829 } 1830 1831 hdmi_audio_infoframe_pack_for_dp(&frame, &sdp, MTK_DP_VERSION); 1832 1833 mtk_dp_audio_sdp_asp_set_channels(mtk_dp, cfg->channels); 1834 mtk_dp_setup_sdp_aui(mtk_dp, &sdp); 1835 } 1836 1837 static void mtk_dp_audio_setup(struct mtk_dp *mtk_dp, 1838 struct mtk_dp_audio_cfg *cfg) 1839 { 1840 mtk_dp_audio_sdp_setup(mtk_dp, cfg); 1841 mtk_dp_audio_channel_status_set(mtk_dp, cfg); 1842 1843 mtk_dp_audio_setup_channels(mtk_dp, cfg); 1844 mtk_dp_audio_set_divider(mtk_dp); 1845 } 1846 1847 static int mtk_dp_video_config(struct mtk_dp *mtk_dp) 1848 { 1849 mtk_dp_config_mn_mode(mtk_dp); 1850 mtk_dp_set_msa(mtk_dp); 1851 mtk_dp_set_color_depth(mtk_dp); 1852 return mtk_dp_set_color_format(mtk_dp, mtk_dp->info.format); 1853 } 1854 1855 static void mtk_dp_init_port(struct mtk_dp *mtk_dp) 1856 { 1857 mtk_dp_set_idle_pattern(mtk_dp, true); 1858 mtk_dp_initialize_priv_data(mtk_dp); 1859 1860 mtk_dp_initialize_settings(mtk_dp); 1861 mtk_dp_initialize_aux_settings(mtk_dp); 1862 mtk_dp_initialize_digital_settings(mtk_dp); 1863 mtk_dp_initialize_hpd_detect_settings(mtk_dp); 1864 1865 mtk_dp_digital_sw_reset(mtk_dp); 1866 } 1867 1868 static irqreturn_t mtk_dp_hpd_event_thread(int hpd, void *dev) 1869 { 1870 struct mtk_dp *mtk_dp = dev; 1871 unsigned long flags; 1872 u32 status; 1873 1874 if (mtk_dp->need_debounce && mtk_dp->train_info.cable_plugged_in) 1875 msleep(100); 1876 1877 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 1878 status = mtk_dp->irq_thread_handle; 1879 mtk_dp->irq_thread_handle = 0; 1880 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 1881 1882 if (status & MTK_DP_THREAD_CABLE_STATE_CHG) { 1883 if (mtk_dp->bridge.dev) 1884 drm_helper_hpd_irq_event(mtk_dp->bridge.dev); 1885 1886 if (!mtk_dp->train_info.cable_plugged_in) { 1887 mtk_dp_disable_sdp_aui(mtk_dp); 1888 memset(&mtk_dp->info.audio_cur_cfg, 0, 1889 sizeof(mtk_dp->info.audio_cur_cfg)); 1890 1891 mtk_dp->need_debounce = false; 1892 mod_timer(&mtk_dp->debounce_timer, 1893 jiffies + msecs_to_jiffies(100) - 1); 1894 } 1895 } 1896 1897 if (status & MTK_DP_THREAD_HPD_EVENT) 1898 dev_dbg(mtk_dp->dev, "Receive IRQ from sink devices\n"); 1899 1900 return IRQ_HANDLED; 1901 } 1902 1903 static irqreturn_t mtk_dp_hpd_event(int hpd, void *dev) 1904 { 1905 struct mtk_dp *mtk_dp = dev; 1906 bool cable_sta_chg = false; 1907 unsigned long flags; 1908 u32 irq_status = mtk_dp_swirq_get_clear(mtk_dp) | 1909 mtk_dp_hwirq_get_clear(mtk_dp); 1910 1911 if (!irq_status) 1912 return IRQ_HANDLED; 1913 1914 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 1915 1916 if (irq_status & MTK_DP_HPD_INTERRUPT) 1917 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_HPD_EVENT; 1918 1919 /* Cable state is changed. */ 1920 if (irq_status != MTK_DP_HPD_INTERRUPT) { 1921 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_CABLE_STATE_CHG; 1922 cable_sta_chg = true; 1923 } 1924 1925 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 1926 1927 if (cable_sta_chg) { 1928 if (!!(mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3414) & 1929 HPD_DB_DP_TRANS_P0_MASK)) 1930 mtk_dp->train_info.cable_plugged_in = true; 1931 else 1932 mtk_dp->train_info.cable_plugged_in = false; 1933 } 1934 1935 return IRQ_WAKE_THREAD; 1936 } 1937 1938 static int mtk_dp_wait_hpd_asserted(struct drm_dp_aux *mtk_aux, unsigned long wait_us) 1939 { 1940 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 1941 u32 val; 1942 int ret; 1943 1944 ret = regmap_read_poll_timeout(mtk_dp->regs, MTK_DP_TRANS_P0_3414, 1945 val, !!(val & HPD_DB_DP_TRANS_P0_MASK), 1946 wait_us / 100, wait_us); 1947 if (ret) { 1948 mtk_dp->train_info.cable_plugged_in = false; 1949 return ret; 1950 } 1951 1952 mtk_dp->train_info.cable_plugged_in = true; 1953 1954 ret = mtk_dp_parse_capabilities(mtk_dp); 1955 if (ret) { 1956 drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n"); 1957 return ret; 1958 } 1959 1960 return 0; 1961 } 1962 1963 static int mtk_dp_dt_parse(struct mtk_dp *mtk_dp, 1964 struct platform_device *pdev) 1965 { 1966 struct device_node *endpoint; 1967 struct device *dev = &pdev->dev; 1968 int ret; 1969 void __iomem *base; 1970 u32 linkrate; 1971 int len; 1972 1973 base = devm_platform_ioremap_resource(pdev, 0); 1974 if (IS_ERR(base)) 1975 return PTR_ERR(base); 1976 1977 mtk_dp->regs = devm_regmap_init_mmio(dev, base, &mtk_dp_regmap_config); 1978 if (IS_ERR(mtk_dp->regs)) 1979 return PTR_ERR(mtk_dp->regs); 1980 1981 endpoint = of_graph_get_endpoint_by_regs(pdev->dev.of_node, 1, -1); 1982 len = of_property_count_elems_of_size(endpoint, 1983 "data-lanes", sizeof(u32)); 1984 if (len < 0 || len > 4 || len == 3) { 1985 dev_err(dev, "invalid data lane size: %d\n", len); 1986 return -EINVAL; 1987 } 1988 1989 mtk_dp->max_lanes = len; 1990 1991 ret = device_property_read_u32(dev, "max-linkrate-mhz", &linkrate); 1992 if (ret) { 1993 dev_err(dev, "failed to read max linkrate: %d\n", ret); 1994 return ret; 1995 } 1996 1997 mtk_dp->max_linkrate = drm_dp_link_rate_to_bw_code(linkrate * 100); 1998 1999 return 0; 2000 } 2001 2002 static void mtk_dp_update_plugged_status(struct mtk_dp *mtk_dp) 2003 { 2004 if (!mtk_dp->data->audio_supported || !mtk_dp->audio_enable) 2005 return; 2006 2007 mutex_lock(&mtk_dp->update_plugged_status_lock); 2008 if (mtk_dp->plugged_cb && mtk_dp->codec_dev) 2009 mtk_dp->plugged_cb(mtk_dp->codec_dev, 2010 mtk_dp->enabled & 2011 mtk_dp->info.audio_cur_cfg.detect_monitor); 2012 mutex_unlock(&mtk_dp->update_plugged_status_lock); 2013 } 2014 2015 static enum drm_connector_status mtk_dp_bdg_detect(struct drm_bridge *bridge) 2016 { 2017 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2018 enum drm_connector_status ret = connector_status_disconnected; 2019 bool enabled = mtk_dp->enabled; 2020 u8 sink_count = 0; 2021 2022 if (!mtk_dp->train_info.cable_plugged_in) 2023 return ret; 2024 2025 if (!enabled) 2026 mtk_dp_aux_panel_poweron(mtk_dp, true); 2027 2028 /* 2029 * Some dongles still source HPD when they do not connect to any 2030 * sink device. To avoid this, we need to read the sink count 2031 * to make sure we do connect to sink devices. After this detect 2032 * function, we just need to check the HPD connection to check 2033 * whether we connect to a sink device. 2034 */ 2035 drm_dp_dpcd_readb(&mtk_dp->aux, DP_SINK_COUNT, &sink_count); 2036 if (DP_GET_SINK_COUNT(sink_count)) 2037 ret = connector_status_connected; 2038 2039 if (!enabled) 2040 mtk_dp_aux_panel_poweron(mtk_dp, false); 2041 2042 return ret; 2043 } 2044 2045 static struct edid *mtk_dp_get_edid(struct drm_bridge *bridge, 2046 struct drm_connector *connector) 2047 { 2048 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2049 bool enabled = mtk_dp->enabled; 2050 struct edid *new_edid = NULL; 2051 struct mtk_dp_audio_cfg *audio_caps = &mtk_dp->info.audio_cur_cfg; 2052 2053 if (!enabled) { 2054 drm_atomic_bridge_chain_pre_enable(bridge, connector->state->state); 2055 mtk_dp_aux_panel_poweron(mtk_dp, true); 2056 } 2057 2058 new_edid = drm_get_edid(connector, &mtk_dp->aux.ddc); 2059 2060 /* 2061 * Parse capability here to let atomic_get_input_bus_fmts and 2062 * mode_valid use the capability to calculate sink bitrates. 2063 */ 2064 if (mtk_dp_parse_capabilities(mtk_dp)) { 2065 drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n"); 2066 kfree(new_edid); 2067 new_edid = NULL; 2068 } 2069 2070 if (new_edid) { 2071 struct cea_sad *sads; 2072 2073 audio_caps->sad_count = drm_edid_to_sad(new_edid, &sads); 2074 kfree(sads); 2075 2076 audio_caps->detect_monitor = drm_detect_monitor_audio(new_edid); 2077 } 2078 2079 if (!enabled) { 2080 mtk_dp_aux_panel_poweron(mtk_dp, false); 2081 drm_atomic_bridge_chain_post_disable(bridge, connector->state->state); 2082 } 2083 2084 return new_edid; 2085 } 2086 2087 static ssize_t mtk_dp_aux_transfer(struct drm_dp_aux *mtk_aux, 2088 struct drm_dp_aux_msg *msg) 2089 { 2090 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2091 bool is_read; 2092 u8 request; 2093 size_t accessed_bytes = 0; 2094 int ret; 2095 2096 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP && 2097 !mtk_dp->train_info.cable_plugged_in) { 2098 ret = -EAGAIN; 2099 goto err; 2100 } 2101 2102 switch (msg->request) { 2103 case DP_AUX_I2C_MOT: 2104 case DP_AUX_I2C_WRITE: 2105 case DP_AUX_NATIVE_WRITE: 2106 case DP_AUX_I2C_WRITE_STATUS_UPDATE: 2107 case DP_AUX_I2C_WRITE_STATUS_UPDATE | DP_AUX_I2C_MOT: 2108 request = msg->request & ~DP_AUX_I2C_WRITE_STATUS_UPDATE; 2109 is_read = false; 2110 break; 2111 case DP_AUX_I2C_READ: 2112 case DP_AUX_NATIVE_READ: 2113 case DP_AUX_I2C_READ | DP_AUX_I2C_MOT: 2114 request = msg->request; 2115 is_read = true; 2116 break; 2117 default: 2118 dev_err(mtk_dp->dev, "invalid aux cmd = %d\n", 2119 msg->request); 2120 ret = -EINVAL; 2121 goto err; 2122 } 2123 2124 do { 2125 size_t to_access = min_t(size_t, DP_AUX_MAX_PAYLOAD_BYTES, 2126 msg->size - accessed_bytes); 2127 2128 ret = mtk_dp_aux_do_transfer(mtk_dp, is_read, request, 2129 msg->address + accessed_bytes, 2130 msg->buffer + accessed_bytes, 2131 to_access, &msg->reply); 2132 2133 if (ret) { 2134 dev_info(mtk_dp->dev, 2135 "Failed to do AUX transfer: %d\n", ret); 2136 goto err; 2137 } 2138 accessed_bytes += to_access; 2139 } while (accessed_bytes < msg->size); 2140 2141 return msg->size; 2142 err: 2143 msg->reply = DP_AUX_NATIVE_REPLY_NACK | DP_AUX_I2C_REPLY_NACK; 2144 return ret; 2145 } 2146 2147 static int mtk_dp_poweron(struct mtk_dp *mtk_dp) 2148 { 2149 int ret; 2150 2151 ret = phy_init(mtk_dp->phy); 2152 if (ret) { 2153 dev_err(mtk_dp->dev, "Failed to initialize phy: %d\n", ret); 2154 return ret; 2155 } 2156 2157 mtk_dp_init_port(mtk_dp); 2158 mtk_dp_power_enable(mtk_dp); 2159 2160 return 0; 2161 } 2162 2163 static void mtk_dp_poweroff(struct mtk_dp *mtk_dp) 2164 { 2165 mtk_dp_power_disable(mtk_dp); 2166 phy_exit(mtk_dp->phy); 2167 } 2168 2169 static int mtk_dp_bridge_attach(struct drm_bridge *bridge, 2170 enum drm_bridge_attach_flags flags) 2171 { 2172 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2173 int ret; 2174 2175 if (!(flags & DRM_BRIDGE_ATTACH_NO_CONNECTOR)) { 2176 dev_err(mtk_dp->dev, "Driver does not provide a connector!"); 2177 return -EINVAL; 2178 } 2179 2180 mtk_dp->aux.drm_dev = bridge->dev; 2181 ret = drm_dp_aux_register(&mtk_dp->aux); 2182 if (ret) { 2183 dev_err(mtk_dp->dev, 2184 "failed to register DP AUX channel: %d\n", ret); 2185 return ret; 2186 } 2187 2188 ret = mtk_dp_poweron(mtk_dp); 2189 if (ret) 2190 goto err_aux_register; 2191 2192 if (mtk_dp->next_bridge) { 2193 ret = drm_bridge_attach(bridge->encoder, mtk_dp->next_bridge, 2194 &mtk_dp->bridge, flags); 2195 if (ret) { 2196 drm_warn(mtk_dp->drm_dev, 2197 "Failed to attach external bridge: %d\n", ret); 2198 goto err_bridge_attach; 2199 } 2200 } 2201 2202 mtk_dp->drm_dev = bridge->dev; 2203 2204 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) { 2205 irq_clear_status_flags(mtk_dp->irq, IRQ_NOAUTOEN); 2206 enable_irq(mtk_dp->irq); 2207 mtk_dp_hwirq_enable(mtk_dp, true); 2208 } 2209 2210 return 0; 2211 2212 err_bridge_attach: 2213 mtk_dp_poweroff(mtk_dp); 2214 err_aux_register: 2215 drm_dp_aux_unregister(&mtk_dp->aux); 2216 return ret; 2217 } 2218 2219 static void mtk_dp_bridge_detach(struct drm_bridge *bridge) 2220 { 2221 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2222 2223 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) { 2224 mtk_dp_hwirq_enable(mtk_dp, false); 2225 disable_irq(mtk_dp->irq); 2226 } 2227 mtk_dp->drm_dev = NULL; 2228 mtk_dp_poweroff(mtk_dp); 2229 drm_dp_aux_unregister(&mtk_dp->aux); 2230 } 2231 2232 static void mtk_dp_bridge_atomic_enable(struct drm_bridge *bridge, 2233 struct drm_bridge_state *old_state) 2234 { 2235 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2236 int ret; 2237 2238 mtk_dp->conn = drm_atomic_get_new_connector_for_encoder(old_state->base.state, 2239 bridge->encoder); 2240 if (!mtk_dp->conn) { 2241 drm_err(mtk_dp->drm_dev, 2242 "Can't enable bridge as connector is missing\n"); 2243 return; 2244 } 2245 2246 mtk_dp_aux_panel_poweron(mtk_dp, true); 2247 2248 /* Training */ 2249 ret = mtk_dp_training(mtk_dp); 2250 if (ret) { 2251 drm_err(mtk_dp->drm_dev, "Training failed, %d\n", ret); 2252 goto power_off_aux; 2253 } 2254 2255 ret = mtk_dp_video_config(mtk_dp); 2256 if (ret) 2257 goto power_off_aux; 2258 2259 mtk_dp_video_enable(mtk_dp, true); 2260 2261 mtk_dp->audio_enable = 2262 mtk_dp_edid_parse_audio_capabilities(mtk_dp, 2263 &mtk_dp->info.audio_cur_cfg); 2264 if (mtk_dp->audio_enable) { 2265 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2266 mtk_dp_audio_mute(mtk_dp, false); 2267 } else { 2268 memset(&mtk_dp->info.audio_cur_cfg, 0, 2269 sizeof(mtk_dp->info.audio_cur_cfg)); 2270 } 2271 2272 mtk_dp->enabled = true; 2273 mtk_dp_update_plugged_status(mtk_dp); 2274 2275 return; 2276 power_off_aux: 2277 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2278 DP_PWR_STATE_BANDGAP_TPLL, 2279 DP_PWR_STATE_MASK); 2280 } 2281 2282 static void mtk_dp_bridge_atomic_disable(struct drm_bridge *bridge, 2283 struct drm_bridge_state *old_state) 2284 { 2285 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2286 2287 mtk_dp->enabled = false; 2288 mtk_dp_update_plugged_status(mtk_dp); 2289 mtk_dp_video_enable(mtk_dp, false); 2290 mtk_dp_audio_mute(mtk_dp, true); 2291 2292 if (mtk_dp->train_info.cable_plugged_in) { 2293 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 2294 usleep_range(2000, 3000); 2295 } 2296 2297 /* power off aux */ 2298 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2299 DP_PWR_STATE_BANDGAP_TPLL, 2300 DP_PWR_STATE_MASK); 2301 2302 /* Ensure the sink is muted */ 2303 msleep(20); 2304 } 2305 2306 static enum drm_mode_status 2307 mtk_dp_bridge_mode_valid(struct drm_bridge *bridge, 2308 const struct drm_display_info *info, 2309 const struct drm_display_mode *mode) 2310 { 2311 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2312 u32 bpp = info->color_formats & DRM_COLOR_FORMAT_YCBCR422 ? 16 : 24; 2313 u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) * 2314 drm_dp_max_lane_count(mtk_dp->rx_cap), 2315 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) * 2316 mtk_dp->max_lanes); 2317 2318 if (rate < mode->clock * bpp / 8) 2319 return MODE_CLOCK_HIGH; 2320 2321 return MODE_OK; 2322 } 2323 2324 static u32 *mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge *bridge, 2325 struct drm_bridge_state *bridge_state, 2326 struct drm_crtc_state *crtc_state, 2327 struct drm_connector_state *conn_state, 2328 unsigned int *num_output_fmts) 2329 { 2330 u32 *output_fmts; 2331 2332 *num_output_fmts = 0; 2333 output_fmts = kmalloc(sizeof(*output_fmts), GFP_KERNEL); 2334 if (!output_fmts) 2335 return NULL; 2336 *num_output_fmts = 1; 2337 output_fmts[0] = MEDIA_BUS_FMT_FIXED; 2338 return output_fmts; 2339 } 2340 2341 static const u32 mt8195_input_fmts[] = { 2342 MEDIA_BUS_FMT_RGB888_1X24, 2343 MEDIA_BUS_FMT_YUV8_1X24, 2344 MEDIA_BUS_FMT_YUYV8_1X16, 2345 }; 2346 2347 static u32 *mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge *bridge, 2348 struct drm_bridge_state *bridge_state, 2349 struct drm_crtc_state *crtc_state, 2350 struct drm_connector_state *conn_state, 2351 u32 output_fmt, 2352 unsigned int *num_input_fmts) 2353 { 2354 u32 *input_fmts; 2355 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2356 struct drm_display_mode *mode = &crtc_state->adjusted_mode; 2357 struct drm_display_info *display_info = 2358 &conn_state->connector->display_info; 2359 u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) * 2360 drm_dp_max_lane_count(mtk_dp->rx_cap), 2361 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) * 2362 mtk_dp->max_lanes); 2363 2364 *num_input_fmts = 0; 2365 2366 /* 2367 * If the linkrate is smaller than datarate of RGB888, larger than 2368 * datarate of YUV422 and sink device supports YUV422, we output YUV422 2369 * format. Use this condition, we can support more resolution. 2370 */ 2371 if ((rate < (mode->clock * 24 / 8)) && 2372 (rate > (mode->clock * 16 / 8)) && 2373 (display_info->color_formats & DRM_COLOR_FORMAT_YCBCR422)) { 2374 input_fmts = kcalloc(1, sizeof(*input_fmts), GFP_KERNEL); 2375 if (!input_fmts) 2376 return NULL; 2377 *num_input_fmts = 1; 2378 input_fmts[0] = MEDIA_BUS_FMT_YUYV8_1X16; 2379 } else { 2380 input_fmts = kcalloc(ARRAY_SIZE(mt8195_input_fmts), 2381 sizeof(*input_fmts), 2382 GFP_KERNEL); 2383 if (!input_fmts) 2384 return NULL; 2385 2386 *num_input_fmts = ARRAY_SIZE(mt8195_input_fmts); 2387 memcpy(input_fmts, mt8195_input_fmts, sizeof(mt8195_input_fmts)); 2388 } 2389 2390 return input_fmts; 2391 } 2392 2393 static int mtk_dp_bridge_atomic_check(struct drm_bridge *bridge, 2394 struct drm_bridge_state *bridge_state, 2395 struct drm_crtc_state *crtc_state, 2396 struct drm_connector_state *conn_state) 2397 { 2398 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2399 struct drm_crtc *crtc = conn_state->crtc; 2400 unsigned int input_bus_format; 2401 2402 input_bus_format = bridge_state->input_bus_cfg.format; 2403 2404 dev_dbg(mtk_dp->dev, "input format 0x%04x, output format 0x%04x\n", 2405 bridge_state->input_bus_cfg.format, 2406 bridge_state->output_bus_cfg.format); 2407 2408 if (input_bus_format == MEDIA_BUS_FMT_YUYV8_1X16) 2409 mtk_dp->info.format = DP_PIXELFORMAT_YUV422; 2410 else 2411 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 2412 2413 if (!crtc) { 2414 drm_err(mtk_dp->drm_dev, 2415 "Can't enable bridge as connector state doesn't have a crtc\n"); 2416 return -EINVAL; 2417 } 2418 2419 drm_display_mode_to_videomode(&crtc_state->adjusted_mode, &mtk_dp->info.vm); 2420 2421 return 0; 2422 } 2423 2424 static const struct drm_bridge_funcs mtk_dp_bridge_funcs = { 2425 .atomic_check = mtk_dp_bridge_atomic_check, 2426 .atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state, 2427 .atomic_destroy_state = drm_atomic_helper_bridge_destroy_state, 2428 .atomic_get_output_bus_fmts = mtk_dp_bridge_atomic_get_output_bus_fmts, 2429 .atomic_get_input_bus_fmts = mtk_dp_bridge_atomic_get_input_bus_fmts, 2430 .atomic_reset = drm_atomic_helper_bridge_reset, 2431 .attach = mtk_dp_bridge_attach, 2432 .detach = mtk_dp_bridge_detach, 2433 .atomic_enable = mtk_dp_bridge_atomic_enable, 2434 .atomic_disable = mtk_dp_bridge_atomic_disable, 2435 .mode_valid = mtk_dp_bridge_mode_valid, 2436 .get_edid = mtk_dp_get_edid, 2437 .detect = mtk_dp_bdg_detect, 2438 }; 2439 2440 static void mtk_dp_debounce_timer(struct timer_list *t) 2441 { 2442 struct mtk_dp *mtk_dp = from_timer(mtk_dp, t, debounce_timer); 2443 2444 mtk_dp->need_debounce = true; 2445 } 2446 2447 /* 2448 * HDMI audio codec callbacks 2449 */ 2450 static int mtk_dp_audio_hw_params(struct device *dev, void *data, 2451 struct hdmi_codec_daifmt *daifmt, 2452 struct hdmi_codec_params *params) 2453 { 2454 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2455 2456 if (!mtk_dp->enabled) { 2457 dev_err(mtk_dp->dev, "%s, DP is not ready!\n", __func__); 2458 return -ENODEV; 2459 } 2460 2461 mtk_dp->info.audio_cur_cfg.channels = params->cea.channels; 2462 mtk_dp->info.audio_cur_cfg.sample_rate = params->sample_rate; 2463 2464 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2465 2466 return 0; 2467 } 2468 2469 static int mtk_dp_audio_startup(struct device *dev, void *data) 2470 { 2471 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2472 2473 mtk_dp_audio_mute(mtk_dp, false); 2474 2475 return 0; 2476 } 2477 2478 static void mtk_dp_audio_shutdown(struct device *dev, void *data) 2479 { 2480 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2481 2482 mtk_dp_audio_mute(mtk_dp, true); 2483 } 2484 2485 static int mtk_dp_audio_get_eld(struct device *dev, void *data, uint8_t *buf, 2486 size_t len) 2487 { 2488 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2489 2490 if (mtk_dp->enabled) 2491 memcpy(buf, mtk_dp->conn->eld, len); 2492 else 2493 memset(buf, 0, len); 2494 2495 return 0; 2496 } 2497 2498 static int mtk_dp_audio_hook_plugged_cb(struct device *dev, void *data, 2499 hdmi_codec_plugged_cb fn, 2500 struct device *codec_dev) 2501 { 2502 struct mtk_dp *mtk_dp = data; 2503 2504 mutex_lock(&mtk_dp->update_plugged_status_lock); 2505 mtk_dp->plugged_cb = fn; 2506 mtk_dp->codec_dev = codec_dev; 2507 mutex_unlock(&mtk_dp->update_plugged_status_lock); 2508 2509 mtk_dp_update_plugged_status(mtk_dp); 2510 2511 return 0; 2512 } 2513 2514 static const struct hdmi_codec_ops mtk_dp_audio_codec_ops = { 2515 .hw_params = mtk_dp_audio_hw_params, 2516 .audio_startup = mtk_dp_audio_startup, 2517 .audio_shutdown = mtk_dp_audio_shutdown, 2518 .get_eld = mtk_dp_audio_get_eld, 2519 .hook_plugged_cb = mtk_dp_audio_hook_plugged_cb, 2520 .no_capture_mute = 1, 2521 }; 2522 2523 static int mtk_dp_register_audio_driver(struct device *dev) 2524 { 2525 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2526 struct hdmi_codec_pdata codec_data = { 2527 .ops = &mtk_dp_audio_codec_ops, 2528 .max_i2s_channels = 8, 2529 .i2s = 1, 2530 .data = mtk_dp, 2531 }; 2532 2533 mtk_dp->audio_pdev = platform_device_register_data(dev, 2534 HDMI_CODEC_DRV_NAME, 2535 PLATFORM_DEVID_AUTO, 2536 &codec_data, 2537 sizeof(codec_data)); 2538 return PTR_ERR_OR_ZERO(mtk_dp->audio_pdev); 2539 } 2540 2541 static int mtk_dp_register_phy(struct mtk_dp *mtk_dp) 2542 { 2543 struct device *dev = mtk_dp->dev; 2544 2545 mtk_dp->phy_dev = platform_device_register_data(dev, "mediatek-dp-phy", 2546 PLATFORM_DEVID_AUTO, 2547 &mtk_dp->regs, 2548 sizeof(struct regmap *)); 2549 if (IS_ERR(mtk_dp->phy_dev)) 2550 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy_dev), 2551 "Failed to create device mediatek-dp-phy\n"); 2552 2553 mtk_dp_get_calibration_data(mtk_dp); 2554 2555 mtk_dp->phy = devm_phy_get(&mtk_dp->phy_dev->dev, "dp"); 2556 if (IS_ERR(mtk_dp->phy)) { 2557 platform_device_unregister(mtk_dp->phy_dev); 2558 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy), "Failed to get phy\n"); 2559 } 2560 2561 return 0; 2562 } 2563 2564 static int mtk_dp_edp_link_panel(struct drm_dp_aux *mtk_aux) 2565 { 2566 struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2567 struct device *dev = mtk_aux->dev; 2568 int ret; 2569 2570 mtk_dp->next_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0); 2571 2572 /* Power off the DP and AUX: either detection is done, or no panel present */ 2573 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2574 DP_PWR_STATE_BANDGAP_TPLL, 2575 DP_PWR_STATE_MASK); 2576 mtk_dp_power_disable(mtk_dp); 2577 2578 if (IS_ERR(mtk_dp->next_bridge)) { 2579 ret = PTR_ERR(mtk_dp->next_bridge); 2580 mtk_dp->next_bridge = NULL; 2581 return ret; 2582 } 2583 2584 /* For eDP, we add the bridge only if the panel was found */ 2585 ret = devm_drm_bridge_add(dev, &mtk_dp->bridge); 2586 if (ret) 2587 return ret; 2588 2589 return 0; 2590 } 2591 2592 static int mtk_dp_probe(struct platform_device *pdev) 2593 { 2594 struct mtk_dp *mtk_dp; 2595 struct device *dev = &pdev->dev; 2596 int ret; 2597 2598 mtk_dp = devm_kzalloc(dev, sizeof(*mtk_dp), GFP_KERNEL); 2599 if (!mtk_dp) 2600 return -ENOMEM; 2601 2602 mtk_dp->dev = dev; 2603 mtk_dp->data = (struct mtk_dp_data *)of_device_get_match_data(dev); 2604 2605 ret = mtk_dp_dt_parse(mtk_dp, pdev); 2606 if (ret) 2607 return dev_err_probe(dev, ret, "Failed to parse dt\n"); 2608 2609 /* 2610 * Request the interrupt and install service routine only if we are 2611 * on full DisplayPort. 2612 * For eDP, polling the HPD instead is more convenient because we 2613 * don't expect any (un)plug events during runtime, hence we can 2614 * avoid some locking. 2615 */ 2616 if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) { 2617 mtk_dp->irq = platform_get_irq(pdev, 0); 2618 if (mtk_dp->irq < 0) 2619 return dev_err_probe(dev, mtk_dp->irq, 2620 "failed to request dp irq resource\n"); 2621 2622 spin_lock_init(&mtk_dp->irq_thread_lock); 2623 2624 irq_set_status_flags(mtk_dp->irq, IRQ_NOAUTOEN); 2625 ret = devm_request_threaded_irq(dev, mtk_dp->irq, mtk_dp_hpd_event, 2626 mtk_dp_hpd_event_thread, 2627 IRQ_TYPE_LEVEL_HIGH, dev_name(dev), 2628 mtk_dp); 2629 if (ret) 2630 return dev_err_probe(dev, ret, 2631 "failed to request mediatek dptx irq\n"); 2632 2633 mtk_dp->need_debounce = true; 2634 timer_setup(&mtk_dp->debounce_timer, mtk_dp_debounce_timer, 0); 2635 } 2636 2637 mtk_dp->aux.name = "aux_mtk_dp"; 2638 mtk_dp->aux.dev = dev; 2639 mtk_dp->aux.transfer = mtk_dp_aux_transfer; 2640 mtk_dp->aux.wait_hpd_asserted = mtk_dp_wait_hpd_asserted; 2641 drm_dp_aux_init(&mtk_dp->aux); 2642 2643 platform_set_drvdata(pdev, mtk_dp); 2644 2645 if (mtk_dp->data->audio_supported) { 2646 mutex_init(&mtk_dp->update_plugged_status_lock); 2647 2648 ret = mtk_dp_register_audio_driver(dev); 2649 if (ret) { 2650 dev_err(dev, "Failed to register audio driver: %d\n", 2651 ret); 2652 return ret; 2653 } 2654 } 2655 2656 ret = mtk_dp_register_phy(mtk_dp); 2657 if (ret) 2658 return ret; 2659 2660 mtk_dp->bridge.funcs = &mtk_dp_bridge_funcs; 2661 mtk_dp->bridge.of_node = dev->of_node; 2662 mtk_dp->bridge.type = mtk_dp->data->bridge_type; 2663 2664 if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP) { 2665 /* 2666 * Set the data lanes to idle in case the bootloader didn't 2667 * properly close the eDP port to avoid stalls and then 2668 * reinitialize, reset and power on the AUX block. 2669 */ 2670 mtk_dp_set_idle_pattern(mtk_dp, true); 2671 mtk_dp_initialize_aux_settings(mtk_dp); 2672 mtk_dp_power_enable(mtk_dp); 2673 2674 /* Disable HW interrupts: we don't need any for eDP */ 2675 mtk_dp_hwirq_enable(mtk_dp, false); 2676 2677 /* 2678 * Power on the AUX to allow reading the EDID from aux-bus: 2679 * please note that it is necessary to call power off in the 2680 * .done_probing() callback (mtk_dp_edp_link_panel), as only 2681 * there we can safely assume that we finished reading EDID. 2682 */ 2683 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2684 DP_PWR_STATE_BANDGAP_TPLL_LANE, 2685 DP_PWR_STATE_MASK); 2686 2687 ret = devm_of_dp_aux_populate_bus(&mtk_dp->aux, mtk_dp_edp_link_panel); 2688 if (ret) { 2689 /* -ENODEV this means that the panel is not on the aux-bus */ 2690 if (ret == -ENODEV) { 2691 ret = mtk_dp_edp_link_panel(&mtk_dp->aux); 2692 if (ret) 2693 return ret; 2694 } else { 2695 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2696 DP_PWR_STATE_BANDGAP_TPLL, 2697 DP_PWR_STATE_MASK); 2698 mtk_dp_power_disable(mtk_dp); 2699 return ret; 2700 } 2701 } 2702 } else { 2703 mtk_dp->bridge.ops = DRM_BRIDGE_OP_DETECT | 2704 DRM_BRIDGE_OP_EDID | DRM_BRIDGE_OP_HPD; 2705 ret = devm_drm_bridge_add(dev, &mtk_dp->bridge); 2706 if (ret) 2707 return dev_err_probe(dev, ret, "Failed to add bridge\n"); 2708 } 2709 2710 pm_runtime_enable(dev); 2711 pm_runtime_get_sync(dev); 2712 2713 return 0; 2714 } 2715 2716 static void mtk_dp_remove(struct platform_device *pdev) 2717 { 2718 struct mtk_dp *mtk_dp = platform_get_drvdata(pdev); 2719 2720 pm_runtime_put(&pdev->dev); 2721 pm_runtime_disable(&pdev->dev); 2722 if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) 2723 del_timer_sync(&mtk_dp->debounce_timer); 2724 platform_device_unregister(mtk_dp->phy_dev); 2725 if (mtk_dp->audio_pdev) 2726 platform_device_unregister(mtk_dp->audio_pdev); 2727 } 2728 2729 #ifdef CONFIG_PM_SLEEP 2730 static int mtk_dp_suspend(struct device *dev) 2731 { 2732 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2733 2734 mtk_dp_power_disable(mtk_dp); 2735 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) 2736 mtk_dp_hwirq_enable(mtk_dp, false); 2737 pm_runtime_put_sync(dev); 2738 2739 return 0; 2740 } 2741 2742 static int mtk_dp_resume(struct device *dev) 2743 { 2744 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2745 2746 pm_runtime_get_sync(dev); 2747 mtk_dp_init_port(mtk_dp); 2748 if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) 2749 mtk_dp_hwirq_enable(mtk_dp, true); 2750 mtk_dp_power_enable(mtk_dp); 2751 2752 return 0; 2753 } 2754 #endif 2755 2756 static SIMPLE_DEV_PM_OPS(mtk_dp_pm_ops, mtk_dp_suspend, mtk_dp_resume); 2757 2758 static const struct mtk_dp_data mt8188_dp_data = { 2759 .bridge_type = DRM_MODE_CONNECTOR_DisplayPort, 2760 .smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE, 2761 .efuse_fmt = mt8195_dp_efuse_fmt, 2762 .audio_supported = true, 2763 .audio_pkt_in_hblank_area = true, 2764 .audio_m_div2_bit = MT8188_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2765 }; 2766 2767 static const struct mtk_dp_data mt8195_edp_data = { 2768 .bridge_type = DRM_MODE_CONNECTOR_eDP, 2769 .smc_cmd = MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE, 2770 .efuse_fmt = mt8195_edp_efuse_fmt, 2771 .audio_supported = false, 2772 .audio_m_div2_bit = MT8195_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2773 }; 2774 2775 static const struct mtk_dp_data mt8195_dp_data = { 2776 .bridge_type = DRM_MODE_CONNECTOR_DisplayPort, 2777 .smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE, 2778 .efuse_fmt = mt8195_dp_efuse_fmt, 2779 .audio_supported = true, 2780 .audio_m_div2_bit = MT8195_AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 2781 }; 2782 2783 static const struct of_device_id mtk_dp_of_match[] = { 2784 { 2785 .compatible = "mediatek,mt8188-edp-tx", 2786 .data = &mt8195_edp_data, 2787 }, 2788 { 2789 .compatible = "mediatek,mt8188-dp-tx", 2790 .data = &mt8188_dp_data, 2791 }, 2792 { 2793 .compatible = "mediatek,mt8195-edp-tx", 2794 .data = &mt8195_edp_data, 2795 }, 2796 { 2797 .compatible = "mediatek,mt8195-dp-tx", 2798 .data = &mt8195_dp_data, 2799 }, 2800 {}, 2801 }; 2802 MODULE_DEVICE_TABLE(of, mtk_dp_of_match); 2803 2804 static struct platform_driver mtk_dp_driver = { 2805 .probe = mtk_dp_probe, 2806 .remove_new = mtk_dp_remove, 2807 .driver = { 2808 .name = "mediatek-drm-dp", 2809 .of_match_table = mtk_dp_of_match, 2810 .pm = &mtk_dp_pm_ops, 2811 }, 2812 }; 2813 2814 module_platform_driver(mtk_dp_driver); 2815 2816 MODULE_AUTHOR("Jitao Shi <jitao.shi@mediatek.com>"); 2817 MODULE_AUTHOR("Markus Schneider-Pargmann <msp@baylibre.com>"); 2818 MODULE_AUTHOR("Bo-Chen Chen <rex-bc.chen@mediatek.com>"); 2819 MODULE_DESCRIPTION("MediaTek DisplayPort Driver"); 2820 MODULE_LICENSE("GPL"); 2821