1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (c) 2019-2022 MediaTek Inc. 4 * Copyright (c) 2022 BayLibre 5 */ 6 7 #include <drm/display/drm_dp.h> 8 #include <drm/display/drm_dp_helper.h> 9 #include <drm/drm_atomic_helper.h> 10 #include <drm/drm_bridge.h> 11 #include <drm/drm_crtc.h> 12 #include <drm/drm_edid.h> 13 #include <drm/drm_of.h> 14 #include <drm/drm_panel.h> 15 #include <drm/drm_print.h> 16 #include <drm/drm_probe_helper.h> 17 #include <linux/arm-smccc.h> 18 #include <linux/clk.h> 19 #include <linux/delay.h> 20 #include <linux/errno.h> 21 #include <linux/kernel.h> 22 #include <linux/media-bus-format.h> 23 #include <linux/nvmem-consumer.h> 24 #include <linux/of.h> 25 #include <linux/of_irq.h> 26 #include <linux/of_platform.h> 27 #include <linux/phy/phy.h> 28 #include <linux/platform_device.h> 29 #include <linux/pm_runtime.h> 30 #include <linux/regmap.h> 31 #include <linux/soc/mediatek/mtk_sip_svc.h> 32 #include <sound/hdmi-codec.h> 33 #include <video/videomode.h> 34 35 #include "mtk_dp_reg.h" 36 37 #define MTK_DP_SIP_CONTROL_AARCH32 MTK_SIP_SMC_CMD(0x523) 38 #define MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE (BIT(0) | BIT(5)) 39 #define MTK_DP_SIP_ATF_VIDEO_UNMUTE BIT(5) 40 41 #define MTK_DP_THREAD_CABLE_STATE_CHG BIT(0) 42 #define MTK_DP_THREAD_HPD_EVENT BIT(1) 43 44 #define MTK_DP_4P1T 4 45 #define MTK_DP_HDE 2 46 #define MTK_DP_PIX_PER_ADDR 2 47 #define MTK_DP_AUX_WAIT_REPLY_COUNT 20 48 #define MTK_DP_TBC_BUF_READ_START_ADDR 0x8 49 #define MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY 5 50 #define MTK_DP_TRAIN_DOWNSCALE_RETRY 10 51 #define MTK_DP_VERSION 0x11 52 #define MTK_DP_SDP_AUI 0x4 53 54 enum { 55 MTK_DP_CAL_GLB_BIAS_TRIM = 0, 56 MTK_DP_CAL_CLKTX_IMPSE, 57 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0, 58 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1, 59 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2, 60 MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3, 61 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0, 62 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1, 63 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2, 64 MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3, 65 MTK_DP_CAL_MAX, 66 }; 67 68 struct mtk_dp_train_info { 69 bool sink_ssc; 70 bool cable_plugged_in; 71 /* link_rate is in multiple of 0.27Gbps */ 72 int link_rate; 73 int lane_count; 74 unsigned int channel_eq_pattern; 75 }; 76 77 struct mtk_dp_audio_cfg { 78 bool detect_monitor; 79 int sad_count; 80 int sample_rate; 81 int word_length_bits; 82 int channels; 83 }; 84 85 struct mtk_dp_info { 86 enum dp_pixelformat format; 87 struct videomode vm; 88 struct mtk_dp_audio_cfg audio_cur_cfg; 89 }; 90 91 struct mtk_dp_efuse_fmt { 92 unsigned short idx; 93 unsigned short shift; 94 unsigned short mask; 95 unsigned short min_val; 96 unsigned short max_val; 97 unsigned short default_val; 98 }; 99 100 struct mtk_dp { 101 bool enabled; 102 bool need_debounce; 103 u8 max_lanes; 104 u8 max_linkrate; 105 u8 rx_cap[DP_RECEIVER_CAP_SIZE]; 106 u32 cal_data[MTK_DP_CAL_MAX]; 107 u32 irq_thread_handle; 108 /* irq_thread_lock is used to protect irq_thread_handle */ 109 spinlock_t irq_thread_lock; 110 111 struct device *dev; 112 struct drm_bridge bridge; 113 struct drm_bridge *next_bridge; 114 struct drm_connector *conn; 115 struct drm_device *drm_dev; 116 struct drm_dp_aux aux; 117 118 const struct mtk_dp_data *data; 119 struct mtk_dp_info info; 120 struct mtk_dp_train_info train_info; 121 122 struct platform_device *phy_dev; 123 struct phy *phy; 124 struct regmap *regs; 125 struct timer_list debounce_timer; 126 127 /* For audio */ 128 bool audio_enable; 129 hdmi_codec_plugged_cb plugged_cb; 130 struct platform_device *audio_pdev; 131 132 struct device *codec_dev; 133 /* protect the plugged_cb as it's used in both bridge ops and audio */ 134 struct mutex update_plugged_status_lock; 135 }; 136 137 struct mtk_dp_data { 138 int bridge_type; 139 unsigned int smc_cmd; 140 const struct mtk_dp_efuse_fmt *efuse_fmt; 141 bool audio_supported; 142 }; 143 144 static const struct mtk_dp_efuse_fmt mt8195_edp_efuse_fmt[MTK_DP_CAL_MAX] = { 145 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 146 .idx = 3, 147 .shift = 27, 148 .mask = 0x1f, 149 .min_val = 1, 150 .max_val = 0x1e, 151 .default_val = 0xf, 152 }, 153 [MTK_DP_CAL_CLKTX_IMPSE] = { 154 .idx = 0, 155 .shift = 9, 156 .mask = 0xf, 157 .min_val = 1, 158 .max_val = 0xe, 159 .default_val = 0x8, 160 }, 161 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 162 .idx = 2, 163 .shift = 28, 164 .mask = 0xf, 165 .min_val = 1, 166 .max_val = 0xe, 167 .default_val = 0x8, 168 }, 169 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 170 .idx = 2, 171 .shift = 20, 172 .mask = 0xf, 173 .min_val = 1, 174 .max_val = 0xe, 175 .default_val = 0x8, 176 }, 177 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 178 .idx = 2, 179 .shift = 12, 180 .mask = 0xf, 181 .min_val = 1, 182 .max_val = 0xe, 183 .default_val = 0x8, 184 }, 185 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 186 .idx = 2, 187 .shift = 4, 188 .mask = 0xf, 189 .min_val = 1, 190 .max_val = 0xe, 191 .default_val = 0x8, 192 }, 193 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 194 .idx = 2, 195 .shift = 24, 196 .mask = 0xf, 197 .min_val = 1, 198 .max_val = 0xe, 199 .default_val = 0x8, 200 }, 201 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 202 .idx = 2, 203 .shift = 16, 204 .mask = 0xf, 205 .min_val = 1, 206 .max_val = 0xe, 207 .default_val = 0x8, 208 }, 209 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 210 .idx = 2, 211 .shift = 8, 212 .mask = 0xf, 213 .min_val = 1, 214 .max_val = 0xe, 215 .default_val = 0x8, 216 }, 217 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 218 .idx = 2, 219 .shift = 0, 220 .mask = 0xf, 221 .min_val = 1, 222 .max_val = 0xe, 223 .default_val = 0x8, 224 }, 225 }; 226 227 static const struct mtk_dp_efuse_fmt mt8195_dp_efuse_fmt[MTK_DP_CAL_MAX] = { 228 [MTK_DP_CAL_GLB_BIAS_TRIM] = { 229 .idx = 0, 230 .shift = 27, 231 .mask = 0x1f, 232 .min_val = 1, 233 .max_val = 0x1e, 234 .default_val = 0xf, 235 }, 236 [MTK_DP_CAL_CLKTX_IMPSE] = { 237 .idx = 0, 238 .shift = 13, 239 .mask = 0xf, 240 .min_val = 1, 241 .max_val = 0xe, 242 .default_val = 0x8, 243 }, 244 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = { 245 .idx = 1, 246 .shift = 28, 247 .mask = 0xf, 248 .min_val = 1, 249 .max_val = 0xe, 250 .default_val = 0x8, 251 }, 252 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = { 253 .idx = 1, 254 .shift = 20, 255 .mask = 0xf, 256 .min_val = 1, 257 .max_val = 0xe, 258 .default_val = 0x8, 259 }, 260 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = { 261 .idx = 1, 262 .shift = 12, 263 .mask = 0xf, 264 .min_val = 1, 265 .max_val = 0xe, 266 .default_val = 0x8, 267 }, 268 [MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = { 269 .idx = 1, 270 .shift = 4, 271 .mask = 0xf, 272 .min_val = 1, 273 .max_val = 0xe, 274 .default_val = 0x8, 275 }, 276 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = { 277 .idx = 1, 278 .shift = 24, 279 .mask = 0xf, 280 .min_val = 1, 281 .max_val = 0xe, 282 .default_val = 0x8, 283 }, 284 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = { 285 .idx = 1, 286 .shift = 16, 287 .mask = 0xf, 288 .min_val = 1, 289 .max_val = 0xe, 290 .default_val = 0x8, 291 }, 292 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = { 293 .idx = 1, 294 .shift = 8, 295 .mask = 0xf, 296 .min_val = 1, 297 .max_val = 0xe, 298 .default_val = 0x8, 299 }, 300 [MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = { 301 .idx = 1, 302 .shift = 0, 303 .mask = 0xf, 304 .min_val = 1, 305 .max_val = 0xe, 306 .default_val = 0x8, 307 }, 308 }; 309 310 static struct regmap_config mtk_dp_regmap_config = { 311 .reg_bits = 32, 312 .val_bits = 32, 313 .reg_stride = 4, 314 .max_register = SEC_OFFSET + 0x90, 315 .name = "mtk-dp-registers", 316 }; 317 318 static struct mtk_dp *mtk_dp_from_bridge(struct drm_bridge *b) 319 { 320 return container_of(b, struct mtk_dp, bridge); 321 } 322 323 static u32 mtk_dp_read(struct mtk_dp *mtk_dp, u32 offset) 324 { 325 u32 read_val; 326 int ret; 327 328 ret = regmap_read(mtk_dp->regs, offset, &read_val); 329 if (ret) { 330 dev_err(mtk_dp->dev, "Failed to read register 0x%x: %d\n", 331 offset, ret); 332 return 0; 333 } 334 335 return read_val; 336 } 337 338 static int mtk_dp_write(struct mtk_dp *mtk_dp, u32 offset, u32 val) 339 { 340 int ret = regmap_write(mtk_dp->regs, offset, val); 341 342 if (ret) 343 dev_err(mtk_dp->dev, 344 "Failed to write register 0x%x with value 0x%x\n", 345 offset, val); 346 return ret; 347 } 348 349 static int mtk_dp_update_bits(struct mtk_dp *mtk_dp, u32 offset, 350 u32 val, u32 mask) 351 { 352 int ret = regmap_update_bits(mtk_dp->regs, offset, mask, val); 353 354 if (ret) 355 dev_err(mtk_dp->dev, 356 "Failed to update register 0x%x with value 0x%x, mask 0x%x\n", 357 offset, val, mask); 358 return ret; 359 } 360 361 static void mtk_dp_bulk_16bit_write(struct mtk_dp *mtk_dp, u32 offset, u8 *buf, 362 size_t length) 363 { 364 int i; 365 366 /* 2 bytes per register */ 367 for (i = 0; i < length; i += 2) { 368 u32 val = buf[i] | (i + 1 < length ? buf[i + 1] << 8 : 0); 369 370 if (mtk_dp_write(mtk_dp, offset + i * 2, val)) 371 return; 372 } 373 } 374 375 static void mtk_dp_msa_bypass_enable(struct mtk_dp *mtk_dp, bool enable) 376 { 377 u32 mask = HTOTAL_SEL_DP_ENC0_P0 | VTOTAL_SEL_DP_ENC0_P0 | 378 HSTART_SEL_DP_ENC0_P0 | VSTART_SEL_DP_ENC0_P0 | 379 HWIDTH_SEL_DP_ENC0_P0 | VHEIGHT_SEL_DP_ENC0_P0 | 380 HSP_SEL_DP_ENC0_P0 | HSW_SEL_DP_ENC0_P0 | 381 VSP_SEL_DP_ENC0_P0 | VSW_SEL_DP_ENC0_P0; 382 383 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, enable ? 0 : mask, mask); 384 } 385 386 static void mtk_dp_set_msa(struct mtk_dp *mtk_dp) 387 { 388 struct drm_display_mode mode; 389 struct videomode *vm = &mtk_dp->info.vm; 390 391 drm_display_mode_from_videomode(vm, &mode); 392 393 /* horizontal */ 394 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3010, 395 mode.htotal, HTOTAL_SW_DP_ENC0_P0_MASK); 396 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3018, 397 vm->hsync_len + vm->hback_porch, 398 HSTART_SW_DP_ENC0_P0_MASK); 399 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 400 vm->hsync_len, HSW_SW_DP_ENC0_P0_MASK); 401 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028, 402 0, HSP_SW_DP_ENC0_P0_MASK); 403 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3020, 404 vm->hactive, HWIDTH_SW_DP_ENC0_P0_MASK); 405 406 /* vertical */ 407 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3014, 408 mode.vtotal, VTOTAL_SW_DP_ENC0_P0_MASK); 409 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_301C, 410 vm->vsync_len + vm->vback_porch, 411 VSTART_SW_DP_ENC0_P0_MASK); 412 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 413 vm->vsync_len, VSW_SW_DP_ENC0_P0_MASK); 414 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C, 415 0, VSP_SW_DP_ENC0_P0_MASK); 416 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3024, 417 vm->vactive, VHEIGHT_SW_DP_ENC0_P0_MASK); 418 419 /* horizontal */ 420 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3064, 421 vm->hactive, HDE_NUM_LAST_DP_ENC0_P0_MASK); 422 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3154, 423 mode.htotal, PGEN_HTOTAL_DP_ENC0_P0_MASK); 424 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3158, 425 vm->hfront_porch, 426 PGEN_HSYNC_RISING_DP_ENC0_P0_MASK); 427 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_315C, 428 vm->hsync_len, 429 PGEN_HSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 430 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3160, 431 vm->hback_porch + vm->hsync_len, 432 PGEN_HFDE_START_DP_ENC0_P0_MASK); 433 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3164, 434 vm->hactive, 435 PGEN_HFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 436 437 /* vertical */ 438 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3168, 439 mode.vtotal, 440 PGEN_VTOTAL_DP_ENC0_P0_MASK); 441 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_316C, 442 vm->vfront_porch, 443 PGEN_VSYNC_RISING_DP_ENC0_P0_MASK); 444 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3170, 445 vm->vsync_len, 446 PGEN_VSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK); 447 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3174, 448 vm->vback_porch + vm->vsync_len, 449 PGEN_VFDE_START_DP_ENC0_P0_MASK); 450 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3178, 451 vm->vactive, 452 PGEN_VFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK); 453 } 454 455 static int mtk_dp_set_color_format(struct mtk_dp *mtk_dp, 456 enum dp_pixelformat color_format) 457 { 458 u32 val; 459 460 /* update MISC0 */ 461 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 462 color_format << DP_TEST_COLOR_FORMAT_SHIFT, 463 DP_TEST_COLOR_FORMAT_MASK); 464 465 switch (color_format) { 466 case DP_PIXELFORMAT_YUV422: 467 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_YCBCR422; 468 break; 469 case DP_PIXELFORMAT_RGB: 470 val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_RGB; 471 break; 472 default: 473 drm_warn(mtk_dp->drm_dev, "Unsupported color format: %d\n", 474 color_format); 475 return -EINVAL; 476 } 477 478 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 479 val, PIXEL_ENCODE_FORMAT_DP_ENC0_P0_MASK); 480 return 0; 481 } 482 483 static void mtk_dp_set_color_depth(struct mtk_dp *mtk_dp) 484 { 485 /* Only support 8 bits currently */ 486 /* Update MISC0 */ 487 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034, 488 DP_MSA_MISC_8_BPC, DP_TEST_BIT_DEPTH_MASK); 489 490 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 491 VIDEO_COLOR_DEPTH_DP_ENC0_P0_8BIT, 492 VIDEO_COLOR_DEPTH_DP_ENC0_P0_MASK); 493 } 494 495 static void mtk_dp_config_mn_mode(struct mtk_dp *mtk_dp) 496 { 497 /* 0: hw mode, 1: sw mode */ 498 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 499 0, VIDEO_M_CODE_SEL_DP_ENC0_P0_MASK); 500 } 501 502 static void mtk_dp_set_sram_read_start(struct mtk_dp *mtk_dp, u32 val) 503 { 504 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 505 val, SRAM_START_READ_THRD_DP_ENC0_P0_MASK); 506 } 507 508 static void mtk_dp_setup_encoder(struct mtk_dp *mtk_dp) 509 { 510 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C, 511 VIDEO_MN_GEN_EN_DP_ENC0_P0, 512 VIDEO_MN_GEN_EN_DP_ENC0_P0); 513 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 514 SDP_DOWN_CNT_DP_ENC0_P0_VAL, 515 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 516 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 517 SDP_DOWN_CNT_IN_HBLANK_DP_ENC1_P0_VAL, 518 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 519 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3300, 520 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_VAL << 8, 521 VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_MASK); 522 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, 523 FIFO_READ_START_POINT_DP_ENC1_P0_VAL << 12, 524 FIFO_READ_START_POINT_DP_ENC1_P0_MASK); 525 mtk_dp_write(mtk_dp, MTK_DP_ENC1_P0_3368, DP_ENC1_P0_3368_VAL); 526 } 527 528 static void mtk_dp_pg_enable(struct mtk_dp *mtk_dp, bool enable) 529 { 530 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3038, 531 enable ? VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK : 0, 532 VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK); 533 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31B0, 534 PGEN_PATTERN_SEL_VAL << 4, PGEN_PATTERN_SEL_MASK); 535 } 536 537 static void mtk_dp_audio_setup_channels(struct mtk_dp *mtk_dp, 538 struct mtk_dp_audio_cfg *cfg) 539 { 540 u32 channel_enable_bits; 541 542 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3324, 543 AUDIO_SOURCE_MUX_DP_ENC1_P0_DPRX, 544 AUDIO_SOURCE_MUX_DP_ENC1_P0_MASK); 545 546 /* audio channel count change reset */ 547 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 548 DP_ENC_DUMMY_RW_1, DP_ENC_DUMMY_RW_1); 549 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3304, 550 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 551 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 552 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK, 553 AU_PRTY_REGEN_DP_ENC1_P0_MASK | 554 AU_CH_STS_REGEN_DP_ENC1_P0_MASK | 555 AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK); 556 557 switch (cfg->channels) { 558 case 2: 559 channel_enable_bits = AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 560 AUDIO_2CH_EN_DP_ENC0_P0_MASK; 561 break; 562 case 8: 563 default: 564 channel_enable_bits = AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 565 AUDIO_8CH_EN_DP_ENC0_P0_MASK; 566 break; 567 } 568 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 569 channel_enable_bits | AU_EN_DP_ENC0_P0, 570 AUDIO_2CH_SEL_DP_ENC0_P0_MASK | 571 AUDIO_2CH_EN_DP_ENC0_P0_MASK | 572 AUDIO_8CH_SEL_DP_ENC0_P0_MASK | 573 AUDIO_8CH_EN_DP_ENC0_P0_MASK | 574 AU_EN_DP_ENC0_P0); 575 576 /* audio channel count change reset */ 577 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 0, DP_ENC_DUMMY_RW_1); 578 579 /* enable audio reset */ 580 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 581 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN, 582 DP_ENC_DUMMY_RW_1_AUDIO_RST_EN); 583 } 584 585 static void mtk_dp_audio_channel_status_set(struct mtk_dp *mtk_dp, 586 struct mtk_dp_audio_cfg *cfg) 587 { 588 struct snd_aes_iec958 iec = { 0 }; 589 590 switch (cfg->sample_rate) { 591 case 32000: 592 iec.status[3] = IEC958_AES3_CON_FS_32000; 593 break; 594 case 44100: 595 iec.status[3] = IEC958_AES3_CON_FS_44100; 596 break; 597 case 48000: 598 iec.status[3] = IEC958_AES3_CON_FS_48000; 599 break; 600 case 88200: 601 iec.status[3] = IEC958_AES3_CON_FS_88200; 602 break; 603 case 96000: 604 iec.status[3] = IEC958_AES3_CON_FS_96000; 605 break; 606 case 192000: 607 iec.status[3] = IEC958_AES3_CON_FS_192000; 608 break; 609 default: 610 iec.status[3] = IEC958_AES3_CON_FS_NOTID; 611 break; 612 } 613 614 switch (cfg->word_length_bits) { 615 case 16: 616 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16; 617 break; 618 case 20: 619 iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16 | 620 IEC958_AES4_CON_MAX_WORDLEN_24; 621 break; 622 case 24: 623 iec.status[4] = IEC958_AES4_CON_WORDLEN_24_20 | 624 IEC958_AES4_CON_MAX_WORDLEN_24; 625 break; 626 default: 627 iec.status[4] = IEC958_AES4_CON_WORDLEN_NOTID; 628 } 629 630 /* IEC 60958 consumer channel status bits */ 631 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_308C, 632 0, CH_STATUS_0_DP_ENC0_P0_MASK); 633 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3090, 634 iec.status[3] << 8, CH_STATUS_1_DP_ENC0_P0_MASK); 635 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3094, 636 iec.status[4], CH_STATUS_2_DP_ENC0_P0_MASK); 637 } 638 639 static void mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp *mtk_dp, 640 int channels) 641 { 642 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_312C, 643 (min(8, channels) - 1) << 8, 644 ASP_HB2_DP_ENC0_P0_MASK | ASP_HB3_DP_ENC0_P0_MASK); 645 } 646 647 static void mtk_dp_audio_set_divider(struct mtk_dp *mtk_dp) 648 { 649 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30BC, 650 AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2, 651 AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_MASK); 652 } 653 654 static void mtk_dp_sdp_trigger_aui(struct mtk_dp *mtk_dp) 655 { 656 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 657 MTK_DP_SDP_AUI, SDP_PACKET_TYPE_DP_ENC1_P0_MASK); 658 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280, 659 SDP_PACKET_W_DP_ENC1_P0, SDP_PACKET_W_DP_ENC1_P0); 660 } 661 662 static void mtk_dp_sdp_set_data(struct mtk_dp *mtk_dp, u8 *data_bytes) 663 { 664 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_ENC1_P0_3200, 665 data_bytes, 0x10); 666 } 667 668 static void mtk_dp_sdp_set_header_aui(struct mtk_dp *mtk_dp, 669 struct dp_sdp_header *header) 670 { 671 u32 db_addr = MTK_DP_ENC0_P0_30D8 + (MTK_DP_SDP_AUI - 1) * 8; 672 673 mtk_dp_bulk_16bit_write(mtk_dp, db_addr, (u8 *)header, 4); 674 } 675 676 static void mtk_dp_disable_sdp_aui(struct mtk_dp *mtk_dp) 677 { 678 /* Disable periodic send */ 679 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 0, 680 0xff << ((MTK_DP_ENC0_P0_30A8 & 3) * 8)); 681 } 682 683 static void mtk_dp_setup_sdp_aui(struct mtk_dp *mtk_dp, 684 struct dp_sdp *sdp) 685 { 686 u32 shift; 687 688 mtk_dp_sdp_set_data(mtk_dp, sdp->db); 689 mtk_dp_sdp_set_header_aui(mtk_dp, &sdp->sdp_header); 690 mtk_dp_disable_sdp_aui(mtk_dp); 691 692 shift = (MTK_DP_ENC0_P0_30A8 & 3) * 8; 693 694 mtk_dp_sdp_trigger_aui(mtk_dp); 695 /* Enable periodic sending */ 696 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 697 0x05 << shift, 0xff << shift); 698 } 699 700 static void mtk_dp_aux_irq_clear(struct mtk_dp *mtk_dp) 701 { 702 mtk_dp_write(mtk_dp, MTK_DP_AUX_P0_3640, DP_AUX_P0_3640_VAL); 703 } 704 705 static void mtk_dp_aux_set_cmd(struct mtk_dp *mtk_dp, u8 cmd, u32 addr) 706 { 707 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3644, 708 cmd, MCU_REQUEST_COMMAND_AUX_TX_P0_MASK); 709 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3648, 710 addr, MCU_REQUEST_ADDRESS_LSB_AUX_TX_P0_MASK); 711 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_364C, 712 addr >> 16, MCU_REQUEST_ADDRESS_MSB_AUX_TX_P0_MASK); 713 } 714 715 static void mtk_dp_aux_clear_fifo(struct mtk_dp *mtk_dp) 716 { 717 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 718 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0, 719 MCU_ACK_TRAN_COMPLETE_AUX_TX_P0 | 720 PHY_FIFO_RST_AUX_TX_P0_MASK | 721 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 722 } 723 724 static void mtk_dp_aux_request_ready(struct mtk_dp *mtk_dp) 725 { 726 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3630, 727 AUX_TX_REQUEST_READY_AUX_TX_P0, 728 AUX_TX_REQUEST_READY_AUX_TX_P0); 729 } 730 731 static void mtk_dp_aux_fill_write_fifo(struct mtk_dp *mtk_dp, u8 *buf, 732 size_t length) 733 { 734 mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_AUX_P0_3708, buf, length); 735 } 736 737 static void mtk_dp_aux_read_rx_fifo(struct mtk_dp *mtk_dp, u8 *buf, 738 size_t length, int read_delay) 739 { 740 int read_pos; 741 742 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 743 0, AUX_RD_MODE_AUX_TX_P0_MASK); 744 745 for (read_pos = 0; read_pos < length; read_pos++) { 746 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620, 747 AUX_RX_FIFO_READ_PULSE_TX_P0, 748 AUX_RX_FIFO_READ_PULSE_TX_P0); 749 750 /* Hardware needs time to update the data */ 751 usleep_range(read_delay, read_delay * 2); 752 buf[read_pos] = (u8)(mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3620) & 753 AUX_RX_FIFO_READ_DATA_AUX_TX_P0_MASK); 754 } 755 } 756 757 static void mtk_dp_aux_set_length(struct mtk_dp *mtk_dp, size_t length) 758 { 759 if (length > 0) { 760 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650, 761 (length - 1) << 12, 762 MCU_REQ_DATA_NUM_AUX_TX_P0_MASK); 763 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 764 0, 765 AUX_NO_LENGTH_AUX_TX_P0 | 766 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 767 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 768 } else { 769 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 770 AUX_NO_LENGTH_AUX_TX_P0, 771 AUX_NO_LENGTH_AUX_TX_P0 | 772 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 773 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 774 } 775 } 776 777 static int mtk_dp_aux_wait_for_completion(struct mtk_dp *mtk_dp, bool is_read) 778 { 779 int wait_reply = MTK_DP_AUX_WAIT_REPLY_COUNT; 780 781 while (--wait_reply) { 782 u32 aux_irq_status; 783 784 if (is_read) { 785 u32 fifo_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3618); 786 787 if (fifo_status & 788 (AUX_RX_FIFO_WRITE_POINTER_AUX_TX_P0_MASK | 789 AUX_RX_FIFO_FULL_AUX_TX_P0_MASK)) { 790 return 0; 791 } 792 } 793 794 aux_irq_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3640); 795 if (aux_irq_status & AUX_RX_AUX_RECV_COMPLETE_IRQ_AUX_TX_P0) 796 return 0; 797 798 if (aux_irq_status & AUX_400US_TIMEOUT_IRQ_AUX_TX_P0) 799 return -ETIMEDOUT; 800 801 /* Give the hardware a chance to reach completion before retrying */ 802 usleep_range(100, 500); 803 } 804 805 return -ETIMEDOUT; 806 } 807 808 static int mtk_dp_aux_do_transfer(struct mtk_dp *mtk_dp, bool is_read, u8 cmd, 809 u32 addr, u8 *buf, size_t length) 810 { 811 int ret; 812 u32 reply_cmd; 813 814 if (is_read && (length > DP_AUX_MAX_PAYLOAD_BYTES || 815 (cmd == DP_AUX_NATIVE_READ && !length))) 816 return -EINVAL; 817 818 if (!is_read) 819 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 820 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0, 821 AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0); 822 823 /* We need to clear fifo and irq before sending commands to the sink device. */ 824 mtk_dp_aux_clear_fifo(mtk_dp); 825 mtk_dp_aux_irq_clear(mtk_dp); 826 827 mtk_dp_aux_set_cmd(mtk_dp, cmd, addr); 828 mtk_dp_aux_set_length(mtk_dp, length); 829 830 if (!is_read) { 831 if (length) 832 mtk_dp_aux_fill_write_fifo(mtk_dp, buf, length); 833 834 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704, 835 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK, 836 AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK); 837 } 838 839 mtk_dp_aux_request_ready(mtk_dp); 840 841 /* Wait for feedback from sink device. */ 842 ret = mtk_dp_aux_wait_for_completion(mtk_dp, is_read); 843 844 reply_cmd = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3624) & 845 AUX_RX_REPLY_COMMAND_AUX_TX_P0_MASK; 846 847 if (ret || reply_cmd) { 848 u32 phy_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3628) & 849 AUX_RX_PHY_STATE_AUX_TX_P0_MASK; 850 if (phy_status != AUX_RX_PHY_STATE_AUX_TX_P0_RX_IDLE) { 851 drm_err(mtk_dp->drm_dev, 852 "AUX Rx Aux hang, need SW reset\n"); 853 return -EIO; 854 } 855 856 return -ETIMEDOUT; 857 } 858 859 if (!length) { 860 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C, 861 0, 862 AUX_NO_LENGTH_AUX_TX_P0 | 863 AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK | 864 AUX_RESERVED_RW_0_AUX_TX_P0_MASK); 865 } else if (is_read) { 866 int read_delay; 867 868 if (cmd == (DP_AUX_I2C_READ | DP_AUX_I2C_MOT) || 869 cmd == DP_AUX_I2C_READ) 870 read_delay = 500; 871 else 872 read_delay = 100; 873 874 mtk_dp_aux_read_rx_fifo(mtk_dp, buf, length, read_delay); 875 } 876 877 return 0; 878 } 879 880 static void mtk_dp_set_swing_pre_emphasis(struct mtk_dp *mtk_dp, int lane_num, 881 int swing_val, int preemphasis) 882 { 883 u32 lane_shift = lane_num * DP_TX1_VOLT_SWING_SHIFT; 884 885 dev_dbg(mtk_dp->dev, 886 "link training: swing_val = 0x%x, pre-emphasis = 0x%x\n", 887 swing_val, preemphasis); 888 889 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 890 swing_val << (DP_TX0_VOLT_SWING_SHIFT + lane_shift), 891 DP_TX0_VOLT_SWING_MASK << lane_shift); 892 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 893 preemphasis << (DP_TX0_PRE_EMPH_SHIFT + lane_shift), 894 DP_TX0_PRE_EMPH_MASK << lane_shift); 895 } 896 897 static void mtk_dp_reset_swing_pre_emphasis(struct mtk_dp *mtk_dp) 898 { 899 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP, 900 0, 901 DP_TX0_VOLT_SWING_MASK | 902 DP_TX1_VOLT_SWING_MASK | 903 DP_TX2_VOLT_SWING_MASK | 904 DP_TX3_VOLT_SWING_MASK | 905 DP_TX0_PRE_EMPH_MASK | 906 DP_TX1_PRE_EMPH_MASK | 907 DP_TX2_PRE_EMPH_MASK | 908 DP_TX3_PRE_EMPH_MASK); 909 } 910 911 static u32 mtk_dp_swirq_get_clear(struct mtk_dp *mtk_dp) 912 { 913 u32 irq_status = mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_35D0) & 914 SW_IRQ_FINAL_STATUS_DP_TRANS_P0_MASK; 915 916 if (irq_status) { 917 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 918 irq_status, SW_IRQ_CLR_DP_TRANS_P0_MASK); 919 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8, 920 0, SW_IRQ_CLR_DP_TRANS_P0_MASK); 921 } 922 923 return irq_status; 924 } 925 926 static u32 mtk_dp_hwirq_get_clear(struct mtk_dp *mtk_dp) 927 { 928 u32 irq_status = (mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3418) & 929 IRQ_STATUS_DP_TRANS_P0_MASK) >> 12; 930 931 if (irq_status) { 932 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 933 irq_status, IRQ_CLR_DP_TRANS_P0_MASK); 934 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 935 0, IRQ_CLR_DP_TRANS_P0_MASK); 936 } 937 938 return irq_status; 939 } 940 941 static void mtk_dp_hwirq_enable(struct mtk_dp *mtk_dp, bool enable) 942 { 943 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418, 944 enable ? 0 : 945 IRQ_MASK_DP_TRANS_P0_DISC_IRQ | 946 IRQ_MASK_DP_TRANS_P0_CONN_IRQ | 947 IRQ_MASK_DP_TRANS_P0_INT_IRQ, 948 IRQ_MASK_DP_TRANS_P0_MASK); 949 } 950 951 static void mtk_dp_initialize_settings(struct mtk_dp *mtk_dp) 952 { 953 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_342C, 954 XTAL_FREQ_DP_TRANS_P0_DEFAULT, 955 XTAL_FREQ_DP_TRANS_P0_MASK); 956 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3540, 957 FEC_CLOCK_EN_MODE_DP_TRANS_P0, 958 FEC_CLOCK_EN_MODE_DP_TRANS_P0); 959 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31EC, 960 AUDIO_CH_SRC_SEL_DP_ENC0_P0, 961 AUDIO_CH_SRC_SEL_DP_ENC0_P0); 962 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 963 0, SDP_VSYNC_RISING_MASK_DP_ENC0_P0_MASK); 964 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_IRQ_MASK, 965 IRQ_MASK_AUX_TOP_IRQ, IRQ_MASK_AUX_TOP_IRQ); 966 } 967 968 static void mtk_dp_initialize_hpd_detect_settings(struct mtk_dp *mtk_dp) 969 { 970 u32 val; 971 /* Debounce threshold */ 972 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 973 8, HPD_DEB_THD_DP_TRANS_P0_MASK); 974 975 val = (HPD_INT_THD_DP_TRANS_P0_LOWER_500US | 976 HPD_INT_THD_DP_TRANS_P0_UPPER_1100US) << 4; 977 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 978 val, HPD_INT_THD_DP_TRANS_P0_MASK); 979 980 /* 981 * Connect threshold 1.5ms + 5 x 0.1ms = 2ms 982 * Disconnect threshold 1.5ms + 5 x 0.1ms = 2ms 983 */ 984 val = (5 << 8) | (5 << 12); 985 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410, 986 val, 987 HPD_DISC_THD_DP_TRANS_P0_MASK | 988 HPD_CONN_THD_DP_TRANS_P0_MASK); 989 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3430, 990 HPD_INT_THD_ECO_DP_TRANS_P0_HIGH_BOUND_EXT, 991 HPD_INT_THD_ECO_DP_TRANS_P0_MASK); 992 } 993 994 static void mtk_dp_initialize_aux_settings(struct mtk_dp *mtk_dp) 995 { 996 /* modify timeout threshold = 0x1595 */ 997 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_360C, 998 AUX_TIMEOUT_THR_AUX_TX_P0_VAL, 999 AUX_TIMEOUT_THR_AUX_TX_P0_MASK); 1000 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3658, 1001 0, AUX_TX_OV_EN_AUX_TX_P0_MASK); 1002 /* 25 for 26M */ 1003 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3634, 1004 AUX_TX_OVER_SAMPLE_RATE_FOR_26M << 8, 1005 AUX_TX_OVER_SAMPLE_RATE_AUX_TX_P0_MASK); 1006 /* 13 for 26M */ 1007 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3614, 1008 AUX_RX_UI_CNT_THR_AUX_FOR_26M, 1009 AUX_RX_UI_CNT_THR_AUX_TX_P0_MASK); 1010 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_37C8, 1011 MTK_ATOP_EN_AUX_TX_P0, 1012 MTK_ATOP_EN_AUX_TX_P0); 1013 } 1014 1015 static void mtk_dp_initialize_digital_settings(struct mtk_dp *mtk_dp) 1016 { 1017 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C, 1018 0, VBID_VIDEO_MUTE_DP_ENC0_P0_MASK); 1019 1020 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3368, 1021 BS2BS_MODE_DP_ENC1_P0_VAL << 12, 1022 BS2BS_MODE_DP_ENC1_P0_MASK); 1023 1024 /* dp tx encoder reset all sw */ 1025 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1026 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0, 1027 DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1028 1029 /* Wait for sw reset to complete */ 1030 usleep_range(1000, 5000); 1031 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004, 1032 0, DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0); 1033 } 1034 1035 static void mtk_dp_digital_sw_reset(struct mtk_dp *mtk_dp) 1036 { 1037 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1038 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0, 1039 DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1040 1041 /* Wait for sw reset to complete */ 1042 usleep_range(1000, 5000); 1043 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C, 1044 0, DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0); 1045 } 1046 1047 static void mtk_dp_set_lanes(struct mtk_dp *mtk_dp, int lanes) 1048 { 1049 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35F0, 1050 lanes == 0 ? 0 : DP_TRANS_DUMMY_RW_0, 1051 DP_TRANS_DUMMY_RW_0_MASK); 1052 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1053 lanes, LANE_NUM_DP_ENC0_P0_MASK); 1054 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_34A4, 1055 lanes << 2, LANE_NUM_DP_TRANS_P0_MASK); 1056 } 1057 1058 static void mtk_dp_get_calibration_data(struct mtk_dp *mtk_dp) 1059 { 1060 const struct mtk_dp_efuse_fmt *fmt; 1061 struct device *dev = mtk_dp->dev; 1062 struct nvmem_cell *cell; 1063 u32 *cal_data = mtk_dp->cal_data; 1064 u32 *buf; 1065 int i; 1066 size_t len; 1067 1068 cell = nvmem_cell_get(dev, "dp_calibration_data"); 1069 if (IS_ERR(cell)) { 1070 dev_warn(dev, "Failed to get nvmem cell dp_calibration_data\n"); 1071 goto use_default_val; 1072 } 1073 1074 buf = (u32 *)nvmem_cell_read(cell, &len); 1075 nvmem_cell_put(cell); 1076 1077 if (IS_ERR(buf) || ((len / sizeof(u32)) != 4)) { 1078 dev_warn(dev, "Failed to read nvmem_cell_read\n"); 1079 1080 if (!IS_ERR(buf)) 1081 kfree(buf); 1082 1083 goto use_default_val; 1084 } 1085 1086 for (i = 0; i < MTK_DP_CAL_MAX; i++) { 1087 fmt = &mtk_dp->data->efuse_fmt[i]; 1088 cal_data[i] = (buf[fmt->idx] >> fmt->shift) & fmt->mask; 1089 1090 if (cal_data[i] < fmt->min_val || cal_data[i] > fmt->max_val) { 1091 dev_warn(mtk_dp->dev, "Invalid efuse data, idx = %d\n", i); 1092 kfree(buf); 1093 goto use_default_val; 1094 } 1095 } 1096 kfree(buf); 1097 1098 return; 1099 1100 use_default_val: 1101 dev_warn(mtk_dp->dev, "Use default calibration data\n"); 1102 for (i = 0; i < MTK_DP_CAL_MAX; i++) 1103 cal_data[i] = mtk_dp->data->efuse_fmt[i].default_val; 1104 } 1105 1106 static void mtk_dp_set_calibration_data(struct mtk_dp *mtk_dp) 1107 { 1108 u32 *cal_data = mtk_dp->cal_data; 1109 1110 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_DPAUX_TX, 1111 cal_data[MTK_DP_CAL_CLKTX_IMPSE] << 20, 1112 RG_CKM_PT0_CKTX_IMPSEL); 1113 mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_BIAS_GEN_00, 1114 cal_data[MTK_DP_CAL_GLB_BIAS_TRIM] << 16, 1115 RG_XTP_GLB_BIAS_INTR_CTRL); 1116 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1117 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] << 12, 1118 RG_XTP_LN0_TX_IMPSEL_PMOS); 1119 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0, 1120 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] << 16, 1121 RG_XTP_LN0_TX_IMPSEL_NMOS); 1122 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1123 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] << 12, 1124 RG_XTP_LN1_TX_IMPSEL_PMOS); 1125 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1, 1126 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] << 16, 1127 RG_XTP_LN1_TX_IMPSEL_NMOS); 1128 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1129 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] << 12, 1130 RG_XTP_LN2_TX_IMPSEL_PMOS); 1131 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2, 1132 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] << 16, 1133 RG_XTP_LN2_TX_IMPSEL_NMOS); 1134 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1135 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] << 12, 1136 RG_XTP_LN3_TX_IMPSEL_PMOS); 1137 mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3, 1138 cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] << 16, 1139 RG_XTP_LN3_TX_IMPSEL_NMOS); 1140 } 1141 1142 static int mtk_dp_phy_configure(struct mtk_dp *mtk_dp, 1143 u32 link_rate, int lane_count) 1144 { 1145 int ret; 1146 union phy_configure_opts phy_opts = { 1147 .dp = { 1148 .link_rate = drm_dp_bw_code_to_link_rate(link_rate) / 100, 1149 .set_rate = 1, 1150 .lanes = lane_count, 1151 .set_lanes = 1, 1152 .ssc = mtk_dp->train_info.sink_ssc, 1153 } 1154 }; 1155 1156 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, DP_PWR_STATE_BANDGAP, 1157 DP_PWR_STATE_MASK); 1158 1159 ret = phy_configure(mtk_dp->phy, &phy_opts); 1160 if (ret) 1161 return ret; 1162 1163 mtk_dp_set_calibration_data(mtk_dp); 1164 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1165 DP_PWR_STATE_BANDGAP_TPLL_LANE, DP_PWR_STATE_MASK); 1166 1167 return 0; 1168 } 1169 1170 static void mtk_dp_set_idle_pattern(struct mtk_dp *mtk_dp, bool enable) 1171 { 1172 u32 val = POST_MISC_DATA_LANE0_OV_DP_TRANS_P0_MASK | 1173 POST_MISC_DATA_LANE1_OV_DP_TRANS_P0_MASK | 1174 POST_MISC_DATA_LANE2_OV_DP_TRANS_P0_MASK | 1175 POST_MISC_DATA_LANE3_OV_DP_TRANS_P0_MASK; 1176 1177 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3580, 1178 enable ? val : 0, val); 1179 } 1180 1181 static void mtk_dp_train_set_pattern(struct mtk_dp *mtk_dp, int pattern) 1182 { 1183 /* TPS1 */ 1184 if (pattern == 1) 1185 mtk_dp_set_idle_pattern(mtk_dp, false); 1186 1187 mtk_dp_update_bits(mtk_dp, 1188 MTK_DP_TRANS_P0_3400, 1189 pattern ? BIT(pattern - 1) << 12 : 0, 1190 PATTERN1_EN_DP_TRANS_P0_MASK | 1191 PATTERN2_EN_DP_TRANS_P0_MASK | 1192 PATTERN3_EN_DP_TRANS_P0_MASK | 1193 PATTERN4_EN_DP_TRANS_P0_MASK); 1194 } 1195 1196 static void mtk_dp_set_enhanced_frame_mode(struct mtk_dp *mtk_dp) 1197 { 1198 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1199 ENHANCED_FRAME_EN_DP_ENC0_P0, 1200 ENHANCED_FRAME_EN_DP_ENC0_P0); 1201 } 1202 1203 static void mtk_dp_training_set_scramble(struct mtk_dp *mtk_dp, bool enable) 1204 { 1205 mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3404, 1206 enable ? DP_SCR_EN_DP_TRANS_P0_MASK : 0, 1207 DP_SCR_EN_DP_TRANS_P0_MASK); 1208 } 1209 1210 static void mtk_dp_video_mute(struct mtk_dp *mtk_dp, bool enable) 1211 { 1212 struct arm_smccc_res res; 1213 u32 val = VIDEO_MUTE_SEL_DP_ENC0_P0 | 1214 (enable ? VIDEO_MUTE_SW_DP_ENC0_P0 : 0); 1215 1216 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000, 1217 val, 1218 VIDEO_MUTE_SEL_DP_ENC0_P0 | 1219 VIDEO_MUTE_SW_DP_ENC0_P0); 1220 1221 arm_smccc_smc(MTK_DP_SIP_CONTROL_AARCH32, 1222 mtk_dp->data->smc_cmd, enable, 1223 0, 0, 0, 0, 0, &res); 1224 1225 dev_dbg(mtk_dp->dev, "smc cmd: 0x%x, p1: %s, ret: 0x%lx-0x%lx\n", 1226 mtk_dp->data->smc_cmd, enable ? "enable" : "disable", res.a0, res.a1); 1227 } 1228 1229 static void mtk_dp_audio_mute(struct mtk_dp *mtk_dp, bool mute) 1230 { 1231 u32 val[3]; 1232 1233 if (mute) { 1234 val[0] = VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1235 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0; 1236 val[1] = 0; 1237 val[2] = 0; 1238 } else { 1239 val[0] = 0; 1240 val[1] = AU_EN_DP_ENC0_P0; 1241 /* Send one every two frames */ 1242 val[2] = 0x0F; 1243 } 1244 1245 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, 1246 val[0], 1247 VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 | 1248 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0); 1249 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088, 1250 val[1], AU_EN_DP_ENC0_P0); 1251 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A4, 1252 val[2], AU_TS_CFG_DP_ENC0_P0_MASK); 1253 } 1254 1255 static void mtk_dp_power_enable(struct mtk_dp *mtk_dp) 1256 { 1257 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1258 0, SW_RST_B_PHYD); 1259 1260 /* Wait for power enable */ 1261 usleep_range(10, 200); 1262 1263 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE, 1264 SW_RST_B_PHYD, SW_RST_B_PHYD); 1265 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1266 DP_PWR_STATE_BANDGAP_TPLL, DP_PWR_STATE_MASK); 1267 mtk_dp_write(mtk_dp, MTK_DP_1040, 1268 RG_DPAUX_RX_VALID_DEGLITCH_EN | RG_XTP_GLB_CKDET_EN | 1269 RG_DPAUX_RX_EN); 1270 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 0, DA_CKM_CKTX0_EN_FORCE_EN); 1271 } 1272 1273 static void mtk_dp_power_disable(struct mtk_dp *mtk_dp) 1274 { 1275 mtk_dp_write(mtk_dp, MTK_DP_TOP_PWR_STATE, 0); 1276 1277 mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 1278 DA_CKM_CKTX0_EN_FORCE_EN, DA_CKM_CKTX0_EN_FORCE_EN); 1279 1280 /* Disable RX */ 1281 mtk_dp_write(mtk_dp, MTK_DP_1040, 0); 1282 mtk_dp_write(mtk_dp, MTK_DP_TOP_MEM_PD, 1283 0x550 | FUSE_SEL | MEM_ISO_EN); 1284 } 1285 1286 static void mtk_dp_initialize_priv_data(struct mtk_dp *mtk_dp) 1287 { 1288 mtk_dp->train_info.link_rate = DP_LINK_BW_5_4; 1289 mtk_dp->train_info.lane_count = mtk_dp->max_lanes; 1290 mtk_dp->train_info.cable_plugged_in = false; 1291 1292 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 1293 memset(&mtk_dp->info.vm, 0, sizeof(struct videomode)); 1294 mtk_dp->audio_enable = false; 1295 } 1296 1297 static void mtk_dp_sdp_set_down_cnt_init(struct mtk_dp *mtk_dp, 1298 u32 sram_read_start) 1299 { 1300 u32 sdp_down_cnt_init = 0; 1301 struct drm_display_mode mode; 1302 struct videomode *vm = &mtk_dp->info.vm; 1303 1304 drm_display_mode_from_videomode(vm, &mode); 1305 1306 if (mode.clock > 0) 1307 sdp_down_cnt_init = sram_read_start * 1308 mtk_dp->train_info.link_rate * 2700 * 8 / 1309 (mode.clock * 4); 1310 1311 switch (mtk_dp->train_info.lane_count) { 1312 case 1: 1313 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x1A); 1314 break; 1315 case 2: 1316 /* case for LowResolution && High Audio Sample Rate */ 1317 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x10); 1318 sdp_down_cnt_init += mode.vtotal <= 525 ? 4 : 0; 1319 break; 1320 case 4: 1321 default: 1322 sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 6); 1323 break; 1324 } 1325 1326 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040, 1327 sdp_down_cnt_init, 1328 SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK); 1329 } 1330 1331 static void mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp *mtk_dp) 1332 { 1333 int pix_clk_mhz; 1334 u32 dc_offset; 1335 u32 spd_down_cnt_init = 0; 1336 struct drm_display_mode mode; 1337 struct videomode *vm = &mtk_dp->info.vm; 1338 1339 drm_display_mode_from_videomode(vm, &mode); 1340 1341 pix_clk_mhz = mtk_dp->info.format == DP_PIXELFORMAT_YUV420 ? 1342 mode.clock / 2000 : mode.clock / 1000; 1343 1344 switch (mtk_dp->train_info.lane_count) { 1345 case 1: 1346 spd_down_cnt_init = 0x20; 1347 break; 1348 case 2: 1349 dc_offset = (mode.vtotal <= 525) ? 0x14 : 0x00; 1350 spd_down_cnt_init = 0x18 + dc_offset; 1351 break; 1352 case 4: 1353 default: 1354 dc_offset = (mode.vtotal <= 525) ? 0x08 : 0x00; 1355 if (pix_clk_mhz > mtk_dp->train_info.link_rate * 27) 1356 spd_down_cnt_init = 0x8; 1357 else 1358 spd_down_cnt_init = 0x10 + dc_offset; 1359 break; 1360 } 1361 1362 mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, spd_down_cnt_init, 1363 SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK); 1364 } 1365 1366 static void mtk_dp_setup_tu(struct mtk_dp *mtk_dp) 1367 { 1368 u32 sram_read_start = min_t(u32, MTK_DP_TBC_BUF_READ_START_ADDR, 1369 mtk_dp->info.vm.hactive / 1370 mtk_dp->train_info.lane_count / 1371 MTK_DP_4P1T / MTK_DP_HDE / 1372 MTK_DP_PIX_PER_ADDR); 1373 mtk_dp_set_sram_read_start(mtk_dp, sram_read_start); 1374 mtk_dp_setup_encoder(mtk_dp); 1375 mtk_dp_sdp_set_down_cnt_init_in_hblank(mtk_dp); 1376 mtk_dp_sdp_set_down_cnt_init(mtk_dp, sram_read_start); 1377 } 1378 1379 static void mtk_dp_set_tx_out(struct mtk_dp *mtk_dp) 1380 { 1381 mtk_dp_setup_tu(mtk_dp); 1382 } 1383 1384 static void mtk_dp_train_update_swing_pre(struct mtk_dp *mtk_dp, int lanes, 1385 u8 dpcd_adjust_req[2]) 1386 { 1387 int lane; 1388 1389 for (lane = 0; lane < lanes; ++lane) { 1390 u8 val; 1391 u8 swing; 1392 u8 preemphasis; 1393 int index = lane / 2; 1394 int shift = lane % 2 ? DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 0; 1395 1396 swing = (dpcd_adjust_req[index] >> shift) & 1397 DP_ADJUST_VOLTAGE_SWING_LANE0_MASK; 1398 preemphasis = ((dpcd_adjust_req[index] >> shift) & 1399 DP_ADJUST_PRE_EMPHASIS_LANE0_MASK) >> 1400 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT; 1401 val = swing << DP_TRAIN_VOLTAGE_SWING_SHIFT | 1402 preemphasis << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1403 1404 if (swing == DP_TRAIN_VOLTAGE_SWING_LEVEL_3) 1405 val |= DP_TRAIN_MAX_SWING_REACHED; 1406 if (preemphasis == 3) 1407 val |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1408 1409 mtk_dp_set_swing_pre_emphasis(mtk_dp, lane, swing, preemphasis); 1410 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_LANE0_SET + lane, 1411 val); 1412 } 1413 } 1414 1415 static void mtk_dp_pattern(struct mtk_dp *mtk_dp, bool is_tps1) 1416 { 1417 int pattern; 1418 unsigned int aux_offset; 1419 1420 if (is_tps1) { 1421 pattern = 1; 1422 aux_offset = DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1; 1423 } else { 1424 aux_offset = mtk_dp->train_info.channel_eq_pattern; 1425 1426 switch (mtk_dp->train_info.channel_eq_pattern) { 1427 case DP_TRAINING_PATTERN_4: 1428 pattern = 4; 1429 break; 1430 case DP_TRAINING_PATTERN_3: 1431 pattern = 3; 1432 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1433 break; 1434 case DP_TRAINING_PATTERN_2: 1435 default: 1436 pattern = 2; 1437 aux_offset |= DP_LINK_SCRAMBLING_DISABLE; 1438 break; 1439 } 1440 } 1441 1442 mtk_dp_train_set_pattern(mtk_dp, pattern); 1443 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, aux_offset); 1444 } 1445 1446 static int mtk_dp_train_setting(struct mtk_dp *mtk_dp, u8 target_link_rate, 1447 u8 target_lane_count) 1448 { 1449 int ret; 1450 1451 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LINK_BW_SET, target_link_rate); 1452 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LANE_COUNT_SET, 1453 target_lane_count | DP_LANE_COUNT_ENHANCED_FRAME_EN); 1454 1455 if (mtk_dp->train_info.sink_ssc) 1456 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_DOWNSPREAD_CTRL, 1457 DP_SPREAD_AMP_0_5); 1458 1459 mtk_dp_set_lanes(mtk_dp, target_lane_count / 2); 1460 ret = mtk_dp_phy_configure(mtk_dp, target_link_rate, target_lane_count); 1461 if (ret) 1462 return ret; 1463 1464 dev_dbg(mtk_dp->dev, 1465 "Link train target_link_rate = 0x%x, target_lane_count = 0x%x\n", 1466 target_link_rate, target_lane_count); 1467 1468 return 0; 1469 } 1470 1471 static int mtk_dp_train_cr(struct mtk_dp *mtk_dp, u8 target_lane_count) 1472 { 1473 u8 lane_adjust[2] = {}; 1474 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1475 u8 prev_lane_adjust = 0xff; 1476 int train_retries = 0; 1477 int voltage_retries = 0; 1478 1479 mtk_dp_pattern(mtk_dp, true); 1480 1481 /* In DP spec 1.4, the retry count of CR is defined as 10. */ 1482 do { 1483 train_retries++; 1484 if (!mtk_dp->train_info.cable_plugged_in) { 1485 mtk_dp_train_set_pattern(mtk_dp, 0); 1486 return -ENODEV; 1487 } 1488 1489 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1490 lane_adjust, sizeof(lane_adjust)); 1491 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1492 lane_adjust); 1493 1494 drm_dp_link_train_clock_recovery_delay(&mtk_dp->aux, 1495 mtk_dp->rx_cap); 1496 1497 /* check link status from sink device */ 1498 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1499 if (drm_dp_clock_recovery_ok(link_status, 1500 target_lane_count)) { 1501 dev_dbg(mtk_dp->dev, "Link train CR pass\n"); 1502 return 0; 1503 } 1504 1505 /* 1506 * In DP spec 1.4, if current voltage level is the same 1507 * with previous voltage level, we need to retry 5 times. 1508 */ 1509 if (prev_lane_adjust == link_status[4]) { 1510 voltage_retries++; 1511 /* 1512 * Condition of CR fail: 1513 * 1. Failed to pass CR using the same voltage 1514 * level over five times. 1515 * 2. Failed to pass CR when the current voltage 1516 * level is the same with previous voltage 1517 * level and reach max voltage level (3). 1518 */ 1519 if (voltage_retries > MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY || 1520 (prev_lane_adjust & DP_ADJUST_VOLTAGE_SWING_LANE0_MASK) == 3) { 1521 dev_dbg(mtk_dp->dev, "Link train CR fail\n"); 1522 break; 1523 } 1524 } else { 1525 /* 1526 * If the voltage level is changed, we need to 1527 * re-calculate this retry count. 1528 */ 1529 voltage_retries = 0; 1530 } 1531 prev_lane_adjust = link_status[4]; 1532 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1533 1534 /* Failed to train CR, and disable pattern. */ 1535 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1536 DP_TRAINING_PATTERN_DISABLE); 1537 mtk_dp_train_set_pattern(mtk_dp, 0); 1538 1539 return -ETIMEDOUT; 1540 } 1541 1542 static int mtk_dp_train_eq(struct mtk_dp *mtk_dp, u8 target_lane_count) 1543 { 1544 u8 lane_adjust[2] = {}; 1545 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 1546 int train_retries = 0; 1547 1548 mtk_dp_pattern(mtk_dp, false); 1549 1550 do { 1551 train_retries++; 1552 if (!mtk_dp->train_info.cable_plugged_in) { 1553 mtk_dp_train_set_pattern(mtk_dp, 0); 1554 return -ENODEV; 1555 } 1556 1557 drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1, 1558 lane_adjust, sizeof(lane_adjust)); 1559 mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count, 1560 lane_adjust); 1561 1562 drm_dp_link_train_channel_eq_delay(&mtk_dp->aux, 1563 mtk_dp->rx_cap); 1564 1565 /* check link status from sink device */ 1566 drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status); 1567 if (drm_dp_channel_eq_ok(link_status, target_lane_count)) { 1568 dev_dbg(mtk_dp->dev, "Link train EQ pass\n"); 1569 1570 /* Training done, and disable pattern. */ 1571 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1572 DP_TRAINING_PATTERN_DISABLE); 1573 mtk_dp_train_set_pattern(mtk_dp, 0); 1574 return 0; 1575 } 1576 dev_dbg(mtk_dp->dev, "Link train EQ fail\n"); 1577 } while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY); 1578 1579 /* Failed to train EQ, and disable pattern. */ 1580 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, 1581 DP_TRAINING_PATTERN_DISABLE); 1582 mtk_dp_train_set_pattern(mtk_dp, 0); 1583 1584 return -ETIMEDOUT; 1585 } 1586 1587 static int mtk_dp_parse_capabilities(struct mtk_dp *mtk_dp) 1588 { 1589 u8 val; 1590 ssize_t ret; 1591 1592 drm_dp_read_dpcd_caps(&mtk_dp->aux, mtk_dp->rx_cap); 1593 1594 if (drm_dp_tps4_supported(mtk_dp->rx_cap)) 1595 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_4; 1596 else if (drm_dp_tps3_supported(mtk_dp->rx_cap)) 1597 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_3; 1598 else 1599 mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_2; 1600 1601 mtk_dp->train_info.sink_ssc = drm_dp_max_downspread(mtk_dp->rx_cap); 1602 1603 ret = drm_dp_dpcd_readb(&mtk_dp->aux, DP_MSTM_CAP, &val); 1604 if (ret < 1) { 1605 drm_err(mtk_dp->drm_dev, "Read mstm cap failed\n"); 1606 return ret == 0 ? -EIO : ret; 1607 } 1608 1609 if (val & DP_MST_CAP) { 1610 /* Clear DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 */ 1611 ret = drm_dp_dpcd_readb(&mtk_dp->aux, 1612 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1613 &val); 1614 if (ret < 1) { 1615 drm_err(mtk_dp->drm_dev, "Read irq vector failed\n"); 1616 return ret == 0 ? -EIO : ret; 1617 } 1618 1619 if (val) 1620 drm_dp_dpcd_writeb(&mtk_dp->aux, 1621 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0, 1622 val); 1623 } 1624 1625 return 0; 1626 } 1627 1628 static bool mtk_dp_edid_parse_audio_capabilities(struct mtk_dp *mtk_dp, 1629 struct mtk_dp_audio_cfg *cfg) 1630 { 1631 if (!mtk_dp->data->audio_supported) 1632 return false; 1633 1634 if (mtk_dp->info.audio_cur_cfg.sad_count <= 0) { 1635 drm_info(mtk_dp->drm_dev, "The SADs is NULL\n"); 1636 return false; 1637 } 1638 1639 return true; 1640 } 1641 1642 static void mtk_dp_train_change_mode(struct mtk_dp *mtk_dp) 1643 { 1644 phy_reset(mtk_dp->phy); 1645 mtk_dp_reset_swing_pre_emphasis(mtk_dp); 1646 } 1647 1648 static int mtk_dp_training(struct mtk_dp *mtk_dp) 1649 { 1650 int ret; 1651 u8 lane_count, link_rate, train_limit, max_link_rate; 1652 1653 link_rate = min_t(u8, mtk_dp->max_linkrate, 1654 mtk_dp->rx_cap[DP_MAX_LINK_RATE]); 1655 max_link_rate = link_rate; 1656 lane_count = min_t(u8, mtk_dp->max_lanes, 1657 drm_dp_max_lane_count(mtk_dp->rx_cap)); 1658 1659 /* 1660 * TPS are generated by the hardware pattern generator. From the 1661 * hardware setting we need to disable this scramble setting before 1662 * use the TPS pattern generator. 1663 */ 1664 mtk_dp_training_set_scramble(mtk_dp, false); 1665 1666 for (train_limit = 6; train_limit > 0; train_limit--) { 1667 mtk_dp_train_change_mode(mtk_dp); 1668 1669 ret = mtk_dp_train_setting(mtk_dp, link_rate, lane_count); 1670 if (ret) 1671 return ret; 1672 1673 ret = mtk_dp_train_cr(mtk_dp, lane_count); 1674 if (ret == -ENODEV) { 1675 return ret; 1676 } else if (ret) { 1677 /* reduce link rate */ 1678 switch (link_rate) { 1679 case DP_LINK_BW_1_62: 1680 lane_count = lane_count / 2; 1681 link_rate = max_link_rate; 1682 if (lane_count == 0) 1683 return -EIO; 1684 break; 1685 case DP_LINK_BW_2_7: 1686 link_rate = DP_LINK_BW_1_62; 1687 break; 1688 case DP_LINK_BW_5_4: 1689 link_rate = DP_LINK_BW_2_7; 1690 break; 1691 case DP_LINK_BW_8_1: 1692 link_rate = DP_LINK_BW_5_4; 1693 break; 1694 default: 1695 return -EINVAL; 1696 }; 1697 continue; 1698 } 1699 1700 ret = mtk_dp_train_eq(mtk_dp, lane_count); 1701 if (ret == -ENODEV) { 1702 return ret; 1703 } else if (ret) { 1704 /* reduce lane count */ 1705 if (lane_count == 0) 1706 return -EIO; 1707 lane_count /= 2; 1708 continue; 1709 } 1710 1711 /* if we can run to this, training is done. */ 1712 break; 1713 } 1714 1715 if (train_limit == 0) 1716 return -ETIMEDOUT; 1717 1718 mtk_dp->train_info.link_rate = link_rate; 1719 mtk_dp->train_info.lane_count = lane_count; 1720 1721 /* 1722 * After training done, we need to output normal stream instead of TPS, 1723 * so we need to enable scramble. 1724 */ 1725 mtk_dp_training_set_scramble(mtk_dp, true); 1726 mtk_dp_set_enhanced_frame_mode(mtk_dp); 1727 1728 return 0; 1729 } 1730 1731 static void mtk_dp_video_enable(struct mtk_dp *mtk_dp, bool enable) 1732 { 1733 /* the mute sequence is different between enable and disable */ 1734 if (enable) { 1735 mtk_dp_msa_bypass_enable(mtk_dp, false); 1736 mtk_dp_pg_enable(mtk_dp, false); 1737 mtk_dp_set_tx_out(mtk_dp); 1738 mtk_dp_video_mute(mtk_dp, false); 1739 } else { 1740 mtk_dp_video_mute(mtk_dp, true); 1741 mtk_dp_pg_enable(mtk_dp, true); 1742 mtk_dp_msa_bypass_enable(mtk_dp, true); 1743 } 1744 } 1745 1746 static void mtk_dp_audio_sdp_setup(struct mtk_dp *mtk_dp, 1747 struct mtk_dp_audio_cfg *cfg) 1748 { 1749 struct dp_sdp sdp; 1750 struct hdmi_audio_infoframe frame; 1751 1752 hdmi_audio_infoframe_init(&frame); 1753 frame.coding_type = HDMI_AUDIO_CODING_TYPE_PCM; 1754 frame.channels = cfg->channels; 1755 frame.sample_frequency = cfg->sample_rate; 1756 1757 switch (cfg->word_length_bits) { 1758 case 16: 1759 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_16; 1760 break; 1761 case 20: 1762 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_20; 1763 break; 1764 case 24: 1765 default: 1766 frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_24; 1767 break; 1768 } 1769 1770 hdmi_audio_infoframe_pack_for_dp(&frame, &sdp, MTK_DP_VERSION); 1771 1772 mtk_dp_audio_sdp_asp_set_channels(mtk_dp, cfg->channels); 1773 mtk_dp_setup_sdp_aui(mtk_dp, &sdp); 1774 } 1775 1776 static void mtk_dp_audio_setup(struct mtk_dp *mtk_dp, 1777 struct mtk_dp_audio_cfg *cfg) 1778 { 1779 mtk_dp_audio_sdp_setup(mtk_dp, cfg); 1780 mtk_dp_audio_channel_status_set(mtk_dp, cfg); 1781 1782 mtk_dp_audio_setup_channels(mtk_dp, cfg); 1783 mtk_dp_audio_set_divider(mtk_dp); 1784 } 1785 1786 static int mtk_dp_video_config(struct mtk_dp *mtk_dp) 1787 { 1788 mtk_dp_config_mn_mode(mtk_dp); 1789 mtk_dp_set_msa(mtk_dp); 1790 mtk_dp_set_color_depth(mtk_dp); 1791 return mtk_dp_set_color_format(mtk_dp, mtk_dp->info.format); 1792 } 1793 1794 static void mtk_dp_init_port(struct mtk_dp *mtk_dp) 1795 { 1796 mtk_dp_set_idle_pattern(mtk_dp, true); 1797 mtk_dp_initialize_priv_data(mtk_dp); 1798 1799 mtk_dp_initialize_settings(mtk_dp); 1800 mtk_dp_initialize_aux_settings(mtk_dp); 1801 mtk_dp_initialize_digital_settings(mtk_dp); 1802 1803 mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3690, 1804 RX_REPLY_COMPLETE_MODE_AUX_TX_P0, 1805 RX_REPLY_COMPLETE_MODE_AUX_TX_P0); 1806 mtk_dp_initialize_hpd_detect_settings(mtk_dp); 1807 1808 mtk_dp_digital_sw_reset(mtk_dp); 1809 } 1810 1811 static irqreturn_t mtk_dp_hpd_event_thread(int hpd, void *dev) 1812 { 1813 struct mtk_dp *mtk_dp = dev; 1814 unsigned long flags; 1815 u32 status; 1816 1817 if (mtk_dp->need_debounce && mtk_dp->train_info.cable_plugged_in) 1818 msleep(100); 1819 1820 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 1821 status = mtk_dp->irq_thread_handle; 1822 mtk_dp->irq_thread_handle = 0; 1823 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 1824 1825 if (status & MTK_DP_THREAD_CABLE_STATE_CHG) { 1826 drm_helper_hpd_irq_event(mtk_dp->bridge.dev); 1827 1828 if (!mtk_dp->train_info.cable_plugged_in) { 1829 mtk_dp_disable_sdp_aui(mtk_dp); 1830 memset(&mtk_dp->info.audio_cur_cfg, 0, 1831 sizeof(mtk_dp->info.audio_cur_cfg)); 1832 1833 mtk_dp->need_debounce = false; 1834 mod_timer(&mtk_dp->debounce_timer, 1835 jiffies + msecs_to_jiffies(100) - 1); 1836 } 1837 } 1838 1839 if (status & MTK_DP_THREAD_HPD_EVENT) 1840 dev_dbg(mtk_dp->dev, "Receive IRQ from sink devices\n"); 1841 1842 return IRQ_HANDLED; 1843 } 1844 1845 static irqreturn_t mtk_dp_hpd_event(int hpd, void *dev) 1846 { 1847 struct mtk_dp *mtk_dp = dev; 1848 bool cable_sta_chg = false; 1849 unsigned long flags; 1850 u32 irq_status = mtk_dp_swirq_get_clear(mtk_dp) | 1851 mtk_dp_hwirq_get_clear(mtk_dp); 1852 1853 if (!irq_status) 1854 return IRQ_HANDLED; 1855 1856 spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags); 1857 1858 if (irq_status & MTK_DP_HPD_INTERRUPT) 1859 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_HPD_EVENT; 1860 1861 /* Cable state is changed. */ 1862 if (irq_status != MTK_DP_HPD_INTERRUPT) { 1863 mtk_dp->irq_thread_handle |= MTK_DP_THREAD_CABLE_STATE_CHG; 1864 cable_sta_chg = true; 1865 } 1866 1867 spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags); 1868 1869 if (cable_sta_chg) { 1870 if (!!(mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3414) & 1871 HPD_DB_DP_TRANS_P0_MASK)) 1872 mtk_dp->train_info.cable_plugged_in = true; 1873 else 1874 mtk_dp->train_info.cable_plugged_in = false; 1875 } 1876 1877 return IRQ_WAKE_THREAD; 1878 } 1879 1880 static int mtk_dp_dt_parse(struct mtk_dp *mtk_dp, 1881 struct platform_device *pdev) 1882 { 1883 struct device_node *endpoint; 1884 struct device *dev = &pdev->dev; 1885 int ret; 1886 void __iomem *base; 1887 u32 linkrate; 1888 int len; 1889 1890 base = devm_platform_ioremap_resource(pdev, 0); 1891 if (IS_ERR(base)) 1892 return PTR_ERR(base); 1893 1894 mtk_dp->regs = devm_regmap_init_mmio(dev, base, &mtk_dp_regmap_config); 1895 if (IS_ERR(mtk_dp->regs)) 1896 return PTR_ERR(mtk_dp->regs); 1897 1898 endpoint = of_graph_get_endpoint_by_regs(pdev->dev.of_node, 1, -1); 1899 len = of_property_count_elems_of_size(endpoint, 1900 "data-lanes", sizeof(u32)); 1901 if (len < 0 || len > 4 || len == 3) { 1902 dev_err(dev, "invalid data lane size: %d\n", len); 1903 return -EINVAL; 1904 } 1905 1906 mtk_dp->max_lanes = len; 1907 1908 ret = device_property_read_u32(dev, "max-linkrate-mhz", &linkrate); 1909 if (ret) { 1910 dev_err(dev, "failed to read max linkrate: %d\n", ret); 1911 return ret; 1912 } 1913 1914 mtk_dp->max_linkrate = drm_dp_link_rate_to_bw_code(linkrate * 100); 1915 1916 return 0; 1917 } 1918 1919 static void mtk_dp_update_plugged_status(struct mtk_dp *mtk_dp) 1920 { 1921 mutex_lock(&mtk_dp->update_plugged_status_lock); 1922 if (mtk_dp->plugged_cb && mtk_dp->codec_dev) 1923 mtk_dp->plugged_cb(mtk_dp->codec_dev, 1924 mtk_dp->enabled & 1925 mtk_dp->info.audio_cur_cfg.detect_monitor); 1926 mutex_unlock(&mtk_dp->update_plugged_status_lock); 1927 } 1928 1929 static enum drm_connector_status mtk_dp_bdg_detect(struct drm_bridge *bridge) 1930 { 1931 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 1932 enum drm_connector_status ret = connector_status_disconnected; 1933 bool enabled = mtk_dp->enabled; 1934 u8 sink_count = 0; 1935 1936 if (!mtk_dp->train_info.cable_plugged_in) 1937 return ret; 1938 1939 if (!enabled) { 1940 /* power on aux */ 1941 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1942 DP_PWR_STATE_BANDGAP_TPLL_LANE, 1943 DP_PWR_STATE_MASK); 1944 1945 /* power on panel */ 1946 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); 1947 usleep_range(2000, 5000); 1948 } 1949 /* 1950 * Some dongles still source HPD when they do not connect to any 1951 * sink device. To avoid this, we need to read the sink count 1952 * to make sure we do connect to sink devices. After this detect 1953 * function, we just need to check the HPD connection to check 1954 * whether we connect to a sink device. 1955 */ 1956 drm_dp_dpcd_readb(&mtk_dp->aux, DP_SINK_COUNT, &sink_count); 1957 if (DP_GET_SINK_COUNT(sink_count)) 1958 ret = connector_status_connected; 1959 1960 if (!enabled) { 1961 /* power off panel */ 1962 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 1963 usleep_range(2000, 3000); 1964 1965 /* power off aux */ 1966 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1967 DP_PWR_STATE_BANDGAP_TPLL, 1968 DP_PWR_STATE_MASK); 1969 } 1970 1971 return ret; 1972 } 1973 1974 static struct edid *mtk_dp_get_edid(struct drm_bridge *bridge, 1975 struct drm_connector *connector) 1976 { 1977 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 1978 bool enabled = mtk_dp->enabled; 1979 struct edid *new_edid = NULL; 1980 struct mtk_dp_audio_cfg *audio_caps = &mtk_dp->info.audio_cur_cfg; 1981 struct cea_sad *sads; 1982 1983 if (!enabled) { 1984 drm_bridge_chain_pre_enable(bridge); 1985 1986 /* power on aux */ 1987 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 1988 DP_PWR_STATE_BANDGAP_TPLL_LANE, 1989 DP_PWR_STATE_MASK); 1990 1991 /* power on panel */ 1992 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); 1993 usleep_range(2000, 5000); 1994 } 1995 1996 new_edid = drm_get_edid(connector, &mtk_dp->aux.ddc); 1997 1998 /* 1999 * Parse capability here to let atomic_get_input_bus_fmts and 2000 * mode_valid use the capability to calculate sink bitrates. 2001 */ 2002 if (mtk_dp_parse_capabilities(mtk_dp)) { 2003 drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n"); 2004 new_edid = NULL; 2005 } 2006 2007 if (new_edid) { 2008 audio_caps->sad_count = drm_edid_to_sad(new_edid, &sads); 2009 audio_caps->detect_monitor = drm_detect_monitor_audio(new_edid); 2010 } 2011 2012 if (!enabled) { 2013 /* power off panel */ 2014 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 2015 usleep_range(2000, 3000); 2016 2017 /* power off aux */ 2018 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2019 DP_PWR_STATE_BANDGAP_TPLL, 2020 DP_PWR_STATE_MASK); 2021 2022 drm_bridge_chain_post_disable(bridge); 2023 } 2024 2025 return new_edid; 2026 } 2027 2028 static ssize_t mtk_dp_aux_transfer(struct drm_dp_aux *mtk_aux, 2029 struct drm_dp_aux_msg *msg) 2030 { 2031 struct mtk_dp *mtk_dp; 2032 bool is_read; 2033 u8 request; 2034 size_t accessed_bytes = 0; 2035 int ret; 2036 2037 mtk_dp = container_of(mtk_aux, struct mtk_dp, aux); 2038 2039 if (!mtk_dp->train_info.cable_plugged_in) { 2040 ret = -EAGAIN; 2041 goto err; 2042 } 2043 2044 switch (msg->request) { 2045 case DP_AUX_I2C_MOT: 2046 case DP_AUX_I2C_WRITE: 2047 case DP_AUX_NATIVE_WRITE: 2048 case DP_AUX_I2C_WRITE_STATUS_UPDATE: 2049 case DP_AUX_I2C_WRITE_STATUS_UPDATE | DP_AUX_I2C_MOT: 2050 request = msg->request & ~DP_AUX_I2C_WRITE_STATUS_UPDATE; 2051 is_read = false; 2052 break; 2053 case DP_AUX_I2C_READ: 2054 case DP_AUX_NATIVE_READ: 2055 case DP_AUX_I2C_READ | DP_AUX_I2C_MOT: 2056 request = msg->request; 2057 is_read = true; 2058 break; 2059 default: 2060 drm_err(mtk_aux->drm_dev, "invalid aux cmd = %d\n", 2061 msg->request); 2062 ret = -EINVAL; 2063 goto err; 2064 } 2065 2066 do { 2067 size_t to_access = min_t(size_t, DP_AUX_MAX_PAYLOAD_BYTES, 2068 msg->size - accessed_bytes); 2069 2070 ret = mtk_dp_aux_do_transfer(mtk_dp, is_read, request, 2071 msg->address + accessed_bytes, 2072 msg->buffer + accessed_bytes, 2073 to_access); 2074 2075 if (ret) { 2076 drm_info(mtk_dp->drm_dev, 2077 "Failed to do AUX transfer: %d\n", ret); 2078 goto err; 2079 } 2080 accessed_bytes += to_access; 2081 } while (accessed_bytes < msg->size); 2082 2083 msg->reply = DP_AUX_NATIVE_REPLY_ACK | DP_AUX_I2C_REPLY_ACK; 2084 return msg->size; 2085 err: 2086 msg->reply = DP_AUX_NATIVE_REPLY_NACK | DP_AUX_I2C_REPLY_NACK; 2087 return ret; 2088 } 2089 2090 static int mtk_dp_poweron(struct mtk_dp *mtk_dp) 2091 { 2092 int ret; 2093 2094 ret = phy_init(mtk_dp->phy); 2095 if (ret) { 2096 dev_err(mtk_dp->dev, "Failed to initialize phy: %d\n", ret); 2097 return ret; 2098 } 2099 2100 mtk_dp_init_port(mtk_dp); 2101 mtk_dp_power_enable(mtk_dp); 2102 2103 return 0; 2104 } 2105 2106 static void mtk_dp_poweroff(struct mtk_dp *mtk_dp) 2107 { 2108 mtk_dp_power_disable(mtk_dp); 2109 phy_exit(mtk_dp->phy); 2110 } 2111 2112 static int mtk_dp_bridge_attach(struct drm_bridge *bridge, 2113 enum drm_bridge_attach_flags flags) 2114 { 2115 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2116 int ret; 2117 2118 if (!(flags & DRM_BRIDGE_ATTACH_NO_CONNECTOR)) { 2119 dev_err(mtk_dp->dev, "Driver does not provide a connector!"); 2120 return -EINVAL; 2121 } 2122 2123 mtk_dp->aux.drm_dev = bridge->dev; 2124 ret = drm_dp_aux_register(&mtk_dp->aux); 2125 if (ret) { 2126 dev_err(mtk_dp->dev, 2127 "failed to register DP AUX channel: %d\n", ret); 2128 return ret; 2129 } 2130 2131 ret = mtk_dp_poweron(mtk_dp); 2132 if (ret) 2133 goto err_aux_register; 2134 2135 if (mtk_dp->next_bridge) { 2136 ret = drm_bridge_attach(bridge->encoder, mtk_dp->next_bridge, 2137 &mtk_dp->bridge, flags); 2138 if (ret) { 2139 drm_warn(mtk_dp->drm_dev, 2140 "Failed to attach external bridge: %d\n", ret); 2141 goto err_bridge_attach; 2142 } 2143 } 2144 2145 mtk_dp->drm_dev = bridge->dev; 2146 2147 mtk_dp_hwirq_enable(mtk_dp, true); 2148 2149 return 0; 2150 2151 err_bridge_attach: 2152 mtk_dp_poweroff(mtk_dp); 2153 err_aux_register: 2154 drm_dp_aux_unregister(&mtk_dp->aux); 2155 return ret; 2156 } 2157 2158 static void mtk_dp_bridge_detach(struct drm_bridge *bridge) 2159 { 2160 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2161 2162 mtk_dp_hwirq_enable(mtk_dp, false); 2163 mtk_dp->drm_dev = NULL; 2164 mtk_dp_poweroff(mtk_dp); 2165 drm_dp_aux_unregister(&mtk_dp->aux); 2166 } 2167 2168 static void mtk_dp_bridge_atomic_enable(struct drm_bridge *bridge, 2169 struct drm_bridge_state *old_state) 2170 { 2171 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2172 int ret; 2173 2174 mtk_dp->conn = drm_atomic_get_new_connector_for_encoder(old_state->base.state, 2175 bridge->encoder); 2176 if (!mtk_dp->conn) { 2177 drm_err(mtk_dp->drm_dev, 2178 "Can't enable bridge as connector is missing\n"); 2179 return; 2180 } 2181 2182 /* power on aux */ 2183 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2184 DP_PWR_STATE_BANDGAP_TPLL_LANE, 2185 DP_PWR_STATE_MASK); 2186 2187 if (mtk_dp->train_info.cable_plugged_in) { 2188 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); 2189 usleep_range(2000, 5000); 2190 } 2191 2192 /* Training */ 2193 ret = mtk_dp_training(mtk_dp); 2194 if (ret) { 2195 drm_err(mtk_dp->drm_dev, "Training failed, %d\n", ret); 2196 goto power_off_aux; 2197 } 2198 2199 ret = mtk_dp_video_config(mtk_dp); 2200 if (ret) 2201 goto power_off_aux; 2202 2203 mtk_dp_video_enable(mtk_dp, true); 2204 2205 mtk_dp->audio_enable = 2206 mtk_dp_edid_parse_audio_capabilities(mtk_dp, 2207 &mtk_dp->info.audio_cur_cfg); 2208 if (mtk_dp->audio_enable) { 2209 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2210 mtk_dp_audio_mute(mtk_dp, false); 2211 } else { 2212 memset(&mtk_dp->info.audio_cur_cfg, 0, 2213 sizeof(mtk_dp->info.audio_cur_cfg)); 2214 } 2215 2216 mtk_dp->enabled = true; 2217 mtk_dp_update_plugged_status(mtk_dp); 2218 2219 return; 2220 power_off_aux: 2221 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2222 DP_PWR_STATE_BANDGAP_TPLL, 2223 DP_PWR_STATE_MASK); 2224 } 2225 2226 static void mtk_dp_bridge_atomic_disable(struct drm_bridge *bridge, 2227 struct drm_bridge_state *old_state) 2228 { 2229 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2230 2231 mtk_dp->enabled = false; 2232 mtk_dp_update_plugged_status(mtk_dp); 2233 mtk_dp_video_enable(mtk_dp, false); 2234 mtk_dp_audio_mute(mtk_dp, true); 2235 2236 if (mtk_dp->train_info.cable_plugged_in) { 2237 drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3); 2238 usleep_range(2000, 3000); 2239 } 2240 2241 /* power off aux */ 2242 mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, 2243 DP_PWR_STATE_BANDGAP_TPLL, 2244 DP_PWR_STATE_MASK); 2245 2246 /* Ensure the sink is muted */ 2247 msleep(20); 2248 } 2249 2250 static enum drm_mode_status 2251 mtk_dp_bridge_mode_valid(struct drm_bridge *bridge, 2252 const struct drm_display_info *info, 2253 const struct drm_display_mode *mode) 2254 { 2255 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2256 u32 bpp = info->color_formats & DRM_COLOR_FORMAT_YCBCR422 ? 16 : 24; 2257 u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) * 2258 drm_dp_max_lane_count(mtk_dp->rx_cap), 2259 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) * 2260 mtk_dp->max_lanes); 2261 2262 if (rate < mode->clock * bpp / 8) 2263 return MODE_CLOCK_HIGH; 2264 2265 return MODE_OK; 2266 } 2267 2268 static u32 *mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge *bridge, 2269 struct drm_bridge_state *bridge_state, 2270 struct drm_crtc_state *crtc_state, 2271 struct drm_connector_state *conn_state, 2272 unsigned int *num_output_fmts) 2273 { 2274 u32 *output_fmts; 2275 2276 *num_output_fmts = 0; 2277 output_fmts = kmalloc(sizeof(*output_fmts), GFP_KERNEL); 2278 if (!output_fmts) 2279 return NULL; 2280 *num_output_fmts = 1; 2281 output_fmts[0] = MEDIA_BUS_FMT_FIXED; 2282 return output_fmts; 2283 } 2284 2285 static const u32 mt8195_input_fmts[] = { 2286 MEDIA_BUS_FMT_RGB888_1X24, 2287 MEDIA_BUS_FMT_YUV8_1X24, 2288 MEDIA_BUS_FMT_YUYV8_1X16, 2289 }; 2290 2291 static u32 *mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge *bridge, 2292 struct drm_bridge_state *bridge_state, 2293 struct drm_crtc_state *crtc_state, 2294 struct drm_connector_state *conn_state, 2295 u32 output_fmt, 2296 unsigned int *num_input_fmts) 2297 { 2298 u32 *input_fmts; 2299 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2300 struct drm_display_mode *mode = &crtc_state->adjusted_mode; 2301 struct drm_display_info *display_info = 2302 &conn_state->connector->display_info; 2303 u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) * 2304 drm_dp_max_lane_count(mtk_dp->rx_cap), 2305 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) * 2306 mtk_dp->max_lanes); 2307 2308 *num_input_fmts = 0; 2309 2310 /* 2311 * If the linkrate is smaller than datarate of RGB888, larger than 2312 * datarate of YUV422 and sink device supports YUV422, we output YUV422 2313 * format. Use this condition, we can support more resolution. 2314 */ 2315 if ((rate < (mode->clock * 24 / 8)) && 2316 (rate > (mode->clock * 16 / 8)) && 2317 (display_info->color_formats & DRM_COLOR_FORMAT_YCBCR422)) { 2318 input_fmts = kcalloc(1, sizeof(*input_fmts), GFP_KERNEL); 2319 if (!input_fmts) 2320 return NULL; 2321 *num_input_fmts = 1; 2322 input_fmts[0] = MEDIA_BUS_FMT_YUYV8_1X16; 2323 } else { 2324 input_fmts = kcalloc(ARRAY_SIZE(mt8195_input_fmts), 2325 sizeof(*input_fmts), 2326 GFP_KERNEL); 2327 if (!input_fmts) 2328 return NULL; 2329 2330 *num_input_fmts = ARRAY_SIZE(mt8195_input_fmts); 2331 memcpy(input_fmts, mt8195_input_fmts, sizeof(mt8195_input_fmts)); 2332 } 2333 2334 return input_fmts; 2335 } 2336 2337 static int mtk_dp_bridge_atomic_check(struct drm_bridge *bridge, 2338 struct drm_bridge_state *bridge_state, 2339 struct drm_crtc_state *crtc_state, 2340 struct drm_connector_state *conn_state) 2341 { 2342 struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge); 2343 struct drm_crtc *crtc = conn_state->crtc; 2344 unsigned int input_bus_format; 2345 2346 input_bus_format = bridge_state->input_bus_cfg.format; 2347 2348 dev_dbg(mtk_dp->dev, "input format 0x%04x, output format 0x%04x\n", 2349 bridge_state->input_bus_cfg.format, 2350 bridge_state->output_bus_cfg.format); 2351 2352 if (input_bus_format == MEDIA_BUS_FMT_YUYV8_1X16) 2353 mtk_dp->info.format = DP_PIXELFORMAT_YUV422; 2354 else 2355 mtk_dp->info.format = DP_PIXELFORMAT_RGB; 2356 2357 if (!crtc) { 2358 drm_err(mtk_dp->drm_dev, 2359 "Can't enable bridge as connector state doesn't have a crtc\n"); 2360 return -EINVAL; 2361 } 2362 2363 drm_display_mode_to_videomode(&crtc_state->adjusted_mode, &mtk_dp->info.vm); 2364 2365 return 0; 2366 } 2367 2368 static const struct drm_bridge_funcs mtk_dp_bridge_funcs = { 2369 .atomic_check = mtk_dp_bridge_atomic_check, 2370 .atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state, 2371 .atomic_destroy_state = drm_atomic_helper_bridge_destroy_state, 2372 .atomic_get_output_bus_fmts = mtk_dp_bridge_atomic_get_output_bus_fmts, 2373 .atomic_get_input_bus_fmts = mtk_dp_bridge_atomic_get_input_bus_fmts, 2374 .atomic_reset = drm_atomic_helper_bridge_reset, 2375 .attach = mtk_dp_bridge_attach, 2376 .detach = mtk_dp_bridge_detach, 2377 .atomic_enable = mtk_dp_bridge_atomic_enable, 2378 .atomic_disable = mtk_dp_bridge_atomic_disable, 2379 .mode_valid = mtk_dp_bridge_mode_valid, 2380 .get_edid = mtk_dp_get_edid, 2381 .detect = mtk_dp_bdg_detect, 2382 }; 2383 2384 static void mtk_dp_debounce_timer(struct timer_list *t) 2385 { 2386 struct mtk_dp *mtk_dp = from_timer(mtk_dp, t, debounce_timer); 2387 2388 mtk_dp->need_debounce = true; 2389 } 2390 2391 /* 2392 * HDMI audio codec callbacks 2393 */ 2394 static int mtk_dp_audio_hw_params(struct device *dev, void *data, 2395 struct hdmi_codec_daifmt *daifmt, 2396 struct hdmi_codec_params *params) 2397 { 2398 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2399 2400 if (!mtk_dp->enabled) { 2401 dev_err(mtk_dp->dev, "%s, DP is not ready!\n", __func__); 2402 return -ENODEV; 2403 } 2404 2405 mtk_dp->info.audio_cur_cfg.channels = params->cea.channels; 2406 mtk_dp->info.audio_cur_cfg.sample_rate = params->sample_rate; 2407 2408 mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg); 2409 2410 return 0; 2411 } 2412 2413 static int mtk_dp_audio_startup(struct device *dev, void *data) 2414 { 2415 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2416 2417 mtk_dp_audio_mute(mtk_dp, false); 2418 2419 return 0; 2420 } 2421 2422 static void mtk_dp_audio_shutdown(struct device *dev, void *data) 2423 { 2424 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2425 2426 mtk_dp_audio_mute(mtk_dp, true); 2427 } 2428 2429 static int mtk_dp_audio_get_eld(struct device *dev, void *data, uint8_t *buf, 2430 size_t len) 2431 { 2432 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2433 2434 if (mtk_dp->enabled) 2435 memcpy(buf, mtk_dp->conn->eld, len); 2436 else 2437 memset(buf, 0, len); 2438 2439 return 0; 2440 } 2441 2442 static int mtk_dp_audio_hook_plugged_cb(struct device *dev, void *data, 2443 hdmi_codec_plugged_cb fn, 2444 struct device *codec_dev) 2445 { 2446 struct mtk_dp *mtk_dp = data; 2447 2448 mutex_lock(&mtk_dp->update_plugged_status_lock); 2449 mtk_dp->plugged_cb = fn; 2450 mtk_dp->codec_dev = codec_dev; 2451 mutex_unlock(&mtk_dp->update_plugged_status_lock); 2452 2453 mtk_dp_update_plugged_status(mtk_dp); 2454 2455 return 0; 2456 } 2457 2458 static const struct hdmi_codec_ops mtk_dp_audio_codec_ops = { 2459 .hw_params = mtk_dp_audio_hw_params, 2460 .audio_startup = mtk_dp_audio_startup, 2461 .audio_shutdown = mtk_dp_audio_shutdown, 2462 .get_eld = mtk_dp_audio_get_eld, 2463 .hook_plugged_cb = mtk_dp_audio_hook_plugged_cb, 2464 .no_capture_mute = 1, 2465 }; 2466 2467 static int mtk_dp_register_audio_driver(struct device *dev) 2468 { 2469 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2470 struct hdmi_codec_pdata codec_data = { 2471 .ops = &mtk_dp_audio_codec_ops, 2472 .max_i2s_channels = 8, 2473 .i2s = 1, 2474 .data = mtk_dp, 2475 }; 2476 2477 mtk_dp->audio_pdev = platform_device_register_data(dev, 2478 HDMI_CODEC_DRV_NAME, 2479 PLATFORM_DEVID_AUTO, 2480 &codec_data, 2481 sizeof(codec_data)); 2482 return PTR_ERR_OR_ZERO(mtk_dp->audio_pdev); 2483 } 2484 2485 static int mtk_dp_probe(struct platform_device *pdev) 2486 { 2487 struct mtk_dp *mtk_dp; 2488 struct device *dev = &pdev->dev; 2489 int ret, irq_num; 2490 2491 mtk_dp = devm_kzalloc(dev, sizeof(*mtk_dp), GFP_KERNEL); 2492 if (!mtk_dp) 2493 return -ENOMEM; 2494 2495 mtk_dp->dev = dev; 2496 mtk_dp->data = (struct mtk_dp_data *)of_device_get_match_data(dev); 2497 2498 irq_num = platform_get_irq(pdev, 0); 2499 if (irq_num < 0) 2500 return dev_err_probe(dev, irq_num, 2501 "failed to request dp irq resource\n"); 2502 2503 mtk_dp->next_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0); 2504 if (IS_ERR(mtk_dp->next_bridge) && 2505 PTR_ERR(mtk_dp->next_bridge) == -ENODEV) 2506 mtk_dp->next_bridge = NULL; 2507 else if (IS_ERR(mtk_dp->next_bridge)) 2508 return dev_err_probe(dev, PTR_ERR(mtk_dp->next_bridge), 2509 "Failed to get bridge\n"); 2510 2511 ret = mtk_dp_dt_parse(mtk_dp, pdev); 2512 if (ret) 2513 return dev_err_probe(dev, ret, "Failed to parse dt\n"); 2514 2515 drm_dp_aux_init(&mtk_dp->aux); 2516 mtk_dp->aux.name = "aux_mtk_dp"; 2517 mtk_dp->aux.transfer = mtk_dp_aux_transfer; 2518 2519 spin_lock_init(&mtk_dp->irq_thread_lock); 2520 2521 ret = devm_request_threaded_irq(dev, irq_num, mtk_dp_hpd_event, 2522 mtk_dp_hpd_event_thread, 2523 IRQ_TYPE_LEVEL_HIGH, dev_name(dev), 2524 mtk_dp); 2525 if (ret) 2526 return dev_err_probe(dev, ret, 2527 "failed to request mediatek dptx irq\n"); 2528 2529 mutex_init(&mtk_dp->update_plugged_status_lock); 2530 2531 platform_set_drvdata(pdev, mtk_dp); 2532 2533 if (mtk_dp->data->audio_supported) { 2534 ret = mtk_dp_register_audio_driver(dev); 2535 if (ret) { 2536 dev_err(dev, "Failed to register audio driver: %d\n", 2537 ret); 2538 return ret; 2539 } 2540 } 2541 2542 mtk_dp->phy_dev = platform_device_register_data(dev, "mediatek-dp-phy", 2543 PLATFORM_DEVID_AUTO, 2544 &mtk_dp->regs, 2545 sizeof(struct regmap *)); 2546 if (IS_ERR(mtk_dp->phy_dev)) 2547 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy_dev), 2548 "Failed to create device mediatek-dp-phy\n"); 2549 2550 mtk_dp_get_calibration_data(mtk_dp); 2551 2552 mtk_dp->phy = devm_phy_get(&mtk_dp->phy_dev->dev, "dp"); 2553 2554 if (IS_ERR(mtk_dp->phy)) { 2555 platform_device_unregister(mtk_dp->phy_dev); 2556 return dev_err_probe(dev, PTR_ERR(mtk_dp->phy), 2557 "Failed to get phy\n"); 2558 } 2559 2560 mtk_dp->bridge.funcs = &mtk_dp_bridge_funcs; 2561 mtk_dp->bridge.of_node = dev->of_node; 2562 2563 mtk_dp->bridge.ops = 2564 DRM_BRIDGE_OP_DETECT | DRM_BRIDGE_OP_EDID | DRM_BRIDGE_OP_HPD; 2565 mtk_dp->bridge.type = mtk_dp->data->bridge_type; 2566 2567 drm_bridge_add(&mtk_dp->bridge); 2568 2569 mtk_dp->need_debounce = true; 2570 timer_setup(&mtk_dp->debounce_timer, mtk_dp_debounce_timer, 0); 2571 2572 pm_runtime_enable(dev); 2573 pm_runtime_get_sync(dev); 2574 2575 return 0; 2576 } 2577 2578 static int mtk_dp_remove(struct platform_device *pdev) 2579 { 2580 struct mtk_dp *mtk_dp = platform_get_drvdata(pdev); 2581 2582 pm_runtime_put(&pdev->dev); 2583 pm_runtime_disable(&pdev->dev); 2584 del_timer_sync(&mtk_dp->debounce_timer); 2585 drm_bridge_remove(&mtk_dp->bridge); 2586 platform_device_unregister(mtk_dp->phy_dev); 2587 if (mtk_dp->audio_pdev) 2588 platform_device_unregister(mtk_dp->audio_pdev); 2589 2590 return 0; 2591 } 2592 2593 #ifdef CONFIG_PM_SLEEP 2594 static int mtk_dp_suspend(struct device *dev) 2595 { 2596 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2597 2598 mtk_dp_power_disable(mtk_dp); 2599 mtk_dp_hwirq_enable(mtk_dp, false); 2600 pm_runtime_put_sync(dev); 2601 2602 return 0; 2603 } 2604 2605 static int mtk_dp_resume(struct device *dev) 2606 { 2607 struct mtk_dp *mtk_dp = dev_get_drvdata(dev); 2608 2609 pm_runtime_get_sync(dev); 2610 mtk_dp_init_port(mtk_dp); 2611 mtk_dp_hwirq_enable(mtk_dp, true); 2612 mtk_dp_power_enable(mtk_dp); 2613 2614 return 0; 2615 } 2616 #endif 2617 2618 static SIMPLE_DEV_PM_OPS(mtk_dp_pm_ops, mtk_dp_suspend, mtk_dp_resume); 2619 2620 static const struct mtk_dp_data mt8195_edp_data = { 2621 .bridge_type = DRM_MODE_CONNECTOR_eDP, 2622 .smc_cmd = MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE, 2623 .efuse_fmt = mt8195_edp_efuse_fmt, 2624 .audio_supported = false, 2625 }; 2626 2627 static const struct mtk_dp_data mt8195_dp_data = { 2628 .bridge_type = DRM_MODE_CONNECTOR_DisplayPort, 2629 .smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE, 2630 .efuse_fmt = mt8195_dp_efuse_fmt, 2631 .audio_supported = true, 2632 }; 2633 2634 static const struct of_device_id mtk_dp_of_match[] = { 2635 { 2636 .compatible = "mediatek,mt8195-edp-tx", 2637 .data = &mt8195_edp_data, 2638 }, 2639 { 2640 .compatible = "mediatek,mt8195-dp-tx", 2641 .data = &mt8195_dp_data, 2642 }, 2643 {}, 2644 }; 2645 MODULE_DEVICE_TABLE(of, mtk_dp_of_match); 2646 2647 static struct platform_driver mtk_dp_driver = { 2648 .probe = mtk_dp_probe, 2649 .remove = mtk_dp_remove, 2650 .driver = { 2651 .name = "mediatek-drm-dp", 2652 .of_match_table = mtk_dp_of_match, 2653 .pm = &mtk_dp_pm_ops, 2654 }, 2655 }; 2656 2657 module_platform_driver(mtk_dp_driver); 2658 2659 MODULE_AUTHOR("Jitao Shi <jitao.shi@mediatek.com>"); 2660 MODULE_AUTHOR("Markus Schneider-Pargmann <msp@baylibre.com>"); 2661 MODULE_AUTHOR("Bo-Chen Chen <rex-bc.chen@mediatek.com>"); 2662 MODULE_DESCRIPTION("MediaTek DisplayPort Driver"); 2663 MODULE_LICENSE("GPL"); 2664