1 /* 2 * Copyright © 2018 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 21 * DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: 24 * Madhav Chauhan <madhav.chauhan@intel.com> 25 * Jani Nikula <jani.nikula@intel.com> 26 */ 27 28 #include <drm/drm_atomic_helper.h> 29 #include <drm/drm_mipi_dsi.h> 30 31 #include "intel_atomic.h" 32 #include "intel_combo_phy.h" 33 #include "intel_connector.h" 34 #include "intel_ddi.h" 35 #include "intel_dsi.h" 36 #include "intel_panel.h" 37 #include "intel_vdsc.h" 38 #include "skl_scaler.h" 39 #include "skl_universal_plane.h" 40 41 static int header_credits_available(struct drm_i915_private *dev_priv, 42 enum transcoder dsi_trans) 43 { 44 return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK) 45 >> FREE_HEADER_CREDIT_SHIFT; 46 } 47 48 static int payload_credits_available(struct drm_i915_private *dev_priv, 49 enum transcoder dsi_trans) 50 { 51 return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK) 52 >> FREE_PLOAD_CREDIT_SHIFT; 53 } 54 55 static void wait_for_header_credits(struct drm_i915_private *dev_priv, 56 enum transcoder dsi_trans) 57 { 58 if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >= 59 MAX_HEADER_CREDIT, 100)) 60 drm_err(&dev_priv->drm, "DSI header credits not released\n"); 61 } 62 63 static void wait_for_payload_credits(struct drm_i915_private *dev_priv, 64 enum transcoder dsi_trans) 65 { 66 if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >= 67 MAX_PLOAD_CREDIT, 100)) 68 drm_err(&dev_priv->drm, "DSI payload credits not released\n"); 69 } 70 71 static enum transcoder dsi_port_to_transcoder(enum port port) 72 { 73 if (port == PORT_A) 74 return TRANSCODER_DSI_0; 75 else 76 return TRANSCODER_DSI_1; 77 } 78 79 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder) 80 { 81 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 82 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 83 struct mipi_dsi_device *dsi; 84 enum port port; 85 enum transcoder dsi_trans; 86 int ret; 87 88 /* wait for header/payload credits to be released */ 89 for_each_dsi_port(port, intel_dsi->ports) { 90 dsi_trans = dsi_port_to_transcoder(port); 91 wait_for_header_credits(dev_priv, dsi_trans); 92 wait_for_payload_credits(dev_priv, dsi_trans); 93 } 94 95 /* send nop DCS command */ 96 for_each_dsi_port(port, intel_dsi->ports) { 97 dsi = intel_dsi->dsi_hosts[port]->device; 98 dsi->mode_flags |= MIPI_DSI_MODE_LPM; 99 dsi->channel = 0; 100 ret = mipi_dsi_dcs_nop(dsi); 101 if (ret < 0) 102 drm_err(&dev_priv->drm, 103 "error sending DCS NOP command\n"); 104 } 105 106 /* wait for header credits to be released */ 107 for_each_dsi_port(port, intel_dsi->ports) { 108 dsi_trans = dsi_port_to_transcoder(port); 109 wait_for_header_credits(dev_priv, dsi_trans); 110 } 111 112 /* wait for LP TX in progress bit to be cleared */ 113 for_each_dsi_port(port, intel_dsi->ports) { 114 dsi_trans = dsi_port_to_transcoder(port); 115 if (wait_for_us(!(intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) & 116 LPTX_IN_PROGRESS), 20)) 117 drm_err(&dev_priv->drm, "LPTX bit not cleared\n"); 118 } 119 } 120 121 static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data, 122 u32 len) 123 { 124 struct intel_dsi *intel_dsi = host->intel_dsi; 125 struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); 126 enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); 127 int free_credits; 128 int i, j; 129 130 for (i = 0; i < len; i += 4) { 131 u32 tmp = 0; 132 133 free_credits = payload_credits_available(dev_priv, dsi_trans); 134 if (free_credits < 1) { 135 drm_err(&dev_priv->drm, 136 "Payload credit not available\n"); 137 return false; 138 } 139 140 for (j = 0; j < min_t(u32, len - i, 4); j++) 141 tmp |= *data++ << 8 * j; 142 143 intel_de_write(dev_priv, DSI_CMD_TXPYLD(dsi_trans), tmp); 144 } 145 146 return true; 147 } 148 149 static int dsi_send_pkt_hdr(struct intel_dsi_host *host, 150 struct mipi_dsi_packet pkt, bool enable_lpdt) 151 { 152 struct intel_dsi *intel_dsi = host->intel_dsi; 153 struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); 154 enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); 155 u32 tmp; 156 int free_credits; 157 158 /* check if header credit available */ 159 free_credits = header_credits_available(dev_priv, dsi_trans); 160 if (free_credits < 1) { 161 drm_err(&dev_priv->drm, 162 "send pkt header failed, not enough hdr credits\n"); 163 return -1; 164 } 165 166 tmp = intel_de_read(dev_priv, DSI_CMD_TXHDR(dsi_trans)); 167 168 if (pkt.payload) 169 tmp |= PAYLOAD_PRESENT; 170 else 171 tmp &= ~PAYLOAD_PRESENT; 172 173 tmp &= ~VBLANK_FENCE; 174 175 if (enable_lpdt) 176 tmp |= LP_DATA_TRANSFER; 177 178 tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK); 179 tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT); 180 tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT); 181 tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT); 182 tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT); 183 intel_de_write(dev_priv, DSI_CMD_TXHDR(dsi_trans), tmp); 184 185 return 0; 186 } 187 188 static int dsi_send_pkt_payld(struct intel_dsi_host *host, 189 struct mipi_dsi_packet pkt) 190 { 191 struct intel_dsi *intel_dsi = host->intel_dsi; 192 struct drm_i915_private *i915 = to_i915(intel_dsi->base.base.dev); 193 194 /* payload queue can accept *256 bytes*, check limit */ 195 if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) { 196 drm_err(&i915->drm, "payload size exceeds max queue limit\n"); 197 return -1; 198 } 199 200 /* load data into command payload queue */ 201 if (!add_payld_to_queue(host, pkt.payload, 202 pkt.payload_length)) { 203 drm_err(&i915->drm, "adding payload to queue failed\n"); 204 return -1; 205 } 206 207 return 0; 208 } 209 210 void icl_dsi_frame_update(struct intel_crtc_state *crtc_state) 211 { 212 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 213 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 214 u32 tmp, mode_flags; 215 enum port port; 216 217 mode_flags = crtc_state->mode_flags; 218 219 /* 220 * case 1 also covers dual link 221 * In case of dual link, frame update should be set on 222 * DSI_0 223 */ 224 if (mode_flags & I915_MODE_FLAG_DSI_USE_TE0) 225 port = PORT_A; 226 else if (mode_flags & I915_MODE_FLAG_DSI_USE_TE1) 227 port = PORT_B; 228 else 229 return; 230 231 tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port)); 232 tmp |= DSI_FRAME_UPDATE_REQUEST; 233 intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp); 234 } 235 236 static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder) 237 { 238 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 239 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 240 enum phy phy; 241 u32 tmp; 242 int lane; 243 244 for_each_dsi_phy(phy, intel_dsi->phys) { 245 /* 246 * Program voltage swing and pre-emphasis level values as per 247 * table in BSPEC under DDI buffer programing 248 */ 249 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 250 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); 251 tmp |= SCALING_MODE_SEL(0x2); 252 tmp |= TAP2_DISABLE | TAP3_DISABLE; 253 tmp |= RTERM_SELECT(0x6); 254 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp); 255 256 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy)); 257 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); 258 tmp |= SCALING_MODE_SEL(0x2); 259 tmp |= TAP2_DISABLE | TAP3_DISABLE; 260 tmp |= RTERM_SELECT(0x6); 261 intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp); 262 263 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 264 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 265 RCOMP_SCALAR_MASK); 266 tmp |= SWING_SEL_UPPER(0x2); 267 tmp |= SWING_SEL_LOWER(0x2); 268 tmp |= RCOMP_SCALAR(0x98); 269 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp); 270 271 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy)); 272 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 273 RCOMP_SCALAR_MASK); 274 tmp |= SWING_SEL_UPPER(0x2); 275 tmp |= SWING_SEL_LOWER(0x2); 276 tmp |= RCOMP_SCALAR(0x98); 277 intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp); 278 279 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy)); 280 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 281 CURSOR_COEFF_MASK); 282 tmp |= POST_CURSOR_1(0x0); 283 tmp |= POST_CURSOR_2(0x0); 284 tmp |= CURSOR_COEFF(0x3f); 285 intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp); 286 287 for (lane = 0; lane <= 3; lane++) { 288 /* Bspec: must not use GRP register for write */ 289 tmp = intel_de_read(dev_priv, 290 ICL_PORT_TX_DW4_LN(lane, phy)); 291 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 292 CURSOR_COEFF_MASK); 293 tmp |= POST_CURSOR_1(0x0); 294 tmp |= POST_CURSOR_2(0x0); 295 tmp |= CURSOR_COEFF(0x3f); 296 intel_de_write(dev_priv, 297 ICL_PORT_TX_DW4_LN(lane, phy), tmp); 298 } 299 } 300 } 301 302 static void configure_dual_link_mode(struct intel_encoder *encoder, 303 const struct intel_crtc_state *pipe_config) 304 { 305 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 306 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 307 u32 dss_ctl1; 308 309 dss_ctl1 = intel_de_read(dev_priv, DSS_CTL1); 310 dss_ctl1 |= SPLITTER_ENABLE; 311 dss_ctl1 &= ~OVERLAP_PIXELS_MASK; 312 dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap); 313 314 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) { 315 const struct drm_display_mode *adjusted_mode = 316 &pipe_config->hw.adjusted_mode; 317 u32 dss_ctl2; 318 u16 hactive = adjusted_mode->crtc_hdisplay; 319 u16 dl_buffer_depth; 320 321 dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE; 322 dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap; 323 324 if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH) 325 drm_err(&dev_priv->drm, 326 "DL buffer depth exceed max value\n"); 327 328 dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK; 329 dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); 330 dss_ctl2 = intel_de_read(dev_priv, DSS_CTL2); 331 dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK; 332 dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); 333 intel_de_write(dev_priv, DSS_CTL2, dss_ctl2); 334 } else { 335 /* Interleave */ 336 dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE; 337 } 338 339 intel_de_write(dev_priv, DSS_CTL1, dss_ctl1); 340 } 341 342 /* aka DSI 8X clock */ 343 static int afe_clk(struct intel_encoder *encoder, 344 const struct intel_crtc_state *crtc_state) 345 { 346 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 347 int bpp; 348 349 if (crtc_state->dsc.compression_enable) 350 bpp = crtc_state->dsc.compressed_bpp; 351 else 352 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); 353 354 return DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, intel_dsi->lane_count); 355 } 356 357 static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder, 358 const struct intel_crtc_state *crtc_state) 359 { 360 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 361 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 362 enum port port; 363 int afe_clk_khz; 364 u32 esc_clk_div_m; 365 366 afe_clk_khz = afe_clk(encoder, crtc_state); 367 esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK); 368 369 for_each_dsi_port(port, intel_dsi->ports) { 370 intel_de_write(dev_priv, ICL_DSI_ESC_CLK_DIV(port), 371 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); 372 intel_de_posting_read(dev_priv, ICL_DSI_ESC_CLK_DIV(port)); 373 } 374 375 for_each_dsi_port(port, intel_dsi->ports) { 376 intel_de_write(dev_priv, ICL_DPHY_ESC_CLK_DIV(port), 377 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); 378 intel_de_posting_read(dev_priv, ICL_DPHY_ESC_CLK_DIV(port)); 379 } 380 } 381 382 static void get_dsi_io_power_domains(struct drm_i915_private *dev_priv, 383 struct intel_dsi *intel_dsi) 384 { 385 enum port port; 386 387 for_each_dsi_port(port, intel_dsi->ports) { 388 drm_WARN_ON(&dev_priv->drm, intel_dsi->io_wakeref[port]); 389 intel_dsi->io_wakeref[port] = 390 intel_display_power_get(dev_priv, 391 port == PORT_A ? 392 POWER_DOMAIN_PORT_DDI_A_IO : 393 POWER_DOMAIN_PORT_DDI_B_IO); 394 } 395 } 396 397 static void gen11_dsi_enable_io_power(struct intel_encoder *encoder) 398 { 399 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 400 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 401 enum port port; 402 u32 tmp; 403 404 for_each_dsi_port(port, intel_dsi->ports) { 405 tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port)); 406 tmp |= COMBO_PHY_MODE_DSI; 407 intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp); 408 } 409 410 get_dsi_io_power_domains(dev_priv, intel_dsi); 411 } 412 413 static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder) 414 { 415 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 416 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 417 enum phy phy; 418 419 for_each_dsi_phy(phy, intel_dsi->phys) 420 intel_combo_phy_power_up_lanes(dev_priv, phy, true, 421 intel_dsi->lane_count, false); 422 } 423 424 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder) 425 { 426 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 427 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 428 enum phy phy; 429 u32 tmp; 430 int lane; 431 432 /* Step 4b(i) set loadgen select for transmit and aux lanes */ 433 for_each_dsi_phy(phy, intel_dsi->phys) { 434 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy)); 435 tmp &= ~LOADGEN_SELECT; 436 intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp); 437 for (lane = 0; lane <= 3; lane++) { 438 tmp = intel_de_read(dev_priv, 439 ICL_PORT_TX_DW4_LN(lane, phy)); 440 tmp &= ~LOADGEN_SELECT; 441 if (lane != 2) 442 tmp |= LOADGEN_SELECT; 443 intel_de_write(dev_priv, 444 ICL_PORT_TX_DW4_LN(lane, phy), tmp); 445 } 446 } 447 448 /* Step 4b(ii) set latency optimization for transmit and aux lanes */ 449 for_each_dsi_phy(phy, intel_dsi->phys) { 450 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy)); 451 tmp &= ~FRC_LATENCY_OPTIM_MASK; 452 tmp |= FRC_LATENCY_OPTIM_VAL(0x5); 453 intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp); 454 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 455 tmp &= ~FRC_LATENCY_OPTIM_MASK; 456 tmp |= FRC_LATENCY_OPTIM_VAL(0x5); 457 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp); 458 459 /* For EHL, TGL, set latency optimization for PCS_DW1 lanes */ 460 if (IS_JSL_EHL(dev_priv) || (DISPLAY_VER(dev_priv) >= 12)) { 461 tmp = intel_de_read(dev_priv, 462 ICL_PORT_PCS_DW1_AUX(phy)); 463 tmp &= ~LATENCY_OPTIM_MASK; 464 tmp |= LATENCY_OPTIM_VAL(0); 465 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy), 466 tmp); 467 468 tmp = intel_de_read(dev_priv, 469 ICL_PORT_PCS_DW1_LN0(phy)); 470 tmp &= ~LATENCY_OPTIM_MASK; 471 tmp |= LATENCY_OPTIM_VAL(0x1); 472 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), 473 tmp); 474 } 475 } 476 477 } 478 479 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder) 480 { 481 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 482 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 483 u32 tmp; 484 enum phy phy; 485 486 /* clear common keeper enable bit */ 487 for_each_dsi_phy(phy, intel_dsi->phys) { 488 tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 489 tmp &= ~COMMON_KEEPER_EN; 490 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), tmp); 491 tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_AUX(phy)); 492 tmp &= ~COMMON_KEEPER_EN; 493 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy), tmp); 494 } 495 496 /* 497 * Set SUS Clock Config bitfield to 11b 498 * Note: loadgen select program is done 499 * as part of lane phy sequence configuration 500 */ 501 for_each_dsi_phy(phy, intel_dsi->phys) { 502 tmp = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 503 tmp |= SUS_CLOCK_CONFIG; 504 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), tmp); 505 } 506 507 /* Clear training enable to change swing values */ 508 for_each_dsi_phy(phy, intel_dsi->phys) { 509 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 510 tmp &= ~TX_TRAINING_EN; 511 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp); 512 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy)); 513 tmp &= ~TX_TRAINING_EN; 514 intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp); 515 } 516 517 /* Program swing and de-emphasis */ 518 dsi_program_swing_and_deemphasis(encoder); 519 520 /* Set training enable to trigger update */ 521 for_each_dsi_phy(phy, intel_dsi->phys) { 522 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 523 tmp |= TX_TRAINING_EN; 524 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp); 525 tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy)); 526 tmp |= TX_TRAINING_EN; 527 intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp); 528 } 529 } 530 531 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder) 532 { 533 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 534 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 535 u32 tmp; 536 enum port port; 537 538 for_each_dsi_port(port, intel_dsi->ports) { 539 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 540 tmp |= DDI_BUF_CTL_ENABLE; 541 intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp); 542 543 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 544 DDI_BUF_IS_IDLE), 545 500)) 546 drm_err(&dev_priv->drm, "DDI port:%c buffer idle\n", 547 port_name(port)); 548 } 549 } 550 551 static void 552 gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder, 553 const struct intel_crtc_state *crtc_state) 554 { 555 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 556 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 557 u32 tmp; 558 enum port port; 559 enum phy phy; 560 561 /* Program T-INIT master registers */ 562 for_each_dsi_port(port, intel_dsi->ports) { 563 tmp = intel_de_read(dev_priv, ICL_DSI_T_INIT_MASTER(port)); 564 tmp &= ~MASTER_INIT_TIMER_MASK; 565 tmp |= intel_dsi->init_count; 566 intel_de_write(dev_priv, ICL_DSI_T_INIT_MASTER(port), tmp); 567 } 568 569 /* Program DPHY clock lanes timings */ 570 for_each_dsi_port(port, intel_dsi->ports) { 571 intel_de_write(dev_priv, DPHY_CLK_TIMING_PARAM(port), 572 intel_dsi->dphy_reg); 573 574 /* shadow register inside display core */ 575 intel_de_write(dev_priv, DSI_CLK_TIMING_PARAM(port), 576 intel_dsi->dphy_reg); 577 } 578 579 /* Program DPHY data lanes timings */ 580 for_each_dsi_port(port, intel_dsi->ports) { 581 intel_de_write(dev_priv, DPHY_DATA_TIMING_PARAM(port), 582 intel_dsi->dphy_data_lane_reg); 583 584 /* shadow register inside display core */ 585 intel_de_write(dev_priv, DSI_DATA_TIMING_PARAM(port), 586 intel_dsi->dphy_data_lane_reg); 587 } 588 589 /* 590 * If DSI link operating at or below an 800 MHz, 591 * TA_SURE should be override and programmed to 592 * a value '0' inside TA_PARAM_REGISTERS otherwise 593 * leave all fields at HW default values. 594 */ 595 if (IS_DISPLAY_VER(dev_priv, 11)) { 596 if (afe_clk(encoder, crtc_state) <= 800000) { 597 for_each_dsi_port(port, intel_dsi->ports) { 598 tmp = intel_de_read(dev_priv, 599 DPHY_TA_TIMING_PARAM(port)); 600 tmp &= ~TA_SURE_MASK; 601 tmp |= TA_SURE_OVERRIDE | TA_SURE(0); 602 intel_de_write(dev_priv, 603 DPHY_TA_TIMING_PARAM(port), 604 tmp); 605 606 /* shadow register inside display core */ 607 tmp = intel_de_read(dev_priv, 608 DSI_TA_TIMING_PARAM(port)); 609 tmp &= ~TA_SURE_MASK; 610 tmp |= TA_SURE_OVERRIDE | TA_SURE(0); 611 intel_de_write(dev_priv, 612 DSI_TA_TIMING_PARAM(port), tmp); 613 } 614 } 615 } 616 617 if (IS_JSL_EHL(dev_priv)) { 618 for_each_dsi_phy(phy, intel_dsi->phys) { 619 tmp = intel_de_read(dev_priv, ICL_DPHY_CHKN(phy)); 620 tmp |= ICL_DPHY_CHKN_AFE_OVER_PPI_STRAP; 621 intel_de_write(dev_priv, ICL_DPHY_CHKN(phy), tmp); 622 } 623 } 624 } 625 626 static void gen11_dsi_gate_clocks(struct intel_encoder *encoder) 627 { 628 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 629 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 630 u32 tmp; 631 enum phy phy; 632 633 mutex_lock(&dev_priv->dpll.lock); 634 tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 635 for_each_dsi_phy(phy, intel_dsi->phys) 636 tmp |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 637 638 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp); 639 mutex_unlock(&dev_priv->dpll.lock); 640 } 641 642 static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder) 643 { 644 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 645 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 646 u32 tmp; 647 enum phy phy; 648 649 mutex_lock(&dev_priv->dpll.lock); 650 tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 651 for_each_dsi_phy(phy, intel_dsi->phys) 652 tmp &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 653 654 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp); 655 mutex_unlock(&dev_priv->dpll.lock); 656 } 657 658 static bool gen11_dsi_is_clock_enabled(struct intel_encoder *encoder) 659 { 660 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 661 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 662 bool clock_enabled = false; 663 enum phy phy; 664 u32 tmp; 665 666 tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 667 668 for_each_dsi_phy(phy, intel_dsi->phys) { 669 if (!(tmp & ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy))) 670 clock_enabled = true; 671 } 672 673 return clock_enabled; 674 } 675 676 static void gen11_dsi_map_pll(struct intel_encoder *encoder, 677 const struct intel_crtc_state *crtc_state) 678 { 679 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 680 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 681 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 682 enum phy phy; 683 u32 val; 684 685 mutex_lock(&dev_priv->dpll.lock); 686 687 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 688 for_each_dsi_phy(phy, intel_dsi->phys) { 689 val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 690 val |= ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 691 } 692 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 693 694 for_each_dsi_phy(phy, intel_dsi->phys) { 695 if (DISPLAY_VER(dev_priv) >= 12) 696 val |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 697 else 698 val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 699 } 700 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 701 702 intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0); 703 704 mutex_unlock(&dev_priv->dpll.lock); 705 } 706 707 static void 708 gen11_dsi_configure_transcoder(struct intel_encoder *encoder, 709 const struct intel_crtc_state *pipe_config) 710 { 711 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 712 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 713 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 714 enum pipe pipe = intel_crtc->pipe; 715 u32 tmp; 716 enum port port; 717 enum transcoder dsi_trans; 718 719 for_each_dsi_port(port, intel_dsi->ports) { 720 dsi_trans = dsi_port_to_transcoder(port); 721 tmp = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans)); 722 723 if (intel_dsi->eotp_pkt) 724 tmp &= ~EOTP_DISABLED; 725 else 726 tmp |= EOTP_DISABLED; 727 728 /* enable link calibration if freq > 1.5Gbps */ 729 if (afe_clk(encoder, pipe_config) >= 1500 * 1000) { 730 tmp &= ~LINK_CALIBRATION_MASK; 731 tmp |= CALIBRATION_ENABLED_INITIAL_ONLY; 732 } 733 734 /* configure continuous clock */ 735 tmp &= ~CONTINUOUS_CLK_MASK; 736 if (intel_dsi->clock_stop) 737 tmp |= CLK_ENTER_LP_AFTER_DATA; 738 else 739 tmp |= CLK_HS_CONTINUOUS; 740 741 /* configure buffer threshold limit to minimum */ 742 tmp &= ~PIX_BUF_THRESHOLD_MASK; 743 tmp |= PIX_BUF_THRESHOLD_1_4; 744 745 /* set virtual channel to '0' */ 746 tmp &= ~PIX_VIRT_CHAN_MASK; 747 tmp |= PIX_VIRT_CHAN(0); 748 749 /* program BGR transmission */ 750 if (intel_dsi->bgr_enabled) 751 tmp |= BGR_TRANSMISSION; 752 753 /* select pixel format */ 754 tmp &= ~PIX_FMT_MASK; 755 if (pipe_config->dsc.compression_enable) { 756 tmp |= PIX_FMT_COMPRESSED; 757 } else { 758 switch (intel_dsi->pixel_format) { 759 default: 760 MISSING_CASE(intel_dsi->pixel_format); 761 fallthrough; 762 case MIPI_DSI_FMT_RGB565: 763 tmp |= PIX_FMT_RGB565; 764 break; 765 case MIPI_DSI_FMT_RGB666_PACKED: 766 tmp |= PIX_FMT_RGB666_PACKED; 767 break; 768 case MIPI_DSI_FMT_RGB666: 769 tmp |= PIX_FMT_RGB666_LOOSE; 770 break; 771 case MIPI_DSI_FMT_RGB888: 772 tmp |= PIX_FMT_RGB888; 773 break; 774 } 775 } 776 777 if (DISPLAY_VER(dev_priv) >= 12) { 778 if (is_vid_mode(intel_dsi)) 779 tmp |= BLANKING_PACKET_ENABLE; 780 } 781 782 /* program DSI operation mode */ 783 if (is_vid_mode(intel_dsi)) { 784 tmp &= ~OP_MODE_MASK; 785 switch (intel_dsi->video_mode_format) { 786 default: 787 MISSING_CASE(intel_dsi->video_mode_format); 788 fallthrough; 789 case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS: 790 tmp |= VIDEO_MODE_SYNC_EVENT; 791 break; 792 case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE: 793 tmp |= VIDEO_MODE_SYNC_PULSE; 794 break; 795 } 796 } else { 797 /* 798 * FIXME: Retrieve this info from VBT. 799 * As per the spec when dsi transcoder is operating 800 * in TE GATE mode, TE comes from GPIO 801 * which is UTIL PIN for DSI 0. 802 * Also this GPIO would not be used for other 803 * purposes is an assumption. 804 */ 805 tmp &= ~OP_MODE_MASK; 806 tmp |= CMD_MODE_TE_GATE; 807 tmp |= TE_SOURCE_GPIO; 808 } 809 810 intel_de_write(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans), tmp); 811 } 812 813 /* enable port sync mode if dual link */ 814 if (intel_dsi->dual_link) { 815 for_each_dsi_port(port, intel_dsi->ports) { 816 dsi_trans = dsi_port_to_transcoder(port); 817 tmp = intel_de_read(dev_priv, 818 TRANS_DDI_FUNC_CTL2(dsi_trans)); 819 tmp |= PORT_SYNC_MODE_ENABLE; 820 intel_de_write(dev_priv, 821 TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); 822 } 823 824 /* configure stream splitting */ 825 configure_dual_link_mode(encoder, pipe_config); 826 } 827 828 for_each_dsi_port(port, intel_dsi->ports) { 829 dsi_trans = dsi_port_to_transcoder(port); 830 831 /* select data lane width */ 832 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans)); 833 tmp &= ~DDI_PORT_WIDTH_MASK; 834 tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count); 835 836 /* select input pipe */ 837 tmp &= ~TRANS_DDI_EDP_INPUT_MASK; 838 switch (pipe) { 839 default: 840 MISSING_CASE(pipe); 841 fallthrough; 842 case PIPE_A: 843 tmp |= TRANS_DDI_EDP_INPUT_A_ON; 844 break; 845 case PIPE_B: 846 tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 847 break; 848 case PIPE_C: 849 tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 850 break; 851 case PIPE_D: 852 tmp |= TRANS_DDI_EDP_INPUT_D_ONOFF; 853 break; 854 } 855 856 /* enable DDI buffer */ 857 tmp |= TRANS_DDI_FUNC_ENABLE; 858 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp); 859 } 860 861 /* wait for link ready */ 862 for_each_dsi_port(port, intel_dsi->ports) { 863 dsi_trans = dsi_port_to_transcoder(port); 864 if (wait_for_us((intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans)) & 865 LINK_READY), 2500)) 866 drm_err(&dev_priv->drm, "DSI link not ready\n"); 867 } 868 } 869 870 static void 871 gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder, 872 const struct intel_crtc_state *crtc_state) 873 { 874 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 875 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 876 const struct drm_display_mode *adjusted_mode = 877 &crtc_state->hw.adjusted_mode; 878 enum port port; 879 enum transcoder dsi_trans; 880 /* horizontal timings */ 881 u16 htotal, hactive, hsync_start, hsync_end, hsync_size; 882 u16 hback_porch; 883 /* vertical timings */ 884 u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift; 885 int mul = 1, div = 1; 886 887 /* 888 * Adjust horizontal timings (htotal, hsync_start, hsync_end) to account 889 * for slower link speed if DSC is enabled. 890 * 891 * The compression frequency ratio is the ratio between compressed and 892 * non-compressed link speeds, and simplifies down to the ratio between 893 * compressed and non-compressed bpp. 894 */ 895 if (crtc_state->dsc.compression_enable) { 896 mul = crtc_state->dsc.compressed_bpp; 897 div = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); 898 } 899 900 hactive = adjusted_mode->crtc_hdisplay; 901 902 if (is_vid_mode(intel_dsi)) 903 htotal = DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div); 904 else 905 htotal = DIV_ROUND_UP((hactive + 160) * mul, div); 906 907 hsync_start = DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div); 908 hsync_end = DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div); 909 hsync_size = hsync_end - hsync_start; 910 hback_porch = (adjusted_mode->crtc_htotal - 911 adjusted_mode->crtc_hsync_end); 912 vactive = adjusted_mode->crtc_vdisplay; 913 914 if (is_vid_mode(intel_dsi)) { 915 vtotal = adjusted_mode->crtc_vtotal; 916 } else { 917 int bpp, line_time_us, byte_clk_period_ns; 918 919 if (crtc_state->dsc.compression_enable) 920 bpp = crtc_state->dsc.compressed_bpp; 921 else 922 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); 923 924 byte_clk_period_ns = 1000000 / afe_clk(encoder, crtc_state); 925 line_time_us = (htotal * (bpp / 8) * byte_clk_period_ns) / (1000 * intel_dsi->lane_count); 926 vtotal = vactive + DIV_ROUND_UP(400, line_time_us); 927 } 928 vsync_start = adjusted_mode->crtc_vsync_start; 929 vsync_end = adjusted_mode->crtc_vsync_end; 930 vsync_shift = hsync_start - htotal / 2; 931 932 if (intel_dsi->dual_link) { 933 hactive /= 2; 934 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) 935 hactive += intel_dsi->pixel_overlap; 936 htotal /= 2; 937 } 938 939 /* minimum hactive as per bspec: 256 pixels */ 940 if (adjusted_mode->crtc_hdisplay < 256) 941 drm_err(&dev_priv->drm, "hactive is less then 256 pixels\n"); 942 943 /* if RGB666 format, then hactive must be multiple of 4 pixels */ 944 if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0) 945 drm_err(&dev_priv->drm, 946 "hactive pixels are not multiple of 4\n"); 947 948 /* program TRANS_HTOTAL register */ 949 for_each_dsi_port(port, intel_dsi->ports) { 950 dsi_trans = dsi_port_to_transcoder(port); 951 intel_de_write(dev_priv, HTOTAL(dsi_trans), 952 (hactive - 1) | ((htotal - 1) << 16)); 953 } 954 955 /* TRANS_HSYNC register to be programmed only for video mode */ 956 if (is_vid_mode(intel_dsi)) { 957 if (intel_dsi->video_mode_format == 958 VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) { 959 /* BSPEC: hsync size should be atleast 16 pixels */ 960 if (hsync_size < 16) 961 drm_err(&dev_priv->drm, 962 "hsync size < 16 pixels\n"); 963 } 964 965 if (hback_porch < 16) 966 drm_err(&dev_priv->drm, "hback porch < 16 pixels\n"); 967 968 if (intel_dsi->dual_link) { 969 hsync_start /= 2; 970 hsync_end /= 2; 971 } 972 973 for_each_dsi_port(port, intel_dsi->ports) { 974 dsi_trans = dsi_port_to_transcoder(port); 975 intel_de_write(dev_priv, HSYNC(dsi_trans), 976 (hsync_start - 1) | ((hsync_end - 1) << 16)); 977 } 978 } 979 980 /* program TRANS_VTOTAL register */ 981 for_each_dsi_port(port, intel_dsi->ports) { 982 dsi_trans = dsi_port_to_transcoder(port); 983 /* 984 * FIXME: Programing this by assuming progressive mode, since 985 * non-interlaced info from VBT is not saved inside 986 * struct drm_display_mode. 987 * For interlace mode: program required pixel minus 2 988 */ 989 intel_de_write(dev_priv, VTOTAL(dsi_trans), 990 (vactive - 1) | ((vtotal - 1) << 16)); 991 } 992 993 if (vsync_end < vsync_start || vsync_end > vtotal) 994 drm_err(&dev_priv->drm, "Invalid vsync_end value\n"); 995 996 if (vsync_start < vactive) 997 drm_err(&dev_priv->drm, "vsync_start less than vactive\n"); 998 999 /* program TRANS_VSYNC register for video mode only */ 1000 if (is_vid_mode(intel_dsi)) { 1001 for_each_dsi_port(port, intel_dsi->ports) { 1002 dsi_trans = dsi_port_to_transcoder(port); 1003 intel_de_write(dev_priv, VSYNC(dsi_trans), 1004 (vsync_start - 1) | ((vsync_end - 1) << 16)); 1005 } 1006 } 1007 1008 /* 1009 * FIXME: It has to be programmed only for video modes and interlaced 1010 * modes. Put the check condition here once interlaced 1011 * info available as described above. 1012 * program TRANS_VSYNCSHIFT register 1013 */ 1014 if (is_vid_mode(intel_dsi)) { 1015 for_each_dsi_port(port, intel_dsi->ports) { 1016 dsi_trans = dsi_port_to_transcoder(port); 1017 intel_de_write(dev_priv, VSYNCSHIFT(dsi_trans), 1018 vsync_shift); 1019 } 1020 } 1021 1022 /* program TRANS_VBLANK register, should be same as vtotal programmed */ 1023 if (DISPLAY_VER(dev_priv) >= 12) { 1024 for_each_dsi_port(port, intel_dsi->ports) { 1025 dsi_trans = dsi_port_to_transcoder(port); 1026 intel_de_write(dev_priv, VBLANK(dsi_trans), 1027 (vactive - 1) | ((vtotal - 1) << 16)); 1028 } 1029 } 1030 } 1031 1032 static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder) 1033 { 1034 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1035 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1036 enum port port; 1037 enum transcoder dsi_trans; 1038 u32 tmp; 1039 1040 for_each_dsi_port(port, intel_dsi->ports) { 1041 dsi_trans = dsi_port_to_transcoder(port); 1042 tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans)); 1043 tmp |= PIPECONF_ENABLE; 1044 intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp); 1045 1046 /* wait for transcoder to be enabled */ 1047 if (intel_de_wait_for_set(dev_priv, PIPECONF(dsi_trans), 1048 I965_PIPECONF_ACTIVE, 10)) 1049 drm_err(&dev_priv->drm, 1050 "DSI transcoder not enabled\n"); 1051 } 1052 } 1053 1054 static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder, 1055 const struct intel_crtc_state *crtc_state) 1056 { 1057 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1058 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1059 enum port port; 1060 enum transcoder dsi_trans; 1061 u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul; 1062 1063 /* 1064 * escape clock count calculation: 1065 * BYTE_CLK_COUNT = TIME_NS/(8 * UI) 1066 * UI (nsec) = (10^6)/Bitrate 1067 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate 1068 * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS 1069 */ 1070 divisor = intel_dsi_tlpx_ns(intel_dsi) * afe_clk(encoder, crtc_state) * 1000; 1071 mul = 8 * 1000000; 1072 hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul, 1073 divisor); 1074 lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor); 1075 ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor); 1076 1077 for_each_dsi_port(port, intel_dsi->ports) { 1078 dsi_trans = dsi_port_to_transcoder(port); 1079 1080 /* program hst_tx_timeout */ 1081 tmp = intel_de_read(dev_priv, DSI_HSTX_TO(dsi_trans)); 1082 tmp &= ~HSTX_TIMEOUT_VALUE_MASK; 1083 tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout); 1084 intel_de_write(dev_priv, DSI_HSTX_TO(dsi_trans), tmp); 1085 1086 /* FIXME: DSI_CALIB_TO */ 1087 1088 /* program lp_rx_host timeout */ 1089 tmp = intel_de_read(dev_priv, DSI_LPRX_HOST_TO(dsi_trans)); 1090 tmp &= ~LPRX_TIMEOUT_VALUE_MASK; 1091 tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout); 1092 intel_de_write(dev_priv, DSI_LPRX_HOST_TO(dsi_trans), tmp); 1093 1094 /* FIXME: DSI_PWAIT_TO */ 1095 1096 /* program turn around timeout */ 1097 tmp = intel_de_read(dev_priv, DSI_TA_TO(dsi_trans)); 1098 tmp &= ~TA_TIMEOUT_VALUE_MASK; 1099 tmp |= TA_TIMEOUT_VALUE(ta_timeout); 1100 intel_de_write(dev_priv, DSI_TA_TO(dsi_trans), tmp); 1101 } 1102 } 1103 1104 static void gen11_dsi_config_util_pin(struct intel_encoder *encoder, 1105 bool enable) 1106 { 1107 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1108 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1109 u32 tmp; 1110 1111 /* 1112 * used as TE i/p for DSI0, 1113 * for dual link/DSI1 TE is from slave DSI1 1114 * through GPIO. 1115 */ 1116 if (is_vid_mode(intel_dsi) || (intel_dsi->ports & BIT(PORT_B))) 1117 return; 1118 1119 tmp = intel_de_read(dev_priv, UTIL_PIN_CTL); 1120 1121 if (enable) { 1122 tmp |= UTIL_PIN_DIRECTION_INPUT; 1123 tmp |= UTIL_PIN_ENABLE; 1124 } else { 1125 tmp &= ~UTIL_PIN_ENABLE; 1126 } 1127 intel_de_write(dev_priv, UTIL_PIN_CTL, tmp); 1128 } 1129 1130 static void 1131 gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder, 1132 const struct intel_crtc_state *crtc_state) 1133 { 1134 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1135 1136 /* step 4a: power up all lanes of the DDI used by DSI */ 1137 gen11_dsi_power_up_lanes(encoder); 1138 1139 /* step 4b: configure lane sequencing of the Combo-PHY transmitters */ 1140 gen11_dsi_config_phy_lanes_sequence(encoder); 1141 1142 /* step 4c: configure voltage swing and skew */ 1143 gen11_dsi_voltage_swing_program_seq(encoder); 1144 1145 /* enable DDI buffer */ 1146 gen11_dsi_enable_ddi_buffer(encoder); 1147 1148 /* setup D-PHY timings */ 1149 gen11_dsi_setup_dphy_timings(encoder, crtc_state); 1150 1151 /* Since transcoder is configured to take events from GPIO */ 1152 gen11_dsi_config_util_pin(encoder, true); 1153 1154 /* step 4h: setup DSI protocol timeouts */ 1155 gen11_dsi_setup_timeouts(encoder, crtc_state); 1156 1157 /* Step (4h, 4i, 4j, 4k): Configure transcoder */ 1158 gen11_dsi_configure_transcoder(encoder, crtc_state); 1159 1160 /* Step 4l: Gate DDI clocks */ 1161 if (IS_DISPLAY_VER(dev_priv, 11)) 1162 gen11_dsi_gate_clocks(encoder); 1163 } 1164 1165 static void gen11_dsi_powerup_panel(struct intel_encoder *encoder) 1166 { 1167 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1168 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1169 struct mipi_dsi_device *dsi; 1170 enum port port; 1171 enum transcoder dsi_trans; 1172 u32 tmp; 1173 int ret; 1174 1175 /* set maximum return packet size */ 1176 for_each_dsi_port(port, intel_dsi->ports) { 1177 dsi_trans = dsi_port_to_transcoder(port); 1178 1179 /* 1180 * FIXME: This uses the number of DW's currently in the payload 1181 * receive queue. This is probably not what we want here. 1182 */ 1183 tmp = intel_de_read(dev_priv, DSI_CMD_RXCTL(dsi_trans)); 1184 tmp &= NUMBER_RX_PLOAD_DW_MASK; 1185 /* multiply "Number Rx Payload DW" by 4 to get max value */ 1186 tmp = tmp * 4; 1187 dsi = intel_dsi->dsi_hosts[port]->device; 1188 ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp); 1189 if (ret < 0) 1190 drm_err(&dev_priv->drm, 1191 "error setting max return pkt size%d\n", tmp); 1192 } 1193 1194 /* panel power on related mipi dsi vbt sequences */ 1195 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON); 1196 intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay); 1197 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET); 1198 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP); 1199 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON); 1200 1201 /* ensure all panel commands dispatched before enabling transcoder */ 1202 wait_for_cmds_dispatched_to_panel(encoder); 1203 } 1204 1205 static void gen11_dsi_pre_pll_enable(struct intel_atomic_state *state, 1206 struct intel_encoder *encoder, 1207 const struct intel_crtc_state *crtc_state, 1208 const struct drm_connector_state *conn_state) 1209 { 1210 /* step2: enable IO power */ 1211 gen11_dsi_enable_io_power(encoder); 1212 1213 /* step3: enable DSI PLL */ 1214 gen11_dsi_program_esc_clk_div(encoder, crtc_state); 1215 } 1216 1217 static void gen11_dsi_pre_enable(struct intel_atomic_state *state, 1218 struct intel_encoder *encoder, 1219 const struct intel_crtc_state *pipe_config, 1220 const struct drm_connector_state *conn_state) 1221 { 1222 /* step3b */ 1223 gen11_dsi_map_pll(encoder, pipe_config); 1224 1225 /* step4: enable DSI port and DPHY */ 1226 gen11_dsi_enable_port_and_phy(encoder, pipe_config); 1227 1228 /* step5: program and powerup panel */ 1229 gen11_dsi_powerup_panel(encoder); 1230 1231 intel_dsc_enable(encoder, pipe_config); 1232 1233 /* step6c: configure transcoder timings */ 1234 gen11_dsi_set_transcoder_timings(encoder, pipe_config); 1235 } 1236 1237 static void gen11_dsi_enable(struct intel_atomic_state *state, 1238 struct intel_encoder *encoder, 1239 const struct intel_crtc_state *crtc_state, 1240 const struct drm_connector_state *conn_state) 1241 { 1242 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1243 1244 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 1245 1246 /* step6d: enable dsi transcoder */ 1247 gen11_dsi_enable_transcoder(encoder); 1248 1249 /* step7: enable backlight */ 1250 intel_panel_enable_backlight(crtc_state, conn_state); 1251 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON); 1252 1253 intel_crtc_vblank_on(crtc_state); 1254 } 1255 1256 static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder) 1257 { 1258 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1259 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1260 enum port port; 1261 enum transcoder dsi_trans; 1262 u32 tmp; 1263 1264 for_each_dsi_port(port, intel_dsi->ports) { 1265 dsi_trans = dsi_port_to_transcoder(port); 1266 1267 /* disable transcoder */ 1268 tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans)); 1269 tmp &= ~PIPECONF_ENABLE; 1270 intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp); 1271 1272 /* wait for transcoder to be disabled */ 1273 if (intel_de_wait_for_clear(dev_priv, PIPECONF(dsi_trans), 1274 I965_PIPECONF_ACTIVE, 50)) 1275 drm_err(&dev_priv->drm, 1276 "DSI trancoder not disabled\n"); 1277 } 1278 } 1279 1280 static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder) 1281 { 1282 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1283 1284 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF); 1285 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET); 1286 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF); 1287 1288 /* ensure cmds dispatched to panel */ 1289 wait_for_cmds_dispatched_to_panel(encoder); 1290 } 1291 1292 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder) 1293 { 1294 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1295 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1296 enum port port; 1297 enum transcoder dsi_trans; 1298 u32 tmp; 1299 1300 /* disable periodic update mode */ 1301 if (is_cmd_mode(intel_dsi)) { 1302 for_each_dsi_port(port, intel_dsi->ports) { 1303 tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port)); 1304 tmp &= ~DSI_PERIODIC_FRAME_UPDATE_ENABLE; 1305 intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp); 1306 } 1307 } 1308 1309 /* put dsi link in ULPS */ 1310 for_each_dsi_port(port, intel_dsi->ports) { 1311 dsi_trans = dsi_port_to_transcoder(port); 1312 tmp = intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)); 1313 tmp |= LINK_ENTER_ULPS; 1314 tmp &= ~LINK_ULPS_TYPE_LP11; 1315 intel_de_write(dev_priv, DSI_LP_MSG(dsi_trans), tmp); 1316 1317 if (wait_for_us((intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) & 1318 LINK_IN_ULPS), 1319 10)) 1320 drm_err(&dev_priv->drm, "DSI link not in ULPS\n"); 1321 } 1322 1323 /* disable ddi function */ 1324 for_each_dsi_port(port, intel_dsi->ports) { 1325 dsi_trans = dsi_port_to_transcoder(port); 1326 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans)); 1327 tmp &= ~TRANS_DDI_FUNC_ENABLE; 1328 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp); 1329 } 1330 1331 /* disable port sync mode if dual link */ 1332 if (intel_dsi->dual_link) { 1333 for_each_dsi_port(port, intel_dsi->ports) { 1334 dsi_trans = dsi_port_to_transcoder(port); 1335 tmp = intel_de_read(dev_priv, 1336 TRANS_DDI_FUNC_CTL2(dsi_trans)); 1337 tmp &= ~PORT_SYNC_MODE_ENABLE; 1338 intel_de_write(dev_priv, 1339 TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); 1340 } 1341 } 1342 } 1343 1344 static void gen11_dsi_disable_port(struct intel_encoder *encoder) 1345 { 1346 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1347 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1348 u32 tmp; 1349 enum port port; 1350 1351 gen11_dsi_ungate_clocks(encoder); 1352 for_each_dsi_port(port, intel_dsi->ports) { 1353 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 1354 tmp &= ~DDI_BUF_CTL_ENABLE; 1355 intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp); 1356 1357 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1358 DDI_BUF_IS_IDLE), 1359 8)) 1360 drm_err(&dev_priv->drm, 1361 "DDI port:%c buffer not idle\n", 1362 port_name(port)); 1363 } 1364 gen11_dsi_gate_clocks(encoder); 1365 } 1366 1367 static void gen11_dsi_disable_io_power(struct intel_encoder *encoder) 1368 { 1369 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1370 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1371 enum port port; 1372 u32 tmp; 1373 1374 for_each_dsi_port(port, intel_dsi->ports) { 1375 intel_wakeref_t wakeref; 1376 1377 wakeref = fetch_and_zero(&intel_dsi->io_wakeref[port]); 1378 intel_display_power_put(dev_priv, 1379 port == PORT_A ? 1380 POWER_DOMAIN_PORT_DDI_A_IO : 1381 POWER_DOMAIN_PORT_DDI_B_IO, 1382 wakeref); 1383 } 1384 1385 /* set mode to DDI */ 1386 for_each_dsi_port(port, intel_dsi->ports) { 1387 tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port)); 1388 tmp &= ~COMBO_PHY_MODE_DSI; 1389 intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp); 1390 } 1391 } 1392 1393 static void gen11_dsi_disable(struct intel_atomic_state *state, 1394 struct intel_encoder *encoder, 1395 const struct intel_crtc_state *old_crtc_state, 1396 const struct drm_connector_state *old_conn_state) 1397 { 1398 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1399 1400 /* step1: turn off backlight */ 1401 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF); 1402 intel_panel_disable_backlight(old_conn_state); 1403 1404 /* step2d,e: disable transcoder and wait */ 1405 gen11_dsi_disable_transcoder(encoder); 1406 1407 /* step2f,g: powerdown panel */ 1408 gen11_dsi_powerdown_panel(encoder); 1409 1410 /* step2h,i,j: deconfig trancoder */ 1411 gen11_dsi_deconfigure_trancoder(encoder); 1412 1413 /* step3: disable port */ 1414 gen11_dsi_disable_port(encoder); 1415 1416 gen11_dsi_config_util_pin(encoder, false); 1417 1418 /* step4: disable IO power */ 1419 gen11_dsi_disable_io_power(encoder); 1420 } 1421 1422 static void gen11_dsi_post_disable(struct intel_atomic_state *state, 1423 struct intel_encoder *encoder, 1424 const struct intel_crtc_state *old_crtc_state, 1425 const struct drm_connector_state *old_conn_state) 1426 { 1427 intel_crtc_vblank_off(old_crtc_state); 1428 1429 intel_dsc_disable(old_crtc_state); 1430 1431 skl_scaler_disable(old_crtc_state); 1432 } 1433 1434 static enum drm_mode_status gen11_dsi_mode_valid(struct drm_connector *connector, 1435 struct drm_display_mode *mode) 1436 { 1437 /* FIXME: DSC? */ 1438 return intel_dsi_mode_valid(connector, mode); 1439 } 1440 1441 static void gen11_dsi_get_timings(struct intel_encoder *encoder, 1442 struct intel_crtc_state *pipe_config) 1443 { 1444 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1445 struct drm_display_mode *adjusted_mode = 1446 &pipe_config->hw.adjusted_mode; 1447 1448 if (pipe_config->dsc.compressed_bpp) { 1449 int div = pipe_config->dsc.compressed_bpp; 1450 int mul = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); 1451 1452 adjusted_mode->crtc_htotal = 1453 DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div); 1454 adjusted_mode->crtc_hsync_start = 1455 DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div); 1456 adjusted_mode->crtc_hsync_end = 1457 DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div); 1458 } 1459 1460 if (intel_dsi->dual_link) { 1461 adjusted_mode->crtc_hdisplay *= 2; 1462 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) 1463 adjusted_mode->crtc_hdisplay -= 1464 intel_dsi->pixel_overlap; 1465 adjusted_mode->crtc_htotal *= 2; 1466 } 1467 adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hdisplay; 1468 adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_htotal; 1469 1470 if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) { 1471 if (intel_dsi->dual_link) { 1472 adjusted_mode->crtc_hsync_start *= 2; 1473 adjusted_mode->crtc_hsync_end *= 2; 1474 } 1475 } 1476 adjusted_mode->crtc_vblank_start = adjusted_mode->crtc_vdisplay; 1477 adjusted_mode->crtc_vblank_end = adjusted_mode->crtc_vtotal; 1478 } 1479 1480 static bool gen11_dsi_is_periodic_cmd_mode(struct intel_dsi *intel_dsi) 1481 { 1482 struct drm_device *dev = intel_dsi->base.base.dev; 1483 struct drm_i915_private *dev_priv = to_i915(dev); 1484 enum transcoder dsi_trans; 1485 u32 val; 1486 1487 if (intel_dsi->ports == BIT(PORT_B)) 1488 dsi_trans = TRANSCODER_DSI_1; 1489 else 1490 dsi_trans = TRANSCODER_DSI_0; 1491 1492 val = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans)); 1493 return (val & DSI_PERIODIC_FRAME_UPDATE_ENABLE); 1494 } 1495 1496 static void gen11_dsi_get_cmd_mode_config(struct intel_dsi *intel_dsi, 1497 struct intel_crtc_state *pipe_config) 1498 { 1499 if (intel_dsi->ports == (BIT(PORT_B) | BIT(PORT_A))) 1500 pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1 | 1501 I915_MODE_FLAG_DSI_USE_TE0; 1502 else if (intel_dsi->ports == BIT(PORT_B)) 1503 pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1; 1504 else 1505 pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE0; 1506 } 1507 1508 static void gen11_dsi_get_config(struct intel_encoder *encoder, 1509 struct intel_crtc_state *pipe_config) 1510 { 1511 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 1512 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1513 1514 intel_ddi_get_clock(encoder, pipe_config, icl_ddi_combo_get_pll(encoder)); 1515 1516 pipe_config->hw.adjusted_mode.crtc_clock = intel_dsi->pclk; 1517 if (intel_dsi->dual_link) 1518 pipe_config->hw.adjusted_mode.crtc_clock *= 2; 1519 1520 gen11_dsi_get_timings(encoder, pipe_config); 1521 pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI); 1522 pipe_config->pipe_bpp = bdw_get_pipemisc_bpp(crtc); 1523 1524 /* Get the details on which TE should be enabled */ 1525 if (is_cmd_mode(intel_dsi)) 1526 gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config); 1527 1528 if (gen11_dsi_is_periodic_cmd_mode(intel_dsi)) 1529 pipe_config->mode_flags |= I915_MODE_FLAG_DSI_PERIODIC_CMD_MODE; 1530 } 1531 1532 static int gen11_dsi_dsc_compute_config(struct intel_encoder *encoder, 1533 struct intel_crtc_state *crtc_state) 1534 { 1535 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1536 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 1537 int dsc_max_bpc = DISPLAY_VER(dev_priv) >= 12 ? 12 : 10; 1538 bool use_dsc; 1539 int ret; 1540 1541 use_dsc = intel_bios_get_dsc_params(encoder, crtc_state, dsc_max_bpc); 1542 if (!use_dsc) 1543 return 0; 1544 1545 if (crtc_state->pipe_bpp < 8 * 3) 1546 return -EINVAL; 1547 1548 /* FIXME: split only when necessary */ 1549 if (crtc_state->dsc.slice_count > 1) 1550 crtc_state->dsc.dsc_split = true; 1551 1552 vdsc_cfg->convert_rgb = true; 1553 1554 /* FIXME: initialize from VBT */ 1555 vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST; 1556 1557 ret = intel_dsc_compute_params(encoder, crtc_state); 1558 if (ret) 1559 return ret; 1560 1561 /* DSI specific sanity checks on the common code */ 1562 drm_WARN_ON(&dev_priv->drm, vdsc_cfg->vbr_enable); 1563 drm_WARN_ON(&dev_priv->drm, vdsc_cfg->simple_422); 1564 drm_WARN_ON(&dev_priv->drm, 1565 vdsc_cfg->pic_width % vdsc_cfg->slice_width); 1566 drm_WARN_ON(&dev_priv->drm, vdsc_cfg->slice_height < 8); 1567 drm_WARN_ON(&dev_priv->drm, 1568 vdsc_cfg->pic_height % vdsc_cfg->slice_height); 1569 1570 ret = drm_dsc_compute_rc_parameters(vdsc_cfg); 1571 if (ret) 1572 return ret; 1573 1574 crtc_state->dsc.compression_enable = true; 1575 1576 return 0; 1577 } 1578 1579 static int gen11_dsi_compute_config(struct intel_encoder *encoder, 1580 struct intel_crtc_state *pipe_config, 1581 struct drm_connector_state *conn_state) 1582 { 1583 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1584 struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi, 1585 base); 1586 struct intel_connector *intel_connector = intel_dsi->attached_connector; 1587 const struct drm_display_mode *fixed_mode = 1588 intel_connector->panel.fixed_mode; 1589 struct drm_display_mode *adjusted_mode = 1590 &pipe_config->hw.adjusted_mode; 1591 int ret; 1592 1593 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB; 1594 intel_fixed_panel_mode(fixed_mode, adjusted_mode); 1595 1596 ret = intel_pch_panel_fitting(pipe_config, conn_state); 1597 if (ret) 1598 return ret; 1599 1600 adjusted_mode->flags = 0; 1601 1602 /* Dual link goes to trancoder DSI'0' */ 1603 if (intel_dsi->ports == BIT(PORT_B)) 1604 pipe_config->cpu_transcoder = TRANSCODER_DSI_1; 1605 else 1606 pipe_config->cpu_transcoder = TRANSCODER_DSI_0; 1607 1608 if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB888) 1609 pipe_config->pipe_bpp = 24; 1610 else 1611 pipe_config->pipe_bpp = 18; 1612 1613 pipe_config->clock_set = true; 1614 1615 if (gen11_dsi_dsc_compute_config(encoder, pipe_config)) 1616 drm_dbg_kms(&i915->drm, "Attempting to use DSC failed\n"); 1617 1618 pipe_config->port_clock = afe_clk(encoder, pipe_config) / 5; 1619 1620 /* 1621 * In case of TE GATE cmd mode, we 1622 * receive TE from the slave if 1623 * dual link is enabled 1624 */ 1625 if (is_cmd_mode(intel_dsi)) 1626 gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config); 1627 1628 return 0; 1629 } 1630 1631 static void gen11_dsi_get_power_domains(struct intel_encoder *encoder, 1632 struct intel_crtc_state *crtc_state) 1633 { 1634 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1635 1636 get_dsi_io_power_domains(i915, 1637 enc_to_intel_dsi(encoder)); 1638 } 1639 1640 static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder, 1641 enum pipe *pipe) 1642 { 1643 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1644 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 1645 enum transcoder dsi_trans; 1646 intel_wakeref_t wakeref; 1647 enum port port; 1648 bool ret = false; 1649 u32 tmp; 1650 1651 wakeref = intel_display_power_get_if_enabled(dev_priv, 1652 encoder->power_domain); 1653 if (!wakeref) 1654 return false; 1655 1656 for_each_dsi_port(port, intel_dsi->ports) { 1657 dsi_trans = dsi_port_to_transcoder(port); 1658 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans)); 1659 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 1660 case TRANS_DDI_EDP_INPUT_A_ON: 1661 *pipe = PIPE_A; 1662 break; 1663 case TRANS_DDI_EDP_INPUT_B_ONOFF: 1664 *pipe = PIPE_B; 1665 break; 1666 case TRANS_DDI_EDP_INPUT_C_ONOFF: 1667 *pipe = PIPE_C; 1668 break; 1669 case TRANS_DDI_EDP_INPUT_D_ONOFF: 1670 *pipe = PIPE_D; 1671 break; 1672 default: 1673 drm_err(&dev_priv->drm, "Invalid PIPE input\n"); 1674 goto out; 1675 } 1676 1677 tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans)); 1678 ret = tmp & PIPECONF_ENABLE; 1679 } 1680 out: 1681 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 1682 return ret; 1683 } 1684 1685 static bool gen11_dsi_initial_fastset_check(struct intel_encoder *encoder, 1686 struct intel_crtc_state *crtc_state) 1687 { 1688 if (crtc_state->dsc.compression_enable) { 1689 drm_dbg_kms(encoder->base.dev, "Forcing full modeset due to DSC being enabled\n"); 1690 crtc_state->uapi.mode_changed = true; 1691 1692 return false; 1693 } 1694 1695 return true; 1696 } 1697 1698 static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder) 1699 { 1700 intel_encoder_destroy(encoder); 1701 } 1702 1703 static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = { 1704 .destroy = gen11_dsi_encoder_destroy, 1705 }; 1706 1707 static const struct drm_connector_funcs gen11_dsi_connector_funcs = { 1708 .detect = intel_panel_detect, 1709 .late_register = intel_connector_register, 1710 .early_unregister = intel_connector_unregister, 1711 .destroy = intel_connector_destroy, 1712 .fill_modes = drm_helper_probe_single_connector_modes, 1713 .atomic_get_property = intel_digital_connector_atomic_get_property, 1714 .atomic_set_property = intel_digital_connector_atomic_set_property, 1715 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 1716 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 1717 }; 1718 1719 static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = { 1720 .get_modes = intel_dsi_get_modes, 1721 .mode_valid = gen11_dsi_mode_valid, 1722 .atomic_check = intel_digital_connector_atomic_check, 1723 }; 1724 1725 static int gen11_dsi_host_attach(struct mipi_dsi_host *host, 1726 struct mipi_dsi_device *dsi) 1727 { 1728 return 0; 1729 } 1730 1731 static int gen11_dsi_host_detach(struct mipi_dsi_host *host, 1732 struct mipi_dsi_device *dsi) 1733 { 1734 return 0; 1735 } 1736 1737 static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host, 1738 const struct mipi_dsi_msg *msg) 1739 { 1740 struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host); 1741 struct mipi_dsi_packet dsi_pkt; 1742 ssize_t ret; 1743 bool enable_lpdt = false; 1744 1745 ret = mipi_dsi_create_packet(&dsi_pkt, msg); 1746 if (ret < 0) 1747 return ret; 1748 1749 if (msg->flags & MIPI_DSI_MSG_USE_LPM) 1750 enable_lpdt = true; 1751 1752 /* send packet header */ 1753 ret = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt); 1754 if (ret < 0) 1755 return ret; 1756 1757 /* only long packet contains payload */ 1758 if (mipi_dsi_packet_format_is_long(msg->type)) { 1759 ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt); 1760 if (ret < 0) 1761 return ret; 1762 } 1763 1764 //TODO: add payload receive code if needed 1765 1766 ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length; 1767 1768 return ret; 1769 } 1770 1771 static const struct mipi_dsi_host_ops gen11_dsi_host_ops = { 1772 .attach = gen11_dsi_host_attach, 1773 .detach = gen11_dsi_host_detach, 1774 .transfer = gen11_dsi_host_transfer, 1775 }; 1776 1777 #define ICL_PREPARE_CNT_MAX 0x7 1778 #define ICL_CLK_ZERO_CNT_MAX 0xf 1779 #define ICL_TRAIL_CNT_MAX 0x7 1780 #define ICL_TCLK_PRE_CNT_MAX 0x3 1781 #define ICL_TCLK_POST_CNT_MAX 0x7 1782 #define ICL_HS_ZERO_CNT_MAX 0xf 1783 #define ICL_EXIT_ZERO_CNT_MAX 0x7 1784 1785 static void icl_dphy_param_init(struct intel_dsi *intel_dsi) 1786 { 1787 struct drm_device *dev = intel_dsi->base.base.dev; 1788 struct drm_i915_private *dev_priv = to_i915(dev); 1789 struct mipi_config *mipi_config = dev_priv->vbt.dsi.config; 1790 u32 tlpx_ns; 1791 u32 prepare_cnt, exit_zero_cnt, clk_zero_cnt, trail_cnt; 1792 u32 ths_prepare_ns, tclk_trail_ns; 1793 u32 hs_zero_cnt; 1794 u32 tclk_pre_cnt, tclk_post_cnt; 1795 1796 tlpx_ns = intel_dsi_tlpx_ns(intel_dsi); 1797 1798 tclk_trail_ns = max(mipi_config->tclk_trail, mipi_config->ths_trail); 1799 ths_prepare_ns = max(mipi_config->ths_prepare, 1800 mipi_config->tclk_prepare); 1801 1802 /* 1803 * prepare cnt in escape clocks 1804 * this field represents a hexadecimal value with a precision 1805 * of 1.2 – i.e. the most significant bit is the integer 1806 * and the least significant 2 bits are fraction bits. 1807 * so, the field can represent a range of 0.25 to 1.75 1808 */ 1809 prepare_cnt = DIV_ROUND_UP(ths_prepare_ns * 4, tlpx_ns); 1810 if (prepare_cnt > ICL_PREPARE_CNT_MAX) { 1811 drm_dbg_kms(&dev_priv->drm, "prepare_cnt out of range (%d)\n", 1812 prepare_cnt); 1813 prepare_cnt = ICL_PREPARE_CNT_MAX; 1814 } 1815 1816 /* clk zero count in escape clocks */ 1817 clk_zero_cnt = DIV_ROUND_UP(mipi_config->tclk_prepare_clkzero - 1818 ths_prepare_ns, tlpx_ns); 1819 if (clk_zero_cnt > ICL_CLK_ZERO_CNT_MAX) { 1820 drm_dbg_kms(&dev_priv->drm, 1821 "clk_zero_cnt out of range (%d)\n", clk_zero_cnt); 1822 clk_zero_cnt = ICL_CLK_ZERO_CNT_MAX; 1823 } 1824 1825 /* trail cnt in escape clocks*/ 1826 trail_cnt = DIV_ROUND_UP(tclk_trail_ns, tlpx_ns); 1827 if (trail_cnt > ICL_TRAIL_CNT_MAX) { 1828 drm_dbg_kms(&dev_priv->drm, "trail_cnt out of range (%d)\n", 1829 trail_cnt); 1830 trail_cnt = ICL_TRAIL_CNT_MAX; 1831 } 1832 1833 /* tclk pre count in escape clocks */ 1834 tclk_pre_cnt = DIV_ROUND_UP(mipi_config->tclk_pre, tlpx_ns); 1835 if (tclk_pre_cnt > ICL_TCLK_PRE_CNT_MAX) { 1836 drm_dbg_kms(&dev_priv->drm, 1837 "tclk_pre_cnt out of range (%d)\n", tclk_pre_cnt); 1838 tclk_pre_cnt = ICL_TCLK_PRE_CNT_MAX; 1839 } 1840 1841 /* tclk post count in escape clocks */ 1842 tclk_post_cnt = DIV_ROUND_UP(mipi_config->tclk_post, tlpx_ns); 1843 if (tclk_post_cnt > ICL_TCLK_POST_CNT_MAX) { 1844 drm_dbg_kms(&dev_priv->drm, 1845 "tclk_post_cnt out of range (%d)\n", 1846 tclk_post_cnt); 1847 tclk_post_cnt = ICL_TCLK_POST_CNT_MAX; 1848 } 1849 1850 /* hs zero cnt in escape clocks */ 1851 hs_zero_cnt = DIV_ROUND_UP(mipi_config->ths_prepare_hszero - 1852 ths_prepare_ns, tlpx_ns); 1853 if (hs_zero_cnt > ICL_HS_ZERO_CNT_MAX) { 1854 drm_dbg_kms(&dev_priv->drm, "hs_zero_cnt out of range (%d)\n", 1855 hs_zero_cnt); 1856 hs_zero_cnt = ICL_HS_ZERO_CNT_MAX; 1857 } 1858 1859 /* hs exit zero cnt in escape clocks */ 1860 exit_zero_cnt = DIV_ROUND_UP(mipi_config->ths_exit, tlpx_ns); 1861 if (exit_zero_cnt > ICL_EXIT_ZERO_CNT_MAX) { 1862 drm_dbg_kms(&dev_priv->drm, 1863 "exit_zero_cnt out of range (%d)\n", 1864 exit_zero_cnt); 1865 exit_zero_cnt = ICL_EXIT_ZERO_CNT_MAX; 1866 } 1867 1868 /* clock lane dphy timings */ 1869 intel_dsi->dphy_reg = (CLK_PREPARE_OVERRIDE | 1870 CLK_PREPARE(prepare_cnt) | 1871 CLK_ZERO_OVERRIDE | 1872 CLK_ZERO(clk_zero_cnt) | 1873 CLK_PRE_OVERRIDE | 1874 CLK_PRE(tclk_pre_cnt) | 1875 CLK_POST_OVERRIDE | 1876 CLK_POST(tclk_post_cnt) | 1877 CLK_TRAIL_OVERRIDE | 1878 CLK_TRAIL(trail_cnt)); 1879 1880 /* data lanes dphy timings */ 1881 intel_dsi->dphy_data_lane_reg = (HS_PREPARE_OVERRIDE | 1882 HS_PREPARE(prepare_cnt) | 1883 HS_ZERO_OVERRIDE | 1884 HS_ZERO(hs_zero_cnt) | 1885 HS_TRAIL_OVERRIDE | 1886 HS_TRAIL(trail_cnt) | 1887 HS_EXIT_OVERRIDE | 1888 HS_EXIT(exit_zero_cnt)); 1889 1890 intel_dsi_log_params(intel_dsi); 1891 } 1892 1893 static void icl_dsi_add_properties(struct intel_connector *connector) 1894 { 1895 u32 allowed_scalers; 1896 1897 allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) | 1898 BIT(DRM_MODE_SCALE_FULLSCREEN) | 1899 BIT(DRM_MODE_SCALE_CENTER); 1900 1901 drm_connector_attach_scaling_mode_property(&connector->base, 1902 allowed_scalers); 1903 1904 connector->base.state->scaling_mode = DRM_MODE_SCALE_ASPECT; 1905 1906 drm_connector_set_panel_orientation_with_quirk(&connector->base, 1907 intel_dsi_get_panel_orientation(connector), 1908 connector->panel.fixed_mode->hdisplay, 1909 connector->panel.fixed_mode->vdisplay); 1910 } 1911 1912 void icl_dsi_init(struct drm_i915_private *dev_priv) 1913 { 1914 struct drm_device *dev = &dev_priv->drm; 1915 struct intel_dsi *intel_dsi; 1916 struct intel_encoder *encoder; 1917 struct intel_connector *intel_connector; 1918 struct drm_connector *connector; 1919 struct drm_display_mode *fixed_mode; 1920 enum port port; 1921 1922 if (!intel_bios_is_dsi_present(dev_priv, &port)) 1923 return; 1924 1925 intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL); 1926 if (!intel_dsi) 1927 return; 1928 1929 intel_connector = intel_connector_alloc(); 1930 if (!intel_connector) { 1931 kfree(intel_dsi); 1932 return; 1933 } 1934 1935 encoder = &intel_dsi->base; 1936 intel_dsi->attached_connector = intel_connector; 1937 connector = &intel_connector->base; 1938 1939 /* register DSI encoder with DRM subsystem */ 1940 drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs, 1941 DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port)); 1942 1943 encoder->pre_pll_enable = gen11_dsi_pre_pll_enable; 1944 encoder->pre_enable = gen11_dsi_pre_enable; 1945 encoder->enable = gen11_dsi_enable; 1946 encoder->disable = gen11_dsi_disable; 1947 encoder->post_disable = gen11_dsi_post_disable; 1948 encoder->port = port; 1949 encoder->get_config = gen11_dsi_get_config; 1950 encoder->update_pipe = intel_panel_update_backlight; 1951 encoder->compute_config = gen11_dsi_compute_config; 1952 encoder->get_hw_state = gen11_dsi_get_hw_state; 1953 encoder->initial_fastset_check = gen11_dsi_initial_fastset_check; 1954 encoder->type = INTEL_OUTPUT_DSI; 1955 encoder->cloneable = 0; 1956 encoder->pipe_mask = ~0; 1957 encoder->power_domain = POWER_DOMAIN_PORT_DSI; 1958 encoder->get_power_domains = gen11_dsi_get_power_domains; 1959 encoder->disable_clock = gen11_dsi_gate_clocks; 1960 encoder->is_clock_enabled = gen11_dsi_is_clock_enabled; 1961 1962 /* register DSI connector with DRM subsystem */ 1963 drm_connector_init(dev, connector, &gen11_dsi_connector_funcs, 1964 DRM_MODE_CONNECTOR_DSI); 1965 drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs); 1966 connector->display_info.subpixel_order = SubPixelHorizontalRGB; 1967 connector->interlace_allowed = false; 1968 connector->doublescan_allowed = false; 1969 intel_connector->get_hw_state = intel_connector_get_hw_state; 1970 1971 /* attach connector to encoder */ 1972 intel_connector_attach_encoder(intel_connector, encoder); 1973 1974 mutex_lock(&dev->mode_config.mutex); 1975 fixed_mode = intel_panel_vbt_fixed_mode(intel_connector); 1976 mutex_unlock(&dev->mode_config.mutex); 1977 1978 if (!fixed_mode) { 1979 drm_err(&dev_priv->drm, "DSI fixed mode info missing\n"); 1980 goto err; 1981 } 1982 1983 intel_panel_init(&intel_connector->panel, fixed_mode, NULL); 1984 intel_panel_setup_backlight(connector, INVALID_PIPE); 1985 1986 if (dev_priv->vbt.dsi.config->dual_link) 1987 intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B); 1988 else 1989 intel_dsi->ports = BIT(port); 1990 1991 intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports; 1992 intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports; 1993 1994 for_each_dsi_port(port, intel_dsi->ports) { 1995 struct intel_dsi_host *host; 1996 1997 host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port); 1998 if (!host) 1999 goto err; 2000 2001 intel_dsi->dsi_hosts[port] = host; 2002 } 2003 2004 if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) { 2005 drm_dbg_kms(&dev_priv->drm, "no device found\n"); 2006 goto err; 2007 } 2008 2009 icl_dphy_param_init(intel_dsi); 2010 2011 icl_dsi_add_properties(intel_connector); 2012 return; 2013 2014 err: 2015 drm_connector_cleanup(connector); 2016 drm_encoder_cleanup(&encoder->base); 2017 kfree(intel_dsi); 2018 kfree(intel_connector); 2019 } 2020