1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2018 Intel Corporation 4 * 5 * Author: Gaurav K Singh <gaurav.k.singh@intel.com> 6 * Manasi Navare <manasi.d.navare@intel.com> 7 */ 8 #include <linux/limits.h> 9 10 #include <drm/display/drm_dsc_helper.h> 11 #include <drm/drm_fixed.h> 12 13 #include "i915_drv.h" 14 #include "intel_crtc.h" 15 #include "intel_de.h" 16 #include "intel_display_types.h" 17 #include "intel_dsi.h" 18 #include "intel_qp_tables.h" 19 #include "intel_vdsc.h" 20 #include "intel_vdsc_regs.h" 21 22 bool intel_dsc_source_support(const struct intel_crtc_state *crtc_state) 23 { 24 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 25 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 26 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 27 28 if (!HAS_DSC(i915)) 29 return false; 30 31 if (DISPLAY_VER(i915) == 11 && cpu_transcoder == TRANSCODER_A) 32 return false; 33 34 return true; 35 } 36 37 static bool is_pipe_dsc(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 38 { 39 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 40 41 if (DISPLAY_VER(i915) >= 12) 42 return true; 43 44 if (cpu_transcoder == TRANSCODER_EDP || 45 cpu_transcoder == TRANSCODER_DSI_0 || 46 cpu_transcoder == TRANSCODER_DSI_1) 47 return false; 48 49 /* There's no pipe A DSC engine on ICL */ 50 drm_WARN_ON(&i915->drm, crtc->pipe == PIPE_A); 51 52 return true; 53 } 54 55 static void 56 intel_vdsc_set_min_max_qp(struct drm_dsc_config *vdsc_cfg, int buf, 57 int bpp) 58 { 59 int bpc = vdsc_cfg->bits_per_component; 60 61 /* Read range_minqp and range_max_qp from qp tables */ 62 vdsc_cfg->rc_range_params[buf].range_min_qp = 63 intel_lookup_range_min_qp(bpc, buf, bpp, vdsc_cfg->native_420); 64 vdsc_cfg->rc_range_params[buf].range_max_qp = 65 intel_lookup_range_max_qp(bpc, buf, bpp, vdsc_cfg->native_420); 66 } 67 68 /* 69 * We are using the method provided in DSC 1.2a C-Model in codec_main.c 70 * Above method use a common formula to derive values for any combination of DSC 71 * variables. The formula approach may yield slight differences in the derived PPS 72 * parameters from the original parameter sets. These differences are not consequential 73 * to the coding performance because all parameter sets have been shown to produce 74 * visually lossless quality (provides the same PPS values as 75 * DSCParameterValuesVESA V1-2 spreadsheet). 76 */ 77 static void 78 calculate_rc_params(struct drm_dsc_config *vdsc_cfg) 79 { 80 int bpp = fxp_q4_to_int(vdsc_cfg->bits_per_pixel); 81 int bpc = vdsc_cfg->bits_per_component; 82 int qp_bpc_modifier = (bpc - 8) * 2; 83 int uncompressed_bpg_rate; 84 int first_line_bpg_offset; 85 u32 res, buf_i, bpp_i; 86 87 if (vdsc_cfg->slice_height >= 8) 88 first_line_bpg_offset = 89 12 + (9 * min(34, vdsc_cfg->slice_height - 8)) / 100; 90 else 91 first_line_bpg_offset = 2 * (vdsc_cfg->slice_height - 1); 92 93 uncompressed_bpg_rate = (3 * bpc + (vdsc_cfg->convert_rgb ? 0 : 2)) * 3; 94 vdsc_cfg->first_line_bpg_offset = clamp(first_line_bpg_offset, 0, 95 uncompressed_bpg_rate - 3 * bpp); 96 97 /* 98 * According to DSC 1.2 spec in Section 4.1 if native_420 is set: 99 * -second_line_bpg_offset is 12 in general and equal to 2*(slice_height-1) if slice 100 * height < 8. 101 * -second_line_offset_adj is 512 as shown by emperical values to yield best chroma 102 * preservation in second line. 103 * -nsl_bpg_offset is calculated as second_line_offset/slice_height -1 then rounded 104 * up to 16 fractional bits, we left shift second line offset by 11 to preserve 11 105 * fractional bits. 106 */ 107 if (vdsc_cfg->native_420) { 108 if (vdsc_cfg->slice_height >= 8) 109 vdsc_cfg->second_line_bpg_offset = 12; 110 else 111 vdsc_cfg->second_line_bpg_offset = 112 2 * (vdsc_cfg->slice_height - 1); 113 114 vdsc_cfg->second_line_offset_adj = 512; 115 vdsc_cfg->nsl_bpg_offset = DIV_ROUND_UP(vdsc_cfg->second_line_bpg_offset << 11, 116 vdsc_cfg->slice_height - 1); 117 } 118 119 /* Our hw supports only 444 modes as of today */ 120 if (bpp >= 12) 121 vdsc_cfg->initial_offset = 2048; 122 else if (bpp >= 10) 123 vdsc_cfg->initial_offset = 5632 - DIV_ROUND_UP(((bpp - 10) * 3584), 2); 124 else if (bpp >= 8) 125 vdsc_cfg->initial_offset = 6144 - DIV_ROUND_UP(((bpp - 8) * 512), 2); 126 else 127 vdsc_cfg->initial_offset = 6144; 128 129 /* initial_xmit_delay = rc_model_size/2/compression_bpp */ 130 vdsc_cfg->initial_xmit_delay = DIV_ROUND_UP(DSC_RC_MODEL_SIZE_CONST, 2 * bpp); 131 132 vdsc_cfg->flatness_min_qp = 3 + qp_bpc_modifier; 133 vdsc_cfg->flatness_max_qp = 12 + qp_bpc_modifier; 134 135 vdsc_cfg->rc_quant_incr_limit0 = 11 + qp_bpc_modifier; 136 vdsc_cfg->rc_quant_incr_limit1 = 11 + qp_bpc_modifier; 137 138 if (vdsc_cfg->native_420) { 139 static const s8 ofs_und4[] = { 140 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12 141 }; 142 static const s8 ofs_und5[] = { 143 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 144 }; 145 static const s8 ofs_und6[] = { 146 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 147 }; 148 static const s8 ofs_und8[] = { 149 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12 150 }; 151 /* 152 * For 420 format since bits_per_pixel (bpp) is set to target bpp * 2, 153 * QP table values for target bpp 4.0 to 4.4375 (rounded to 4.0) are 154 * actually for bpp 8 to 8.875 (rounded to 4.0 * 2 i.e 8). 155 * Similarly values for target bpp 4.5 to 4.8375 (rounded to 4.5) 156 * are for bpp 9 to 9.875 (rounded to 4.5 * 2 i.e 9), and so on. 157 */ 158 bpp_i = bpp - 8; 159 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) { 160 u8 range_bpg_offset; 161 162 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i); 163 164 /* Calculate range_bpg_offset */ 165 if (bpp <= 8) { 166 range_bpg_offset = ofs_und4[buf_i]; 167 } else if (bpp <= 10) { 168 res = DIV_ROUND_UP(((bpp - 8) * 169 (ofs_und5[buf_i] - ofs_und4[buf_i])), 2); 170 range_bpg_offset = ofs_und4[buf_i] + res; 171 } else if (bpp <= 12) { 172 res = DIV_ROUND_UP(((bpp - 10) * 173 (ofs_und6[buf_i] - ofs_und5[buf_i])), 2); 174 range_bpg_offset = ofs_und5[buf_i] + res; 175 } else if (bpp <= 16) { 176 res = DIV_ROUND_UP(((bpp - 12) * 177 (ofs_und8[buf_i] - ofs_und6[buf_i])), 4); 178 range_bpg_offset = ofs_und6[buf_i] + res; 179 } else { 180 range_bpg_offset = ofs_und8[buf_i]; 181 } 182 183 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset = 184 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK; 185 } 186 } else { 187 /* fractional bpp part * 10000 (for precision up to 4 decimal places) */ 188 int fractional_bits = fxp_q4_to_frac(vdsc_cfg->bits_per_pixel); 189 190 static const s8 ofs_und6[] = { 191 0, -2, -2, -4, -6, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12 192 }; 193 static const s8 ofs_und8[] = { 194 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 195 }; 196 static const s8 ofs_und12[] = { 197 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 198 }; 199 static const s8 ofs_und15[] = { 200 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12 201 }; 202 203 /* 204 * QP table rows have values in increment of 0.5. 205 * So 6.0 bpp to 6.4375 will have index 0, 6.5 to 6.9375 will have index 1, 206 * and so on. 207 * 0.5 fractional part with 4 decimal precision becomes 5000 208 */ 209 bpp_i = ((bpp - 6) + (fractional_bits < 5000 ? 0 : 1)); 210 211 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) { 212 u8 range_bpg_offset; 213 214 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i); 215 216 /* Calculate range_bpg_offset */ 217 if (bpp <= 6) { 218 range_bpg_offset = ofs_und6[buf_i]; 219 } else if (bpp <= 8) { 220 res = DIV_ROUND_UP(((bpp - 6) * 221 (ofs_und8[buf_i] - ofs_und6[buf_i])), 2); 222 range_bpg_offset = ofs_und6[buf_i] + res; 223 } else if (bpp <= 12) { 224 range_bpg_offset = ofs_und8[buf_i]; 225 } else if (bpp <= 15) { 226 res = DIV_ROUND_UP(((bpp - 12) * 227 (ofs_und15[buf_i] - ofs_und12[buf_i])), 3); 228 range_bpg_offset = ofs_und12[buf_i] + res; 229 } else { 230 range_bpg_offset = ofs_und15[buf_i]; 231 } 232 233 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset = 234 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK; 235 } 236 } 237 } 238 239 static int intel_dsc_slice_dimensions_valid(struct intel_crtc_state *pipe_config, 240 struct drm_dsc_config *vdsc_cfg) 241 { 242 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_RGB || 243 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) { 244 if (vdsc_cfg->slice_height > 4095) 245 return -EINVAL; 246 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 15000) 247 return -EINVAL; 248 } else if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 249 if (vdsc_cfg->slice_width % 2) 250 return -EINVAL; 251 if (vdsc_cfg->slice_height % 2) 252 return -EINVAL; 253 if (vdsc_cfg->slice_height > 4094) 254 return -EINVAL; 255 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 30000) 256 return -EINVAL; 257 } 258 259 return 0; 260 } 261 262 int intel_dsc_compute_params(struct intel_crtc_state *pipe_config) 263 { 264 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 265 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 266 struct drm_dsc_config *vdsc_cfg = &pipe_config->dsc.config; 267 u16 compressed_bpp = fxp_q4_to_int(pipe_config->dsc.compressed_bpp_x16); 268 int err; 269 int ret; 270 271 vdsc_cfg->pic_width = pipe_config->hw.adjusted_mode.crtc_hdisplay; 272 vdsc_cfg->slice_width = DIV_ROUND_UP(vdsc_cfg->pic_width, 273 pipe_config->dsc.slice_count); 274 275 err = intel_dsc_slice_dimensions_valid(pipe_config, vdsc_cfg); 276 277 if (err) { 278 drm_dbg_kms(&dev_priv->drm, "Slice dimension requirements not met\n"); 279 return err; 280 } 281 282 /* 283 * According to DSC 1.2 specs if colorspace is YCbCr then convert_rgb is 0 284 * else 1 285 */ 286 vdsc_cfg->convert_rgb = pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR420 && 287 pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR444; 288 289 if (DISPLAY_VER(dev_priv) >= 14 && 290 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 291 vdsc_cfg->native_420 = true; 292 /* We do not support YcBCr422 as of now */ 293 vdsc_cfg->native_422 = false; 294 vdsc_cfg->simple_422 = false; 295 /* Gen 11 does not support VBR */ 296 vdsc_cfg->vbr_enable = false; 297 298 vdsc_cfg->bits_per_pixel = pipe_config->dsc.compressed_bpp_x16; 299 300 /* 301 * According to DSC 1.2 specs in Section 4.1 if native_420 is set 302 * we need to double the current bpp. 303 */ 304 if (vdsc_cfg->native_420) 305 vdsc_cfg->bits_per_pixel <<= 1; 306 307 vdsc_cfg->bits_per_component = pipe_config->pipe_bpp / 3; 308 309 drm_dsc_set_rc_buf_thresh(vdsc_cfg); 310 311 /* 312 * From XE_LPD onwards we supports compression bpps in steps of 1 313 * upto uncompressed bpp-1, hence add calculations for all the rc 314 * parameters 315 */ 316 if (DISPLAY_VER(dev_priv) >= 13) { 317 calculate_rc_params(vdsc_cfg); 318 } else { 319 if ((compressed_bpp == 8 || 320 compressed_bpp == 12) && 321 (vdsc_cfg->bits_per_component == 8 || 322 vdsc_cfg->bits_per_component == 10 || 323 vdsc_cfg->bits_per_component == 12)) 324 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_1_PRE_SCR); 325 else 326 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_2_444); 327 328 if (ret) 329 return ret; 330 } 331 332 /* 333 * BitsPerComponent value determines mux_word_size: 334 * When BitsPerComponent is less than or 10bpc, muxWordSize will be equal to 335 * 48 bits otherwise 64 336 */ 337 if (vdsc_cfg->bits_per_component <= 10) 338 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_8_10_BPC; 339 else 340 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_12_BPC; 341 342 /* InitialScaleValue is a 6 bit value with 3 fractional bits (U3.3) */ 343 vdsc_cfg->initial_scale_value = (vdsc_cfg->rc_model_size << 3) / 344 (vdsc_cfg->rc_model_size - vdsc_cfg->initial_offset); 345 346 return 0; 347 } 348 349 enum intel_display_power_domain 350 intel_dsc_power_domain(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 351 { 352 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 353 enum pipe pipe = crtc->pipe; 354 355 /* 356 * VDSC/joining uses a separate power well, PW2, and requires 357 * POWER_DOMAIN_TRANSCODER_VDSC_PW2 power domain in two cases: 358 * 359 * - ICL eDP/DSI transcoder 360 * - Display version 12 (except RKL) pipe A 361 * 362 * For any other pipe, VDSC/joining uses the power well associated with 363 * the pipe in use. Hence another reference on the pipe power domain 364 * will suffice. (Except no VDSC/joining on ICL pipe A.) 365 */ 366 if (DISPLAY_VER(i915) == 12 && !IS_ROCKETLAKE(i915) && pipe == PIPE_A) 367 return POWER_DOMAIN_TRANSCODER_VDSC_PW2; 368 else if (is_pipe_dsc(crtc, cpu_transcoder)) 369 return POWER_DOMAIN_PIPE(pipe); 370 else 371 return POWER_DOMAIN_TRANSCODER_VDSC_PW2; 372 } 373 374 static int intel_dsc_get_vdsc_per_pipe(const struct intel_crtc_state *crtc_state) 375 { 376 return crtc_state->dsc.dsc_split ? 2 : 1; 377 } 378 379 int intel_dsc_get_num_vdsc_instances(const struct intel_crtc_state *crtc_state) 380 { 381 int num_vdsc_instances = intel_dsc_get_vdsc_per_pipe(crtc_state); 382 383 if (crtc_state->joiner_pipes) 384 num_vdsc_instances *= 2; 385 386 return num_vdsc_instances; 387 } 388 389 static void intel_dsc_get_pps_reg(const struct intel_crtc_state *crtc_state, int pps, 390 i915_reg_t *dsc_reg, int dsc_reg_num) 391 { 392 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 393 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 394 enum pipe pipe = crtc->pipe; 395 bool pipe_dsc; 396 397 pipe_dsc = is_pipe_dsc(crtc, cpu_transcoder); 398 399 if (dsc_reg_num >= 3) 400 MISSING_CASE(dsc_reg_num); 401 if (dsc_reg_num >= 2) 402 dsc_reg[1] = pipe_dsc ? ICL_DSC1_PPS(pipe, pps) : DSCC_PPS(pps); 403 if (dsc_reg_num >= 1) 404 dsc_reg[0] = pipe_dsc ? ICL_DSC0_PPS(pipe, pps) : DSCA_PPS(pps); 405 } 406 407 static void intel_dsc_pps_write(const struct intel_crtc_state *crtc_state, 408 int pps, u32 pps_val) 409 { 410 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 411 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 412 i915_reg_t dsc_reg[2]; 413 int i, vdsc_per_pipe, dsc_reg_num; 414 415 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 416 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe); 417 418 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe); 419 420 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num); 421 422 for (i = 0; i < dsc_reg_num; i++) 423 intel_de_write(i915, dsc_reg[i], pps_val); 424 } 425 426 static void intel_dsc_pps_configure(const struct intel_crtc_state *crtc_state) 427 { 428 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 429 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 430 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 431 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 432 enum pipe pipe = crtc->pipe; 433 u32 pps_val; 434 u32 rc_buf_thresh_dword[4]; 435 u32 rc_range_params_dword[8]; 436 int i = 0; 437 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state); 438 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 439 440 /* PPS 0 */ 441 pps_val = DSC_PPS0_VER_MAJOR(1) | 442 DSC_PPS0_VER_MINOR(vdsc_cfg->dsc_version_minor) | 443 DSC_PPS0_BPC(vdsc_cfg->bits_per_component) | 444 DSC_PPS0_LINE_BUF_DEPTH(vdsc_cfg->line_buf_depth); 445 if (vdsc_cfg->dsc_version_minor == 2) { 446 pps_val |= DSC_PPS0_ALT_ICH_SEL; 447 if (vdsc_cfg->native_420) 448 pps_val |= DSC_PPS0_NATIVE_420_ENABLE; 449 if (vdsc_cfg->native_422) 450 pps_val |= DSC_PPS0_NATIVE_422_ENABLE; 451 } 452 if (vdsc_cfg->block_pred_enable) 453 pps_val |= DSC_PPS0_BLOCK_PREDICTION; 454 if (vdsc_cfg->convert_rgb) 455 pps_val |= DSC_PPS0_COLOR_SPACE_CONVERSION; 456 if (vdsc_cfg->simple_422) 457 pps_val |= DSC_PPS0_422_ENABLE; 458 if (vdsc_cfg->vbr_enable) 459 pps_val |= DSC_PPS0_VBR_ENABLE; 460 intel_dsc_pps_write(crtc_state, 0, pps_val); 461 462 /* PPS 1 */ 463 pps_val = DSC_PPS1_BPP(vdsc_cfg->bits_per_pixel); 464 intel_dsc_pps_write(crtc_state, 1, pps_val); 465 466 /* PPS 2 */ 467 pps_val = DSC_PPS2_PIC_HEIGHT(vdsc_cfg->pic_height) | 468 DSC_PPS2_PIC_WIDTH(vdsc_cfg->pic_width / num_vdsc_instances); 469 intel_dsc_pps_write(crtc_state, 2, pps_val); 470 471 /* PPS 3 */ 472 pps_val = DSC_PPS3_SLICE_HEIGHT(vdsc_cfg->slice_height) | 473 DSC_PPS3_SLICE_WIDTH(vdsc_cfg->slice_width); 474 intel_dsc_pps_write(crtc_state, 3, pps_val); 475 476 /* PPS 4 */ 477 pps_val = DSC_PPS4_INITIAL_XMIT_DELAY(vdsc_cfg->initial_xmit_delay) | 478 DSC_PPS4_INITIAL_DEC_DELAY(vdsc_cfg->initial_dec_delay); 479 intel_dsc_pps_write(crtc_state, 4, pps_val); 480 481 /* PPS 5 */ 482 pps_val = DSC_PPS5_SCALE_INC_INT(vdsc_cfg->scale_increment_interval) | 483 DSC_PPS5_SCALE_DEC_INT(vdsc_cfg->scale_decrement_interval); 484 intel_dsc_pps_write(crtc_state, 5, pps_val); 485 486 /* PPS 6 */ 487 pps_val = DSC_PPS6_INITIAL_SCALE_VALUE(vdsc_cfg->initial_scale_value) | 488 DSC_PPS6_FIRST_LINE_BPG_OFFSET(vdsc_cfg->first_line_bpg_offset) | 489 DSC_PPS6_FLATNESS_MIN_QP(vdsc_cfg->flatness_min_qp) | 490 DSC_PPS6_FLATNESS_MAX_QP(vdsc_cfg->flatness_max_qp); 491 intel_dsc_pps_write(crtc_state, 6, pps_val); 492 493 /* PPS 7 */ 494 pps_val = DSC_PPS7_SLICE_BPG_OFFSET(vdsc_cfg->slice_bpg_offset) | 495 DSC_PPS7_NFL_BPG_OFFSET(vdsc_cfg->nfl_bpg_offset); 496 intel_dsc_pps_write(crtc_state, 7, pps_val); 497 498 /* PPS 8 */ 499 pps_val = DSC_PPS8_FINAL_OFFSET(vdsc_cfg->final_offset) | 500 DSC_PPS8_INITIAL_OFFSET(vdsc_cfg->initial_offset); 501 intel_dsc_pps_write(crtc_state, 8, pps_val); 502 503 /* PPS 9 */ 504 pps_val = DSC_PPS9_RC_MODEL_SIZE(vdsc_cfg->rc_model_size) | 505 DSC_PPS9_RC_EDGE_FACTOR(DSC_RC_EDGE_FACTOR_CONST); 506 intel_dsc_pps_write(crtc_state, 9, pps_val); 507 508 /* PPS 10 */ 509 pps_val = DSC_PPS10_RC_QUANT_INC_LIMIT0(vdsc_cfg->rc_quant_incr_limit0) | 510 DSC_PPS10_RC_QUANT_INC_LIMIT1(vdsc_cfg->rc_quant_incr_limit1) | 511 DSC_PPS10_RC_TARGET_OFF_HIGH(DSC_RC_TGT_OFFSET_HI_CONST) | 512 DSC_PPS10_RC_TARGET_OFF_LOW(DSC_RC_TGT_OFFSET_LO_CONST); 513 intel_dsc_pps_write(crtc_state, 10, pps_val); 514 515 /* PPS 16 */ 516 pps_val = DSC_PPS16_SLICE_CHUNK_SIZE(vdsc_cfg->slice_chunk_size) | 517 DSC_PPS16_SLICE_PER_LINE((vdsc_cfg->pic_width / num_vdsc_instances) / 518 vdsc_cfg->slice_width) | 519 DSC_PPS16_SLICE_ROW_PER_FRAME(vdsc_cfg->pic_height / 520 vdsc_cfg->slice_height); 521 intel_dsc_pps_write(crtc_state, 16, pps_val); 522 523 if (DISPLAY_VER(dev_priv) >= 14) { 524 /* PPS 17 */ 525 pps_val = DSC_PPS17_SL_BPG_OFFSET(vdsc_cfg->second_line_bpg_offset); 526 intel_dsc_pps_write(crtc_state, 17, pps_val); 527 528 /* PPS 18 */ 529 pps_val = DSC_PPS18_NSL_BPG_OFFSET(vdsc_cfg->nsl_bpg_offset) | 530 DSC_PPS18_SL_OFFSET_ADJ(vdsc_cfg->second_line_offset_adj); 531 intel_dsc_pps_write(crtc_state, 18, pps_val); 532 } 533 534 /* Populate the RC_BUF_THRESH registers */ 535 memset(rc_buf_thresh_dword, 0, sizeof(rc_buf_thresh_dword)); 536 for (i = 0; i < DSC_NUM_BUF_RANGES - 1; i++) 537 rc_buf_thresh_dword[i / 4] |= 538 (u32)(vdsc_cfg->rc_buf_thresh[i] << 539 BITS_PER_BYTE * (i % 4)); 540 if (!is_pipe_dsc(crtc, cpu_transcoder)) { 541 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0, 542 rc_buf_thresh_dword[0]); 543 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0_UDW, 544 rc_buf_thresh_dword[1]); 545 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1, 546 rc_buf_thresh_dword[2]); 547 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1_UDW, 548 rc_buf_thresh_dword[3]); 549 if (vdsc_instances_per_pipe > 1) { 550 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0, 551 rc_buf_thresh_dword[0]); 552 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0_UDW, 553 rc_buf_thresh_dword[1]); 554 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1, 555 rc_buf_thresh_dword[2]); 556 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1_UDW, 557 rc_buf_thresh_dword[3]); 558 } 559 } else { 560 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0(pipe), 561 rc_buf_thresh_dword[0]); 562 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0_UDW(pipe), 563 rc_buf_thresh_dword[1]); 564 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1(pipe), 565 rc_buf_thresh_dword[2]); 566 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1_UDW(pipe), 567 rc_buf_thresh_dword[3]); 568 if (vdsc_instances_per_pipe > 1) { 569 intel_de_write(dev_priv, 570 ICL_DSC1_RC_BUF_THRESH_0(pipe), 571 rc_buf_thresh_dword[0]); 572 intel_de_write(dev_priv, 573 ICL_DSC1_RC_BUF_THRESH_0_UDW(pipe), 574 rc_buf_thresh_dword[1]); 575 intel_de_write(dev_priv, 576 ICL_DSC1_RC_BUF_THRESH_1(pipe), 577 rc_buf_thresh_dword[2]); 578 intel_de_write(dev_priv, 579 ICL_DSC1_RC_BUF_THRESH_1_UDW(pipe), 580 rc_buf_thresh_dword[3]); 581 } 582 } 583 584 /* Populate the RC_RANGE_PARAMETERS registers */ 585 memset(rc_range_params_dword, 0, sizeof(rc_range_params_dword)); 586 for (i = 0; i < DSC_NUM_BUF_RANGES; i++) 587 rc_range_params_dword[i / 2] |= 588 (u32)(((vdsc_cfg->rc_range_params[i].range_bpg_offset << 589 RC_BPG_OFFSET_SHIFT) | 590 (vdsc_cfg->rc_range_params[i].range_max_qp << 591 RC_MAX_QP_SHIFT) | 592 (vdsc_cfg->rc_range_params[i].range_min_qp << 593 RC_MIN_QP_SHIFT)) << 16 * (i % 2)); 594 if (!is_pipe_dsc(crtc, cpu_transcoder)) { 595 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0, 596 rc_range_params_dword[0]); 597 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0_UDW, 598 rc_range_params_dword[1]); 599 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1, 600 rc_range_params_dword[2]); 601 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1_UDW, 602 rc_range_params_dword[3]); 603 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2, 604 rc_range_params_dword[4]); 605 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2_UDW, 606 rc_range_params_dword[5]); 607 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3, 608 rc_range_params_dword[6]); 609 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3_UDW, 610 rc_range_params_dword[7]); 611 if (vdsc_instances_per_pipe > 1) { 612 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_0, 613 rc_range_params_dword[0]); 614 intel_de_write(dev_priv, 615 DSCC_RC_RANGE_PARAMETERS_0_UDW, 616 rc_range_params_dword[1]); 617 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_1, 618 rc_range_params_dword[2]); 619 intel_de_write(dev_priv, 620 DSCC_RC_RANGE_PARAMETERS_1_UDW, 621 rc_range_params_dword[3]); 622 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_2, 623 rc_range_params_dword[4]); 624 intel_de_write(dev_priv, 625 DSCC_RC_RANGE_PARAMETERS_2_UDW, 626 rc_range_params_dword[5]); 627 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_3, 628 rc_range_params_dword[6]); 629 intel_de_write(dev_priv, 630 DSCC_RC_RANGE_PARAMETERS_3_UDW, 631 rc_range_params_dword[7]); 632 } 633 } else { 634 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_0(pipe), 635 rc_range_params_dword[0]); 636 intel_de_write(dev_priv, 637 ICL_DSC0_RC_RANGE_PARAMETERS_0_UDW(pipe), 638 rc_range_params_dword[1]); 639 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_1(pipe), 640 rc_range_params_dword[2]); 641 intel_de_write(dev_priv, 642 ICL_DSC0_RC_RANGE_PARAMETERS_1_UDW(pipe), 643 rc_range_params_dword[3]); 644 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_2(pipe), 645 rc_range_params_dword[4]); 646 intel_de_write(dev_priv, 647 ICL_DSC0_RC_RANGE_PARAMETERS_2_UDW(pipe), 648 rc_range_params_dword[5]); 649 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_3(pipe), 650 rc_range_params_dword[6]); 651 intel_de_write(dev_priv, 652 ICL_DSC0_RC_RANGE_PARAMETERS_3_UDW(pipe), 653 rc_range_params_dword[7]); 654 if (vdsc_instances_per_pipe > 1) { 655 intel_de_write(dev_priv, 656 ICL_DSC1_RC_RANGE_PARAMETERS_0(pipe), 657 rc_range_params_dword[0]); 658 intel_de_write(dev_priv, 659 ICL_DSC1_RC_RANGE_PARAMETERS_0_UDW(pipe), 660 rc_range_params_dword[1]); 661 intel_de_write(dev_priv, 662 ICL_DSC1_RC_RANGE_PARAMETERS_1(pipe), 663 rc_range_params_dword[2]); 664 intel_de_write(dev_priv, 665 ICL_DSC1_RC_RANGE_PARAMETERS_1_UDW(pipe), 666 rc_range_params_dword[3]); 667 intel_de_write(dev_priv, 668 ICL_DSC1_RC_RANGE_PARAMETERS_2(pipe), 669 rc_range_params_dword[4]); 670 intel_de_write(dev_priv, 671 ICL_DSC1_RC_RANGE_PARAMETERS_2_UDW(pipe), 672 rc_range_params_dword[5]); 673 intel_de_write(dev_priv, 674 ICL_DSC1_RC_RANGE_PARAMETERS_3(pipe), 675 rc_range_params_dword[6]); 676 intel_de_write(dev_priv, 677 ICL_DSC1_RC_RANGE_PARAMETERS_3_UDW(pipe), 678 rc_range_params_dword[7]); 679 } 680 } 681 } 682 683 void intel_dsc_dsi_pps_write(struct intel_encoder *encoder, 684 const struct intel_crtc_state *crtc_state) 685 { 686 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 687 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 688 struct mipi_dsi_device *dsi; 689 struct drm_dsc_picture_parameter_set pps; 690 enum port port; 691 692 if (!crtc_state->dsc.compression_enable) 693 return; 694 695 drm_dsc_pps_payload_pack(&pps, vdsc_cfg); 696 697 for_each_dsi_port(port, intel_dsi->ports) { 698 dsi = intel_dsi->dsi_hosts[port]->device; 699 700 mipi_dsi_picture_parameter_set(dsi, &pps); 701 mipi_dsi_compression_mode(dsi, true); 702 } 703 } 704 705 void intel_dsc_dp_pps_write(struct intel_encoder *encoder, 706 const struct intel_crtc_state *crtc_state) 707 { 708 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 709 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 710 struct drm_dsc_pps_infoframe dp_dsc_pps_sdp; 711 712 if (!crtc_state->dsc.compression_enable) 713 return; 714 715 /* Prepare DP SDP PPS header as per DP 1.4 spec, Table 2-123 */ 716 drm_dsc_dp_pps_header_init(&dp_dsc_pps_sdp.pps_header); 717 718 /* Fill the PPS payload bytes as per DSC spec 1.2 Table 4-1 */ 719 drm_dsc_pps_payload_pack(&dp_dsc_pps_sdp.pps_payload, vdsc_cfg); 720 721 dig_port->write_infoframe(encoder, crtc_state, 722 DP_SDP_PPS, &dp_dsc_pps_sdp, 723 sizeof(dp_dsc_pps_sdp)); 724 } 725 726 static i915_reg_t dss_ctl1_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 727 { 728 return is_pipe_dsc(crtc, cpu_transcoder) ? 729 ICL_PIPE_DSS_CTL1(crtc->pipe) : DSS_CTL1; 730 } 731 732 static i915_reg_t dss_ctl2_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 733 { 734 return is_pipe_dsc(crtc, cpu_transcoder) ? 735 ICL_PIPE_DSS_CTL2(crtc->pipe) : DSS_CTL2; 736 } 737 738 void intel_uncompressed_joiner_enable(const struct intel_crtc_state *crtc_state) 739 { 740 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 741 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 742 u32 dss_ctl1_val = 0; 743 744 if (crtc_state->joiner_pipes && !crtc_state->dsc.compression_enable) { 745 if (intel_crtc_is_joiner_secondary(crtc_state)) 746 dss_ctl1_val |= UNCOMPRESSED_JOINER_SECONDARY; 747 else 748 dss_ctl1_val |= UNCOMPRESSED_JOINER_PRIMARY; 749 750 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val); 751 } 752 } 753 754 void intel_dsc_enable(const struct intel_crtc_state *crtc_state) 755 { 756 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 757 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 758 u32 dss_ctl1_val = 0; 759 u32 dss_ctl2_val = 0; 760 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 761 762 if (!crtc_state->dsc.compression_enable) 763 return; 764 765 intel_dsc_pps_configure(crtc_state); 766 767 dss_ctl2_val |= LEFT_BRANCH_VDSC_ENABLE; 768 if (vdsc_instances_per_pipe > 1) { 769 dss_ctl2_val |= RIGHT_BRANCH_VDSC_ENABLE; 770 dss_ctl1_val |= JOINER_ENABLE; 771 } 772 if (crtc_state->joiner_pipes) { 773 dss_ctl1_val |= BIG_JOINER_ENABLE; 774 if (!intel_crtc_is_joiner_secondary(crtc_state)) 775 dss_ctl1_val |= PRIMARY_BIG_JOINER_ENABLE; 776 } 777 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val); 778 intel_de_write(dev_priv, dss_ctl2_reg(crtc, crtc_state->cpu_transcoder), dss_ctl2_val); 779 } 780 781 void intel_dsc_disable(const struct intel_crtc_state *old_crtc_state) 782 { 783 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 784 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 785 786 /* Disable only if either of them is enabled */ 787 if (old_crtc_state->dsc.compression_enable || 788 old_crtc_state->joiner_pipes) { 789 intel_de_write(dev_priv, dss_ctl1_reg(crtc, old_crtc_state->cpu_transcoder), 0); 790 intel_de_write(dev_priv, dss_ctl2_reg(crtc, old_crtc_state->cpu_transcoder), 0); 791 } 792 } 793 794 static u32 intel_dsc_pps_read(struct intel_crtc_state *crtc_state, int pps, 795 bool *all_equal) 796 { 797 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 798 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 799 i915_reg_t dsc_reg[2]; 800 int i, vdsc_per_pipe, dsc_reg_num; 801 u32 val; 802 803 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 804 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe); 805 806 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe); 807 808 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num); 809 810 *all_equal = true; 811 812 val = intel_de_read(i915, dsc_reg[0]); 813 814 for (i = 1; i < dsc_reg_num; i++) { 815 if (intel_de_read(i915, dsc_reg[i]) != val) { 816 *all_equal = false; 817 break; 818 } 819 } 820 821 return val; 822 } 823 824 static u32 intel_dsc_pps_read_and_verify(struct intel_crtc_state *crtc_state, int pps) 825 { 826 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 827 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 828 u32 val; 829 bool all_equal; 830 831 val = intel_dsc_pps_read(crtc_state, pps, &all_equal); 832 drm_WARN_ON(&i915->drm, !all_equal); 833 834 return val; 835 } 836 837 static void intel_dsc_get_pps_config(struct intel_crtc_state *crtc_state) 838 { 839 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 840 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 841 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 842 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state); 843 u32 pps_temp; 844 845 /* PPS 0 */ 846 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 0); 847 848 vdsc_cfg->bits_per_component = REG_FIELD_GET(DSC_PPS0_BPC_MASK, pps_temp); 849 vdsc_cfg->line_buf_depth = REG_FIELD_GET(DSC_PPS0_LINE_BUF_DEPTH_MASK, pps_temp); 850 vdsc_cfg->block_pred_enable = pps_temp & DSC_PPS0_BLOCK_PREDICTION; 851 vdsc_cfg->convert_rgb = pps_temp & DSC_PPS0_COLOR_SPACE_CONVERSION; 852 vdsc_cfg->simple_422 = pps_temp & DSC_PPS0_422_ENABLE; 853 vdsc_cfg->native_422 = pps_temp & DSC_PPS0_NATIVE_422_ENABLE; 854 vdsc_cfg->native_420 = pps_temp & DSC_PPS0_NATIVE_420_ENABLE; 855 vdsc_cfg->vbr_enable = pps_temp & DSC_PPS0_VBR_ENABLE; 856 857 /* PPS 1 */ 858 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 1); 859 860 vdsc_cfg->bits_per_pixel = REG_FIELD_GET(DSC_PPS1_BPP_MASK, pps_temp); 861 862 if (vdsc_cfg->native_420) 863 vdsc_cfg->bits_per_pixel >>= 1; 864 865 crtc_state->dsc.compressed_bpp_x16 = vdsc_cfg->bits_per_pixel; 866 867 /* PPS 2 */ 868 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 2); 869 870 vdsc_cfg->pic_width = REG_FIELD_GET(DSC_PPS2_PIC_WIDTH_MASK, pps_temp) * num_vdsc_instances; 871 vdsc_cfg->pic_height = REG_FIELD_GET(DSC_PPS2_PIC_HEIGHT_MASK, pps_temp); 872 873 /* PPS 3 */ 874 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 3); 875 876 vdsc_cfg->slice_width = REG_FIELD_GET(DSC_PPS3_SLICE_WIDTH_MASK, pps_temp); 877 vdsc_cfg->slice_height = REG_FIELD_GET(DSC_PPS3_SLICE_HEIGHT_MASK, pps_temp); 878 879 /* PPS 4 */ 880 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 4); 881 882 vdsc_cfg->initial_dec_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_DEC_DELAY_MASK, pps_temp); 883 vdsc_cfg->initial_xmit_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_XMIT_DELAY_MASK, pps_temp); 884 885 /* PPS 5 */ 886 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 5); 887 888 vdsc_cfg->scale_decrement_interval = REG_FIELD_GET(DSC_PPS5_SCALE_DEC_INT_MASK, pps_temp); 889 vdsc_cfg->scale_increment_interval = REG_FIELD_GET(DSC_PPS5_SCALE_INC_INT_MASK, pps_temp); 890 891 /* PPS 6 */ 892 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 6); 893 894 vdsc_cfg->initial_scale_value = REG_FIELD_GET(DSC_PPS6_INITIAL_SCALE_VALUE_MASK, pps_temp); 895 vdsc_cfg->first_line_bpg_offset = REG_FIELD_GET(DSC_PPS6_FIRST_LINE_BPG_OFFSET_MASK, pps_temp); 896 vdsc_cfg->flatness_min_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MIN_QP_MASK, pps_temp); 897 vdsc_cfg->flatness_max_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MAX_QP_MASK, pps_temp); 898 899 /* PPS 7 */ 900 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 7); 901 902 vdsc_cfg->nfl_bpg_offset = REG_FIELD_GET(DSC_PPS7_NFL_BPG_OFFSET_MASK, pps_temp); 903 vdsc_cfg->slice_bpg_offset = REG_FIELD_GET(DSC_PPS7_SLICE_BPG_OFFSET_MASK, pps_temp); 904 905 /* PPS 8 */ 906 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 8); 907 908 vdsc_cfg->initial_offset = REG_FIELD_GET(DSC_PPS8_INITIAL_OFFSET_MASK, pps_temp); 909 vdsc_cfg->final_offset = REG_FIELD_GET(DSC_PPS8_FINAL_OFFSET_MASK, pps_temp); 910 911 /* PPS 9 */ 912 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 9); 913 914 vdsc_cfg->rc_model_size = REG_FIELD_GET(DSC_PPS9_RC_MODEL_SIZE_MASK, pps_temp); 915 916 /* PPS 10 */ 917 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 10); 918 919 vdsc_cfg->rc_quant_incr_limit0 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT0_MASK, pps_temp); 920 vdsc_cfg->rc_quant_incr_limit1 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT1_MASK, pps_temp); 921 922 /* PPS 16 */ 923 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 16); 924 925 vdsc_cfg->slice_chunk_size = REG_FIELD_GET(DSC_PPS16_SLICE_CHUNK_SIZE_MASK, pps_temp); 926 927 if (DISPLAY_VER(i915) >= 14) { 928 /* PPS 17 */ 929 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 17); 930 931 vdsc_cfg->second_line_bpg_offset = REG_FIELD_GET(DSC_PPS17_SL_BPG_OFFSET_MASK, pps_temp); 932 933 /* PPS 18 */ 934 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 18); 935 936 vdsc_cfg->nsl_bpg_offset = REG_FIELD_GET(DSC_PPS18_NSL_BPG_OFFSET_MASK, pps_temp); 937 vdsc_cfg->second_line_offset_adj = REG_FIELD_GET(DSC_PPS18_SL_OFFSET_ADJ_MASK, pps_temp); 938 } 939 } 940 941 void intel_dsc_get_config(struct intel_crtc_state *crtc_state) 942 { 943 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 944 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 945 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 946 enum intel_display_power_domain power_domain; 947 intel_wakeref_t wakeref; 948 u32 dss_ctl1, dss_ctl2; 949 950 if (!intel_dsc_source_support(crtc_state)) 951 return; 952 953 power_domain = intel_dsc_power_domain(crtc, cpu_transcoder); 954 955 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain); 956 if (!wakeref) 957 return; 958 959 dss_ctl1 = intel_de_read(dev_priv, dss_ctl1_reg(crtc, cpu_transcoder)); 960 dss_ctl2 = intel_de_read(dev_priv, dss_ctl2_reg(crtc, cpu_transcoder)); 961 962 crtc_state->dsc.compression_enable = dss_ctl2 & LEFT_BRANCH_VDSC_ENABLE; 963 if (!crtc_state->dsc.compression_enable) 964 goto out; 965 966 crtc_state->dsc.dsc_split = (dss_ctl2 & RIGHT_BRANCH_VDSC_ENABLE) && 967 (dss_ctl1 & JOINER_ENABLE); 968 969 intel_dsc_get_pps_config(crtc_state); 970 out: 971 intel_display_power_put(dev_priv, power_domain, wakeref); 972 } 973 974 static void intel_vdsc_dump_state(struct drm_printer *p, int indent, 975 const struct intel_crtc_state *crtc_state) 976 { 977 drm_printf_indent(p, indent, 978 "dsc-dss: compressed-bpp:" FXP_Q4_FMT ", slice-count: %d, split: %s\n", 979 FXP_Q4_ARGS(crtc_state->dsc.compressed_bpp_x16), 980 crtc_state->dsc.slice_count, 981 str_yes_no(crtc_state->dsc.dsc_split)); 982 } 983 984 void intel_vdsc_state_dump(struct drm_printer *p, int indent, 985 const struct intel_crtc_state *crtc_state) 986 { 987 if (!crtc_state->dsc.compression_enable) 988 return; 989 990 intel_vdsc_dump_state(p, indent, crtc_state); 991 drm_dsc_dump_config(p, indent, &crtc_state->dsc.config); 992 } 993