1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2018 Intel Corporation 4 * 5 * Author: Gaurav K Singh <gaurav.k.singh@intel.com> 6 * Manasi Navare <manasi.d.navare@intel.com> 7 */ 8 #include <linux/limits.h> 9 10 #include <drm/display/drm_dsc_helper.h> 11 12 #include "i915_drv.h" 13 #include "intel_crtc.h" 14 #include "intel_de.h" 15 #include "intel_display_types.h" 16 #include "intel_dsi.h" 17 #include "intel_qp_tables.h" 18 #include "intel_vdsc.h" 19 #include "intel_vdsc_regs.h" 20 21 bool intel_dsc_source_support(const struct intel_crtc_state *crtc_state) 22 { 23 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 24 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 25 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 26 27 if (!HAS_DSC(i915)) 28 return false; 29 30 if (DISPLAY_VER(i915) == 11 && cpu_transcoder == TRANSCODER_A) 31 return false; 32 33 return true; 34 } 35 36 static bool is_pipe_dsc(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 37 { 38 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 39 40 if (DISPLAY_VER(i915) >= 12) 41 return true; 42 43 if (cpu_transcoder == TRANSCODER_EDP || 44 cpu_transcoder == TRANSCODER_DSI_0 || 45 cpu_transcoder == TRANSCODER_DSI_1) 46 return false; 47 48 /* There's no pipe A DSC engine on ICL */ 49 drm_WARN_ON(&i915->drm, crtc->pipe == PIPE_A); 50 51 return true; 52 } 53 54 static void 55 intel_vdsc_set_min_max_qp(struct drm_dsc_config *vdsc_cfg, int buf, 56 int bpp) 57 { 58 int bpc = vdsc_cfg->bits_per_component; 59 60 /* Read range_minqp and range_max_qp from qp tables */ 61 vdsc_cfg->rc_range_params[buf].range_min_qp = 62 intel_lookup_range_min_qp(bpc, buf, bpp, vdsc_cfg->native_420); 63 vdsc_cfg->rc_range_params[buf].range_max_qp = 64 intel_lookup_range_max_qp(bpc, buf, bpp, vdsc_cfg->native_420); 65 } 66 67 /* 68 * We are using the method provided in DSC 1.2a C-Model in codec_main.c 69 * Above method use a common formula to derive values for any combination of DSC 70 * variables. The formula approach may yield slight differences in the derived PPS 71 * parameters from the original parameter sets. These differences are not consequential 72 * to the coding performance because all parameter sets have been shown to produce 73 * visually lossless quality (provides the same PPS values as 74 * DSCParameterValuesVESA V1-2 spreadsheet). 75 */ 76 static void 77 calculate_rc_params(struct drm_dsc_config *vdsc_cfg) 78 { 79 int bpp = to_bpp_int(vdsc_cfg->bits_per_pixel); 80 int bpc = vdsc_cfg->bits_per_component; 81 int qp_bpc_modifier = (bpc - 8) * 2; 82 int uncompressed_bpg_rate; 83 int first_line_bpg_offset; 84 u32 res, buf_i, bpp_i; 85 86 if (vdsc_cfg->slice_height >= 8) 87 first_line_bpg_offset = 88 12 + (9 * min(34, vdsc_cfg->slice_height - 8)) / 100; 89 else 90 first_line_bpg_offset = 2 * (vdsc_cfg->slice_height - 1); 91 92 uncompressed_bpg_rate = (3 * bpc + (vdsc_cfg->convert_rgb ? 0 : 2)) * 3; 93 vdsc_cfg->first_line_bpg_offset = clamp(first_line_bpg_offset, 0, 94 uncompressed_bpg_rate - 3 * bpp); 95 96 /* 97 * According to DSC 1.2 spec in Section 4.1 if native_420 is set: 98 * -second_line_bpg_offset is 12 in general and equal to 2*(slice_height-1) if slice 99 * height < 8. 100 * -second_line_offset_adj is 512 as shown by emperical values to yield best chroma 101 * preservation in second line. 102 * -nsl_bpg_offset is calculated as second_line_offset/slice_height -1 then rounded 103 * up to 16 fractional bits, we left shift second line offset by 11 to preserve 11 104 * fractional bits. 105 */ 106 if (vdsc_cfg->native_420) { 107 if (vdsc_cfg->slice_height >= 8) 108 vdsc_cfg->second_line_bpg_offset = 12; 109 else 110 vdsc_cfg->second_line_bpg_offset = 111 2 * (vdsc_cfg->slice_height - 1); 112 113 vdsc_cfg->second_line_offset_adj = 512; 114 vdsc_cfg->nsl_bpg_offset = DIV_ROUND_UP(vdsc_cfg->second_line_bpg_offset << 11, 115 vdsc_cfg->slice_height - 1); 116 } 117 118 /* Our hw supports only 444 modes as of today */ 119 if (bpp >= 12) 120 vdsc_cfg->initial_offset = 2048; 121 else if (bpp >= 10) 122 vdsc_cfg->initial_offset = 5632 - DIV_ROUND_UP(((bpp - 10) * 3584), 2); 123 else if (bpp >= 8) 124 vdsc_cfg->initial_offset = 6144 - DIV_ROUND_UP(((bpp - 8) * 512), 2); 125 else 126 vdsc_cfg->initial_offset = 6144; 127 128 /* initial_xmit_delay = rc_model_size/2/compression_bpp */ 129 vdsc_cfg->initial_xmit_delay = DIV_ROUND_UP(DSC_RC_MODEL_SIZE_CONST, 2 * bpp); 130 131 vdsc_cfg->flatness_min_qp = 3 + qp_bpc_modifier; 132 vdsc_cfg->flatness_max_qp = 12 + qp_bpc_modifier; 133 134 vdsc_cfg->rc_quant_incr_limit0 = 11 + qp_bpc_modifier; 135 vdsc_cfg->rc_quant_incr_limit1 = 11 + qp_bpc_modifier; 136 137 if (vdsc_cfg->native_420) { 138 static const s8 ofs_und4[] = { 139 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12 140 }; 141 static const s8 ofs_und5[] = { 142 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 143 }; 144 static const s8 ofs_und6[] = { 145 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 146 }; 147 static const s8 ofs_und8[] = { 148 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12 149 }; 150 /* 151 * For 420 format since bits_per_pixel (bpp) is set to target bpp * 2, 152 * QP table values for target bpp 4.0 to 4.4375 (rounded to 4.0) are 153 * actually for bpp 8 to 8.875 (rounded to 4.0 * 2 i.e 8). 154 * Similarly values for target bpp 4.5 to 4.8375 (rounded to 4.5) 155 * are for bpp 9 to 9.875 (rounded to 4.5 * 2 i.e 9), and so on. 156 */ 157 bpp_i = bpp - 8; 158 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) { 159 u8 range_bpg_offset; 160 161 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i); 162 163 /* Calculate range_bpg_offset */ 164 if (bpp <= 8) { 165 range_bpg_offset = ofs_und4[buf_i]; 166 } else if (bpp <= 10) { 167 res = DIV_ROUND_UP(((bpp - 8) * 168 (ofs_und5[buf_i] - ofs_und4[buf_i])), 2); 169 range_bpg_offset = ofs_und4[buf_i] + res; 170 } else if (bpp <= 12) { 171 res = DIV_ROUND_UP(((bpp - 10) * 172 (ofs_und6[buf_i] - ofs_und5[buf_i])), 2); 173 range_bpg_offset = ofs_und5[buf_i] + res; 174 } else if (bpp <= 16) { 175 res = DIV_ROUND_UP(((bpp - 12) * 176 (ofs_und8[buf_i] - ofs_und6[buf_i])), 4); 177 range_bpg_offset = ofs_und6[buf_i] + res; 178 } else { 179 range_bpg_offset = ofs_und8[buf_i]; 180 } 181 182 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset = 183 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK; 184 } 185 } else { 186 /* fractional bpp part * 10000 (for precision up to 4 decimal places) */ 187 int fractional_bits = to_bpp_frac(vdsc_cfg->bits_per_pixel); 188 189 static const s8 ofs_und6[] = { 190 0, -2, -2, -4, -6, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12 191 }; 192 static const s8 ofs_und8[] = { 193 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 194 }; 195 static const s8 ofs_und12[] = { 196 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12 197 }; 198 static const s8 ofs_und15[] = { 199 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12 200 }; 201 202 /* 203 * QP table rows have values in increment of 0.5. 204 * So 6.0 bpp to 6.4375 will have index 0, 6.5 to 6.9375 will have index 1, 205 * and so on. 206 * 0.5 fractional part with 4 decimal precision becomes 5000 207 */ 208 bpp_i = ((bpp - 6) + (fractional_bits < 5000 ? 0 : 1)); 209 210 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) { 211 u8 range_bpg_offset; 212 213 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i); 214 215 /* Calculate range_bpg_offset */ 216 if (bpp <= 6) { 217 range_bpg_offset = ofs_und6[buf_i]; 218 } else if (bpp <= 8) { 219 res = DIV_ROUND_UP(((bpp - 6) * 220 (ofs_und8[buf_i] - ofs_und6[buf_i])), 2); 221 range_bpg_offset = ofs_und6[buf_i] + res; 222 } else if (bpp <= 12) { 223 range_bpg_offset = ofs_und8[buf_i]; 224 } else if (bpp <= 15) { 225 res = DIV_ROUND_UP(((bpp - 12) * 226 (ofs_und15[buf_i] - ofs_und12[buf_i])), 3); 227 range_bpg_offset = ofs_und12[buf_i] + res; 228 } else { 229 range_bpg_offset = ofs_und15[buf_i]; 230 } 231 232 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset = 233 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK; 234 } 235 } 236 } 237 238 static int intel_dsc_slice_dimensions_valid(struct intel_crtc_state *pipe_config, 239 struct drm_dsc_config *vdsc_cfg) 240 { 241 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_RGB || 242 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) { 243 if (vdsc_cfg->slice_height > 4095) 244 return -EINVAL; 245 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 15000) 246 return -EINVAL; 247 } else if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 248 if (vdsc_cfg->slice_width % 2) 249 return -EINVAL; 250 if (vdsc_cfg->slice_height % 2) 251 return -EINVAL; 252 if (vdsc_cfg->slice_height > 4094) 253 return -EINVAL; 254 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 30000) 255 return -EINVAL; 256 } 257 258 return 0; 259 } 260 261 int intel_dsc_compute_params(struct intel_crtc_state *pipe_config) 262 { 263 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 264 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 265 struct drm_dsc_config *vdsc_cfg = &pipe_config->dsc.config; 266 u16 compressed_bpp = to_bpp_int(pipe_config->dsc.compressed_bpp_x16); 267 int err; 268 int ret; 269 270 vdsc_cfg->pic_width = pipe_config->hw.adjusted_mode.crtc_hdisplay; 271 vdsc_cfg->slice_width = DIV_ROUND_UP(vdsc_cfg->pic_width, 272 pipe_config->dsc.slice_count); 273 274 err = intel_dsc_slice_dimensions_valid(pipe_config, vdsc_cfg); 275 276 if (err) { 277 drm_dbg_kms(&dev_priv->drm, "Slice dimension requirements not met\n"); 278 return err; 279 } 280 281 /* 282 * According to DSC 1.2 specs if colorspace is YCbCr then convert_rgb is 0 283 * else 1 284 */ 285 vdsc_cfg->convert_rgb = pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR420 && 286 pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR444; 287 288 if (DISPLAY_VER(dev_priv) >= 14 && 289 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 290 vdsc_cfg->native_420 = true; 291 /* We do not support YcBCr422 as of now */ 292 vdsc_cfg->native_422 = false; 293 vdsc_cfg->simple_422 = false; 294 /* Gen 11 does not support VBR */ 295 vdsc_cfg->vbr_enable = false; 296 297 vdsc_cfg->bits_per_pixel = pipe_config->dsc.compressed_bpp_x16; 298 299 /* 300 * According to DSC 1.2 specs in Section 4.1 if native_420 is set 301 * we need to double the current bpp. 302 */ 303 if (vdsc_cfg->native_420) 304 vdsc_cfg->bits_per_pixel <<= 1; 305 306 vdsc_cfg->bits_per_component = pipe_config->pipe_bpp / 3; 307 308 drm_dsc_set_rc_buf_thresh(vdsc_cfg); 309 310 /* 311 * From XE_LPD onwards we supports compression bpps in steps of 1 312 * upto uncompressed bpp-1, hence add calculations for all the rc 313 * parameters 314 */ 315 if (DISPLAY_VER(dev_priv) >= 13) { 316 calculate_rc_params(vdsc_cfg); 317 } else { 318 if ((compressed_bpp == 8 || 319 compressed_bpp == 12) && 320 (vdsc_cfg->bits_per_component == 8 || 321 vdsc_cfg->bits_per_component == 10 || 322 vdsc_cfg->bits_per_component == 12)) 323 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_1_PRE_SCR); 324 else 325 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_2_444); 326 327 if (ret) 328 return ret; 329 } 330 331 /* 332 * BitsPerComponent value determines mux_word_size: 333 * When BitsPerComponent is less than or 10bpc, muxWordSize will be equal to 334 * 48 bits otherwise 64 335 */ 336 if (vdsc_cfg->bits_per_component <= 10) 337 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_8_10_BPC; 338 else 339 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_12_BPC; 340 341 /* InitialScaleValue is a 6 bit value with 3 fractional bits (U3.3) */ 342 vdsc_cfg->initial_scale_value = (vdsc_cfg->rc_model_size << 3) / 343 (vdsc_cfg->rc_model_size - vdsc_cfg->initial_offset); 344 345 return 0; 346 } 347 348 enum intel_display_power_domain 349 intel_dsc_power_domain(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 350 { 351 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 352 enum pipe pipe = crtc->pipe; 353 354 /* 355 * VDSC/joining uses a separate power well, PW2, and requires 356 * POWER_DOMAIN_TRANSCODER_VDSC_PW2 power domain in two cases: 357 * 358 * - ICL eDP/DSI transcoder 359 * - Display version 12 (except RKL) pipe A 360 * 361 * For any other pipe, VDSC/joining uses the power well associated with 362 * the pipe in use. Hence another reference on the pipe power domain 363 * will suffice. (Except no VDSC/joining on ICL pipe A.) 364 */ 365 if (DISPLAY_VER(i915) == 12 && !IS_ROCKETLAKE(i915) && pipe == PIPE_A) 366 return POWER_DOMAIN_TRANSCODER_VDSC_PW2; 367 else if (is_pipe_dsc(crtc, cpu_transcoder)) 368 return POWER_DOMAIN_PIPE(pipe); 369 else 370 return POWER_DOMAIN_TRANSCODER_VDSC_PW2; 371 } 372 373 static int intel_dsc_get_vdsc_per_pipe(const struct intel_crtc_state *crtc_state) 374 { 375 return crtc_state->dsc.dsc_split ? 2 : 1; 376 } 377 378 int intel_dsc_get_num_vdsc_instances(const struct intel_crtc_state *crtc_state) 379 { 380 int num_vdsc_instances = intel_dsc_get_vdsc_per_pipe(crtc_state); 381 382 if (crtc_state->joiner_pipes) 383 num_vdsc_instances *= 2; 384 385 return num_vdsc_instances; 386 } 387 388 static void intel_dsc_get_pps_reg(const struct intel_crtc_state *crtc_state, int pps, 389 i915_reg_t *dsc_reg, int dsc_reg_num) 390 { 391 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 392 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 393 enum pipe pipe = crtc->pipe; 394 bool pipe_dsc; 395 396 pipe_dsc = is_pipe_dsc(crtc, cpu_transcoder); 397 398 if (dsc_reg_num >= 3) 399 MISSING_CASE(dsc_reg_num); 400 if (dsc_reg_num >= 2) 401 dsc_reg[1] = pipe_dsc ? ICL_DSC1_PPS(pipe, pps) : DSCC_PPS(pps); 402 if (dsc_reg_num >= 1) 403 dsc_reg[0] = pipe_dsc ? ICL_DSC0_PPS(pipe, pps) : DSCA_PPS(pps); 404 } 405 406 static void intel_dsc_pps_write(const struct intel_crtc_state *crtc_state, 407 int pps, u32 pps_val) 408 { 409 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 410 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 411 i915_reg_t dsc_reg[2]; 412 int i, vdsc_per_pipe, dsc_reg_num; 413 414 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 415 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe); 416 417 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe); 418 419 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num); 420 421 for (i = 0; i < dsc_reg_num; i++) 422 intel_de_write(i915, dsc_reg[i], pps_val); 423 } 424 425 static void intel_dsc_pps_configure(const struct intel_crtc_state *crtc_state) 426 { 427 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 428 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 429 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 430 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 431 enum pipe pipe = crtc->pipe; 432 u32 pps_val; 433 u32 rc_buf_thresh_dword[4]; 434 u32 rc_range_params_dword[8]; 435 int i = 0; 436 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state); 437 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 438 439 /* PPS 0 */ 440 pps_val = DSC_PPS0_VER_MAJOR(1) | 441 DSC_PPS0_VER_MINOR(vdsc_cfg->dsc_version_minor) | 442 DSC_PPS0_BPC(vdsc_cfg->bits_per_component) | 443 DSC_PPS0_LINE_BUF_DEPTH(vdsc_cfg->line_buf_depth); 444 if (vdsc_cfg->dsc_version_minor == 2) { 445 pps_val |= DSC_PPS0_ALT_ICH_SEL; 446 if (vdsc_cfg->native_420) 447 pps_val |= DSC_PPS0_NATIVE_420_ENABLE; 448 if (vdsc_cfg->native_422) 449 pps_val |= DSC_PPS0_NATIVE_422_ENABLE; 450 } 451 if (vdsc_cfg->block_pred_enable) 452 pps_val |= DSC_PPS0_BLOCK_PREDICTION; 453 if (vdsc_cfg->convert_rgb) 454 pps_val |= DSC_PPS0_COLOR_SPACE_CONVERSION; 455 if (vdsc_cfg->simple_422) 456 pps_val |= DSC_PPS0_422_ENABLE; 457 if (vdsc_cfg->vbr_enable) 458 pps_val |= DSC_PPS0_VBR_ENABLE; 459 drm_dbg_kms(&dev_priv->drm, "PPS0 = 0x%08x\n", pps_val); 460 intel_dsc_pps_write(crtc_state, 0, pps_val); 461 462 /* PPS 1 */ 463 pps_val = DSC_PPS1_BPP(vdsc_cfg->bits_per_pixel); 464 drm_dbg_kms(&dev_priv->drm, "PPS1 = 0x%08x\n", pps_val); 465 intel_dsc_pps_write(crtc_state, 1, pps_val); 466 467 /* PPS 2 */ 468 pps_val = DSC_PPS2_PIC_HEIGHT(vdsc_cfg->pic_height) | 469 DSC_PPS2_PIC_WIDTH(vdsc_cfg->pic_width / num_vdsc_instances); 470 drm_dbg_kms(&dev_priv->drm, "PPS2 = 0x%08x\n", pps_val); 471 intel_dsc_pps_write(crtc_state, 2, pps_val); 472 473 /* PPS 3 */ 474 pps_val = DSC_PPS3_SLICE_HEIGHT(vdsc_cfg->slice_height) | 475 DSC_PPS3_SLICE_WIDTH(vdsc_cfg->slice_width); 476 drm_dbg_kms(&dev_priv->drm, "PPS3 = 0x%08x\n", pps_val); 477 intel_dsc_pps_write(crtc_state, 3, pps_val); 478 479 /* PPS 4 */ 480 pps_val = DSC_PPS4_INITIAL_XMIT_DELAY(vdsc_cfg->initial_xmit_delay) | 481 DSC_PPS4_INITIAL_DEC_DELAY(vdsc_cfg->initial_dec_delay); 482 drm_dbg_kms(&dev_priv->drm, "PPS4 = 0x%08x\n", pps_val); 483 intel_dsc_pps_write(crtc_state, 4, pps_val); 484 485 /* PPS 5 */ 486 pps_val = DSC_PPS5_SCALE_INC_INT(vdsc_cfg->scale_increment_interval) | 487 DSC_PPS5_SCALE_DEC_INT(vdsc_cfg->scale_decrement_interval); 488 drm_dbg_kms(&dev_priv->drm, "PPS5 = 0x%08x\n", pps_val); 489 intel_dsc_pps_write(crtc_state, 5, pps_val); 490 491 /* PPS 6 */ 492 pps_val = DSC_PPS6_INITIAL_SCALE_VALUE(vdsc_cfg->initial_scale_value) | 493 DSC_PPS6_FIRST_LINE_BPG_OFFSET(vdsc_cfg->first_line_bpg_offset) | 494 DSC_PPS6_FLATNESS_MIN_QP(vdsc_cfg->flatness_min_qp) | 495 DSC_PPS6_FLATNESS_MAX_QP(vdsc_cfg->flatness_max_qp); 496 drm_dbg_kms(&dev_priv->drm, "PPS6 = 0x%08x\n", pps_val); 497 intel_dsc_pps_write(crtc_state, 6, pps_val); 498 499 /* PPS 7 */ 500 pps_val = DSC_PPS7_SLICE_BPG_OFFSET(vdsc_cfg->slice_bpg_offset) | 501 DSC_PPS7_NFL_BPG_OFFSET(vdsc_cfg->nfl_bpg_offset); 502 drm_dbg_kms(&dev_priv->drm, "PPS7 = 0x%08x\n", pps_val); 503 intel_dsc_pps_write(crtc_state, 7, pps_val); 504 505 /* PPS 8 */ 506 pps_val = DSC_PPS8_FINAL_OFFSET(vdsc_cfg->final_offset) | 507 DSC_PPS8_INITIAL_OFFSET(vdsc_cfg->initial_offset); 508 drm_dbg_kms(&dev_priv->drm, "PPS8 = 0x%08x\n", pps_val); 509 intel_dsc_pps_write(crtc_state, 8, pps_val); 510 511 /* PPS 9 */ 512 pps_val = DSC_PPS9_RC_MODEL_SIZE(vdsc_cfg->rc_model_size) | 513 DSC_PPS9_RC_EDGE_FACTOR(DSC_RC_EDGE_FACTOR_CONST); 514 drm_dbg_kms(&dev_priv->drm, "PPS9 = 0x%08x\n", pps_val); 515 intel_dsc_pps_write(crtc_state, 9, pps_val); 516 517 /* PPS 10 */ 518 pps_val = DSC_PPS10_RC_QUANT_INC_LIMIT0(vdsc_cfg->rc_quant_incr_limit0) | 519 DSC_PPS10_RC_QUANT_INC_LIMIT1(vdsc_cfg->rc_quant_incr_limit1) | 520 DSC_PPS10_RC_TARGET_OFF_HIGH(DSC_RC_TGT_OFFSET_HI_CONST) | 521 DSC_PPS10_RC_TARGET_OFF_LOW(DSC_RC_TGT_OFFSET_LO_CONST); 522 drm_dbg_kms(&dev_priv->drm, "PPS10 = 0x%08x\n", pps_val); 523 intel_dsc_pps_write(crtc_state, 10, pps_val); 524 525 /* PPS 16 */ 526 pps_val = DSC_PPS16_SLICE_CHUNK_SIZE(vdsc_cfg->slice_chunk_size) | 527 DSC_PPS16_SLICE_PER_LINE((vdsc_cfg->pic_width / num_vdsc_instances) / 528 vdsc_cfg->slice_width) | 529 DSC_PPS16_SLICE_ROW_PER_FRAME(vdsc_cfg->pic_height / 530 vdsc_cfg->slice_height); 531 drm_dbg_kms(&dev_priv->drm, "PPS16 = 0x%08x\n", pps_val); 532 intel_dsc_pps_write(crtc_state, 16, pps_val); 533 534 if (DISPLAY_VER(dev_priv) >= 14) { 535 /* PPS 17 */ 536 pps_val = DSC_PPS17_SL_BPG_OFFSET(vdsc_cfg->second_line_bpg_offset); 537 drm_dbg_kms(&dev_priv->drm, "PPS17 = 0x%08x\n", pps_val); 538 intel_dsc_pps_write(crtc_state, 17, pps_val); 539 540 /* PPS 18 */ 541 pps_val = DSC_PPS18_NSL_BPG_OFFSET(vdsc_cfg->nsl_bpg_offset) | 542 DSC_PPS18_SL_OFFSET_ADJ(vdsc_cfg->second_line_offset_adj); 543 drm_dbg_kms(&dev_priv->drm, "PPS18 = 0x%08x\n", pps_val); 544 intel_dsc_pps_write(crtc_state, 18, pps_val); 545 } 546 547 /* Populate the RC_BUF_THRESH registers */ 548 memset(rc_buf_thresh_dword, 0, sizeof(rc_buf_thresh_dword)); 549 for (i = 0; i < DSC_NUM_BUF_RANGES - 1; i++) { 550 rc_buf_thresh_dword[i / 4] |= 551 (u32)(vdsc_cfg->rc_buf_thresh[i] << 552 BITS_PER_BYTE * (i % 4)); 553 drm_dbg_kms(&dev_priv->drm, "RC_BUF_THRESH_%d = 0x%08x\n", i, 554 rc_buf_thresh_dword[i / 4]); 555 } 556 if (!is_pipe_dsc(crtc, cpu_transcoder)) { 557 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0, 558 rc_buf_thresh_dword[0]); 559 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0_UDW, 560 rc_buf_thresh_dword[1]); 561 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1, 562 rc_buf_thresh_dword[2]); 563 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1_UDW, 564 rc_buf_thresh_dword[3]); 565 if (vdsc_instances_per_pipe > 1) { 566 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0, 567 rc_buf_thresh_dword[0]); 568 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0_UDW, 569 rc_buf_thresh_dword[1]); 570 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1, 571 rc_buf_thresh_dword[2]); 572 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1_UDW, 573 rc_buf_thresh_dword[3]); 574 } 575 } else { 576 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0(pipe), 577 rc_buf_thresh_dword[0]); 578 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0_UDW(pipe), 579 rc_buf_thresh_dword[1]); 580 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1(pipe), 581 rc_buf_thresh_dword[2]); 582 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1_UDW(pipe), 583 rc_buf_thresh_dword[3]); 584 if (vdsc_instances_per_pipe > 1) { 585 intel_de_write(dev_priv, 586 ICL_DSC1_RC_BUF_THRESH_0(pipe), 587 rc_buf_thresh_dword[0]); 588 intel_de_write(dev_priv, 589 ICL_DSC1_RC_BUF_THRESH_0_UDW(pipe), 590 rc_buf_thresh_dword[1]); 591 intel_de_write(dev_priv, 592 ICL_DSC1_RC_BUF_THRESH_1(pipe), 593 rc_buf_thresh_dword[2]); 594 intel_de_write(dev_priv, 595 ICL_DSC1_RC_BUF_THRESH_1_UDW(pipe), 596 rc_buf_thresh_dword[3]); 597 } 598 } 599 600 /* Populate the RC_RANGE_PARAMETERS registers */ 601 memset(rc_range_params_dword, 0, sizeof(rc_range_params_dword)); 602 for (i = 0; i < DSC_NUM_BUF_RANGES; i++) { 603 rc_range_params_dword[i / 2] |= 604 (u32)(((vdsc_cfg->rc_range_params[i].range_bpg_offset << 605 RC_BPG_OFFSET_SHIFT) | 606 (vdsc_cfg->rc_range_params[i].range_max_qp << 607 RC_MAX_QP_SHIFT) | 608 (vdsc_cfg->rc_range_params[i].range_min_qp << 609 RC_MIN_QP_SHIFT)) << 16 * (i % 2)); 610 drm_dbg_kms(&dev_priv->drm, "RC_RANGE_PARAM_%d = 0x%08x\n", i, 611 rc_range_params_dword[i / 2]); 612 } 613 if (!is_pipe_dsc(crtc, cpu_transcoder)) { 614 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0, 615 rc_range_params_dword[0]); 616 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0_UDW, 617 rc_range_params_dword[1]); 618 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1, 619 rc_range_params_dword[2]); 620 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1_UDW, 621 rc_range_params_dword[3]); 622 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2, 623 rc_range_params_dword[4]); 624 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2_UDW, 625 rc_range_params_dword[5]); 626 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3, 627 rc_range_params_dword[6]); 628 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3_UDW, 629 rc_range_params_dword[7]); 630 if (vdsc_instances_per_pipe > 1) { 631 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_0, 632 rc_range_params_dword[0]); 633 intel_de_write(dev_priv, 634 DSCC_RC_RANGE_PARAMETERS_0_UDW, 635 rc_range_params_dword[1]); 636 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_1, 637 rc_range_params_dword[2]); 638 intel_de_write(dev_priv, 639 DSCC_RC_RANGE_PARAMETERS_1_UDW, 640 rc_range_params_dword[3]); 641 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_2, 642 rc_range_params_dword[4]); 643 intel_de_write(dev_priv, 644 DSCC_RC_RANGE_PARAMETERS_2_UDW, 645 rc_range_params_dword[5]); 646 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_3, 647 rc_range_params_dword[6]); 648 intel_de_write(dev_priv, 649 DSCC_RC_RANGE_PARAMETERS_3_UDW, 650 rc_range_params_dword[7]); 651 } 652 } else { 653 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_0(pipe), 654 rc_range_params_dword[0]); 655 intel_de_write(dev_priv, 656 ICL_DSC0_RC_RANGE_PARAMETERS_0_UDW(pipe), 657 rc_range_params_dword[1]); 658 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_1(pipe), 659 rc_range_params_dword[2]); 660 intel_de_write(dev_priv, 661 ICL_DSC0_RC_RANGE_PARAMETERS_1_UDW(pipe), 662 rc_range_params_dword[3]); 663 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_2(pipe), 664 rc_range_params_dword[4]); 665 intel_de_write(dev_priv, 666 ICL_DSC0_RC_RANGE_PARAMETERS_2_UDW(pipe), 667 rc_range_params_dword[5]); 668 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_3(pipe), 669 rc_range_params_dword[6]); 670 intel_de_write(dev_priv, 671 ICL_DSC0_RC_RANGE_PARAMETERS_3_UDW(pipe), 672 rc_range_params_dword[7]); 673 if (vdsc_instances_per_pipe > 1) { 674 intel_de_write(dev_priv, 675 ICL_DSC1_RC_RANGE_PARAMETERS_0(pipe), 676 rc_range_params_dword[0]); 677 intel_de_write(dev_priv, 678 ICL_DSC1_RC_RANGE_PARAMETERS_0_UDW(pipe), 679 rc_range_params_dword[1]); 680 intel_de_write(dev_priv, 681 ICL_DSC1_RC_RANGE_PARAMETERS_1(pipe), 682 rc_range_params_dword[2]); 683 intel_de_write(dev_priv, 684 ICL_DSC1_RC_RANGE_PARAMETERS_1_UDW(pipe), 685 rc_range_params_dword[3]); 686 intel_de_write(dev_priv, 687 ICL_DSC1_RC_RANGE_PARAMETERS_2(pipe), 688 rc_range_params_dword[4]); 689 intel_de_write(dev_priv, 690 ICL_DSC1_RC_RANGE_PARAMETERS_2_UDW(pipe), 691 rc_range_params_dword[5]); 692 intel_de_write(dev_priv, 693 ICL_DSC1_RC_RANGE_PARAMETERS_3(pipe), 694 rc_range_params_dword[6]); 695 intel_de_write(dev_priv, 696 ICL_DSC1_RC_RANGE_PARAMETERS_3_UDW(pipe), 697 rc_range_params_dword[7]); 698 } 699 } 700 } 701 702 void intel_dsc_dsi_pps_write(struct intel_encoder *encoder, 703 const struct intel_crtc_state *crtc_state) 704 { 705 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 706 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder); 707 struct mipi_dsi_device *dsi; 708 struct drm_dsc_picture_parameter_set pps; 709 enum port port; 710 711 if (!crtc_state->dsc.compression_enable) 712 return; 713 714 drm_dsc_pps_payload_pack(&pps, vdsc_cfg); 715 716 for_each_dsi_port(port, intel_dsi->ports) { 717 dsi = intel_dsi->dsi_hosts[port]->device; 718 719 mipi_dsi_picture_parameter_set(dsi, &pps); 720 mipi_dsi_compression_mode(dsi, true); 721 } 722 } 723 724 void intel_dsc_dp_pps_write(struct intel_encoder *encoder, 725 const struct intel_crtc_state *crtc_state) 726 { 727 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 728 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 729 struct drm_dsc_pps_infoframe dp_dsc_pps_sdp; 730 731 if (!crtc_state->dsc.compression_enable) 732 return; 733 734 /* Prepare DP SDP PPS header as per DP 1.4 spec, Table 2-123 */ 735 drm_dsc_dp_pps_header_init(&dp_dsc_pps_sdp.pps_header); 736 737 /* Fill the PPS payload bytes as per DSC spec 1.2 Table 4-1 */ 738 drm_dsc_pps_payload_pack(&dp_dsc_pps_sdp.pps_payload, vdsc_cfg); 739 740 dig_port->write_infoframe(encoder, crtc_state, 741 DP_SDP_PPS, &dp_dsc_pps_sdp, 742 sizeof(dp_dsc_pps_sdp)); 743 } 744 745 static i915_reg_t dss_ctl1_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 746 { 747 return is_pipe_dsc(crtc, cpu_transcoder) ? 748 ICL_PIPE_DSS_CTL1(crtc->pipe) : DSS_CTL1; 749 } 750 751 static i915_reg_t dss_ctl2_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder) 752 { 753 return is_pipe_dsc(crtc, cpu_transcoder) ? 754 ICL_PIPE_DSS_CTL2(crtc->pipe) : DSS_CTL2; 755 } 756 757 void intel_uncompressed_joiner_enable(const struct intel_crtc_state *crtc_state) 758 { 759 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 760 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 761 u32 dss_ctl1_val = 0; 762 763 if (crtc_state->joiner_pipes && !crtc_state->dsc.compression_enable) { 764 if (intel_crtc_is_joiner_secondary(crtc_state)) 765 dss_ctl1_val |= UNCOMPRESSED_JOINER_SECONDARY; 766 else 767 dss_ctl1_val |= UNCOMPRESSED_JOINER_PRIMARY; 768 769 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val); 770 } 771 } 772 773 void intel_dsc_enable(const struct intel_crtc_state *crtc_state) 774 { 775 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 776 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 777 u32 dss_ctl1_val = 0; 778 u32 dss_ctl2_val = 0; 779 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 780 781 if (!crtc_state->dsc.compression_enable) 782 return; 783 784 intel_dsc_pps_configure(crtc_state); 785 786 dss_ctl2_val |= LEFT_BRANCH_VDSC_ENABLE; 787 if (vdsc_instances_per_pipe > 1) { 788 dss_ctl2_val |= RIGHT_BRANCH_VDSC_ENABLE; 789 dss_ctl1_val |= JOINER_ENABLE; 790 } 791 if (crtc_state->joiner_pipes) { 792 dss_ctl1_val |= BIG_JOINER_ENABLE; 793 if (!intel_crtc_is_joiner_secondary(crtc_state)) 794 dss_ctl1_val |= PRIMARY_BIG_JOINER_ENABLE; 795 } 796 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val); 797 intel_de_write(dev_priv, dss_ctl2_reg(crtc, crtc_state->cpu_transcoder), dss_ctl2_val); 798 } 799 800 void intel_dsc_disable(const struct intel_crtc_state *old_crtc_state) 801 { 802 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 803 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 804 805 /* Disable only if either of them is enabled */ 806 if (old_crtc_state->dsc.compression_enable || 807 old_crtc_state->joiner_pipes) { 808 intel_de_write(dev_priv, dss_ctl1_reg(crtc, old_crtc_state->cpu_transcoder), 0); 809 intel_de_write(dev_priv, dss_ctl2_reg(crtc, old_crtc_state->cpu_transcoder), 0); 810 } 811 } 812 813 static u32 intel_dsc_pps_read(struct intel_crtc_state *crtc_state, int pps, 814 bool *all_equal) 815 { 816 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 817 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 818 i915_reg_t dsc_reg[2]; 819 int i, vdsc_per_pipe, dsc_reg_num; 820 u32 val; 821 822 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state); 823 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe); 824 825 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe); 826 827 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num); 828 829 *all_equal = true; 830 831 val = intel_de_read(i915, dsc_reg[0]); 832 833 for (i = 1; i < dsc_reg_num; i++) { 834 if (intel_de_read(i915, dsc_reg[i]) != val) { 835 *all_equal = false; 836 break; 837 } 838 } 839 840 return val; 841 } 842 843 static u32 intel_dsc_pps_read_and_verify(struct intel_crtc_state *crtc_state, int pps) 844 { 845 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 846 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 847 u32 val; 848 bool all_equal; 849 850 val = intel_dsc_pps_read(crtc_state, pps, &all_equal); 851 drm_WARN_ON(&i915->drm, !all_equal); 852 853 return val; 854 } 855 856 static void intel_dsc_get_pps_config(struct intel_crtc_state *crtc_state) 857 { 858 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 859 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 860 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 861 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state); 862 u32 pps_temp; 863 864 /* PPS 0 */ 865 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 0); 866 867 vdsc_cfg->bits_per_component = REG_FIELD_GET(DSC_PPS0_BPC_MASK, pps_temp); 868 vdsc_cfg->line_buf_depth = REG_FIELD_GET(DSC_PPS0_LINE_BUF_DEPTH_MASK, pps_temp); 869 vdsc_cfg->block_pred_enable = pps_temp & DSC_PPS0_BLOCK_PREDICTION; 870 vdsc_cfg->convert_rgb = pps_temp & DSC_PPS0_COLOR_SPACE_CONVERSION; 871 vdsc_cfg->simple_422 = pps_temp & DSC_PPS0_422_ENABLE; 872 vdsc_cfg->native_422 = pps_temp & DSC_PPS0_NATIVE_422_ENABLE; 873 vdsc_cfg->native_420 = pps_temp & DSC_PPS0_NATIVE_420_ENABLE; 874 vdsc_cfg->vbr_enable = pps_temp & DSC_PPS0_VBR_ENABLE; 875 876 /* PPS 1 */ 877 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 1); 878 879 vdsc_cfg->bits_per_pixel = REG_FIELD_GET(DSC_PPS1_BPP_MASK, pps_temp); 880 881 if (vdsc_cfg->native_420) 882 vdsc_cfg->bits_per_pixel >>= 1; 883 884 crtc_state->dsc.compressed_bpp_x16 = vdsc_cfg->bits_per_pixel; 885 886 /* PPS 2 */ 887 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 2); 888 889 vdsc_cfg->pic_width = REG_FIELD_GET(DSC_PPS2_PIC_WIDTH_MASK, pps_temp) * num_vdsc_instances; 890 vdsc_cfg->pic_height = REG_FIELD_GET(DSC_PPS2_PIC_HEIGHT_MASK, pps_temp); 891 892 /* PPS 3 */ 893 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 3); 894 895 vdsc_cfg->slice_width = REG_FIELD_GET(DSC_PPS3_SLICE_WIDTH_MASK, pps_temp); 896 vdsc_cfg->slice_height = REG_FIELD_GET(DSC_PPS3_SLICE_HEIGHT_MASK, pps_temp); 897 898 /* PPS 4 */ 899 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 4); 900 901 vdsc_cfg->initial_dec_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_DEC_DELAY_MASK, pps_temp); 902 vdsc_cfg->initial_xmit_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_XMIT_DELAY_MASK, pps_temp); 903 904 /* PPS 5 */ 905 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 5); 906 907 vdsc_cfg->scale_decrement_interval = REG_FIELD_GET(DSC_PPS5_SCALE_DEC_INT_MASK, pps_temp); 908 vdsc_cfg->scale_increment_interval = REG_FIELD_GET(DSC_PPS5_SCALE_INC_INT_MASK, pps_temp); 909 910 /* PPS 6 */ 911 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 6); 912 913 vdsc_cfg->initial_scale_value = REG_FIELD_GET(DSC_PPS6_INITIAL_SCALE_VALUE_MASK, pps_temp); 914 vdsc_cfg->first_line_bpg_offset = REG_FIELD_GET(DSC_PPS6_FIRST_LINE_BPG_OFFSET_MASK, pps_temp); 915 vdsc_cfg->flatness_min_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MIN_QP_MASK, pps_temp); 916 vdsc_cfg->flatness_max_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MAX_QP_MASK, pps_temp); 917 918 /* PPS 7 */ 919 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 7); 920 921 vdsc_cfg->nfl_bpg_offset = REG_FIELD_GET(DSC_PPS7_NFL_BPG_OFFSET_MASK, pps_temp); 922 vdsc_cfg->slice_bpg_offset = REG_FIELD_GET(DSC_PPS7_SLICE_BPG_OFFSET_MASK, pps_temp); 923 924 /* PPS 8 */ 925 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 8); 926 927 vdsc_cfg->initial_offset = REG_FIELD_GET(DSC_PPS8_INITIAL_OFFSET_MASK, pps_temp); 928 vdsc_cfg->final_offset = REG_FIELD_GET(DSC_PPS8_FINAL_OFFSET_MASK, pps_temp); 929 930 /* PPS 9 */ 931 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 9); 932 933 vdsc_cfg->rc_model_size = REG_FIELD_GET(DSC_PPS9_RC_MODEL_SIZE_MASK, pps_temp); 934 935 /* PPS 10 */ 936 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 10); 937 938 vdsc_cfg->rc_quant_incr_limit0 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT0_MASK, pps_temp); 939 vdsc_cfg->rc_quant_incr_limit1 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT1_MASK, pps_temp); 940 941 /* PPS 16 */ 942 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 16); 943 944 vdsc_cfg->slice_chunk_size = REG_FIELD_GET(DSC_PPS16_SLICE_CHUNK_SIZE_MASK, pps_temp); 945 946 if (DISPLAY_VER(i915) >= 14) { 947 /* PPS 17 */ 948 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 17); 949 950 vdsc_cfg->second_line_bpg_offset = REG_FIELD_GET(DSC_PPS17_SL_BPG_OFFSET_MASK, pps_temp); 951 952 /* PPS 18 */ 953 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 18); 954 955 vdsc_cfg->nsl_bpg_offset = REG_FIELD_GET(DSC_PPS18_NSL_BPG_OFFSET_MASK, pps_temp); 956 vdsc_cfg->second_line_offset_adj = REG_FIELD_GET(DSC_PPS18_SL_OFFSET_ADJ_MASK, pps_temp); 957 } 958 } 959 960 void intel_dsc_get_config(struct intel_crtc_state *crtc_state) 961 { 962 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 963 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 964 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 965 enum intel_display_power_domain power_domain; 966 intel_wakeref_t wakeref; 967 u32 dss_ctl1, dss_ctl2; 968 969 if (!intel_dsc_source_support(crtc_state)) 970 return; 971 972 power_domain = intel_dsc_power_domain(crtc, cpu_transcoder); 973 974 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain); 975 if (!wakeref) 976 return; 977 978 dss_ctl1 = intel_de_read(dev_priv, dss_ctl1_reg(crtc, cpu_transcoder)); 979 dss_ctl2 = intel_de_read(dev_priv, dss_ctl2_reg(crtc, cpu_transcoder)); 980 981 crtc_state->dsc.compression_enable = dss_ctl2 & LEFT_BRANCH_VDSC_ENABLE; 982 if (!crtc_state->dsc.compression_enable) 983 goto out; 984 985 crtc_state->dsc.dsc_split = (dss_ctl2 & RIGHT_BRANCH_VDSC_ENABLE) && 986 (dss_ctl1 & JOINER_ENABLE); 987 988 intel_dsc_get_pps_config(crtc_state); 989 out: 990 intel_display_power_put(dev_priv, power_domain, wakeref); 991 } 992