1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2018 Intel Corporation
4 *
5 * Author: Gaurav K Singh <gaurav.k.singh@intel.com>
6 * Manasi Navare <manasi.d.navare@intel.com>
7 */
8 #include <linux/limits.h>
9
10 #include <drm/display/drm_dsc_helper.h>
11 #include <drm/drm_fixed.h>
12
13 #include "i915_drv.h"
14 #include "intel_crtc.h"
15 #include "intel_de.h"
16 #include "intel_display_types.h"
17 #include "intel_dp.h"
18 #include "intel_dsi.h"
19 #include "intel_qp_tables.h"
20 #include "intel_vdsc.h"
21 #include "intel_vdsc_regs.h"
22
intel_dsc_source_support(const struct intel_crtc_state * crtc_state)23 bool intel_dsc_source_support(const struct intel_crtc_state *crtc_state)
24 {
25 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
26 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
27 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
28
29 if (!HAS_DSC(i915))
30 return false;
31
32 if (DISPLAY_VER(i915) == 11 && cpu_transcoder == TRANSCODER_A)
33 return false;
34
35 return true;
36 }
37
is_pipe_dsc(struct intel_crtc * crtc,enum transcoder cpu_transcoder)38 static bool is_pipe_dsc(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
39 {
40 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
41
42 if (DISPLAY_VER(i915) >= 12)
43 return true;
44
45 if (cpu_transcoder == TRANSCODER_EDP ||
46 cpu_transcoder == TRANSCODER_DSI_0 ||
47 cpu_transcoder == TRANSCODER_DSI_1)
48 return false;
49
50 /* There's no pipe A DSC engine on ICL */
51 drm_WARN_ON(&i915->drm, crtc->pipe == PIPE_A);
52
53 return true;
54 }
55
56 static void
intel_vdsc_set_min_max_qp(struct drm_dsc_config * vdsc_cfg,int buf,int bpp)57 intel_vdsc_set_min_max_qp(struct drm_dsc_config *vdsc_cfg, int buf,
58 int bpp)
59 {
60 int bpc = vdsc_cfg->bits_per_component;
61
62 /* Read range_minqp and range_max_qp from qp tables */
63 vdsc_cfg->rc_range_params[buf].range_min_qp =
64 intel_lookup_range_min_qp(bpc, buf, bpp, vdsc_cfg->native_420);
65 vdsc_cfg->rc_range_params[buf].range_max_qp =
66 intel_lookup_range_max_qp(bpc, buf, bpp, vdsc_cfg->native_420);
67 }
68
69 /*
70 * We are using the method provided in DSC 1.2a C-Model in codec_main.c
71 * Above method use a common formula to derive values for any combination of DSC
72 * variables. The formula approach may yield slight differences in the derived PPS
73 * parameters from the original parameter sets. These differences are not consequential
74 * to the coding performance because all parameter sets have been shown to produce
75 * visually lossless quality (provides the same PPS values as
76 * DSCParameterValuesVESA V1-2 spreadsheet).
77 */
78 static void
calculate_rc_params(struct drm_dsc_config * vdsc_cfg)79 calculate_rc_params(struct drm_dsc_config *vdsc_cfg)
80 {
81 int bpp = fxp_q4_to_int(vdsc_cfg->bits_per_pixel);
82 int bpc = vdsc_cfg->bits_per_component;
83 int qp_bpc_modifier = (bpc - 8) * 2;
84 int uncompressed_bpg_rate;
85 int first_line_bpg_offset;
86 u32 res, buf_i, bpp_i;
87
88 if (vdsc_cfg->slice_height >= 8)
89 first_line_bpg_offset =
90 12 + (9 * min(34, vdsc_cfg->slice_height - 8)) / 100;
91 else
92 first_line_bpg_offset = 2 * (vdsc_cfg->slice_height - 1);
93
94 uncompressed_bpg_rate = (3 * bpc + (vdsc_cfg->convert_rgb ? 0 : 2)) * 3;
95 vdsc_cfg->first_line_bpg_offset = clamp(first_line_bpg_offset, 0,
96 uncompressed_bpg_rate - 3 * bpp);
97
98 /*
99 * According to DSC 1.2 spec in Section 4.1 if native_420 is set:
100 * -second_line_bpg_offset is 12 in general and equal to 2*(slice_height-1) if slice
101 * height < 8.
102 * -second_line_offset_adj is 512 as shown by emperical values to yield best chroma
103 * preservation in second line.
104 * -nsl_bpg_offset is calculated as second_line_offset/slice_height -1 then rounded
105 * up to 16 fractional bits, we left shift second line offset by 11 to preserve 11
106 * fractional bits.
107 */
108 if (vdsc_cfg->native_420) {
109 if (vdsc_cfg->slice_height >= 8)
110 vdsc_cfg->second_line_bpg_offset = 12;
111 else
112 vdsc_cfg->second_line_bpg_offset =
113 2 * (vdsc_cfg->slice_height - 1);
114
115 vdsc_cfg->second_line_offset_adj = 512;
116 vdsc_cfg->nsl_bpg_offset = DIV_ROUND_UP(vdsc_cfg->second_line_bpg_offset << 11,
117 vdsc_cfg->slice_height - 1);
118 }
119
120 /* Our hw supports only 444 modes as of today */
121 if (bpp >= 12)
122 vdsc_cfg->initial_offset = 2048;
123 else if (bpp >= 10)
124 vdsc_cfg->initial_offset = 5632 - DIV_ROUND_UP(((bpp - 10) * 3584), 2);
125 else if (bpp >= 8)
126 vdsc_cfg->initial_offset = 6144 - DIV_ROUND_UP(((bpp - 8) * 512), 2);
127 else
128 vdsc_cfg->initial_offset = 6144;
129
130 /* initial_xmit_delay = rc_model_size/2/compression_bpp */
131 vdsc_cfg->initial_xmit_delay = DIV_ROUND_UP(DSC_RC_MODEL_SIZE_CONST, 2 * bpp);
132
133 vdsc_cfg->flatness_min_qp = 3 + qp_bpc_modifier;
134 vdsc_cfg->flatness_max_qp = 12 + qp_bpc_modifier;
135
136 vdsc_cfg->rc_quant_incr_limit0 = 11 + qp_bpc_modifier;
137 vdsc_cfg->rc_quant_incr_limit1 = 11 + qp_bpc_modifier;
138
139 if (vdsc_cfg->native_420) {
140 static const s8 ofs_und4[] = {
141 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12
142 };
143 static const s8 ofs_und5[] = {
144 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
145 };
146 static const s8 ofs_und6[] = {
147 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
148 };
149 static const s8 ofs_und8[] = {
150 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12
151 };
152 /*
153 * For 420 format since bits_per_pixel (bpp) is set to target bpp * 2,
154 * QP table values for target bpp 4.0 to 4.4375 (rounded to 4.0) are
155 * actually for bpp 8 to 8.875 (rounded to 4.0 * 2 i.e 8).
156 * Similarly values for target bpp 4.5 to 4.8375 (rounded to 4.5)
157 * are for bpp 9 to 9.875 (rounded to 4.5 * 2 i.e 9), and so on.
158 */
159 bpp_i = bpp - 8;
160 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) {
161 u8 range_bpg_offset;
162
163 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i);
164
165 /* Calculate range_bpg_offset */
166 if (bpp <= 8) {
167 range_bpg_offset = ofs_und4[buf_i];
168 } else if (bpp <= 10) {
169 res = DIV_ROUND_UP(((bpp - 8) *
170 (ofs_und5[buf_i] - ofs_und4[buf_i])), 2);
171 range_bpg_offset = ofs_und4[buf_i] + res;
172 } else if (bpp <= 12) {
173 res = DIV_ROUND_UP(((bpp - 10) *
174 (ofs_und6[buf_i] - ofs_und5[buf_i])), 2);
175 range_bpg_offset = ofs_und5[buf_i] + res;
176 } else if (bpp <= 16) {
177 res = DIV_ROUND_UP(((bpp - 12) *
178 (ofs_und8[buf_i] - ofs_und6[buf_i])), 4);
179 range_bpg_offset = ofs_und6[buf_i] + res;
180 } else {
181 range_bpg_offset = ofs_und8[buf_i];
182 }
183
184 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset =
185 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK;
186 }
187 } else {
188 /* fractional bpp part * 10000 (for precision up to 4 decimal places) */
189 int fractional_bits = fxp_q4_to_frac(vdsc_cfg->bits_per_pixel);
190
191 static const s8 ofs_und6[] = {
192 0, -2, -2, -4, -6, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12
193 };
194 static const s8 ofs_und8[] = {
195 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
196 };
197 static const s8 ofs_und12[] = {
198 2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
199 };
200 static const s8 ofs_und15[] = {
201 10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12
202 };
203
204 /*
205 * QP table rows have values in increment of 0.5.
206 * So 6.0 bpp to 6.4375 will have index 0, 6.5 to 6.9375 will have index 1,
207 * and so on.
208 * 0.5 fractional part with 4 decimal precision becomes 5000
209 */
210 bpp_i = ((bpp - 6) + (fractional_bits < 5000 ? 0 : 1));
211
212 for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) {
213 u8 range_bpg_offset;
214
215 intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i);
216
217 /* Calculate range_bpg_offset */
218 if (bpp <= 6) {
219 range_bpg_offset = ofs_und6[buf_i];
220 } else if (bpp <= 8) {
221 res = DIV_ROUND_UP(((bpp - 6) *
222 (ofs_und8[buf_i] - ofs_und6[buf_i])), 2);
223 range_bpg_offset = ofs_und6[buf_i] + res;
224 } else if (bpp <= 12) {
225 range_bpg_offset = ofs_und8[buf_i];
226 } else if (bpp <= 15) {
227 res = DIV_ROUND_UP(((bpp - 12) *
228 (ofs_und15[buf_i] - ofs_und12[buf_i])), 3);
229 range_bpg_offset = ofs_und12[buf_i] + res;
230 } else {
231 range_bpg_offset = ofs_und15[buf_i];
232 }
233
234 vdsc_cfg->rc_range_params[buf_i].range_bpg_offset =
235 range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK;
236 }
237 }
238 }
239
intel_dsc_slice_dimensions_valid(struct intel_crtc_state * pipe_config,struct drm_dsc_config * vdsc_cfg)240 static int intel_dsc_slice_dimensions_valid(struct intel_crtc_state *pipe_config,
241 struct drm_dsc_config *vdsc_cfg)
242 {
243 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_RGB ||
244 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) {
245 if (vdsc_cfg->slice_height > 4095)
246 return -EINVAL;
247 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 15000)
248 return -EINVAL;
249 } else if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
250 if (vdsc_cfg->slice_width % 2)
251 return -EINVAL;
252 if (vdsc_cfg->slice_height % 2)
253 return -EINVAL;
254 if (vdsc_cfg->slice_height > 4094)
255 return -EINVAL;
256 if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 30000)
257 return -EINVAL;
258 }
259
260 return 0;
261 }
262
intel_dsc_compute_params(struct intel_crtc_state * pipe_config)263 int intel_dsc_compute_params(struct intel_crtc_state *pipe_config)
264 {
265 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
266 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
267 struct drm_dsc_config *vdsc_cfg = &pipe_config->dsc.config;
268 u16 compressed_bpp = fxp_q4_to_int(pipe_config->dsc.compressed_bpp_x16);
269 int err;
270 int ret;
271
272 vdsc_cfg->pic_width = pipe_config->hw.adjusted_mode.crtc_hdisplay;
273 vdsc_cfg->slice_width = DIV_ROUND_UP(vdsc_cfg->pic_width,
274 pipe_config->dsc.slice_count);
275
276 err = intel_dsc_slice_dimensions_valid(pipe_config, vdsc_cfg);
277
278 if (err) {
279 drm_dbg_kms(&dev_priv->drm, "Slice dimension requirements not met\n");
280 return err;
281 }
282
283 /*
284 * According to DSC 1.2 specs if colorspace is YCbCr then convert_rgb is 0
285 * else 1
286 */
287 vdsc_cfg->convert_rgb = pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR420 &&
288 pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR444;
289
290 if (DISPLAY_VER(dev_priv) >= 14 &&
291 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
292 vdsc_cfg->native_420 = true;
293 /* We do not support YcBCr422 as of now */
294 vdsc_cfg->native_422 = false;
295 vdsc_cfg->simple_422 = false;
296 /* Gen 11 does not support VBR */
297 vdsc_cfg->vbr_enable = false;
298
299 vdsc_cfg->bits_per_pixel = pipe_config->dsc.compressed_bpp_x16;
300
301 /*
302 * According to DSC 1.2 specs in Section 4.1 if native_420 is set
303 * we need to double the current bpp.
304 */
305 if (vdsc_cfg->native_420)
306 vdsc_cfg->bits_per_pixel <<= 1;
307
308 vdsc_cfg->bits_per_component = pipe_config->pipe_bpp / 3;
309
310 if (vdsc_cfg->bits_per_component < 8) {
311 drm_dbg_kms(&dev_priv->drm, "DSC bpc requirements not met bpc: %d\n",
312 vdsc_cfg->bits_per_component);
313 return -EINVAL;
314 }
315
316 drm_dsc_set_rc_buf_thresh(vdsc_cfg);
317
318 /*
319 * From XE_LPD onwards we supports compression bpps in steps of 1
320 * upto uncompressed bpp-1, hence add calculations for all the rc
321 * parameters
322 */
323 if (DISPLAY_VER(dev_priv) >= 13) {
324 calculate_rc_params(vdsc_cfg);
325 } else {
326 if ((compressed_bpp == 8 ||
327 compressed_bpp == 12) &&
328 (vdsc_cfg->bits_per_component == 8 ||
329 vdsc_cfg->bits_per_component == 10 ||
330 vdsc_cfg->bits_per_component == 12))
331 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_1_PRE_SCR);
332 else
333 ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_2_444);
334
335 if (ret)
336 return ret;
337 }
338
339 /*
340 * BitsPerComponent value determines mux_word_size:
341 * When BitsPerComponent is less than or 10bpc, muxWordSize will be equal to
342 * 48 bits otherwise 64
343 */
344 if (vdsc_cfg->bits_per_component <= 10)
345 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_8_10_BPC;
346 else
347 vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_12_BPC;
348
349 /* InitialScaleValue is a 6 bit value with 3 fractional bits (U3.3) */
350 vdsc_cfg->initial_scale_value = (vdsc_cfg->rc_model_size << 3) /
351 (vdsc_cfg->rc_model_size - vdsc_cfg->initial_offset);
352
353 return 0;
354 }
355
356 enum intel_display_power_domain
intel_dsc_power_domain(struct intel_crtc * crtc,enum transcoder cpu_transcoder)357 intel_dsc_power_domain(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
358 {
359 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
360 enum pipe pipe = crtc->pipe;
361
362 /*
363 * VDSC/joining uses a separate power well, PW2, and requires
364 * POWER_DOMAIN_TRANSCODER_VDSC_PW2 power domain in two cases:
365 *
366 * - ICL eDP/DSI transcoder
367 * - Display version 12 (except RKL) pipe A
368 *
369 * For any other pipe, VDSC/joining uses the power well associated with
370 * the pipe in use. Hence another reference on the pipe power domain
371 * will suffice. (Except no VDSC/joining on ICL pipe A.)
372 */
373 if (DISPLAY_VER(i915) == 12 && !IS_ROCKETLAKE(i915) && pipe == PIPE_A)
374 return POWER_DOMAIN_TRANSCODER_VDSC_PW2;
375 else if (is_pipe_dsc(crtc, cpu_transcoder))
376 return POWER_DOMAIN_PIPE(pipe);
377 else
378 return POWER_DOMAIN_TRANSCODER_VDSC_PW2;
379 }
380
intel_dsc_get_vdsc_per_pipe(const struct intel_crtc_state * crtc_state)381 static int intel_dsc_get_vdsc_per_pipe(const struct intel_crtc_state *crtc_state)
382 {
383 return crtc_state->dsc.num_streams;
384 }
385
intel_dsc_get_num_vdsc_instances(const struct intel_crtc_state * crtc_state)386 int intel_dsc_get_num_vdsc_instances(const struct intel_crtc_state *crtc_state)
387 {
388 int num_vdsc_instances = intel_dsc_get_vdsc_per_pipe(crtc_state);
389 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state);
390
391 num_vdsc_instances *= num_joined_pipes;
392
393 return num_vdsc_instances;
394 }
395
intel_dsc_get_pps_reg(const struct intel_crtc_state * crtc_state,int pps,i915_reg_t * dsc_reg,int dsc_reg_num)396 static void intel_dsc_get_pps_reg(const struct intel_crtc_state *crtc_state, int pps,
397 i915_reg_t *dsc_reg, int dsc_reg_num)
398 {
399 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
400 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
401 enum pipe pipe = crtc->pipe;
402 bool pipe_dsc;
403
404 pipe_dsc = is_pipe_dsc(crtc, cpu_transcoder);
405
406 if (dsc_reg_num >= 4)
407 MISSING_CASE(dsc_reg_num);
408 if (dsc_reg_num >= 3)
409 dsc_reg[2] = BMG_DSC2_PPS(pipe, pps);
410 if (dsc_reg_num >= 2)
411 dsc_reg[1] = pipe_dsc ? ICL_DSC1_PPS(pipe, pps) : DSCC_PPS(pps);
412 if (dsc_reg_num >= 1)
413 dsc_reg[0] = pipe_dsc ? ICL_DSC0_PPS(pipe, pps) : DSCA_PPS(pps);
414 }
415
intel_dsc_pps_write(const struct intel_crtc_state * crtc_state,int pps,u32 pps_val)416 static void intel_dsc_pps_write(const struct intel_crtc_state *crtc_state,
417 int pps, u32 pps_val)
418 {
419 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
420 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
421 i915_reg_t dsc_reg[3];
422 int i, vdsc_per_pipe, dsc_reg_num;
423
424 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
425 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe);
426
427 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe);
428
429 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num);
430
431 for (i = 0; i < dsc_reg_num; i++)
432 intel_de_write(i915, dsc_reg[i], pps_val);
433 }
434
intel_dsc_pps_configure(const struct intel_crtc_state * crtc_state)435 static void intel_dsc_pps_configure(const struct intel_crtc_state *crtc_state)
436 {
437 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
438 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
439 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
440 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
441 enum pipe pipe = crtc->pipe;
442 u32 pps_val;
443 u32 rc_buf_thresh_dword[4];
444 u32 rc_range_params_dword[8];
445 int i = 0;
446 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state);
447 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
448
449 /* PPS 0 */
450 pps_val = DSC_PPS0_VER_MAJOR(1) |
451 DSC_PPS0_VER_MINOR(vdsc_cfg->dsc_version_minor) |
452 DSC_PPS0_BPC(vdsc_cfg->bits_per_component) |
453 DSC_PPS0_LINE_BUF_DEPTH(vdsc_cfg->line_buf_depth);
454 if (vdsc_cfg->dsc_version_minor == 2) {
455 pps_val |= DSC_PPS0_ALT_ICH_SEL;
456 if (vdsc_cfg->native_420)
457 pps_val |= DSC_PPS0_NATIVE_420_ENABLE;
458 if (vdsc_cfg->native_422)
459 pps_val |= DSC_PPS0_NATIVE_422_ENABLE;
460 }
461 if (vdsc_cfg->block_pred_enable)
462 pps_val |= DSC_PPS0_BLOCK_PREDICTION;
463 if (vdsc_cfg->convert_rgb)
464 pps_val |= DSC_PPS0_COLOR_SPACE_CONVERSION;
465 if (vdsc_cfg->simple_422)
466 pps_val |= DSC_PPS0_422_ENABLE;
467 if (vdsc_cfg->vbr_enable)
468 pps_val |= DSC_PPS0_VBR_ENABLE;
469 intel_dsc_pps_write(crtc_state, 0, pps_val);
470
471 /* PPS 1 */
472 pps_val = DSC_PPS1_BPP(vdsc_cfg->bits_per_pixel);
473 intel_dsc_pps_write(crtc_state, 1, pps_val);
474
475 /* PPS 2 */
476 pps_val = DSC_PPS2_PIC_HEIGHT(vdsc_cfg->pic_height) |
477 DSC_PPS2_PIC_WIDTH(vdsc_cfg->pic_width / num_vdsc_instances);
478 intel_dsc_pps_write(crtc_state, 2, pps_val);
479
480 /* PPS 3 */
481 pps_val = DSC_PPS3_SLICE_HEIGHT(vdsc_cfg->slice_height) |
482 DSC_PPS3_SLICE_WIDTH(vdsc_cfg->slice_width);
483 intel_dsc_pps_write(crtc_state, 3, pps_val);
484
485 /* PPS 4 */
486 pps_val = DSC_PPS4_INITIAL_XMIT_DELAY(vdsc_cfg->initial_xmit_delay) |
487 DSC_PPS4_INITIAL_DEC_DELAY(vdsc_cfg->initial_dec_delay);
488 intel_dsc_pps_write(crtc_state, 4, pps_val);
489
490 /* PPS 5 */
491 pps_val = DSC_PPS5_SCALE_INC_INT(vdsc_cfg->scale_increment_interval) |
492 DSC_PPS5_SCALE_DEC_INT(vdsc_cfg->scale_decrement_interval);
493 intel_dsc_pps_write(crtc_state, 5, pps_val);
494
495 /* PPS 6 */
496 pps_val = DSC_PPS6_INITIAL_SCALE_VALUE(vdsc_cfg->initial_scale_value) |
497 DSC_PPS6_FIRST_LINE_BPG_OFFSET(vdsc_cfg->first_line_bpg_offset) |
498 DSC_PPS6_FLATNESS_MIN_QP(vdsc_cfg->flatness_min_qp) |
499 DSC_PPS6_FLATNESS_MAX_QP(vdsc_cfg->flatness_max_qp);
500 intel_dsc_pps_write(crtc_state, 6, pps_val);
501
502 /* PPS 7 */
503 pps_val = DSC_PPS7_SLICE_BPG_OFFSET(vdsc_cfg->slice_bpg_offset) |
504 DSC_PPS7_NFL_BPG_OFFSET(vdsc_cfg->nfl_bpg_offset);
505 intel_dsc_pps_write(crtc_state, 7, pps_val);
506
507 /* PPS 8 */
508 pps_val = DSC_PPS8_FINAL_OFFSET(vdsc_cfg->final_offset) |
509 DSC_PPS8_INITIAL_OFFSET(vdsc_cfg->initial_offset);
510 intel_dsc_pps_write(crtc_state, 8, pps_val);
511
512 /* PPS 9 */
513 pps_val = DSC_PPS9_RC_MODEL_SIZE(vdsc_cfg->rc_model_size) |
514 DSC_PPS9_RC_EDGE_FACTOR(DSC_RC_EDGE_FACTOR_CONST);
515 intel_dsc_pps_write(crtc_state, 9, pps_val);
516
517 /* PPS 10 */
518 pps_val = DSC_PPS10_RC_QUANT_INC_LIMIT0(vdsc_cfg->rc_quant_incr_limit0) |
519 DSC_PPS10_RC_QUANT_INC_LIMIT1(vdsc_cfg->rc_quant_incr_limit1) |
520 DSC_PPS10_RC_TARGET_OFF_HIGH(DSC_RC_TGT_OFFSET_HI_CONST) |
521 DSC_PPS10_RC_TARGET_OFF_LOW(DSC_RC_TGT_OFFSET_LO_CONST);
522 intel_dsc_pps_write(crtc_state, 10, pps_val);
523
524 /* PPS 16 */
525 pps_val = DSC_PPS16_SLICE_CHUNK_SIZE(vdsc_cfg->slice_chunk_size) |
526 DSC_PPS16_SLICE_PER_LINE((vdsc_cfg->pic_width / num_vdsc_instances) /
527 vdsc_cfg->slice_width) |
528 DSC_PPS16_SLICE_ROW_PER_FRAME(vdsc_cfg->pic_height /
529 vdsc_cfg->slice_height);
530 intel_dsc_pps_write(crtc_state, 16, pps_val);
531
532 if (DISPLAY_VER(dev_priv) >= 14) {
533 /* PPS 17 */
534 pps_val = DSC_PPS17_SL_BPG_OFFSET(vdsc_cfg->second_line_bpg_offset);
535 intel_dsc_pps_write(crtc_state, 17, pps_val);
536
537 /* PPS 18 */
538 pps_val = DSC_PPS18_NSL_BPG_OFFSET(vdsc_cfg->nsl_bpg_offset) |
539 DSC_PPS18_SL_OFFSET_ADJ(vdsc_cfg->second_line_offset_adj);
540 intel_dsc_pps_write(crtc_state, 18, pps_val);
541 }
542
543 /* Populate the RC_BUF_THRESH registers */
544 memset(rc_buf_thresh_dword, 0, sizeof(rc_buf_thresh_dword));
545 for (i = 0; i < DSC_NUM_BUF_RANGES - 1; i++)
546 rc_buf_thresh_dword[i / 4] |=
547 (u32)(vdsc_cfg->rc_buf_thresh[i] <<
548 BITS_PER_BYTE * (i % 4));
549 if (!is_pipe_dsc(crtc, cpu_transcoder)) {
550 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0,
551 rc_buf_thresh_dword[0]);
552 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0_UDW,
553 rc_buf_thresh_dword[1]);
554 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1,
555 rc_buf_thresh_dword[2]);
556 intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1_UDW,
557 rc_buf_thresh_dword[3]);
558 if (vdsc_instances_per_pipe > 1) {
559 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0,
560 rc_buf_thresh_dword[0]);
561 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0_UDW,
562 rc_buf_thresh_dword[1]);
563 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1,
564 rc_buf_thresh_dword[2]);
565 intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1_UDW,
566 rc_buf_thresh_dword[3]);
567 }
568 } else {
569 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0(pipe),
570 rc_buf_thresh_dword[0]);
571 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0_UDW(pipe),
572 rc_buf_thresh_dword[1]);
573 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1(pipe),
574 rc_buf_thresh_dword[2]);
575 intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1_UDW(pipe),
576 rc_buf_thresh_dword[3]);
577 if (vdsc_instances_per_pipe > 1) {
578 intel_de_write(dev_priv,
579 ICL_DSC1_RC_BUF_THRESH_0(pipe),
580 rc_buf_thresh_dword[0]);
581 intel_de_write(dev_priv,
582 ICL_DSC1_RC_BUF_THRESH_0_UDW(pipe),
583 rc_buf_thresh_dword[1]);
584 intel_de_write(dev_priv,
585 ICL_DSC1_RC_BUF_THRESH_1(pipe),
586 rc_buf_thresh_dword[2]);
587 intel_de_write(dev_priv,
588 ICL_DSC1_RC_BUF_THRESH_1_UDW(pipe),
589 rc_buf_thresh_dword[3]);
590 }
591 }
592
593 /* Populate the RC_RANGE_PARAMETERS registers */
594 memset(rc_range_params_dword, 0, sizeof(rc_range_params_dword));
595 for (i = 0; i < DSC_NUM_BUF_RANGES; i++)
596 rc_range_params_dword[i / 2] |=
597 (u32)(((vdsc_cfg->rc_range_params[i].range_bpg_offset <<
598 RC_BPG_OFFSET_SHIFT) |
599 (vdsc_cfg->rc_range_params[i].range_max_qp <<
600 RC_MAX_QP_SHIFT) |
601 (vdsc_cfg->rc_range_params[i].range_min_qp <<
602 RC_MIN_QP_SHIFT)) << 16 * (i % 2));
603 if (!is_pipe_dsc(crtc, cpu_transcoder)) {
604 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0,
605 rc_range_params_dword[0]);
606 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0_UDW,
607 rc_range_params_dword[1]);
608 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1,
609 rc_range_params_dword[2]);
610 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1_UDW,
611 rc_range_params_dword[3]);
612 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2,
613 rc_range_params_dword[4]);
614 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2_UDW,
615 rc_range_params_dword[5]);
616 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3,
617 rc_range_params_dword[6]);
618 intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3_UDW,
619 rc_range_params_dword[7]);
620 if (vdsc_instances_per_pipe > 1) {
621 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_0,
622 rc_range_params_dword[0]);
623 intel_de_write(dev_priv,
624 DSCC_RC_RANGE_PARAMETERS_0_UDW,
625 rc_range_params_dword[1]);
626 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_1,
627 rc_range_params_dword[2]);
628 intel_de_write(dev_priv,
629 DSCC_RC_RANGE_PARAMETERS_1_UDW,
630 rc_range_params_dword[3]);
631 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_2,
632 rc_range_params_dword[4]);
633 intel_de_write(dev_priv,
634 DSCC_RC_RANGE_PARAMETERS_2_UDW,
635 rc_range_params_dword[5]);
636 intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_3,
637 rc_range_params_dword[6]);
638 intel_de_write(dev_priv,
639 DSCC_RC_RANGE_PARAMETERS_3_UDW,
640 rc_range_params_dword[7]);
641 }
642 } else {
643 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_0(pipe),
644 rc_range_params_dword[0]);
645 intel_de_write(dev_priv,
646 ICL_DSC0_RC_RANGE_PARAMETERS_0_UDW(pipe),
647 rc_range_params_dword[1]);
648 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_1(pipe),
649 rc_range_params_dword[2]);
650 intel_de_write(dev_priv,
651 ICL_DSC0_RC_RANGE_PARAMETERS_1_UDW(pipe),
652 rc_range_params_dword[3]);
653 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_2(pipe),
654 rc_range_params_dword[4]);
655 intel_de_write(dev_priv,
656 ICL_DSC0_RC_RANGE_PARAMETERS_2_UDW(pipe),
657 rc_range_params_dword[5]);
658 intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_3(pipe),
659 rc_range_params_dword[6]);
660 intel_de_write(dev_priv,
661 ICL_DSC0_RC_RANGE_PARAMETERS_3_UDW(pipe),
662 rc_range_params_dword[7]);
663 if (vdsc_instances_per_pipe > 1) {
664 intel_de_write(dev_priv,
665 ICL_DSC1_RC_RANGE_PARAMETERS_0(pipe),
666 rc_range_params_dword[0]);
667 intel_de_write(dev_priv,
668 ICL_DSC1_RC_RANGE_PARAMETERS_0_UDW(pipe),
669 rc_range_params_dword[1]);
670 intel_de_write(dev_priv,
671 ICL_DSC1_RC_RANGE_PARAMETERS_1(pipe),
672 rc_range_params_dword[2]);
673 intel_de_write(dev_priv,
674 ICL_DSC1_RC_RANGE_PARAMETERS_1_UDW(pipe),
675 rc_range_params_dword[3]);
676 intel_de_write(dev_priv,
677 ICL_DSC1_RC_RANGE_PARAMETERS_2(pipe),
678 rc_range_params_dword[4]);
679 intel_de_write(dev_priv,
680 ICL_DSC1_RC_RANGE_PARAMETERS_2_UDW(pipe),
681 rc_range_params_dword[5]);
682 intel_de_write(dev_priv,
683 ICL_DSC1_RC_RANGE_PARAMETERS_3(pipe),
684 rc_range_params_dword[6]);
685 intel_de_write(dev_priv,
686 ICL_DSC1_RC_RANGE_PARAMETERS_3_UDW(pipe),
687 rc_range_params_dword[7]);
688 }
689 }
690 }
691
intel_dsc_dsi_pps_write(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)692 void intel_dsc_dsi_pps_write(struct intel_encoder *encoder,
693 const struct intel_crtc_state *crtc_state)
694 {
695 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
696 struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
697 struct mipi_dsi_device *dsi;
698 struct drm_dsc_picture_parameter_set pps;
699 enum port port;
700
701 if (!crtc_state->dsc.compression_enable)
702 return;
703
704 drm_dsc_pps_payload_pack(&pps, vdsc_cfg);
705
706 for_each_dsi_port(port, intel_dsi->ports) {
707 dsi = intel_dsi->dsi_hosts[port]->device;
708
709 mipi_dsi_picture_parameter_set(dsi, &pps);
710 mipi_dsi_compression_mode(dsi, true);
711 }
712 }
713
intel_dsc_dp_pps_write(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)714 void intel_dsc_dp_pps_write(struct intel_encoder *encoder,
715 const struct intel_crtc_state *crtc_state)
716 {
717 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
718 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
719 struct drm_dsc_pps_infoframe dp_dsc_pps_sdp;
720
721 if (!crtc_state->dsc.compression_enable)
722 return;
723
724 /* Prepare DP SDP PPS header as per DP 1.4 spec, Table 2-123 */
725 drm_dsc_dp_pps_header_init(&dp_dsc_pps_sdp.pps_header);
726
727 /* Fill the PPS payload bytes as per DSC spec 1.2 Table 4-1 */
728 drm_dsc_pps_payload_pack(&dp_dsc_pps_sdp.pps_payload, vdsc_cfg);
729
730 dig_port->write_infoframe(encoder, crtc_state,
731 DP_SDP_PPS, &dp_dsc_pps_sdp,
732 sizeof(dp_dsc_pps_sdp));
733 }
734
dss_ctl1_reg(struct intel_crtc * crtc,enum transcoder cpu_transcoder)735 static i915_reg_t dss_ctl1_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
736 {
737 return is_pipe_dsc(crtc, cpu_transcoder) ?
738 ICL_PIPE_DSS_CTL1(crtc->pipe) : DSS_CTL1;
739 }
740
dss_ctl2_reg(struct intel_crtc * crtc,enum transcoder cpu_transcoder)741 static i915_reg_t dss_ctl2_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
742 {
743 return is_pipe_dsc(crtc, cpu_transcoder) ?
744 ICL_PIPE_DSS_CTL2(crtc->pipe) : DSS_CTL2;
745 }
746
intel_uncompressed_joiner_enable(const struct intel_crtc_state * crtc_state)747 void intel_uncompressed_joiner_enable(const struct intel_crtc_state *crtc_state)
748 {
749 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
750 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
751 u32 dss_ctl1_val = 0;
752
753 if (crtc_state->joiner_pipes && !crtc_state->dsc.compression_enable) {
754 if (intel_crtc_is_bigjoiner_secondary(crtc_state))
755 dss_ctl1_val |= UNCOMPRESSED_JOINER_SECONDARY;
756 else
757 dss_ctl1_val |= UNCOMPRESSED_JOINER_PRIMARY;
758
759 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val);
760 }
761 }
762
intel_dsc_enable(const struct intel_crtc_state * crtc_state)763 void intel_dsc_enable(const struct intel_crtc_state *crtc_state)
764 {
765 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
766 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
767 u32 dss_ctl1_val = 0;
768 u32 dss_ctl2_val = 0;
769 int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
770
771 if (!crtc_state->dsc.compression_enable)
772 return;
773
774 intel_dsc_pps_configure(crtc_state);
775
776 dss_ctl2_val |= VDSC0_ENABLE;
777 if (vdsc_instances_per_pipe > 1) {
778 dss_ctl2_val |= VDSC1_ENABLE;
779 dss_ctl1_val |= JOINER_ENABLE;
780 }
781
782 if (vdsc_instances_per_pipe > 2) {
783 dss_ctl2_val |= VDSC2_ENABLE;
784 dss_ctl2_val |= SMALL_JOINER_CONFIG_3_ENGINES;
785 }
786
787 if (crtc_state->joiner_pipes) {
788 if (intel_crtc_ultrajoiner_enable_needed(crtc_state))
789 dss_ctl1_val |= ULTRA_JOINER_ENABLE;
790
791 if (intel_crtc_is_ultrajoiner_primary(crtc_state))
792 dss_ctl1_val |= PRIMARY_ULTRA_JOINER_ENABLE;
793
794 dss_ctl1_val |= BIG_JOINER_ENABLE;
795
796 if (intel_crtc_is_bigjoiner_primary(crtc_state))
797 dss_ctl1_val |= PRIMARY_BIG_JOINER_ENABLE;
798 }
799 intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val);
800 intel_de_write(dev_priv, dss_ctl2_reg(crtc, crtc_state->cpu_transcoder), dss_ctl2_val);
801 }
802
intel_dsc_disable(const struct intel_crtc_state * old_crtc_state)803 void intel_dsc_disable(const struct intel_crtc_state *old_crtc_state)
804 {
805 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
806 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
807
808 /* Disable only if either of them is enabled */
809 if (old_crtc_state->dsc.compression_enable ||
810 old_crtc_state->joiner_pipes) {
811 intel_de_write(dev_priv, dss_ctl1_reg(crtc, old_crtc_state->cpu_transcoder), 0);
812 intel_de_write(dev_priv, dss_ctl2_reg(crtc, old_crtc_state->cpu_transcoder), 0);
813 }
814 }
815
intel_dsc_pps_read(struct intel_crtc_state * crtc_state,int pps,bool * all_equal)816 static u32 intel_dsc_pps_read(struct intel_crtc_state *crtc_state, int pps,
817 bool *all_equal)
818 {
819 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
820 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
821 i915_reg_t dsc_reg[3];
822 int i, vdsc_per_pipe, dsc_reg_num;
823 u32 val;
824
825 vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
826 dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe);
827
828 drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe);
829
830 intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num);
831
832 *all_equal = true;
833
834 val = intel_de_read(i915, dsc_reg[0]);
835
836 for (i = 1; i < dsc_reg_num; i++) {
837 if (intel_de_read(i915, dsc_reg[i]) != val) {
838 *all_equal = false;
839 break;
840 }
841 }
842
843 return val;
844 }
845
intel_dsc_pps_read_and_verify(struct intel_crtc_state * crtc_state,int pps)846 static u32 intel_dsc_pps_read_and_verify(struct intel_crtc_state *crtc_state, int pps)
847 {
848 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
849 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
850 u32 val;
851 bool all_equal;
852
853 val = intel_dsc_pps_read(crtc_state, pps, &all_equal);
854 drm_WARN_ON(&i915->drm, !all_equal);
855
856 return val;
857 }
858
intel_dsc_get_pps_config(struct intel_crtc_state * crtc_state)859 static void intel_dsc_get_pps_config(struct intel_crtc_state *crtc_state)
860 {
861 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
862 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
863 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
864 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state);
865 u32 pps_temp;
866
867 /* PPS 0 */
868 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 0);
869
870 vdsc_cfg->bits_per_component = REG_FIELD_GET(DSC_PPS0_BPC_MASK, pps_temp);
871 vdsc_cfg->line_buf_depth = REG_FIELD_GET(DSC_PPS0_LINE_BUF_DEPTH_MASK, pps_temp);
872 vdsc_cfg->block_pred_enable = pps_temp & DSC_PPS0_BLOCK_PREDICTION;
873 vdsc_cfg->convert_rgb = pps_temp & DSC_PPS0_COLOR_SPACE_CONVERSION;
874 vdsc_cfg->simple_422 = pps_temp & DSC_PPS0_422_ENABLE;
875 vdsc_cfg->native_422 = pps_temp & DSC_PPS0_NATIVE_422_ENABLE;
876 vdsc_cfg->native_420 = pps_temp & DSC_PPS0_NATIVE_420_ENABLE;
877 vdsc_cfg->vbr_enable = pps_temp & DSC_PPS0_VBR_ENABLE;
878
879 /* PPS 1 */
880 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 1);
881
882 vdsc_cfg->bits_per_pixel = REG_FIELD_GET(DSC_PPS1_BPP_MASK, pps_temp);
883
884 if (vdsc_cfg->native_420)
885 vdsc_cfg->bits_per_pixel >>= 1;
886
887 crtc_state->dsc.compressed_bpp_x16 = vdsc_cfg->bits_per_pixel;
888
889 /* PPS 2 */
890 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 2);
891
892 vdsc_cfg->pic_width = REG_FIELD_GET(DSC_PPS2_PIC_WIDTH_MASK, pps_temp) * num_vdsc_instances;
893 vdsc_cfg->pic_height = REG_FIELD_GET(DSC_PPS2_PIC_HEIGHT_MASK, pps_temp);
894
895 /* PPS 3 */
896 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 3);
897
898 vdsc_cfg->slice_width = REG_FIELD_GET(DSC_PPS3_SLICE_WIDTH_MASK, pps_temp);
899 vdsc_cfg->slice_height = REG_FIELD_GET(DSC_PPS3_SLICE_HEIGHT_MASK, pps_temp);
900
901 /* PPS 4 */
902 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 4);
903
904 vdsc_cfg->initial_dec_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_DEC_DELAY_MASK, pps_temp);
905 vdsc_cfg->initial_xmit_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_XMIT_DELAY_MASK, pps_temp);
906
907 /* PPS 5 */
908 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 5);
909
910 vdsc_cfg->scale_decrement_interval = REG_FIELD_GET(DSC_PPS5_SCALE_DEC_INT_MASK, pps_temp);
911 vdsc_cfg->scale_increment_interval = REG_FIELD_GET(DSC_PPS5_SCALE_INC_INT_MASK, pps_temp);
912
913 /* PPS 6 */
914 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 6);
915
916 vdsc_cfg->initial_scale_value = REG_FIELD_GET(DSC_PPS6_INITIAL_SCALE_VALUE_MASK, pps_temp);
917 vdsc_cfg->first_line_bpg_offset = REG_FIELD_GET(DSC_PPS6_FIRST_LINE_BPG_OFFSET_MASK, pps_temp);
918 vdsc_cfg->flatness_min_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MIN_QP_MASK, pps_temp);
919 vdsc_cfg->flatness_max_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MAX_QP_MASK, pps_temp);
920
921 /* PPS 7 */
922 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 7);
923
924 vdsc_cfg->nfl_bpg_offset = REG_FIELD_GET(DSC_PPS7_NFL_BPG_OFFSET_MASK, pps_temp);
925 vdsc_cfg->slice_bpg_offset = REG_FIELD_GET(DSC_PPS7_SLICE_BPG_OFFSET_MASK, pps_temp);
926
927 /* PPS 8 */
928 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 8);
929
930 vdsc_cfg->initial_offset = REG_FIELD_GET(DSC_PPS8_INITIAL_OFFSET_MASK, pps_temp);
931 vdsc_cfg->final_offset = REG_FIELD_GET(DSC_PPS8_FINAL_OFFSET_MASK, pps_temp);
932
933 /* PPS 9 */
934 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 9);
935
936 vdsc_cfg->rc_model_size = REG_FIELD_GET(DSC_PPS9_RC_MODEL_SIZE_MASK, pps_temp);
937
938 /* PPS 10 */
939 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 10);
940
941 vdsc_cfg->rc_quant_incr_limit0 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT0_MASK, pps_temp);
942 vdsc_cfg->rc_quant_incr_limit1 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT1_MASK, pps_temp);
943
944 /* PPS 16 */
945 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 16);
946
947 vdsc_cfg->slice_chunk_size = REG_FIELD_GET(DSC_PPS16_SLICE_CHUNK_SIZE_MASK, pps_temp);
948
949 if (DISPLAY_VER(i915) >= 14) {
950 /* PPS 17 */
951 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 17);
952
953 vdsc_cfg->second_line_bpg_offset = REG_FIELD_GET(DSC_PPS17_SL_BPG_OFFSET_MASK, pps_temp);
954
955 /* PPS 18 */
956 pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 18);
957
958 vdsc_cfg->nsl_bpg_offset = REG_FIELD_GET(DSC_PPS18_NSL_BPG_OFFSET_MASK, pps_temp);
959 vdsc_cfg->second_line_offset_adj = REG_FIELD_GET(DSC_PPS18_SL_OFFSET_ADJ_MASK, pps_temp);
960 }
961 }
962
intel_dsc_get_config(struct intel_crtc_state * crtc_state)963 void intel_dsc_get_config(struct intel_crtc_state *crtc_state)
964 {
965 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
966 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
967 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
968 enum intel_display_power_domain power_domain;
969 intel_wakeref_t wakeref;
970 u32 dss_ctl1, dss_ctl2;
971
972 if (!intel_dsc_source_support(crtc_state))
973 return;
974
975 power_domain = intel_dsc_power_domain(crtc, cpu_transcoder);
976
977 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
978 if (!wakeref)
979 return;
980
981 dss_ctl1 = intel_de_read(dev_priv, dss_ctl1_reg(crtc, cpu_transcoder));
982 dss_ctl2 = intel_de_read(dev_priv, dss_ctl2_reg(crtc, cpu_transcoder));
983
984 crtc_state->dsc.compression_enable = dss_ctl2 & VDSC0_ENABLE;
985 if (!crtc_state->dsc.compression_enable)
986 goto out;
987
988 if (dss_ctl1 & JOINER_ENABLE && dss_ctl2 & (VDSC2_ENABLE | SMALL_JOINER_CONFIG_3_ENGINES))
989 crtc_state->dsc.num_streams = 3;
990 else if (dss_ctl1 & JOINER_ENABLE && dss_ctl2 & VDSC1_ENABLE)
991 crtc_state->dsc.num_streams = 2;
992 else
993 crtc_state->dsc.num_streams = 1;
994
995 intel_dsc_get_pps_config(crtc_state);
996 out:
997 intel_display_power_put(dev_priv, power_domain, wakeref);
998 }
999
intel_vdsc_dump_state(struct drm_printer * p,int indent,const struct intel_crtc_state * crtc_state)1000 static void intel_vdsc_dump_state(struct drm_printer *p, int indent,
1001 const struct intel_crtc_state *crtc_state)
1002 {
1003 drm_printf_indent(p, indent,
1004 "dsc-dss: compressed-bpp:" FXP_Q4_FMT ", slice-count: %d, num_streams: %d\n",
1005 FXP_Q4_ARGS(crtc_state->dsc.compressed_bpp_x16),
1006 crtc_state->dsc.slice_count,
1007 crtc_state->dsc.num_streams);
1008 }
1009
intel_vdsc_state_dump(struct drm_printer * p,int indent,const struct intel_crtc_state * crtc_state)1010 void intel_vdsc_state_dump(struct drm_printer *p, int indent,
1011 const struct intel_crtc_state *crtc_state)
1012 {
1013 if (!crtc_state->dsc.compression_enable)
1014 return;
1015
1016 intel_vdsc_dump_state(p, indent, crtc_state);
1017 drm_dsc_dump_config(p, indent, &crtc_state->dsc.config);
1018 }
1019
intel_vdsc_min_cdclk(const struct intel_crtc_state * crtc_state)1020 int intel_vdsc_min_cdclk(const struct intel_crtc_state *crtc_state)
1021 {
1022 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1023 struct intel_display *display = to_intel_display(crtc);
1024 int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state);
1025 int min_cdclk;
1026
1027 if (!crtc_state->dsc.compression_enable)
1028 return 0;
1029
1030 /*
1031 * When we decide to use only one VDSC engine, since
1032 * each VDSC operates with 1 ppc throughput, pixel clock
1033 * cannot be higher than the VDSC clock (cdclk)
1034 * If there 2 VDSC engines, then pixel clock can't be higher than
1035 * VDSC clock(cdclk) * 2 and so on.
1036 */
1037 min_cdclk = DIV_ROUND_UP(crtc_state->pixel_rate, num_vdsc_instances);
1038
1039 if (crtc_state->joiner_pipes) {
1040 int pixel_clock = intel_dp_mode_to_fec_clock(crtc_state->hw.adjusted_mode.clock);
1041
1042 /*
1043 * According to Bigjoiner bw check:
1044 * compressed_bpp <= PPC * CDCLK * Big joiner Interface bits / Pixel clock
1045 *
1046 * We have already computed compressed_bpp, so now compute the min CDCLK that
1047 * is required to support this compressed_bpp.
1048 *
1049 * => CDCLK >= compressed_bpp * Pixel clock / (PPC * Bigjoiner Interface bits)
1050 *
1051 * Since PPC = 2 with bigjoiner
1052 * => CDCLK >= compressed_bpp * Pixel clock / 2 * Bigjoiner Interface bits
1053 */
1054 int bigjoiner_interface_bits = DISPLAY_VER(display) >= 14 ? 36 : 24;
1055 int min_cdclk_bj =
1056 (fxp_q4_to_int_roundup(crtc_state->dsc.compressed_bpp_x16) *
1057 pixel_clock) / (2 * bigjoiner_interface_bits);
1058
1059 min_cdclk = max(min_cdclk, min_cdclk_bj);
1060 }
1061
1062 return min_cdclk;
1063 }
1064