1 /*
2 * Copyright © 2008 Intel Corporation
3 * 2014 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22 * IN THE SOFTWARE.
23 *
24 */
25
26 #include <linux/log2.h>
27 #include <linux/math.h>
28
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_edid.h>
32 #include <drm/drm_fixed.h>
33 #include <drm/drm_print.h>
34 #include <drm/drm_probe_helper.h>
35
36 #include "i915_utils.h"
37 #include "intel_atomic.h"
38 #include "intel_audio.h"
39 #include "intel_connector.h"
40 #include "intel_crtc.h"
41 #include "intel_ddi.h"
42 #include "intel_de.h"
43 #include "intel_display_driver.h"
44 #include "intel_display_regs.h"
45 #include "intel_display_types.h"
46 #include "intel_dp.h"
47 #include "intel_dp_hdcp.h"
48 #include "intel_dp_link_training.h"
49 #include "intel_dp_mst.h"
50 #include "intel_dp_test.h"
51 #include "intel_dp_tunnel.h"
52 #include "intel_dpio_phy.h"
53 #include "intel_hdcp.h"
54 #include "intel_hotplug.h"
55 #include "intel_link_bw.h"
56 #include "intel_pfit.h"
57 #include "intel_psr.h"
58 #include "intel_step.h"
59 #include "intel_vdsc.h"
60 #include "intel_vrr.h"
61 #include "skl_scaler.h"
62
63 /*
64 * DP MST (DisplayPort Multi-Stream Transport)
65 *
66 * MST support on the source depends on the platform and port. DP initialization
67 * sets up MST for each MST capable encoder. This will become the primary
68 * encoder for the port.
69 *
70 * MST initialization of each primary encoder creates MST stream encoders, one
71 * per pipe, and initializes the MST topology manager. The MST stream encoders
72 * are sometimes called "fake encoders", because they're virtual, not
73 * physical. Thus there are (number of MST capable ports) x (number of pipes)
74 * MST stream encoders in total.
75 *
76 * Decision to use MST for a sink happens at detect on the connector attached to
77 * the primary encoder, and this will not change while the sink is connected. We
78 * always use MST when possible, including for SST sinks with sideband messaging
79 * support.
80 *
81 * The connectors for the MST streams are added and removed dynamically by the
82 * topology manager. Their connection status is also determined by the topology
83 * manager.
84 *
85 * On hardware, each transcoder may be associated with a single DDI
86 * port. Multiple transcoders may be associated with the same DDI port only if
87 * the port is in MST mode.
88 *
89 * On TGL+, all the transcoders streaming on the same DDI port will indicate a
90 * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are
91 * relevant only on the primary transcoder. Prior to that, they are port
92 * registers.
93 */
94
95 /* From fake MST stream encoder to primary encoder */
to_primary_encoder(struct intel_encoder * encoder)96 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder)
97 {
98 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
99 struct intel_digital_port *dig_port = intel_mst->primary;
100
101 return &dig_port->base;
102 }
103
104 /* From fake MST stream encoder to primary DP */
to_primary_dp(struct intel_encoder * encoder)105 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder)
106 {
107 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
108 struct intel_digital_port *dig_port = intel_mst->primary;
109
110 return &dig_port->dp;
111 }
112
intel_dp_mst_active_streams(struct intel_dp * intel_dp)113 int intel_dp_mst_active_streams(struct intel_dp *intel_dp)
114 {
115 return intel_dp->mst.active_streams;
116 }
117
intel_dp_mst_dec_active_streams(struct intel_dp * intel_dp)118 static bool intel_dp_mst_dec_active_streams(struct intel_dp *intel_dp)
119 {
120 struct intel_display *display = to_intel_display(intel_dp);
121
122 drm_dbg_kms(display->drm, "active MST streams %d -> %d\n",
123 intel_dp->mst.active_streams, intel_dp->mst.active_streams - 1);
124
125 if (drm_WARN_ON(display->drm, intel_dp->mst.active_streams == 0))
126 return true;
127
128 return --intel_dp->mst.active_streams == 0;
129 }
130
intel_dp_mst_inc_active_streams(struct intel_dp * intel_dp)131 static bool intel_dp_mst_inc_active_streams(struct intel_dp *intel_dp)
132 {
133 struct intel_display *display = to_intel_display(intel_dp);
134
135 drm_dbg_kms(display->drm, "active MST streams %d -> %d\n",
136 intel_dp->mst.active_streams, intel_dp->mst.active_streams + 1);
137
138 return intel_dp->mst.active_streams++ == 0;
139 }
140
141 /* TODO: return a bpp_x16 value */
intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state * crtc_state,bool dsc)142 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state,
143 bool dsc)
144 {
145 struct intel_display *display = to_intel_display(crtc_state);
146 const struct drm_display_mode *adjusted_mode =
147 &crtc_state->hw.adjusted_mode;
148
149 if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc)
150 return 0;
151
152 /*
153 * DSC->DPT interface width:
154 * ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used)
155 * LNL+: 144 bits (not a bottleneck in any config)
156 *
157 * Bspec/49259 suggests that the FEC overhead needs to be
158 * applied here, though HW people claim that neither this FEC
159 * or any other overhead is applicable here (that is the actual
160 * available_bw is just symbol_clock * 72). However based on
161 * testing on MTL-P the
162 * - DELL U3224KBA display
163 * - Unigraf UCD-500 CTS test sink
164 * devices the
165 * - 5120x2880/995.59Mhz
166 * - 6016x3384/1357.23Mhz
167 * - 6144x3456/1413.39Mhz
168 * modes (all the ones having a DPT limit on the above devices),
169 * both the channel coding efficiency and an additional 3%
170 * overhead needs to be accounted for.
171 */
172 return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72,
173 drm_dp_bw_channel_coding_efficiency(true)),
174 mul_u32_u32(adjusted_mode->crtc_clock, 1030000));
175 }
176
intel_dp_mst_bw_overhead(const struct intel_crtc_state * crtc_state,bool ssc,int dsc_slice_count,int bpp_x16)177 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state,
178 bool ssc, int dsc_slice_count, int bpp_x16)
179 {
180 const struct drm_display_mode *adjusted_mode =
181 &crtc_state->hw.adjusted_mode;
182 unsigned long flags = DRM_DP_BW_OVERHEAD_MST;
183 int overhead;
184
185 flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0;
186 flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0;
187 flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0;
188
189 if (dsc_slice_count)
190 flags |= DRM_DP_BW_OVERHEAD_DSC;
191
192 overhead = drm_dp_bw_overhead(crtc_state->lane_count,
193 adjusted_mode->hdisplay,
194 dsc_slice_count,
195 bpp_x16,
196 flags);
197
198 /*
199 * TODO: clarify whether a minimum required by the fixed FEC overhead
200 * in the bspec audio programming sequence is required here.
201 */
202 return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable));
203 }
204
intel_dp_mst_compute_m_n(const struct intel_crtc_state * crtc_state,int overhead,int bpp_x16,struct intel_link_m_n * m_n)205 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state,
206 int overhead,
207 int bpp_x16,
208 struct intel_link_m_n *m_n)
209 {
210 const struct drm_display_mode *adjusted_mode =
211 &crtc_state->hw.adjusted_mode;
212
213 /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */
214 intel_link_compute_m_n(bpp_x16, crtc_state->lane_count,
215 adjusted_mode->crtc_clock,
216 crtc_state->port_clock,
217 overhead,
218 m_n);
219
220 m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n);
221 }
222
intel_dp_mst_calc_pbn(int pixel_clock,int bpp_x16,int bw_overhead)223 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead)
224 {
225 int effective_data_rate =
226 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead);
227
228 /*
229 * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted
230 * to calculate PBN with the BW overhead passed to it.
231 */
232 return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000);
233 }
234
intel_dp_mst_dsc_get_slice_count(const struct intel_connector * connector,const struct intel_crtc_state * crtc_state)235 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector,
236 const struct intel_crtc_state *crtc_state)
237 {
238 const struct drm_display_mode *adjusted_mode =
239 &crtc_state->hw.adjusted_mode;
240 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state);
241
242 return intel_dp_dsc_get_slice_count(connector,
243 adjusted_mode->clock,
244 adjusted_mode->hdisplay,
245 num_joined_pipes);
246 }
247
mst_stream_update_slots(const struct intel_crtc_state * crtc_state,struct drm_dp_mst_topology_state * topology_state)248 static void mst_stream_update_slots(const struct intel_crtc_state *crtc_state,
249 struct drm_dp_mst_topology_state *topology_state)
250 {
251 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ?
252 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B;
253
254 drm_dp_mst_update_slots(topology_state, link_coding_cap);
255 }
256
intel_dp_mtp_tu_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state,int min_bpp_x16,int max_bpp_x16,int bpp_step_x16,bool dsc)257 int intel_dp_mtp_tu_compute_config(struct intel_dp *intel_dp,
258 struct intel_crtc_state *crtc_state,
259 struct drm_connector_state *conn_state,
260 int min_bpp_x16, int max_bpp_x16, int bpp_step_x16, bool dsc)
261 {
262 struct intel_display *display = to_intel_display(intel_dp);
263 struct drm_atomic_state *state = crtc_state->uapi.state;
264 struct drm_dp_mst_topology_state *mst_state = NULL;
265 struct intel_connector *connector =
266 to_intel_connector(conn_state->connector);
267 const struct drm_display_mode *adjusted_mode =
268 &crtc_state->hw.adjusted_mode;
269 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
270 int bpp_x16, slots = -EINVAL;
271 int dsc_slice_count = 0;
272 int max_dpt_bpp_x16;
273
274 /* shouldn't happen, sanity check */
275 drm_WARN_ON(display->drm, !dsc && (fxp_q4_to_frac(min_bpp_x16) ||
276 fxp_q4_to_frac(max_bpp_x16) ||
277 fxp_q4_to_frac(bpp_step_x16)));
278
279 if (!bpp_step_x16) {
280 /* Allow using zero step only to indicate single try for a given bpp. */
281 drm_WARN_ON(display->drm, min_bpp_x16 != max_bpp_x16);
282 bpp_step_x16 = 1;
283 }
284
285 if (is_mst) {
286 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst.mgr);
287 if (IS_ERR(mst_state))
288 return PTR_ERR(mst_state);
289
290 mst_state->pbn_div = drm_dp_get_vc_payload_bw(crtc_state->port_clock,
291 crtc_state->lane_count);
292
293 mst_stream_update_slots(crtc_state, mst_state);
294 }
295
296 if (dsc) {
297 if (!intel_dp_supports_fec(intel_dp, connector, crtc_state))
298 return -EINVAL;
299
300 crtc_state->fec_enable = !intel_dp_is_uhbr(crtc_state);
301 }
302
303 max_dpt_bpp_x16 = fxp_q4_from_int(intel_dp_mst_max_dpt_bpp(crtc_state, dsc));
304 if (max_dpt_bpp_x16 && max_bpp_x16 > max_dpt_bpp_x16) {
305 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (" FXP_Q4_FMT " -> " FXP_Q4_FMT ")\n",
306 FXP_Q4_ARGS(max_bpp_x16), FXP_Q4_ARGS(max_dpt_bpp_x16));
307 max_bpp_x16 = max_dpt_bpp_x16;
308 }
309
310 drm_dbg_kms(display->drm, "Looking for slots in range min bpp " FXP_Q4_FMT " max bpp " FXP_Q4_FMT "\n",
311 FXP_Q4_ARGS(min_bpp_x16), FXP_Q4_ARGS(max_bpp_x16));
312
313 if (dsc) {
314 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state);
315 if (!dsc_slice_count) {
316 drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n");
317
318 return -ENOSPC;
319 }
320 }
321
322 drm_WARN_ON(display->drm, min_bpp_x16 % bpp_step_x16 || max_bpp_x16 % bpp_step_x16);
323
324 for (bpp_x16 = max_bpp_x16; bpp_x16 >= min_bpp_x16; bpp_x16 -= bpp_step_x16) {
325 int local_bw_overhead;
326 int link_bpp_x16;
327
328 drm_dbg_kms(display->drm, "Trying bpp " FXP_Q4_FMT "\n", FXP_Q4_ARGS(bpp_x16));
329
330 if (dsc && !intel_dp_dsc_valid_compressed_bpp(intel_dp, bpp_x16)) {
331 /* SST must have validated the single bpp tried here already earlier. */
332 drm_WARN_ON(display->drm, !is_mst);
333 continue;
334 }
335
336 link_bpp_x16 = dsc ? bpp_x16 :
337 fxp_q4_from_int(intel_dp_output_bpp(crtc_state->output_format,
338 fxp_q4_to_int(bpp_x16)));
339
340 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state,
341 false, dsc_slice_count, link_bpp_x16);
342
343 intel_dp_mst_compute_m_n(crtc_state,
344 local_bw_overhead,
345 link_bpp_x16,
346 &crtc_state->dp_m_n);
347
348 if (is_mst) {
349 int remote_bw_overhead;
350 int remote_tu;
351 fixed20_12 pbn;
352
353 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state,
354 true, dsc_slice_count, link_bpp_x16);
355
356 /*
357 * The TU size programmed to the HW determines which slots in
358 * an MTP frame are used for this stream, which needs to match
359 * the payload size programmed to the first downstream branch
360 * device's payload table.
361 *
362 * Note that atm the payload's PBN value DRM core sends via
363 * the ALLOCATE_PAYLOAD side-band message matches the payload
364 * size (which it calculates from the PBN value) it programs
365 * to the first branch device's payload table. The allocation
366 * in the payload table could be reduced though (to
367 * crtc_state->dp_m_n.tu), provided that the driver doesn't
368 * enable SSC on the corresponding link.
369 */
370 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock,
371 link_bpp_x16,
372 remote_bw_overhead));
373 remote_tu = DIV_ROUND_UP(pbn.full, mst_state->pbn_div.full);
374
375 /*
376 * Aligning the TUs ensures that symbols consisting of multiple
377 * (4) symbol cycles don't get split between two consecutive
378 * MTPs, as required by Bspec.
379 * TODO: remove the alignment restriction for 128b/132b links
380 * on some platforms, where Bspec allows this.
381 */
382 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count);
383
384 /*
385 * Also align PBNs accordingly, since MST core will derive its
386 * own copy of TU from the PBN in drm_dp_atomic_find_time_slots().
387 * The above comment about the difference between the PBN
388 * allocated for the whole path and the TUs allocated for the
389 * first branch device's link also applies here.
390 */
391 pbn.full = remote_tu * mst_state->pbn_div.full;
392
393 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu);
394 crtc_state->dp_m_n.tu = remote_tu;
395
396 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst.mgr,
397 connector->mst.port,
398 dfixed_trunc(pbn));
399
400 /* TODO: Check this already in drm_dp_atomic_find_time_slots(). */
401 if (slots > mst_state->total_avail_slots)
402 slots = -EINVAL;
403 } else {
404 /* Same as above for remote_tu */
405 crtc_state->dp_m_n.tu = ALIGN(crtc_state->dp_m_n.tu,
406 4 / crtc_state->lane_count);
407
408 if (crtc_state->dp_m_n.tu <= 64)
409 slots = crtc_state->dp_m_n.tu;
410 else
411 slots = -EINVAL;
412 }
413
414 if (slots == -EDEADLK)
415 return slots;
416
417 if (slots >= 0) {
418 drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu);
419
420 break;
421 }
422 }
423
424 if (slots < 0) {
425 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n",
426 slots);
427 return slots;
428 }
429
430 if (!dsc)
431 crtc_state->pipe_bpp = fxp_q4_to_int(bpp_x16);
432 else
433 crtc_state->dsc.compressed_bpp_x16 = bpp_x16;
434
435 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp " FXP_Q4_FMT " dsc %d\n",
436 slots, FXP_Q4_ARGS(bpp_x16), dsc);
437
438 return 0;
439 }
440
mst_stream_compute_link_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state,const struct link_config_limits * limits)441 static int mst_stream_compute_link_config(struct intel_dp *intel_dp,
442 struct intel_crtc_state *crtc_state,
443 struct drm_connector_state *conn_state,
444 const struct link_config_limits *limits)
445 {
446 crtc_state->lane_count = limits->max_lane_count;
447 crtc_state->port_clock = limits->max_rate;
448
449 /*
450 * FIXME: allocate the BW according to link_bpp, which in the case of
451 * YUV420 is only half of the pipe bpp value.
452 */
453 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state,
454 limits->link.min_bpp_x16,
455 limits->link.max_bpp_x16,
456 fxp_q4_from_int(2 * 3), false);
457 }
458
mst_stream_dsc_compute_link_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state,const struct link_config_limits * limits)459 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp,
460 struct intel_crtc_state *crtc_state,
461 struct drm_connector_state *conn_state,
462 const struct link_config_limits *limits)
463 {
464 struct intel_display *display = to_intel_display(intel_dp);
465 struct intel_connector *connector = to_intel_connector(conn_state->connector);
466 int num_bpc;
467 u8 dsc_bpc[3] = {};
468 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp;
469 int min_compressed_bpp_x16, max_compressed_bpp_x16;
470 int bpp_step_x16;
471
472 max_bpp = limits->pipe.max_bpp;
473 min_bpp = limits->pipe.min_bpp;
474
475 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd,
476 dsc_bpc);
477
478 drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n",
479 min_bpp, max_bpp);
480
481 sink_min_bpp = min_array(dsc_bpc, num_bpc) * 3;
482 sink_max_bpp = max_array(dsc_bpc, num_bpc) * 3;
483
484 drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n",
485 sink_min_bpp, sink_max_bpp);
486
487 if (min_bpp < sink_min_bpp)
488 min_bpp = sink_min_bpp;
489
490 if (max_bpp > sink_max_bpp)
491 max_bpp = sink_max_bpp;
492
493 crtc_state->pipe_bpp = max_bpp;
494
495 min_compressed_bpp_x16 = limits->link.min_bpp_x16;
496 max_compressed_bpp_x16 = limits->link.max_bpp_x16;
497
498 drm_dbg_kms(display->drm,
499 "DSC Sink supported compressed min bpp " FXP_Q4_FMT " compressed max bpp " FXP_Q4_FMT "\n",
500 FXP_Q4_ARGS(min_compressed_bpp_x16), FXP_Q4_ARGS(max_compressed_bpp_x16));
501
502 bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector);
503
504 max_compressed_bpp_x16 = min(max_compressed_bpp_x16, fxp_q4_from_int(crtc_state->pipe_bpp) - bpp_step_x16);
505
506 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16));
507 min_compressed_bpp_x16 = round_up(min_compressed_bpp_x16, bpp_step_x16);
508 max_compressed_bpp_x16 = round_down(max_compressed_bpp_x16, bpp_step_x16);
509
510 crtc_state->lane_count = limits->max_lane_count;
511 crtc_state->port_clock = limits->max_rate;
512
513 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state,
514 min_compressed_bpp_x16,
515 max_compressed_bpp_x16,
516 bpp_step_x16, true);
517 }
518
mode_hblank_period_ns(const struct drm_display_mode * mode)519 static int mode_hblank_period_ns(const struct drm_display_mode *mode)
520 {
521 return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay,
522 NSEC_PER_SEC / 1000),
523 mode->crtc_clock);
524 }
525
526 static bool
hblank_expansion_quirk_needs_dsc(const struct intel_connector * connector,const struct intel_crtc_state * crtc_state,const struct link_config_limits * limits)527 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector,
528 const struct intel_crtc_state *crtc_state,
529 const struct link_config_limits *limits)
530 {
531 const struct drm_display_mode *adjusted_mode =
532 &crtc_state->hw.adjusted_mode;
533 bool is_uhbr_sink = connector->mst.dp &&
534 drm_dp_128b132b_supported(connector->mst.dp->dpcd);
535 int hblank_limit = is_uhbr_sink ? 500 : 300;
536
537 if (!connector->dp.dsc_hblank_expansion_quirk)
538 return false;
539
540 if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate))
541 return false;
542
543 if (mode_hblank_period_ns(adjusted_mode) > hblank_limit)
544 return false;
545
546 if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state))
547 return false;
548
549 return true;
550 }
551
552 static bool
adjust_limits_for_dsc_hblank_expansion_quirk(struct intel_dp * intel_dp,const struct intel_connector * connector,const struct intel_crtc_state * crtc_state,struct link_config_limits * limits,bool dsc)553 adjust_limits_for_dsc_hblank_expansion_quirk(struct intel_dp *intel_dp,
554 const struct intel_connector *connector,
555 const struct intel_crtc_state *crtc_state,
556 struct link_config_limits *limits,
557 bool dsc)
558 {
559 struct intel_display *display = to_intel_display(connector);
560 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
561 int min_bpp_x16 = limits->link.min_bpp_x16;
562
563 if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits))
564 return true;
565
566 if (!dsc) {
567 if (intel_dp_supports_dsc(intel_dp, connector, crtc_state)) {
568 drm_dbg_kms(display->drm,
569 "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n",
570 crtc->base.base.id, crtc->base.name,
571 connector->base.base.id, connector->base.name);
572 return false;
573 }
574
575 drm_dbg_kms(display->drm,
576 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n",
577 crtc->base.base.id, crtc->base.name,
578 connector->base.base.id, connector->base.name);
579
580 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24))
581 return false;
582
583 limits->link.min_bpp_x16 = fxp_q4_from_int(24);
584
585 return true;
586 }
587
588 drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate);
589
590 if (limits->max_rate < 540000)
591 min_bpp_x16 = fxp_q4_from_int(13);
592 else if (limits->max_rate < 810000)
593 min_bpp_x16 = fxp_q4_from_int(10);
594
595 if (limits->link.min_bpp_x16 >= min_bpp_x16)
596 return true;
597
598 drm_dbg_kms(display->drm,
599 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n",
600 crtc->base.base.id, crtc->base.name,
601 connector->base.base.id, connector->base.name,
602 FXP_Q4_ARGS(min_bpp_x16));
603
604 if (limits->link.max_bpp_x16 < min_bpp_x16)
605 return false;
606
607 limits->link.min_bpp_x16 = min_bpp_x16;
608
609 return true;
610 }
611
612 static bool
mst_stream_compute_config_limits(struct intel_dp * intel_dp,struct intel_connector * connector,struct intel_crtc_state * crtc_state,bool dsc,struct link_config_limits * limits)613 mst_stream_compute_config_limits(struct intel_dp *intel_dp,
614 struct intel_connector *connector,
615 struct intel_crtc_state *crtc_state,
616 bool dsc,
617 struct link_config_limits *limits)
618 {
619 if (!intel_dp_compute_config_limits(intel_dp, connector,
620 crtc_state, false, dsc,
621 limits))
622 return false;
623
624 return adjust_limits_for_dsc_hblank_expansion_quirk(intel_dp,
625 connector,
626 crtc_state,
627 limits,
628 dsc);
629 }
630
mst_stream_compute_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config,struct drm_connector_state * conn_state)631 static int mst_stream_compute_config(struct intel_encoder *encoder,
632 struct intel_crtc_state *pipe_config,
633 struct drm_connector_state *conn_state)
634 {
635 struct intel_display *display = to_intel_display(encoder);
636 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
637 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
638 struct intel_dp *intel_dp = to_primary_dp(encoder);
639 struct intel_connector *connector =
640 to_intel_connector(conn_state->connector);
641 const struct drm_display_mode *adjusted_mode =
642 &pipe_config->hw.adjusted_mode;
643 struct link_config_limits limits;
644 bool dsc_needed, joiner_needs_dsc;
645 int num_joined_pipes;
646 int ret = 0;
647
648 if (pipe_config->fec_enable &&
649 !intel_dp_supports_fec(intel_dp, connector, pipe_config))
650 return -EINVAL;
651
652 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
653 return -EINVAL;
654
655 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector,
656 adjusted_mode->crtc_hdisplay,
657 adjusted_mode->crtc_clock);
658 if (num_joined_pipes > 1)
659 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe);
660
661 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB;
662 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
663 pipe_config->has_pch_encoder = false;
664
665 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes);
666
667 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en ||
668 !mst_stream_compute_config_limits(intel_dp, connector,
669 pipe_config, false, &limits);
670
671 if (!dsc_needed) {
672 ret = mst_stream_compute_link_config(intel_dp, pipe_config,
673 conn_state, &limits);
674
675 if (ret == -EDEADLK)
676 return ret;
677
678 if (ret)
679 dsc_needed = true;
680 }
681
682 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) {
683 drm_dbg_kms(display->drm, "DSC required but not available\n");
684 return -EINVAL;
685 }
686
687 /* enable compression if the mode doesn't fit available BW */
688 if (dsc_needed) {
689 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n",
690 str_yes_no(ret), str_yes_no(joiner_needs_dsc),
691 str_yes_no(intel_dp->force_dsc_en));
692
693
694 if (!mst_stream_compute_config_limits(intel_dp, connector,
695 pipe_config, true,
696 &limits))
697 return -EINVAL;
698
699 /*
700 * FIXME: As bpc is hardcoded to 8, as mentioned above,
701 * WARN and ignore the debug flag force_dsc_bpc for now.
702 */
703 drm_WARN(display->drm, intel_dp->force_dsc_bpc,
704 "Cannot Force BPC for MST\n");
705 /*
706 * Try to get at least some timeslots and then see, if
707 * we can fit there with DSC.
708 */
709 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n");
710
711 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config,
712 conn_state, &limits);
713 if (ret < 0)
714 return ret;
715
716 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
717 conn_state, &limits,
718 pipe_config->dp_m_n.tu);
719 }
720
721 if (ret)
722 return ret;
723
724 pipe_config->limited_color_range =
725 intel_dp_limited_color_range(pipe_config, conn_state);
726
727 if (display->platform.geminilake || display->platform.broxton)
728 pipe_config->lane_lat_optim_mask =
729 bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
730
731 ret = intel_dp_compute_min_hblank(pipe_config, conn_state);
732 if (ret)
733 return ret;
734
735 intel_vrr_compute_config(pipe_config, conn_state);
736
737 intel_dp_audio_compute_config(encoder, pipe_config, conn_state);
738
739 intel_ddi_compute_min_voltage_level(pipe_config);
740
741 intel_psr_compute_config(intel_dp, pipe_config, conn_state);
742
743 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector,
744 pipe_config);
745 }
746
747 /*
748 * Iterate over all connectors and return a mask of
749 * all CPU transcoders streaming over the same DP link.
750 */
751 static unsigned int
intel_dp_mst_transcoder_mask(struct intel_atomic_state * state,struct intel_dp * mst_port)752 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state,
753 struct intel_dp *mst_port)
754 {
755 struct intel_display *display = to_intel_display(state);
756 const struct intel_digital_connector_state *conn_state;
757 struct intel_connector *connector;
758 u8 transcoders = 0;
759 int i;
760
761 if (DISPLAY_VER(display) < 12)
762 return 0;
763
764 for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
765 const struct intel_crtc_state *crtc_state;
766 struct intel_crtc *crtc;
767
768 if (connector->mst.dp != mst_port || !conn_state->base.crtc)
769 continue;
770
771 crtc = to_intel_crtc(conn_state->base.crtc);
772 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
773
774 if (!crtc_state->hw.active)
775 continue;
776
777 transcoders |= BIT(crtc_state->cpu_transcoder);
778 }
779
780 return transcoders;
781 }
782
get_pipes_downstream_of_mst_port(struct intel_atomic_state * state,struct drm_dp_mst_topology_mgr * mst_mgr,struct drm_dp_mst_port * parent_port)783 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state,
784 struct drm_dp_mst_topology_mgr *mst_mgr,
785 struct drm_dp_mst_port *parent_port)
786 {
787 const struct intel_digital_connector_state *conn_state;
788 struct intel_connector *connector;
789 u8 mask = 0;
790 int i;
791
792 for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
793 if (!conn_state->base.crtc)
794 continue;
795
796 if (&connector->mst.dp->mst.mgr != mst_mgr)
797 continue;
798
799 if (connector->mst.port != parent_port &&
800 !drm_dp_mst_port_downstream_of_parent(mst_mgr,
801 connector->mst.port,
802 parent_port))
803 continue;
804
805 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe);
806 }
807
808 return mask;
809 }
810
intel_dp_mst_check_fec_change(struct intel_atomic_state * state,struct drm_dp_mst_topology_mgr * mst_mgr,struct intel_link_bw_limits * limits)811 static int intel_dp_mst_check_fec_change(struct intel_atomic_state *state,
812 struct drm_dp_mst_topology_mgr *mst_mgr,
813 struct intel_link_bw_limits *limits)
814 {
815 struct intel_display *display = to_intel_display(state);
816 struct intel_crtc *crtc;
817 u8 mst_pipe_mask;
818 u8 fec_pipe_mask = 0;
819 int ret;
820
821 mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL);
822
823 for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) {
824 struct intel_crtc_state *crtc_state =
825 intel_atomic_get_new_crtc_state(state, crtc);
826
827 /* Atomic connector check should've added all the MST CRTCs. */
828 if (drm_WARN_ON(display->drm, !crtc_state))
829 return -EINVAL;
830
831 if (crtc_state->fec_enable)
832 fec_pipe_mask |= BIT(crtc->pipe);
833 }
834
835 if (!fec_pipe_mask || mst_pipe_mask == fec_pipe_mask)
836 return 0;
837
838 limits->force_fec_pipes |= mst_pipe_mask;
839
840 ret = intel_modeset_pipes_in_mask_early(state, "MST FEC",
841 mst_pipe_mask);
842
843 return ret ? : -EAGAIN;
844 }
845
intel_dp_mst_check_bw(struct intel_atomic_state * state,struct drm_dp_mst_topology_mgr * mst_mgr,struct drm_dp_mst_topology_state * mst_state,struct intel_link_bw_limits * limits)846 static int intel_dp_mst_check_bw(struct intel_atomic_state *state,
847 struct drm_dp_mst_topology_mgr *mst_mgr,
848 struct drm_dp_mst_topology_state *mst_state,
849 struct intel_link_bw_limits *limits)
850 {
851 struct drm_dp_mst_port *mst_port;
852 u8 mst_port_pipes;
853 int ret;
854
855 ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port);
856 if (ret != -ENOSPC)
857 return ret;
858
859 mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port);
860
861 ret = intel_link_bw_reduce_bpp(state, limits,
862 mst_port_pipes, "MST link BW");
863
864 return ret ? : -EAGAIN;
865 }
866
867 /**
868 * intel_dp_mst_atomic_check_link - check all modeset MST link configuration
869 * @state: intel atomic state
870 * @limits: link BW limits
871 *
872 * Check the link configuration for all modeset MST outputs. If the
873 * configuration is invalid @limits will be updated if possible to
874 * reduce the total BW, after which the configuration for all CRTCs in
875 * @state must be recomputed with the updated @limits.
876 *
877 * Returns:
878 * - 0 if the configuration is valid
879 * - %-EAGAIN, if the configuration is invalid and @limits got updated
880 * with fallback values with which the configuration of all CRTCs in
881 * @state must be recomputed
882 * - Other negative error, if the configuration is invalid without a
883 * fallback possibility, or the check failed for another reason
884 */
intel_dp_mst_atomic_check_link(struct intel_atomic_state * state,struct intel_link_bw_limits * limits)885 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state,
886 struct intel_link_bw_limits *limits)
887 {
888 struct drm_dp_mst_topology_mgr *mgr;
889 struct drm_dp_mst_topology_state *mst_state;
890 int ret;
891 int i;
892
893 for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) {
894 ret = intel_dp_mst_check_fec_change(state, mgr, limits);
895 if (ret)
896 return ret;
897
898 ret = intel_dp_mst_check_bw(state, mgr, mst_state,
899 limits);
900 if (ret)
901 return ret;
902 }
903
904 return 0;
905 }
906
mst_stream_compute_config_late(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)907 static int mst_stream_compute_config_late(struct intel_encoder *encoder,
908 struct intel_crtc_state *crtc_state,
909 struct drm_connector_state *conn_state)
910 {
911 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
912 struct intel_dp *intel_dp = to_primary_dp(encoder);
913
914 /* lowest numbered transcoder will be designated master */
915 crtc_state->mst_master_transcoder =
916 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1;
917
918 return 0;
919 }
920
921 /*
922 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs
923 * that shares the same MST stream as mode changed,
924 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do
925 * a fastset when possible.
926 *
927 * On TGL+ this is required since each stream go through a master transcoder,
928 * so if the master transcoder needs modeset, all other streams in the
929 * topology need a modeset. All platforms need to add the atomic state
930 * for all streams in the topology, since a modeset on one may require
931 * changing the MST link BW usage of the others, which in turn needs a
932 * recomputation of the corresponding CRTC states.
933 */
934 static int
mst_connector_atomic_topology_check(struct intel_connector * connector,struct intel_atomic_state * state)935 mst_connector_atomic_topology_check(struct intel_connector *connector,
936 struct intel_atomic_state *state)
937 {
938 struct intel_display *display = to_intel_display(connector);
939 struct drm_connector_list_iter connector_list_iter;
940 struct intel_connector *connector_iter;
941 int ret = 0;
942
943 if (!intel_connector_needs_modeset(state, &connector->base))
944 return 0;
945
946 drm_connector_list_iter_begin(display->drm, &connector_list_iter);
947 for_each_intel_connector_iter(connector_iter, &connector_list_iter) {
948 struct intel_digital_connector_state *conn_iter_state;
949 struct intel_crtc_state *crtc_state;
950 struct intel_crtc *crtc;
951
952 if (connector_iter->mst.dp != connector->mst.dp ||
953 connector_iter == connector)
954 continue;
955
956 conn_iter_state = intel_atomic_get_digital_connector_state(state,
957 connector_iter);
958 if (IS_ERR(conn_iter_state)) {
959 ret = PTR_ERR(conn_iter_state);
960 break;
961 }
962
963 if (!conn_iter_state->base.crtc)
964 continue;
965
966 crtc = to_intel_crtc(conn_iter_state->base.crtc);
967 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
968 if (IS_ERR(crtc_state)) {
969 ret = PTR_ERR(crtc_state);
970 break;
971 }
972
973 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
974 if (ret)
975 break;
976 crtc_state->uapi.mode_changed = true;
977 }
978 drm_connector_list_iter_end(&connector_list_iter);
979
980 return ret;
981 }
982
983 static int
mst_connector_atomic_check(struct drm_connector * _connector,struct drm_atomic_state * _state)984 mst_connector_atomic_check(struct drm_connector *_connector,
985 struct drm_atomic_state *_state)
986 {
987 struct intel_atomic_state *state = to_intel_atomic_state(_state);
988 struct intel_connector *connector = to_intel_connector(_connector);
989 int ret;
990
991 ret = intel_digital_connector_atomic_check(&connector->base, &state->base);
992 if (ret)
993 return ret;
994
995 ret = mst_connector_atomic_topology_check(connector, state);
996 if (ret)
997 return ret;
998
999 if (intel_connector_needs_modeset(state, &connector->base)) {
1000 ret = intel_dp_tunnel_atomic_check_state(state,
1001 connector->mst.dp,
1002 connector);
1003 if (ret)
1004 return ret;
1005 }
1006
1007 return drm_dp_atomic_release_time_slots(&state->base,
1008 &connector->mst.dp->mst.mgr,
1009 connector->mst.port);
1010 }
1011
mst_stream_disable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)1012 static void mst_stream_disable(struct intel_atomic_state *state,
1013 struct intel_encoder *encoder,
1014 const struct intel_crtc_state *old_crtc_state,
1015 const struct drm_connector_state *old_conn_state)
1016 {
1017 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1018 struct intel_dp *intel_dp = to_primary_dp(encoder);
1019 struct intel_connector *connector =
1020 to_intel_connector(old_conn_state->connector);
1021
1022 if (intel_dp_mst_active_streams(intel_dp) == 1)
1023 intel_dp->link.active = false;
1024
1025 intel_hdcp_disable(intel_mst->connector);
1026
1027 intel_dp_sink_disable_decompression(state, connector, old_crtc_state);
1028 }
1029
mst_stream_post_disable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)1030 static void mst_stream_post_disable(struct intel_atomic_state *state,
1031 struct intel_encoder *encoder,
1032 const struct intel_crtc_state *old_crtc_state,
1033 const struct drm_connector_state *old_conn_state)
1034 {
1035 struct intel_display *display = to_intel_display(encoder);
1036 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1037 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1038 struct intel_dp *intel_dp = to_primary_dp(encoder);
1039 struct intel_connector *connector =
1040 to_intel_connector(old_conn_state->connector);
1041 struct drm_dp_mst_topology_state *old_mst_state =
1042 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst.mgr);
1043 struct drm_dp_mst_topology_state *new_mst_state =
1044 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
1045 const struct drm_dp_mst_atomic_payload *old_payload =
1046 drm_atomic_get_mst_payload_state(old_mst_state, connector->mst.port);
1047 struct drm_dp_mst_atomic_payload *new_payload =
1048 drm_atomic_get_mst_payload_state(new_mst_state, connector->mst.port);
1049 struct intel_crtc *pipe_crtc;
1050 bool last_mst_stream;
1051 int i;
1052
1053 last_mst_stream = intel_dp_mst_dec_active_streams(intel_dp);
1054
1055 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream &&
1056 !intel_dp_mst_is_master_trans(old_crtc_state));
1057
1058 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1059 const struct intel_crtc_state *old_pipe_crtc_state =
1060 intel_atomic_get_old_crtc_state(state, pipe_crtc);
1061
1062 intel_crtc_vblank_off(old_pipe_crtc_state);
1063 }
1064
1065 intel_disable_transcoder(old_crtc_state);
1066
1067 drm_dp_remove_payload_part1(&intel_dp->mst.mgr, new_mst_state, new_payload);
1068
1069 intel_ddi_clear_act_sent(encoder, old_crtc_state);
1070
1071 intel_de_rmw(display,
1072 TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder),
1073 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0);
1074
1075 intel_ddi_wait_for_act_sent(encoder, old_crtc_state);
1076 drm_dp_check_act_status(&intel_dp->mst.mgr);
1077
1078 drm_dp_remove_payload_part2(&intel_dp->mst.mgr, new_mst_state,
1079 old_payload, new_payload);
1080
1081 intel_vrr_transcoder_disable(old_crtc_state);
1082
1083 intel_ddi_disable_transcoder_func(old_crtc_state);
1084
1085 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1086 const struct intel_crtc_state *old_pipe_crtc_state =
1087 intel_atomic_get_old_crtc_state(state, pipe_crtc);
1088
1089 intel_dsc_disable(old_pipe_crtc_state);
1090
1091 if (DISPLAY_VER(display) >= 9)
1092 skl_scaler_disable(old_pipe_crtc_state);
1093 else
1094 ilk_pfit_disable(old_pipe_crtc_state);
1095 }
1096
1097 /*
1098 * Power down mst path before disabling the port, otherwise we end
1099 * up getting interrupts from the sink upon detecting link loss.
1100 */
1101 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port,
1102 false);
1103
1104 /*
1105 * BSpec 4287: disable DIP after the transcoder is disabled and before
1106 * the transcoder clock select is set to none.
1107 */
1108 intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL);
1109 /*
1110 * From TGL spec: "If multi-stream slave transcoder: Configure
1111 * Transcoder Clock Select to direct no clock to the transcoder"
1112 *
1113 * From older GENs spec: "Configure Transcoder Clock Select to direct
1114 * no clock to the transcoder"
1115 */
1116 if (DISPLAY_VER(display) < 12 || !last_mst_stream)
1117 intel_ddi_disable_transcoder_clock(old_crtc_state);
1118
1119
1120 intel_mst->connector = NULL;
1121 if (last_mst_stream)
1122 primary_encoder->post_disable(state, primary_encoder,
1123 old_crtc_state, NULL);
1124
1125 }
1126
mst_stream_post_pll_disable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)1127 static void mst_stream_post_pll_disable(struct intel_atomic_state *state,
1128 struct intel_encoder *encoder,
1129 const struct intel_crtc_state *old_crtc_state,
1130 const struct drm_connector_state *old_conn_state)
1131 {
1132 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1133 struct intel_dp *intel_dp = to_primary_dp(encoder);
1134
1135 if (intel_dp_mst_active_streams(intel_dp) == 0 &&
1136 primary_encoder->post_pll_disable)
1137 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state);
1138 }
1139
mst_stream_pre_pll_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)1140 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state,
1141 struct intel_encoder *encoder,
1142 const struct intel_crtc_state *pipe_config,
1143 const struct drm_connector_state *conn_state)
1144 {
1145 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1146 struct intel_dp *intel_dp = to_primary_dp(encoder);
1147
1148 if (intel_dp_mst_active_streams(intel_dp) == 0)
1149 primary_encoder->pre_pll_enable(state, primary_encoder,
1150 pipe_config, NULL);
1151 else
1152 /*
1153 * The port PLL state needs to get updated for secondary
1154 * streams as for the primary stream.
1155 */
1156 intel_ddi_update_active_dpll(state, primary_encoder,
1157 to_intel_crtc(pipe_config->uapi.crtc));
1158 }
1159
intel_mst_probed_link_params_valid(struct intel_dp * intel_dp,int link_rate,int lane_count)1160 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp,
1161 int link_rate, int lane_count)
1162 {
1163 return intel_dp->link.mst_probed_rate == link_rate &&
1164 intel_dp->link.mst_probed_lane_count == lane_count;
1165 }
1166
intel_mst_set_probed_link_params(struct intel_dp * intel_dp,int link_rate,int lane_count)1167 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp,
1168 int link_rate, int lane_count)
1169 {
1170 intel_dp->link.mst_probed_rate = link_rate;
1171 intel_dp->link.mst_probed_lane_count = lane_count;
1172 }
1173
intel_mst_reprobe_topology(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1174 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp,
1175 const struct intel_crtc_state *crtc_state)
1176 {
1177 if (intel_mst_probed_link_params_valid(intel_dp,
1178 crtc_state->port_clock, crtc_state->lane_count))
1179 return;
1180
1181 drm_dp_mst_topology_queue_probe(&intel_dp->mst.mgr);
1182
1183 intel_mst_set_probed_link_params(intel_dp,
1184 crtc_state->port_clock, crtc_state->lane_count);
1185 }
1186
mst_stream_pre_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)1187 static void mst_stream_pre_enable(struct intel_atomic_state *state,
1188 struct intel_encoder *encoder,
1189 const struct intel_crtc_state *pipe_config,
1190 const struct drm_connector_state *conn_state)
1191 {
1192 struct intel_display *display = to_intel_display(state);
1193 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1194 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1195 struct intel_dp *intel_dp = to_primary_dp(encoder);
1196 struct intel_connector *connector =
1197 to_intel_connector(conn_state->connector);
1198 struct drm_dp_mst_topology_state *mst_state =
1199 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
1200 int ret;
1201 bool first_mst_stream;
1202
1203 /* MST encoders are bound to a crtc, not to a connector,
1204 * force the mapping here for get_hw_state.
1205 */
1206 connector->encoder = encoder;
1207 intel_mst->connector = connector;
1208
1209 first_mst_stream = intel_dp_mst_inc_active_streams(intel_dp);
1210 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream &&
1211 !intel_dp_mst_is_master_trans(pipe_config));
1212
1213 if (first_mst_stream)
1214 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
1215
1216 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port, true);
1217
1218 intel_dp_sink_enable_decompression(state, connector, pipe_config);
1219
1220 if (first_mst_stream) {
1221 primary_encoder->pre_enable(state, primary_encoder,
1222 pipe_config, NULL);
1223
1224 intel_mst_reprobe_topology(intel_dp, pipe_config);
1225 }
1226
1227 ret = drm_dp_add_payload_part1(&intel_dp->mst.mgr, mst_state,
1228 drm_atomic_get_mst_payload_state(mst_state, connector->mst.port));
1229 if (ret < 0)
1230 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1231
1232 /*
1233 * Before Gen 12 this is not done as part of
1234 * primary_encoder->pre_enable() and should be done here. For
1235 * Gen 12+ the step in which this should be done is different for the
1236 * first MST stream, so it's done on the DDI for the first stream and
1237 * here for the following ones.
1238 */
1239 if (DISPLAY_VER(display) < 12 || !first_mst_stream)
1240 intel_ddi_enable_transcoder_clock(encoder, pipe_config);
1241
1242 if (DISPLAY_VER(display) >= 13 && !first_mst_stream)
1243 intel_ddi_config_transcoder_func(encoder, pipe_config);
1244
1245 intel_dsc_dp_pps_write(primary_encoder, pipe_config);
1246 intel_ddi_set_dp_msa(pipe_config, conn_state);
1247 }
1248
enable_bs_jitter_was(const struct intel_crtc_state * crtc_state)1249 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state)
1250 {
1251 struct intel_display *display = to_intel_display(crtc_state);
1252 u32 clear = 0;
1253 u32 set = 0;
1254
1255 if (!display->platform.alderlake_p)
1256 return;
1257
1258 if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER))
1259 return;
1260
1261 /* Wa_14013163432:adlp */
1262 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1263 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder);
1264
1265 /* Wa_14014143976:adlp */
1266 if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) {
1267 if (intel_dp_is_uhbr(crtc_state))
1268 set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1269 else if (crtc_state->fec_enable)
1270 clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1271
1272 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1273 set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder);
1274 }
1275
1276 if (!clear && !set)
1277 return;
1278
1279 intel_de_rmw(display, CHICKEN_MISC_3, clear, set);
1280 }
1281
mst_stream_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)1282 static void mst_stream_enable(struct intel_atomic_state *state,
1283 struct intel_encoder *encoder,
1284 const struct intel_crtc_state *pipe_config,
1285 const struct drm_connector_state *conn_state)
1286 {
1287 struct intel_display *display = to_intel_display(encoder);
1288 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1289 struct intel_dp *intel_dp = to_primary_dp(encoder);
1290 struct intel_connector *connector = to_intel_connector(conn_state->connector);
1291 struct drm_dp_mst_topology_state *mst_state =
1292 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
1293 enum transcoder trans = pipe_config->cpu_transcoder;
1294 bool first_mst_stream = intel_dp_mst_active_streams(intel_dp) == 1;
1295 struct intel_crtc *pipe_crtc;
1296 int ret, i;
1297
1298 drm_WARN_ON(display->drm, pipe_config->has_pch_encoder);
1299
1300 if (intel_dp_is_uhbr(pipe_config)) {
1301 const struct drm_display_mode *adjusted_mode =
1302 &pipe_config->hw.adjusted_mode;
1303 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock);
1304
1305 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder),
1306 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24));
1307 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder),
1308 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff));
1309 }
1310
1311 enable_bs_jitter_was(pipe_config);
1312
1313 intel_ddi_enable_transcoder_func(encoder, pipe_config);
1314
1315 intel_vrr_transcoder_enable(pipe_config);
1316
1317 intel_ddi_clear_act_sent(encoder, pipe_config);
1318
1319 intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0,
1320 TRANS_DDI_DP_VC_PAYLOAD_ALLOC);
1321
1322 intel_ddi_wait_for_act_sent(encoder, pipe_config);
1323 drm_dp_check_act_status(&intel_dp->mst.mgr);
1324
1325 if (first_mst_stream)
1326 intel_ddi_wait_for_fec_status(encoder, pipe_config, true);
1327
1328 ret = drm_dp_add_payload_part2(&intel_dp->mst.mgr,
1329 drm_atomic_get_mst_payload_state(mst_state,
1330 connector->mst.port));
1331 if (ret < 0)
1332 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1333
1334 if (DISPLAY_VER(display) >= 12)
1335 intel_de_rmw(display, CHICKEN_TRANS(display, trans),
1336 FECSTALL_DIS_DPTSTREAM_DPTTG,
1337 pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0);
1338
1339 intel_enable_transcoder(pipe_config);
1340
1341 for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) {
1342 const struct intel_crtc_state *pipe_crtc_state =
1343 intel_atomic_get_new_crtc_state(state, pipe_crtc);
1344
1345 intel_crtc_vblank_on(pipe_crtc_state);
1346 }
1347
1348 intel_hdcp_enable(state, encoder, pipe_config, conn_state);
1349 }
1350
mst_stream_get_hw_state(struct intel_encoder * encoder,enum pipe * pipe)1351 static bool mst_stream_get_hw_state(struct intel_encoder *encoder,
1352 enum pipe *pipe)
1353 {
1354 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1355 *pipe = intel_mst->pipe;
1356 if (intel_mst->connector)
1357 return true;
1358 return false;
1359 }
1360
mst_stream_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1361 static void mst_stream_get_config(struct intel_encoder *encoder,
1362 struct intel_crtc_state *pipe_config)
1363 {
1364 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1365
1366 primary_encoder->get_config(primary_encoder, pipe_config);
1367 }
1368
mst_stream_initial_fastset_check(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state)1369 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder,
1370 struct intel_crtc_state *crtc_state)
1371 {
1372 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1373
1374 return intel_dp_initial_fastset_check(primary_encoder, crtc_state);
1375 }
1376
mst_connector_get_ddc_modes(struct drm_connector * _connector)1377 static int mst_connector_get_ddc_modes(struct drm_connector *_connector)
1378 {
1379 struct intel_connector *connector = to_intel_connector(_connector);
1380 struct intel_display *display = to_intel_display(connector);
1381 struct intel_dp *intel_dp = connector->mst.dp;
1382 const struct drm_edid *drm_edid;
1383 int ret;
1384
1385 if (drm_connector_is_unregistered(&connector->base))
1386 return intel_connector_update_modes(&connector->base, NULL);
1387
1388 if (!intel_display_driver_check_access(display))
1389 return drm_edid_connector_add_modes(&connector->base);
1390
1391 drm_edid = drm_dp_mst_edid_read(&connector->base, &intel_dp->mst.mgr, connector->mst.port);
1392
1393 ret = intel_connector_update_modes(&connector->base, drm_edid);
1394
1395 drm_edid_free(drm_edid);
1396
1397 return ret;
1398 }
1399
1400 static int
mst_connector_late_register(struct drm_connector * _connector)1401 mst_connector_late_register(struct drm_connector *_connector)
1402 {
1403 struct intel_connector *connector = to_intel_connector(_connector);
1404 int ret;
1405
1406 ret = drm_dp_mst_connector_late_register(&connector->base, connector->mst.port);
1407 if (ret < 0)
1408 return ret;
1409
1410 ret = intel_connector_register(&connector->base);
1411 if (ret < 0)
1412 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port);
1413
1414 return ret;
1415 }
1416
1417 static void
mst_connector_early_unregister(struct drm_connector * _connector)1418 mst_connector_early_unregister(struct drm_connector *_connector)
1419 {
1420 struct intel_connector *connector = to_intel_connector(_connector);
1421
1422 intel_connector_unregister(&connector->base);
1423 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port);
1424 }
1425
1426 static const struct drm_connector_funcs mst_connector_funcs = {
1427 .fill_modes = drm_helper_probe_single_connector_modes,
1428 .atomic_get_property = intel_digital_connector_atomic_get_property,
1429 .atomic_set_property = intel_digital_connector_atomic_set_property,
1430 .late_register = mst_connector_late_register,
1431 .early_unregister = mst_connector_early_unregister,
1432 .destroy = intel_connector_destroy,
1433 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1434 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
1435 };
1436
mst_connector_get_modes(struct drm_connector * _connector)1437 static int mst_connector_get_modes(struct drm_connector *_connector)
1438 {
1439 struct intel_connector *connector = to_intel_connector(_connector);
1440
1441 return mst_connector_get_ddc_modes(&connector->base);
1442 }
1443
1444 static int
mst_connector_mode_valid_ctx(struct drm_connector * _connector,const struct drm_display_mode * mode,struct drm_modeset_acquire_ctx * ctx,enum drm_mode_status * status)1445 mst_connector_mode_valid_ctx(struct drm_connector *_connector,
1446 const struct drm_display_mode *mode,
1447 struct drm_modeset_acquire_ctx *ctx,
1448 enum drm_mode_status *status)
1449 {
1450 struct intel_connector *connector = to_intel_connector(_connector);
1451 struct intel_display *display = to_intel_display(connector);
1452 struct intel_dp *intel_dp = connector->mst.dp;
1453 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr;
1454 struct drm_dp_mst_port *port = connector->mst.port;
1455 const int min_bpp = 18;
1456 int max_dotclk = display->cdclk.max_dotclk_freq;
1457 int max_rate, mode_rate, max_lanes, max_link_clock;
1458 int ret;
1459 bool dsc = false;
1460 u16 dsc_max_compressed_bpp = 0;
1461 u8 dsc_slice_count = 0;
1462 int target_clock = mode->clock;
1463 int num_joined_pipes;
1464
1465 if (drm_connector_is_unregistered(&connector->base)) {
1466 *status = MODE_ERROR;
1467 return 0;
1468 }
1469
1470 *status = intel_cpu_transcoder_mode_valid(display, mode);
1471 if (*status != MODE_OK)
1472 return 0;
1473
1474 if (mode->flags & DRM_MODE_FLAG_DBLCLK) {
1475 *status = MODE_H_ILLEGAL;
1476 return 0;
1477 }
1478
1479 if (mode->clock < 10000) {
1480 *status = MODE_CLOCK_LOW;
1481 return 0;
1482 }
1483
1484 max_link_clock = intel_dp_max_link_rate(intel_dp);
1485 max_lanes = intel_dp_max_lane_count(intel_dp);
1486
1487 max_rate = intel_dp_max_link_data_rate(intel_dp,
1488 max_link_clock, max_lanes);
1489 mode_rate = intel_dp_link_required(mode->clock, min_bpp);
1490
1491 /*
1492 * TODO:
1493 * - Also check if compression would allow for the mode
1494 * - Calculate the overhead using drm_dp_bw_overhead() /
1495 * drm_dp_bw_channel_coding_efficiency(), similarly to the
1496 * compute config code, as drm_dp_calc_pbn_mode() doesn't
1497 * account with all the overheads.
1498 * - Check here and during compute config the BW reported by
1499 * DFP_Link_Available_Payload_Bandwidth_Number (or the
1500 * corresponding link capabilities of the sink) in case the
1501 * stream is uncompressed for it by the last branch device.
1502 */
1503 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector,
1504 mode->hdisplay, target_clock);
1505 max_dotclk *= num_joined_pipes;
1506
1507 ret = drm_modeset_lock(&mgr->base.lock, ctx);
1508 if (ret)
1509 return ret;
1510
1511 if (mode_rate > max_rate || mode->clock > max_dotclk ||
1512 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) {
1513 *status = MODE_CLOCK_HIGH;
1514 return 0;
1515 }
1516
1517 if (intel_dp_has_dsc(connector)) {
1518 /*
1519 * TBD pass the connector BPC,
1520 * for now U8_MAX so that max BPC on that platform would be picked
1521 */
1522 int pipe_bpp = intel_dp_dsc_compute_max_bpp(connector, U8_MAX);
1523
1524 if (drm_dp_sink_supports_fec(connector->dp.fec_capability)) {
1525 dsc_max_compressed_bpp =
1526 intel_dp_dsc_get_max_compressed_bpp(display,
1527 max_link_clock,
1528 max_lanes,
1529 target_clock,
1530 mode->hdisplay,
1531 num_joined_pipes,
1532 INTEL_OUTPUT_FORMAT_RGB,
1533 pipe_bpp, 64);
1534 dsc_slice_count =
1535 intel_dp_dsc_get_slice_count(connector,
1536 target_clock,
1537 mode->hdisplay,
1538 num_joined_pipes);
1539 }
1540
1541 dsc = dsc_max_compressed_bpp && dsc_slice_count;
1542 }
1543
1544 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) {
1545 *status = MODE_CLOCK_HIGH;
1546 return 0;
1547 }
1548
1549 if (mode_rate > max_rate && !dsc) {
1550 *status = MODE_CLOCK_HIGH;
1551 return 0;
1552 }
1553
1554 *status = intel_mode_valid_max_plane_size(display, mode, num_joined_pipes);
1555 return 0;
1556 }
1557
1558 static struct drm_encoder *
mst_connector_atomic_best_encoder(struct drm_connector * _connector,struct drm_atomic_state * state)1559 mst_connector_atomic_best_encoder(struct drm_connector *_connector,
1560 struct drm_atomic_state *state)
1561 {
1562 struct intel_connector *connector = to_intel_connector(_connector);
1563 struct drm_connector_state *connector_state =
1564 drm_atomic_get_new_connector_state(state, &connector->base);
1565 struct intel_dp *intel_dp = connector->mst.dp;
1566 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc);
1567
1568 return &intel_dp->mst.stream_encoders[crtc->pipe]->base.base;
1569 }
1570
1571 static int
mst_connector_detect_ctx(struct drm_connector * _connector,struct drm_modeset_acquire_ctx * ctx,bool force)1572 mst_connector_detect_ctx(struct drm_connector *_connector,
1573 struct drm_modeset_acquire_ctx *ctx, bool force)
1574 {
1575 struct intel_connector *connector = to_intel_connector(_connector);
1576 struct intel_display *display = to_intel_display(connector);
1577 struct intel_dp *intel_dp = connector->mst.dp;
1578
1579 if (!intel_display_device_enabled(display))
1580 return connector_status_disconnected;
1581
1582 if (drm_connector_is_unregistered(&connector->base))
1583 return connector_status_disconnected;
1584
1585 if (!intel_display_driver_check_access(display))
1586 return connector->base.status;
1587
1588 intel_dp_flush_connector_commits(connector);
1589
1590 return drm_dp_mst_detect_port(&connector->base, ctx, &intel_dp->mst.mgr,
1591 connector->mst.port);
1592 }
1593
1594 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = {
1595 .get_modes = mst_connector_get_modes,
1596 .mode_valid_ctx = mst_connector_mode_valid_ctx,
1597 .atomic_best_encoder = mst_connector_atomic_best_encoder,
1598 .atomic_check = mst_connector_atomic_check,
1599 .detect_ctx = mst_connector_detect_ctx,
1600 };
1601
mst_stream_encoder_destroy(struct drm_encoder * encoder)1602 static void mst_stream_encoder_destroy(struct drm_encoder *encoder)
1603 {
1604 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder));
1605
1606 drm_encoder_cleanup(encoder);
1607 kfree(intel_mst);
1608 }
1609
1610 static const struct drm_encoder_funcs mst_stream_encoder_funcs = {
1611 .destroy = mst_stream_encoder_destroy,
1612 };
1613
mst_connector_get_hw_state(struct intel_connector * connector)1614 static bool mst_connector_get_hw_state(struct intel_connector *connector)
1615 {
1616 /* This is the MST stream encoder set in ->pre_enable, if any */
1617 struct intel_encoder *encoder = intel_attached_encoder(connector);
1618 enum pipe pipe;
1619
1620 if (!encoder || !connector->base.state->crtc)
1621 return false;
1622
1623 return encoder->get_hw_state(encoder, &pipe);
1624 }
1625
mst_topology_add_connector_properties(struct intel_dp * intel_dp,struct drm_connector * _connector,const char * pathprop)1626 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp,
1627 struct drm_connector *_connector,
1628 const char *pathprop)
1629 {
1630 struct intel_display *display = to_intel_display(intel_dp);
1631 struct intel_connector *connector = to_intel_connector(_connector);
1632
1633 drm_object_attach_property(&connector->base.base,
1634 display->drm->mode_config.path_property, 0);
1635 drm_object_attach_property(&connector->base.base,
1636 display->drm->mode_config.tile_property, 0);
1637
1638 intel_attach_force_audio_property(&connector->base);
1639 intel_attach_broadcast_rgb_property(&connector->base);
1640
1641 /*
1642 * Reuse the prop from the SST connector because we're
1643 * not allowed to create new props after device registration.
1644 */
1645 connector->base.max_bpc_property =
1646 intel_dp->attached_connector->base.max_bpc_property;
1647 if (connector->base.max_bpc_property)
1648 drm_connector_attach_max_bpc_property(&connector->base, 6, 12);
1649
1650 return drm_connector_set_path_property(&connector->base, pathprop);
1651 }
1652
1653 static void
intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp * intel_dp,struct intel_connector * connector)1654 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp,
1655 struct intel_connector *connector)
1656 {
1657 u8 dpcd_caps[DP_RECEIVER_CAP_SIZE];
1658
1659 if (!connector->dp.dsc_decompression_aux)
1660 return;
1661
1662 if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0)
1663 return;
1664
1665 intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], connector);
1666 }
1667
detect_dsc_hblank_expansion_quirk(const struct intel_connector * connector)1668 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector)
1669 {
1670 struct intel_display *display = to_intel_display(connector);
1671 struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux;
1672 struct drm_dp_desc desc;
1673 u8 dpcd[DP_RECEIVER_CAP_SIZE];
1674
1675 if (!aux)
1676 return false;
1677
1678 /*
1679 * A logical port's OUI (at least for affected sinks) is all 0, so
1680 * instead of that the parent port's OUI is used for identification.
1681 */
1682 if (drm_dp_mst_port_is_logical(connector->mst.port)) {
1683 aux = drm_dp_mst_aux_for_parent(connector->mst.port);
1684 if (!aux)
1685 aux = &connector->mst.dp->aux;
1686 }
1687
1688 if (drm_dp_read_dpcd_caps(aux, dpcd) < 0)
1689 return false;
1690
1691 if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0)
1692 return false;
1693
1694 if (!drm_dp_has_quirk(&desc,
1695 DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC))
1696 return false;
1697
1698 /*
1699 * UHBR (MST sink) devices requiring this quirk don't advertise the
1700 * HBLANK expansion support. Presuming that they perform HBLANK
1701 * expansion internally, or are affected by this issue on modes with a
1702 * short HBLANK for other reasons.
1703 */
1704 if (!drm_dp_128b132b_supported(dpcd) &&
1705 !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE))
1706 return false;
1707
1708 drm_dbg_kms(display->drm,
1709 "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n",
1710 connector->base.base.id, connector->base.name);
1711
1712 return true;
1713 }
1714
1715 static struct drm_connector *
mst_topology_add_connector(struct drm_dp_mst_topology_mgr * mgr,struct drm_dp_mst_port * port,const char * pathprop)1716 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1717 struct drm_dp_mst_port *port,
1718 const char *pathprop)
1719 {
1720 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr);
1721 struct intel_display *display = to_intel_display(intel_dp);
1722 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1723 struct intel_connector *connector;
1724 enum pipe pipe;
1725 int ret;
1726
1727 connector = intel_connector_alloc();
1728 if (!connector)
1729 return NULL;
1730
1731 connector->get_hw_state = mst_connector_get_hw_state;
1732 connector->sync_state = intel_dp_connector_sync_state;
1733 connector->mst.dp = intel_dp;
1734 connector->mst.port = port;
1735 drm_dp_mst_get_port_malloc(port);
1736
1737 ret = drm_connector_dynamic_init(display->drm, &connector->base, &mst_connector_funcs,
1738 DRM_MODE_CONNECTOR_DisplayPort, NULL);
1739 if (ret)
1740 goto err_put_port;
1741
1742 connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port);
1743 intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, connector);
1744 connector->dp.dsc_hblank_expansion_quirk =
1745 detect_dsc_hblank_expansion_quirk(connector);
1746
1747 drm_connector_helper_add(&connector->base, &mst_connector_helper_funcs);
1748
1749 for_each_pipe(display, pipe) {
1750 struct drm_encoder *enc =
1751 &intel_dp->mst.stream_encoders[pipe]->base.base;
1752
1753 ret = drm_connector_attach_encoder(&connector->base, enc);
1754 if (ret)
1755 goto err_cleanup_connector;
1756 }
1757
1758 ret = mst_topology_add_connector_properties(intel_dp, &connector->base, pathprop);
1759 if (ret)
1760 goto err_cleanup_connector;
1761
1762 ret = intel_dp_hdcp_init(dig_port, connector);
1763 if (ret)
1764 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n",
1765 connector->base.name, connector->base.base.id);
1766
1767 return &connector->base;
1768
1769 err_cleanup_connector:
1770 drm_connector_cleanup(&connector->base);
1771 err_put_port:
1772 drm_dp_mst_put_port_malloc(port);
1773 intel_connector_free(connector);
1774
1775 return NULL;
1776 }
1777
1778 static void
mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr * mgr)1779 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr)
1780 {
1781 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr);
1782
1783 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp));
1784 }
1785
1786 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = {
1787 .add_connector = mst_topology_add_connector,
1788 .poll_hpd_irq = mst_topology_poll_hpd_irq,
1789 };
1790
1791 /* Create a fake encoder for an individual MST stream */
1792 static struct intel_dp_mst_encoder *
mst_stream_encoder_create(struct intel_digital_port * dig_port,enum pipe pipe)1793 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe)
1794 {
1795 struct intel_display *display = to_intel_display(dig_port);
1796 struct intel_encoder *primary_encoder = &dig_port->base;
1797 struct intel_dp_mst_encoder *intel_mst;
1798 struct intel_encoder *encoder;
1799
1800 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL);
1801
1802 if (!intel_mst)
1803 return NULL;
1804
1805 intel_mst->pipe = pipe;
1806 encoder = &intel_mst->base;
1807 intel_mst->primary = dig_port;
1808
1809 drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs,
1810 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe));
1811
1812 encoder->type = INTEL_OUTPUT_DP_MST;
1813 encoder->power_domain = primary_encoder->power_domain;
1814 encoder->port = primary_encoder->port;
1815 encoder->cloneable = 0;
1816 /*
1817 * This is wrong, but broken userspace uses the intersection
1818 * of possible_crtcs of all the encoders of a given connector
1819 * to figure out which crtcs can drive said connector. What
1820 * should be used instead is the union of possible_crtcs.
1821 * To keep such userspace functioning we must misconfigure
1822 * this to make sure the intersection is not empty :(
1823 */
1824 encoder->pipe_mask = ~0;
1825
1826 encoder->compute_config = mst_stream_compute_config;
1827 encoder->compute_config_late = mst_stream_compute_config_late;
1828 encoder->disable = mst_stream_disable;
1829 encoder->post_disable = mst_stream_post_disable;
1830 encoder->post_pll_disable = mst_stream_post_pll_disable;
1831 encoder->update_pipe = intel_ddi_update_pipe;
1832 encoder->pre_pll_enable = mst_stream_pre_pll_enable;
1833 encoder->pre_enable = mst_stream_pre_enable;
1834 encoder->enable = mst_stream_enable;
1835 encoder->audio_enable = intel_audio_codec_enable;
1836 encoder->audio_disable = intel_audio_codec_disable;
1837 encoder->get_hw_state = mst_stream_get_hw_state;
1838 encoder->get_config = mst_stream_get_config;
1839 encoder->initial_fastset_check = mst_stream_initial_fastset_check;
1840
1841 return intel_mst;
1842
1843 }
1844
1845 /* Create the fake encoders for MST streams */
1846 static bool
mst_stream_encoders_create(struct intel_digital_port * dig_port)1847 mst_stream_encoders_create(struct intel_digital_port *dig_port)
1848 {
1849 struct intel_display *display = to_intel_display(dig_port);
1850 struct intel_dp *intel_dp = &dig_port->dp;
1851 enum pipe pipe;
1852
1853 for_each_pipe(display, pipe)
1854 intel_dp->mst.stream_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe);
1855 return true;
1856 }
1857
1858 int
intel_dp_mst_encoder_init(struct intel_digital_port * dig_port,int conn_base_id)1859 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id)
1860 {
1861 struct intel_display *display = to_intel_display(dig_port);
1862 struct intel_dp *intel_dp = &dig_port->dp;
1863 enum port port = dig_port->base.port;
1864 int ret;
1865
1866 if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp))
1867 return 0;
1868
1869 if (DISPLAY_VER(display) < 12 && port == PORT_A)
1870 return 0;
1871
1872 if (DISPLAY_VER(display) < 11 && port == PORT_E)
1873 return 0;
1874
1875 intel_dp->mst.mgr.cbs = &mst_topology_cbs;
1876
1877 /* create encoders */
1878 mst_stream_encoders_create(dig_port);
1879 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst.mgr, display->drm,
1880 &intel_dp->aux, 16,
1881 INTEL_NUM_PIPES(display), conn_base_id);
1882 if (ret) {
1883 intel_dp->mst.mgr.cbs = NULL;
1884 return ret;
1885 }
1886
1887 return 0;
1888 }
1889
intel_dp_mst_source_support(struct intel_dp * intel_dp)1890 bool intel_dp_mst_source_support(struct intel_dp *intel_dp)
1891 {
1892 return intel_dp->mst.mgr.cbs;
1893 }
1894
1895 void
intel_dp_mst_encoder_cleanup(struct intel_digital_port * dig_port)1896 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port)
1897 {
1898 struct intel_dp *intel_dp = &dig_port->dp;
1899
1900 if (!intel_dp_mst_source_support(intel_dp))
1901 return;
1902
1903 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst.mgr);
1904 /* encoders will get killed by normal cleanup */
1905
1906 intel_dp->mst.mgr.cbs = NULL;
1907 }
1908
intel_dp_mst_is_master_trans(const struct intel_crtc_state * crtc_state)1909 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state)
1910 {
1911 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder;
1912 }
1913
intel_dp_mst_is_slave_trans(const struct intel_crtc_state * crtc_state)1914 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state)
1915 {
1916 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER &&
1917 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder;
1918 }
1919
1920 /**
1921 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector
1922 * @state: atomic state
1923 * @connector: connector to add the state for
1924 * @crtc: the CRTC @connector is attached to
1925 *
1926 * Add the MST topology state for @connector to @state.
1927 *
1928 * Returns 0 on success, negative error code on failure.
1929 */
1930 static int
intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state * state,struct intel_connector * connector,struct intel_crtc * crtc)1931 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state,
1932 struct intel_connector *connector,
1933 struct intel_crtc *crtc)
1934 {
1935 struct drm_dp_mst_topology_state *mst_state;
1936
1937 if (!connector->mst.dp)
1938 return 0;
1939
1940 mst_state = drm_atomic_get_mst_topology_state(&state->base,
1941 &connector->mst.dp->mst.mgr);
1942 if (IS_ERR(mst_state))
1943 return PTR_ERR(mst_state);
1944
1945 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base);
1946
1947 return 0;
1948 }
1949
1950 /**
1951 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC
1952 * @state: atomic state
1953 * @crtc: CRTC to add the state for
1954 *
1955 * Add the MST topology state for @crtc to @state.
1956 *
1957 * Returns 0 on success, negative error code on failure.
1958 */
intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state * state,struct intel_crtc * crtc)1959 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state,
1960 struct intel_crtc *crtc)
1961 {
1962 struct drm_connector *_connector;
1963 struct drm_connector_state *conn_state;
1964 int i;
1965
1966 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
1967 struct intel_connector *connector = to_intel_connector(_connector);
1968 int ret;
1969
1970 if (conn_state->crtc != &crtc->base)
1971 continue;
1972
1973 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc);
1974 if (ret)
1975 return ret;
1976 }
1977
1978 return 0;
1979 }
1980
1981 static struct intel_connector *
get_connector_in_state_for_crtc(struct intel_atomic_state * state,const struct intel_crtc * crtc)1982 get_connector_in_state_for_crtc(struct intel_atomic_state *state,
1983 const struct intel_crtc *crtc)
1984 {
1985 struct drm_connector_state *old_conn_state;
1986 struct drm_connector_state *new_conn_state;
1987 struct drm_connector *_connector;
1988 int i;
1989
1990 for_each_oldnew_connector_in_state(&state->base, _connector,
1991 old_conn_state, new_conn_state, i) {
1992 struct intel_connector *connector =
1993 to_intel_connector(_connector);
1994
1995 if (old_conn_state->crtc == &crtc->base ||
1996 new_conn_state->crtc == &crtc->base)
1997 return connector;
1998 }
1999
2000 return NULL;
2001 }
2002
2003 /**
2004 * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC
2005 * @state: atomic state
2006 * @crtc: CRTC for which to check the modeset requirement
2007 *
2008 * Check if any change in a MST topology requires a forced modeset on @crtc in
2009 * this topology. One such change is enabling/disabling the DSC decompression
2010 * state in the first branch device's UFP DPCD as required by one CRTC, while
2011 * the other @crtc in the same topology is still active, requiring a full modeset
2012 * on @crtc.
2013 */
intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state * state,struct intel_crtc * crtc)2014 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state,
2015 struct intel_crtc *crtc)
2016 {
2017 const struct intel_connector *crtc_connector;
2018 const struct drm_connector_state *conn_state;
2019 const struct drm_connector *_connector;
2020 int i;
2021
2022 if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc),
2023 INTEL_OUTPUT_DP_MST))
2024 return false;
2025
2026 crtc_connector = get_connector_in_state_for_crtc(state, crtc);
2027
2028 if (!crtc_connector)
2029 /* None of the connectors in the topology needs modeset */
2030 return false;
2031
2032 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
2033 const struct intel_connector *connector =
2034 to_intel_connector(_connector);
2035 const struct intel_crtc_state *new_crtc_state;
2036 const struct intel_crtc_state *old_crtc_state;
2037 struct intel_crtc *crtc_iter;
2038
2039 if (connector->mst.dp != crtc_connector->mst.dp ||
2040 !conn_state->crtc)
2041 continue;
2042
2043 crtc_iter = to_intel_crtc(conn_state->crtc);
2044
2045 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter);
2046 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter);
2047
2048 if (!intel_crtc_needs_modeset(new_crtc_state))
2049 continue;
2050
2051 if (old_crtc_state->dsc.compression_enable ==
2052 new_crtc_state->dsc.compression_enable)
2053 continue;
2054 /*
2055 * Toggling the decompression flag because of this stream in
2056 * the first downstream branch device's UFP DPCD may reset the
2057 * whole branch device. To avoid the reset while other streams
2058 * are also active modeset the whole MST topology in this
2059 * case.
2060 */
2061 if (connector->dp.dsc_decompression_aux ==
2062 &connector->mst.dp->aux)
2063 return true;
2064 }
2065
2066 return false;
2067 }
2068
2069 /**
2070 * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing
2071 * @intel_dp: DP port object
2072 *
2073 * Prepare an MST link for topology probing, programming the target
2074 * link parameters to DPCD. This step is a requirement of the enumeration
2075 * of path resources during probing.
2076 */
intel_dp_mst_prepare_probe(struct intel_dp * intel_dp)2077 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp)
2078 {
2079 int link_rate = intel_dp_max_link_rate(intel_dp);
2080 int lane_count = intel_dp_max_lane_count(intel_dp);
2081 u8 rate_select;
2082 u8 link_bw;
2083
2084 if (intel_dp->link.active)
2085 return;
2086
2087 if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count))
2088 return;
2089
2090 intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select);
2091
2092 intel_dp_link_training_set_mode(intel_dp, link_rate, false);
2093 intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count,
2094 drm_dp_enhanced_frame_cap(intel_dp->dpcd));
2095
2096 intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count);
2097 }
2098
2099 /*
2100 * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD
2101 * @intel_dp: DP port object
2102 *
2103 * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD
2104 * state. A long HPD pulse - not long enough to be detected as a disconnected
2105 * state - could've reset the DPCD state, which requires tearing
2106 * down/recreating the MST topology.
2107 *
2108 * Returns %true if the SW MST enabled and DPCD states match, %false
2109 * otherwise.
2110 */
intel_dp_mst_verify_dpcd_state(struct intel_dp * intel_dp)2111 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp)
2112 {
2113 struct intel_display *display = to_intel_display(intel_dp);
2114 struct intel_connector *connector = intel_dp->attached_connector;
2115 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2116 struct intel_encoder *encoder = &dig_port->base;
2117 int ret;
2118 u8 val;
2119
2120 if (!intel_dp->is_mst)
2121 return true;
2122
2123 ret = drm_dp_dpcd_readb(intel_dp->mst.mgr.aux, DP_MSTM_CTRL, &val);
2124
2125 /* Adjust the expected register value for SST + SideBand. */
2126 if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) {
2127 drm_dbg_kms(display->drm,
2128 "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n",
2129 connector->base.base.id, connector->base.name,
2130 encoder->base.base.id, encoder->base.name,
2131 ret, val);
2132
2133 return false;
2134 }
2135
2136 return true;
2137 }
2138