xref: /linux/drivers/gpu/drm/i915/display/intel_atomic.c (revision 95dbf14b236f3147f716cd159bd29461916c610e)
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 /**
25  * DOC: atomic modeset support
26  *
27  * The functions here implement the state management and hardware programming
28  * dispatch required by the atomic modeset infrastructure.
29  * See intel_atomic_plane.c for the plane-specific atomic functionality.
30  */
31 
32 #include <drm/drm_atomic.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_fourcc.h>
35 #include <drm/drm_plane_helper.h>
36 
37 #include "intel_atomic.h"
38 #include "intel_display_types.h"
39 #include "intel_hdcp.h"
40 #include "intel_psr.h"
41 #include "intel_sprite.h"
42 
43 /**
44  * intel_digital_connector_atomic_get_property - hook for connector->atomic_get_property.
45  * @connector: Connector to get the property for.
46  * @state: Connector state to retrieve the property from.
47  * @property: Property to retrieve.
48  * @val: Return value for the property.
49  *
50  * Returns the atomic property value for a digital connector.
51  */
52 int intel_digital_connector_atomic_get_property(struct drm_connector *connector,
53 						const struct drm_connector_state *state,
54 						struct drm_property *property,
55 						u64 *val)
56 {
57 	struct drm_device *dev = connector->dev;
58 	struct drm_i915_private *dev_priv = to_i915(dev);
59 	struct intel_digital_connector_state *intel_conn_state =
60 		to_intel_digital_connector_state(state);
61 
62 	if (property == dev_priv->force_audio_property)
63 		*val = intel_conn_state->force_audio;
64 	else if (property == dev_priv->broadcast_rgb_property)
65 		*val = intel_conn_state->broadcast_rgb;
66 	else {
67 		DRM_DEBUG_ATOMIC("Unknown property [PROP:%d:%s]\n",
68 				 property->base.id, property->name);
69 		return -EINVAL;
70 	}
71 
72 	return 0;
73 }
74 
75 /**
76  * intel_digital_connector_atomic_set_property - hook for connector->atomic_set_property.
77  * @connector: Connector to set the property for.
78  * @state: Connector state to set the property on.
79  * @property: Property to set.
80  * @val: New value for the property.
81  *
82  * Sets the atomic property value for a digital connector.
83  */
84 int intel_digital_connector_atomic_set_property(struct drm_connector *connector,
85 						struct drm_connector_state *state,
86 						struct drm_property *property,
87 						u64 val)
88 {
89 	struct drm_device *dev = connector->dev;
90 	struct drm_i915_private *dev_priv = to_i915(dev);
91 	struct intel_digital_connector_state *intel_conn_state =
92 		to_intel_digital_connector_state(state);
93 
94 	if (property == dev_priv->force_audio_property) {
95 		intel_conn_state->force_audio = val;
96 		return 0;
97 	}
98 
99 	if (property == dev_priv->broadcast_rgb_property) {
100 		intel_conn_state->broadcast_rgb = val;
101 		return 0;
102 	}
103 
104 	DRM_DEBUG_ATOMIC("Unknown property [PROP:%d:%s]\n",
105 			 property->base.id, property->name);
106 	return -EINVAL;
107 }
108 
109 static bool blob_equal(const struct drm_property_blob *a,
110 		       const struct drm_property_blob *b)
111 {
112 	if (a && b)
113 		return a->length == b->length &&
114 			!memcmp(a->data, b->data, a->length);
115 
116 	return !a == !b;
117 }
118 
119 int intel_digital_connector_atomic_check(struct drm_connector *conn,
120 					 struct drm_atomic_state *state)
121 {
122 	struct drm_connector_state *new_state =
123 		drm_atomic_get_new_connector_state(state, conn);
124 	struct intel_digital_connector_state *new_conn_state =
125 		to_intel_digital_connector_state(new_state);
126 	struct drm_connector_state *old_state =
127 		drm_atomic_get_old_connector_state(state, conn);
128 	struct intel_digital_connector_state *old_conn_state =
129 		to_intel_digital_connector_state(old_state);
130 	struct drm_crtc_state *crtc_state;
131 
132 	intel_hdcp_atomic_check(conn, old_state, new_state);
133 	intel_psr_atomic_check(conn, old_state, new_state);
134 
135 	if (!new_state->crtc)
136 		return 0;
137 
138 	crtc_state = drm_atomic_get_new_crtc_state(state, new_state->crtc);
139 
140 	/*
141 	 * These properties are handled by fastset, and might not end
142 	 * up in a modeset.
143 	 */
144 	if (new_conn_state->force_audio != old_conn_state->force_audio ||
145 	    new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb ||
146 	    new_conn_state->base.colorspace != old_conn_state->base.colorspace ||
147 	    new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio ||
148 	    new_conn_state->base.content_type != old_conn_state->base.content_type ||
149 	    new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode ||
150 	    !blob_equal(new_conn_state->base.hdr_output_metadata,
151 			old_conn_state->base.hdr_output_metadata))
152 		crtc_state->mode_changed = true;
153 
154 	return 0;
155 }
156 
157 /**
158  * intel_digital_connector_duplicate_state - duplicate connector state
159  * @connector: digital connector
160  *
161  * Allocates and returns a copy of the connector state (both common and
162  * digital connector specific) for the specified connector.
163  *
164  * Returns: The newly allocated connector state, or NULL on failure.
165  */
166 struct drm_connector_state *
167 intel_digital_connector_duplicate_state(struct drm_connector *connector)
168 {
169 	struct intel_digital_connector_state *state;
170 
171 	state = kmemdup(connector->state, sizeof(*state), GFP_KERNEL);
172 	if (!state)
173 		return NULL;
174 
175 	__drm_atomic_helper_connector_duplicate_state(connector, &state->base);
176 	return &state->base;
177 }
178 
179 /**
180  * intel_connector_needs_modeset - check if connector needs a modeset
181  */
182 bool
183 intel_connector_needs_modeset(struct intel_atomic_state *state,
184 			      struct drm_connector *connector)
185 {
186 	const struct drm_connector_state *old_conn_state, *new_conn_state;
187 
188 	old_conn_state = drm_atomic_get_old_connector_state(&state->base, connector);
189 	new_conn_state = drm_atomic_get_new_connector_state(&state->base, connector);
190 
191 	return old_conn_state->crtc != new_conn_state->crtc ||
192 	       (new_conn_state->crtc &&
193 		drm_atomic_crtc_needs_modeset(drm_atomic_get_new_crtc_state(&state->base,
194 									    new_conn_state->crtc)));
195 }
196 
197 struct intel_digital_connector_state *
198 intel_atomic_get_digital_connector_state(struct intel_atomic_state *state,
199 					 struct intel_connector *connector)
200 {
201 	struct drm_connector_state *conn_state;
202 
203 	conn_state = drm_atomic_get_connector_state(&state->base,
204 						    &connector->base);
205 	if (IS_ERR(conn_state))
206 		return ERR_CAST(conn_state);
207 
208 	return to_intel_digital_connector_state(conn_state);
209 }
210 
211 /**
212  * intel_crtc_duplicate_state - duplicate crtc state
213  * @crtc: drm crtc
214  *
215  * Allocates and returns a copy of the crtc state (both common and
216  * Intel-specific) for the specified crtc.
217  *
218  * Returns: The newly allocated crtc state, or NULL on failure.
219  */
220 struct drm_crtc_state *
221 intel_crtc_duplicate_state(struct drm_crtc *crtc)
222 {
223 	const struct intel_crtc_state *old_crtc_state = to_intel_crtc_state(crtc->state);
224 	struct intel_crtc_state *crtc_state;
225 
226 	crtc_state = kmemdup(old_crtc_state, sizeof(*crtc_state), GFP_KERNEL);
227 	if (!crtc_state)
228 		return NULL;
229 
230 	__drm_atomic_helper_crtc_duplicate_state(crtc, &crtc_state->uapi);
231 
232 	/* copy color blobs */
233 	if (crtc_state->hw.degamma_lut)
234 		drm_property_blob_get(crtc_state->hw.degamma_lut);
235 	if (crtc_state->hw.ctm)
236 		drm_property_blob_get(crtc_state->hw.ctm);
237 	if (crtc_state->hw.gamma_lut)
238 		drm_property_blob_get(crtc_state->hw.gamma_lut);
239 
240 	crtc_state->update_pipe = false;
241 	crtc_state->disable_lp_wm = false;
242 	crtc_state->disable_cxsr = false;
243 	crtc_state->update_wm_pre = false;
244 	crtc_state->update_wm_post = false;
245 	crtc_state->fifo_changed = false;
246 	crtc_state->preload_luts = false;
247 	crtc_state->wm.need_postvbl_update = false;
248 	crtc_state->fb_bits = 0;
249 	crtc_state->update_planes = 0;
250 
251 	return &crtc_state->uapi;
252 }
253 
254 static void intel_crtc_put_color_blobs(struct intel_crtc_state *crtc_state)
255 {
256 	drm_property_blob_put(crtc_state->hw.degamma_lut);
257 	drm_property_blob_put(crtc_state->hw.gamma_lut);
258 	drm_property_blob_put(crtc_state->hw.ctm);
259 }
260 
261 void intel_crtc_free_hw_state(struct intel_crtc_state *crtc_state)
262 {
263 	intel_crtc_put_color_blobs(crtc_state);
264 }
265 
266 void intel_crtc_copy_color_blobs(struct intel_crtc_state *crtc_state)
267 {
268 	drm_property_replace_blob(&crtc_state->hw.degamma_lut,
269 				  crtc_state->uapi.degamma_lut);
270 	drm_property_replace_blob(&crtc_state->hw.gamma_lut,
271 				  crtc_state->uapi.gamma_lut);
272 	drm_property_replace_blob(&crtc_state->hw.ctm,
273 				  crtc_state->uapi.ctm);
274 }
275 
276 /**
277  * intel_crtc_destroy_state - destroy crtc state
278  * @crtc: drm crtc
279  * @state: the state to destroy
280  *
281  * Destroys the crtc state (both common and Intel-specific) for the
282  * specified crtc.
283  */
284 void
285 intel_crtc_destroy_state(struct drm_crtc *crtc,
286 			 struct drm_crtc_state *state)
287 {
288 	struct intel_crtc_state *crtc_state = to_intel_crtc_state(state);
289 
290 	__drm_atomic_helper_crtc_destroy_state(&crtc_state->uapi);
291 	intel_crtc_free_hw_state(crtc_state);
292 	kfree(crtc_state);
293 }
294 
295 static void intel_atomic_setup_scaler(struct intel_crtc_scaler_state *scaler_state,
296 				      int num_scalers_need, struct intel_crtc *intel_crtc,
297 				      const char *name, int idx,
298 				      struct intel_plane_state *plane_state,
299 				      int *scaler_id)
300 {
301 	struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
302 	int j;
303 	u32 mode;
304 
305 	if (*scaler_id < 0) {
306 		/* find a free scaler */
307 		for (j = 0; j < intel_crtc->num_scalers; j++) {
308 			if (scaler_state->scalers[j].in_use)
309 				continue;
310 
311 			*scaler_id = j;
312 			scaler_state->scalers[*scaler_id].in_use = 1;
313 			break;
314 		}
315 	}
316 
317 	if (WARN(*scaler_id < 0, "Cannot find scaler for %s:%d\n", name, idx))
318 		return;
319 
320 	/* set scaler mode */
321 	if (plane_state && plane_state->hw.fb &&
322 	    plane_state->hw.fb->format->is_yuv &&
323 	    plane_state->hw.fb->format->num_planes > 1) {
324 		struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
325 		if (IS_GEN(dev_priv, 9) &&
326 		    !IS_GEMINILAKE(dev_priv)) {
327 			mode = SKL_PS_SCALER_MODE_NV12;
328 		} else if (icl_is_hdr_plane(dev_priv, plane->id)) {
329 			/*
330 			 * On gen11+'s HDR planes we only use the scaler for
331 			 * scaling. They have a dedicated chroma upsampler, so
332 			 * we don't need the scaler to upsample the UV plane.
333 			 */
334 			mode = PS_SCALER_MODE_NORMAL;
335 		} else {
336 			struct intel_plane *linked =
337 				plane_state->planar_linked_plane;
338 
339 			mode = PS_SCALER_MODE_PLANAR;
340 
341 			if (linked)
342 				mode |= PS_PLANE_Y_SEL(linked->id);
343 		}
344 	} else if (INTEL_GEN(dev_priv) > 9 || IS_GEMINILAKE(dev_priv)) {
345 		mode = PS_SCALER_MODE_NORMAL;
346 	} else if (num_scalers_need == 1 && intel_crtc->num_scalers > 1) {
347 		/*
348 		 * when only 1 scaler is in use on a pipe with 2 scalers
349 		 * scaler 0 operates in high quality (HQ) mode.
350 		 * In this case use scaler 0 to take advantage of HQ mode
351 		 */
352 		scaler_state->scalers[*scaler_id].in_use = 0;
353 		*scaler_id = 0;
354 		scaler_state->scalers[0].in_use = 1;
355 		mode = SKL_PS_SCALER_MODE_HQ;
356 	} else {
357 		mode = SKL_PS_SCALER_MODE_DYN;
358 	}
359 
360 	DRM_DEBUG_KMS("Attached scaler id %u.%u to %s:%d\n",
361 		      intel_crtc->pipe, *scaler_id, name, idx);
362 	scaler_state->scalers[*scaler_id].mode = mode;
363 }
364 
365 /**
366  * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests
367  * @dev_priv: i915 device
368  * @intel_crtc: intel crtc
369  * @crtc_state: incoming crtc_state to validate and setup scalers
370  *
371  * This function sets up scalers based on staged scaling requests for
372  * a @crtc and its planes. It is called from crtc level check path. If request
373  * is a supportable request, it attaches scalers to requested planes and crtc.
374  *
375  * This function takes into account the current scaler(s) in use by any planes
376  * not being part of this atomic state
377  *
378  *  Returns:
379  *         0 - scalers were setup succesfully
380  *         error code - otherwise
381  */
382 int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
383 			       struct intel_crtc *intel_crtc,
384 			       struct intel_crtc_state *crtc_state)
385 {
386 	struct drm_plane *plane = NULL;
387 	struct intel_plane *intel_plane;
388 	struct intel_plane_state *plane_state = NULL;
389 	struct intel_crtc_scaler_state *scaler_state =
390 		&crtc_state->scaler_state;
391 	struct drm_atomic_state *drm_state = crtc_state->uapi.state;
392 	struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state);
393 	int num_scalers_need;
394 	int i;
395 
396 	num_scalers_need = hweight32(scaler_state->scaler_users);
397 
398 	/*
399 	 * High level flow:
400 	 * - staged scaler requests are already in scaler_state->scaler_users
401 	 * - check whether staged scaling requests can be supported
402 	 * - add planes using scalers that aren't in current transaction
403 	 * - assign scalers to requested users
404 	 * - as part of plane commit, scalers will be committed
405 	 *   (i.e., either attached or detached) to respective planes in hw
406 	 * - as part of crtc_commit, scaler will be either attached or detached
407 	 *   to crtc in hw
408 	 */
409 
410 	/* fail if required scalers > available scalers */
411 	if (num_scalers_need > intel_crtc->num_scalers){
412 		DRM_DEBUG_KMS("Too many scaling requests %d > %d\n",
413 			num_scalers_need, intel_crtc->num_scalers);
414 		return -EINVAL;
415 	}
416 
417 	/* walkthrough scaler_users bits and start assigning scalers */
418 	for (i = 0; i < sizeof(scaler_state->scaler_users) * 8; i++) {
419 		int *scaler_id;
420 		const char *name;
421 		int idx;
422 
423 		/* skip if scaler not required */
424 		if (!(scaler_state->scaler_users & (1 << i)))
425 			continue;
426 
427 		if (i == SKL_CRTC_INDEX) {
428 			name = "CRTC";
429 			idx = intel_crtc->base.base.id;
430 
431 			/* panel fitter case: assign as a crtc scaler */
432 			scaler_id = &scaler_state->scaler_id;
433 		} else {
434 			name = "PLANE";
435 
436 			/* plane scaler case: assign as a plane scaler */
437 			/* find the plane that set the bit as scaler_user */
438 			plane = drm_state->planes[i].ptr;
439 
440 			/*
441 			 * to enable/disable hq mode, add planes that are using scaler
442 			 * into this transaction
443 			 */
444 			if (!plane) {
445 				struct drm_plane_state *state;
446 
447 				/*
448 				 * GLK+ scalers don't have a HQ mode so it
449 				 * isn't necessary to change between HQ and dyn mode
450 				 * on those platforms.
451 				 */
452 				if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
453 					continue;
454 
455 				plane = drm_plane_from_index(&dev_priv->drm, i);
456 				state = drm_atomic_get_plane_state(drm_state, plane);
457 				if (IS_ERR(state)) {
458 					DRM_DEBUG_KMS("Failed to add [PLANE:%d] to drm_state\n",
459 						plane->base.id);
460 					return PTR_ERR(state);
461 				}
462 			}
463 
464 			intel_plane = to_intel_plane(plane);
465 			idx = plane->base.id;
466 
467 			/* plane on different crtc cannot be a scaler user of this crtc */
468 			if (WARN_ON(intel_plane->pipe != intel_crtc->pipe))
469 				continue;
470 
471 			plane_state = intel_atomic_get_new_plane_state(intel_state,
472 								       intel_plane);
473 			scaler_id = &plane_state->scaler_id;
474 		}
475 
476 		intel_atomic_setup_scaler(scaler_state, num_scalers_need,
477 					  intel_crtc, name, idx,
478 					  plane_state, scaler_id);
479 	}
480 
481 	return 0;
482 }
483 
484 struct drm_atomic_state *
485 intel_atomic_state_alloc(struct drm_device *dev)
486 {
487 	struct intel_atomic_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
488 
489 	if (!state || drm_atomic_state_init(dev, &state->base) < 0) {
490 		kfree(state);
491 		return NULL;
492 	}
493 
494 	return &state->base;
495 }
496 
497 void intel_atomic_state_clear(struct drm_atomic_state *s)
498 {
499 	struct intel_atomic_state *state = to_intel_atomic_state(s);
500 	drm_atomic_state_default_clear(&state->base);
501 	state->dpll_set = state->modeset = false;
502 	state->global_state_changed = false;
503 	state->active_pipes = 0;
504 	memset(&state->min_cdclk, 0, sizeof(state->min_cdclk));
505 	memset(&state->min_voltage_level, 0, sizeof(state->min_voltage_level));
506 	memset(&state->cdclk.logical, 0, sizeof(state->cdclk.logical));
507 	memset(&state->cdclk.actual, 0, sizeof(state->cdclk.actual));
508 	state->cdclk.pipe = INVALID_PIPE;
509 }
510 
511 struct intel_crtc_state *
512 intel_atomic_get_crtc_state(struct drm_atomic_state *state,
513 			    struct intel_crtc *crtc)
514 {
515 	struct drm_crtc_state *crtc_state;
516 	crtc_state = drm_atomic_get_crtc_state(state, &crtc->base);
517 	if (IS_ERR(crtc_state))
518 		return ERR_CAST(crtc_state);
519 
520 	return to_intel_crtc_state(crtc_state);
521 }
522 
523 int intel_atomic_lock_global_state(struct intel_atomic_state *state)
524 {
525 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
526 	struct intel_crtc *crtc;
527 
528 	state->global_state_changed = true;
529 
530 	for_each_intel_crtc(&dev_priv->drm, crtc) {
531 		int ret;
532 
533 		ret = drm_modeset_lock(&crtc->base.mutex,
534 				       state->base.acquire_ctx);
535 		if (ret)
536 			return ret;
537 	}
538 
539 	return 0;
540 }
541 
542 int intel_atomic_serialize_global_state(struct intel_atomic_state *state)
543 {
544 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
545 	struct intel_crtc *crtc;
546 
547 	state->global_state_changed = true;
548 
549 	for_each_intel_crtc(&dev_priv->drm, crtc) {
550 		struct intel_crtc_state *crtc_state;
551 
552 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
553 		if (IS_ERR(crtc_state))
554 			return PTR_ERR(crtc_state);
555 	}
556 
557 	return 0;
558 }
559