xref: /linux/drivers/gpu/drm/drm_gem_atomic_helper.c (revision 5ea5880764cbb164afb17a62e76ca75dc371409d)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 
3 #include <linux/dma-resv.h>
4 #include <linux/dma-fence-chain.h>
5 #include <linux/export.h>
6 
7 #include <drm/drm_atomic_state_helper.h>
8 #include <drm/drm_atomic_uapi.h>
9 #include <drm/drm_framebuffer.h>
10 #include <drm/drm_gem.h>
11 #include <drm/drm_gem_atomic_helper.h>
12 #include <drm/drm_gem_framebuffer_helper.h>
13 #include <drm/drm_simple_kms_helper.h>
14 
15 #include "drm_internal.h"
16 
17 /**
18  * DOC: overview
19  *
20  * The GEM atomic helpers library implements generic atomic-commit
21  * functions for drivers that use GEM objects. Currently, it provides
22  * synchronization helpers, and plane state and framebuffer BO mappings
23  * for planes with shadow buffers.
24  *
25  * Before scanout, a plane's framebuffer needs to be synchronized with
26  * possible writers that draw into the framebuffer. All drivers should
27  * call drm_gem_plane_helper_prepare_fb() from their implementation of
28  * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
29  * the framebuffer so that the DRM core can synchronize access automatically.
30  * drm_gem_plane_helper_prepare_fb() can also be used directly as
31  * implementation of prepare_fb.
32  *
33  * .. code-block:: c
34  *
35  *	#include <drm/drm_gem_atomic_helper.h>
36  *
37  *	struct drm_plane_helper_funcs driver_plane_helper_funcs = {
38  *		...,
39  *		. prepare_fb = drm_gem_plane_helper_prepare_fb,
40  *	};
41  *
42  * A driver using a shadow buffer copies the content of the shadow buffers
43  * into the HW's framebuffer memory during an atomic update. This requires
44  * a mapping of the shadow buffer into kernel address space. The mappings
45  * cannot be established by commit-tail functions, such as atomic_update,
46  * as this would violate locking rules around dma_buf_vmap().
47  *
48  * The helpers for shadow-buffered planes establish and release mappings,
49  * and provide struct drm_shadow_plane_state, which stores the plane's mapping
50  * for commit-tail functions.
51  *
52  * Shadow-buffered planes can easily be enabled by using the provided macros
53  * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
54  * These macros set up the plane and plane-helper callbacks to point to the
55  * shadow-buffer helpers.
56  *
57  * .. code-block:: c
58  *
59  *	#include <drm/drm_gem_atomic_helper.h>
60  *
61  *	struct drm_plane_funcs driver_plane_funcs = {
62  *		...,
63  *		DRM_GEM_SHADOW_PLANE_FUNCS,
64  *	};
65  *
66  *	struct drm_plane_helper_funcs driver_plane_helper_funcs = {
67  *		...,
68  *		DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
69  *	};
70  *
71  * In the driver's atomic-update function, shadow-buffer mappings are available
72  * from the plane state. Use to_drm_shadow_plane_state() to upcast from
73  * struct drm_plane_state.
74  *
75  * .. code-block:: c
76  *
77  *	void driver_plane_atomic_update(struct drm_plane *plane,
78  *					struct drm_plane_state *old_plane_state)
79  *	{
80  *		struct drm_plane_state *plane_state = plane->state;
81  *		struct drm_shadow_plane_state *shadow_plane_state =
82  *			to_drm_shadow_plane_state(plane_state);
83  *
84  *		// access shadow buffer via shadow_plane_state->map
85  *	}
86  *
87  * A mapping address for each of the framebuffer's buffer object is stored in
88  * struct &drm_shadow_plane_state.map. The mappings are valid while the state
89  * is being used.
90  */
91 
92 /*
93  * Plane Helpers
94  */
95 
96 /**
97  * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
98  * @plane: Plane
99  * @state: Plane state the fence will be attached to
100  *
101  * This function extracts the exclusive fence from &drm_gem_object.resv and
102  * attaches it to plane state for the atomic helper to wait on. This is
103  * necessary to correctly implement implicit synchronization for any buffers
104  * shared as a struct &dma_buf. This function can be used as the
105  * &drm_plane_helper_funcs.prepare_fb callback.
106  *
107  * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
108  * GEM based framebuffer drivers which have their buffers always pinned in
109  * memory.
110  *
111  * This function is the default implementation for GEM drivers of
112  * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
113  */
114 int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane,
115 				    struct drm_plane_state *state)
116 {
117 	struct dma_fence *fence = dma_fence_get(state->fence);
118 	enum dma_resv_usage usage;
119 	size_t i;
120 	int ret;
121 
122 	if (!state->fb)
123 		return 0;
124 
125 	/*
126 	 * Only add the kernel fences here if there is already a fence set via
127 	 * explicit fencing interfaces on the atomic ioctl.
128 	 *
129 	 * This way explicit fencing can be used to overrule implicit fencing,
130 	 * which is important to make explicit fencing use-cases work: One
131 	 * example is using one buffer for 2 screens with different refresh
132 	 * rates. Implicit fencing will clamp rendering to the refresh rate of
133 	 * the slower screen, whereas explicit fence allows 2 independent
134 	 * render and display loops on a single buffer. If a driver allows
135 	 * obeys both implicit and explicit fences for plane updates, then it
136 	 * will break all the benefits of explicit fencing.
137 	 */
138 	usage = fence ? DMA_RESV_USAGE_KERNEL : DMA_RESV_USAGE_WRITE;
139 
140 	for (i = 0; i < state->fb->format->num_planes; ++i) {
141 		struct drm_gem_object *obj = drm_gem_fb_get_obj(state->fb, i);
142 		struct dma_fence *new;
143 
144 		if (!obj) {
145 			ret = -EINVAL;
146 			goto error;
147 		}
148 
149 		ret = dma_resv_get_singleton(obj->resv, usage, &new);
150 		if (ret)
151 			goto error;
152 
153 		if (new && fence) {
154 			struct dma_fence_chain *chain = dma_fence_chain_alloc();
155 
156 			if (!chain) {
157 				ret = -ENOMEM;
158 				goto error;
159 			}
160 
161 			dma_fence_chain_init(chain, fence, new, 1);
162 			fence = &chain->base;
163 
164 		} else if (new) {
165 			fence = new;
166 		}
167 	}
168 
169 	dma_fence_put(state->fence);
170 	state->fence = fence;
171 	return 0;
172 
173 error:
174 	dma_fence_put(fence);
175 	return ret;
176 }
177 EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
178 
179 /*
180  * Shadow-buffered Planes
181  */
182 
183 /**
184  * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
185  * @plane: the plane
186  * @new_shadow_plane_state: the new shadow-buffered plane state
187  *
188  * This function duplicates shadow-buffered plane state. This is helpful for drivers
189  * that subclass struct drm_shadow_plane_state.
190  *
191  * The function does not duplicate existing mappings of the shadow buffers.
192  * Mappings are maintained during the atomic commit by the plane's prepare_fb
193  * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
194  * for corresponding helpers.
195  */
196 void
197 __drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
198 				       struct drm_shadow_plane_state *new_shadow_plane_state)
199 {
200 	struct drm_plane_state *plane_state = plane->state;
201 	struct drm_shadow_plane_state *shadow_plane_state =
202 		to_drm_shadow_plane_state(plane_state);
203 
204 	__drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
205 
206 	drm_format_conv_state_copy(&new_shadow_plane_state->fmtcnv_state,
207 				   &shadow_plane_state->fmtcnv_state);
208 }
209 EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
210 
211 /**
212  * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
213  * @plane: the plane
214  *
215  * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
216  * shadow-buffered planes. It assumes the existing state to be of type
217  * struct drm_shadow_plane_state and it allocates the new state to be of this
218  * type.
219  *
220  * The function does not duplicate existing mappings of the shadow buffers.
221  * Mappings are maintained during the atomic commit by the plane's prepare_fb
222  * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
223  * for corresponding helpers.
224  *
225  * Returns:
226  * A pointer to a new plane state on success, or NULL otherwise.
227  */
228 struct drm_plane_state *
229 drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
230 {
231 	struct drm_plane_state *plane_state = plane->state;
232 	struct drm_shadow_plane_state *new_shadow_plane_state;
233 
234 	if (!plane_state)
235 		return NULL;
236 
237 	new_shadow_plane_state = kzalloc_obj(*new_shadow_plane_state);
238 	if (!new_shadow_plane_state)
239 		return NULL;
240 	__drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
241 
242 	return &new_shadow_plane_state->base;
243 }
244 EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
245 
246 /**
247  * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
248  * @shadow_plane_state: the shadow-buffered plane state
249  *
250  * This function cleans up shadow-buffered plane state. Helpful for drivers that
251  * subclass struct drm_shadow_plane_state.
252  */
253 void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
254 {
255 	drm_format_conv_state_release(&shadow_plane_state->fmtcnv_state);
256 	__drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
257 }
258 EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
259 
260 /**
261  * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
262  * @plane: the plane
263  * @plane_state: the plane state of type struct drm_shadow_plane_state
264  *
265  * This function implements struct &drm_plane_funcs.atomic_destroy_state
266  * for shadow-buffered planes. It expects that mappings of shadow buffers
267  * have been released already.
268  */
269 void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
270 					struct drm_plane_state *plane_state)
271 {
272 	struct drm_shadow_plane_state *shadow_plane_state =
273 		to_drm_shadow_plane_state(plane_state);
274 
275 	__drm_gem_destroy_shadow_plane_state(shadow_plane_state);
276 	kfree(shadow_plane_state);
277 }
278 EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
279 
280 /**
281  * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
282  * @plane: the plane
283  * @shadow_plane_state: the shadow-buffered plane state
284  *
285  * This function resets state for shadow-buffered planes. Helpful
286  * for drivers that subclass struct drm_shadow_plane_state.
287  */
288 void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
289 				  struct drm_shadow_plane_state *shadow_plane_state)
290 {
291 	if (shadow_plane_state) {
292 		__drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
293 		drm_format_conv_state_init(&shadow_plane_state->fmtcnv_state);
294 	} else {
295 		__drm_atomic_helper_plane_reset(plane, NULL);
296 	}
297 }
298 EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
299 
300 /**
301  * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
302  * @plane: the plane
303  *
304  * This function implements struct &drm_plane_funcs.reset_plane for
305  * shadow-buffered planes. It assumes the current plane state to be
306  * of type struct drm_shadow_plane and it allocates the new state of
307  * this type.
308  */
309 void drm_gem_reset_shadow_plane(struct drm_plane *plane)
310 {
311 	struct drm_shadow_plane_state *shadow_plane_state;
312 
313 	if (plane->state) {
314 		drm_gem_destroy_shadow_plane_state(plane, plane->state);
315 		plane->state = NULL; /* must be set to NULL here */
316 	}
317 
318 	shadow_plane_state = kzalloc_obj(*shadow_plane_state);
319 	__drm_gem_reset_shadow_plane(plane, shadow_plane_state);
320 }
321 EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
322 
323 /**
324  * drm_gem_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
325  * @plane: the plane
326  * @plane_state: the plane state of type struct drm_shadow_plane_state
327  *
328  * This function implements struct &drm_plane_helper_funcs.begin_fb_access. It
329  * maps all buffer objects of the plane's framebuffer into kernel address
330  * space and stores them in struct &drm_shadow_plane_state.map. The first data
331  * bytes are available in struct &drm_shadow_plane_state.data.
332  *
333  * See drm_gem_end_shadow_fb_access() for cleanup.
334  *
335  * Returns:
336  * 0 on success, or a negative errno code otherwise.
337  */
338 int drm_gem_begin_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
339 {
340 	struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
341 	struct drm_framebuffer *fb = plane_state->fb;
342 
343 	if (!fb)
344 		return 0;
345 
346 	return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data);
347 }
348 EXPORT_SYMBOL(drm_gem_begin_shadow_fb_access);
349 
350 /**
351  * drm_gem_end_shadow_fb_access - releases shadow framebuffers from CPU access
352  * @plane: the plane
353  * @plane_state: the plane state of type struct drm_shadow_plane_state
354  *
355  * This function implements struct &drm_plane_helper_funcs.end_fb_access. It
356  * undoes all effects of drm_gem_begin_shadow_fb_access() in reverse order.
357  *
358  * See drm_gem_begin_shadow_fb_access() for more information.
359  */
360 void drm_gem_end_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
361 {
362 	struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
363 	struct drm_framebuffer *fb = plane_state->fb;
364 
365 	if (!fb)
366 		return;
367 
368 	drm_gem_fb_vunmap(fb, shadow_plane_state->map);
369 }
370 EXPORT_SYMBOL(drm_gem_end_shadow_fb_access);
371 
372 /**
373  * drm_gem_simple_kms_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
374  * @pipe: the simple display pipe
375  * @plane_state: the plane state of type struct drm_shadow_plane_state
376  *
377  * This function implements struct drm_simple_display_funcs.begin_fb_access.
378  *
379  * See drm_gem_begin_shadow_fb_access() for details and
380  * drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
381  *
382  * Returns:
383  * 0 on success, or a negative errno code otherwise.
384  */
385 int drm_gem_simple_kms_begin_shadow_fb_access(struct drm_simple_display_pipe *pipe,
386 					      struct drm_plane_state *plane_state)
387 {
388 	return drm_gem_begin_shadow_fb_access(&pipe->plane, plane_state);
389 }
390 EXPORT_SYMBOL(drm_gem_simple_kms_begin_shadow_fb_access);
391 
392 /**
393  * drm_gem_simple_kms_end_shadow_fb_access - releases shadow framebuffers from CPU access
394  * @pipe: the simple display pipe
395  * @plane_state: the plane state of type struct drm_shadow_plane_state
396  *
397  * This function implements struct drm_simple_display_funcs.end_fb_access.
398  * It undoes all effects of drm_gem_simple_kms_begin_shadow_fb_access() in
399  * reverse order.
400  *
401  * See drm_gem_simple_kms_begin_shadow_fb_access().
402  */
403 void drm_gem_simple_kms_end_shadow_fb_access(struct drm_simple_display_pipe *pipe,
404 					     struct drm_plane_state *plane_state)
405 {
406 	drm_gem_end_shadow_fb_access(&pipe->plane, plane_state);
407 }
408 EXPORT_SYMBOL(drm_gem_simple_kms_end_shadow_fb_access);
409 
410 /**
411  * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
412  * @pipe: the simple display pipe
413  *
414  * This function implements struct drm_simple_display_funcs.reset_plane
415  * for shadow-buffered planes.
416  */
417 void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
418 {
419 	drm_gem_reset_shadow_plane(&pipe->plane);
420 }
421 EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
422 
423 /**
424  * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
425  * @pipe: the simple display pipe
426  *
427  * This function implements struct drm_simple_display_funcs.duplicate_plane_state
428  * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
429  * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
430  * and cleanup_fb helpers.
431  *
432  * Returns:
433  * A pointer to a new plane state on success, or NULL otherwise.
434  */
435 struct drm_plane_state *
436 drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
437 {
438 	return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
439 }
440 EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
441 
442 /**
443  * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
444  * @pipe: the simple display pipe
445  * @plane_state: the plane state of type struct drm_shadow_plane_state
446  *
447  * This function implements struct drm_simple_display_funcs.destroy_plane_state
448  * for shadow-buffered planes. It expects that mappings of shadow buffers
449  * have been released already.
450  */
451 void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
452 						   struct drm_plane_state *plane_state)
453 {
454 	drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
455 }
456 EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);
457