1 // SPDX-License-Identifier: GPL-2.0-or-later
2 
3 #include <linux/dma-resv.h>
4 #include <linux/dma-fence-chain.h>
5 
6 #include <drm/drm_atomic_state_helper.h>
7 #include <drm/drm_atomic_uapi.h>
8 #include <drm/drm_gem.h>
9 #include <drm/drm_gem_atomic_helper.h>
10 #include <drm/drm_gem_framebuffer_helper.h>
11 #include <drm/drm_simple_kms_helper.h>
12 
13 #include "drm_internal.h"
14 
15 /**
16  * DOC: overview
17  *
18  * The GEM atomic helpers library implements generic atomic-commit
19  * functions for drivers that use GEM objects. Currently, it provides
20  * synchronization helpers, and plane state and framebuffer BO mappings
21  * for planes with shadow buffers.
22  *
23  * Before scanout, a plane's framebuffer needs to be synchronized with
24  * possible writers that draw into the framebuffer. All drivers should
25  * call drm_gem_plane_helper_prepare_fb() from their implementation of
26  * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
27  * the framebuffer so that the DRM core can synchronize access automatically.
28  *
29  * drm_gem_plane_helper_prepare_fb() can also be used directly as
30  * implementation of prepare_fb. For drivers based on
31  * struct drm_simple_display_pipe, drm_gem_simple_display_pipe_prepare_fb()
32  * provides equivalent functionality.
33  *
34  * .. code-block:: c
35  *
36  *	#include <drm/drm_gem_atomic_helper.h>
37  *
38  *	struct drm_plane_helper_funcs driver_plane_helper_funcs = {
39  *		...,
40  *		. prepare_fb = drm_gem_plane_helper_prepare_fb,
41  *	};
42  *
43  *	struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
44  *		...,
45  *		. prepare_fb = drm_gem_simple_display_pipe_prepare_fb,
46  *	};
47  *
48  * A driver using a shadow buffer copies the content of the shadow buffers
49  * into the HW's framebuffer memory during an atomic update. This requires
50  * a mapping of the shadow buffer into kernel address space. The mappings
51  * cannot be established by commit-tail functions, such as atomic_update,
52  * as this would violate locking rules around dma_buf_vmap().
53  *
54  * The helpers for shadow-buffered planes establish and release mappings,
55  * and provide struct drm_shadow_plane_state, which stores the plane's mapping
56  * for commit-tail functions.
57  *
58  * Shadow-buffered planes can easily be enabled by using the provided macros
59  * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
60  * These macros set up the plane and plane-helper callbacks to point to the
61  * shadow-buffer helpers.
62  *
63  * .. code-block:: c
64  *
65  *	#include <drm/drm_gem_atomic_helper.h>
66  *
67  *	struct drm_plane_funcs driver_plane_funcs = {
68  *		...,
69  *		DRM_GEM_SHADOW_PLANE_FUNCS,
70  *	};
71  *
72  *	struct drm_plane_helper_funcs driver_plane_helper_funcs = {
73  *		...,
74  *		DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
75  *	};
76  *
77  * In the driver's atomic-update function, shadow-buffer mappings are available
78  * from the plane state. Use to_drm_shadow_plane_state() to upcast from
79  * struct drm_plane_state.
80  *
81  * .. code-block:: c
82  *
83  *	void driver_plane_atomic_update(struct drm_plane *plane,
84  *					struct drm_plane_state *old_plane_state)
85  *	{
86  *		struct drm_plane_state *plane_state = plane->state;
87  *		struct drm_shadow_plane_state *shadow_plane_state =
88  *			to_drm_shadow_plane_state(plane_state);
89  *
90  *		// access shadow buffer via shadow_plane_state->map
91  *	}
92  *
93  * A mapping address for each of the framebuffer's buffer object is stored in
94  * struct &drm_shadow_plane_state.map. The mappings are valid while the state
95  * is being used.
96  *
97  * Drivers that use struct drm_simple_display_pipe can use
98  * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
99  * callbacks. Access to shadow-buffer mappings is similar to regular
100  * atomic_update.
101  *
102  * .. code-block:: c
103  *
104  *	struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
105  *		...,
106  *		DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
107  *	};
108  *
109  *	void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
110  *				struct drm_crtc_state *crtc_state,
111  *				struct drm_plane_state *plane_state)
112  *	{
113  *		struct drm_shadow_plane_state *shadow_plane_state =
114  *			to_drm_shadow_plane_state(plane_state);
115  *
116  *		// access shadow buffer via shadow_plane_state->map
117  *	}
118  */
119 
120 /*
121  * Plane Helpers
122  */
123 
124 /**
125  * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
126  * @plane: Plane
127  * @state: Plane state the fence will be attached to
128  *
129  * This function extracts the exclusive fence from &drm_gem_object.resv and
130  * attaches it to plane state for the atomic helper to wait on. This is
131  * necessary to correctly implement implicit synchronization for any buffers
132  * shared as a struct &dma_buf. This function can be used as the
133  * &drm_plane_helper_funcs.prepare_fb callback.
134  *
135  * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
136  * GEM based framebuffer drivers which have their buffers always pinned in
137  * memory.
138  *
139  * This function is the default implementation for GEM drivers of
140  * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
141  */
drm_gem_plane_helper_prepare_fb(struct drm_plane * plane,struct drm_plane_state * state)142 int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane,
143 				    struct drm_plane_state *state)
144 {
145 	struct dma_fence *fence = dma_fence_get(state->fence);
146 	enum dma_resv_usage usage;
147 	size_t i;
148 	int ret;
149 
150 	if (!state->fb)
151 		return 0;
152 
153 	/*
154 	 * Only add the kernel fences here if there is already a fence set via
155 	 * explicit fencing interfaces on the atomic ioctl.
156 	 *
157 	 * This way explicit fencing can be used to overrule implicit fencing,
158 	 * which is important to make explicit fencing use-cases work: One
159 	 * example is using one buffer for 2 screens with different refresh
160 	 * rates. Implicit fencing will clamp rendering to the refresh rate of
161 	 * the slower screen, whereas explicit fence allows 2 independent
162 	 * render and display loops on a single buffer. If a driver allows
163 	 * obeys both implicit and explicit fences for plane updates, then it
164 	 * will break all the benefits of explicit fencing.
165 	 */
166 	usage = fence ? DMA_RESV_USAGE_KERNEL : DMA_RESV_USAGE_WRITE;
167 
168 	for (i = 0; i < state->fb->format->num_planes; ++i) {
169 		struct drm_gem_object *obj = drm_gem_fb_get_obj(state->fb, i);
170 		struct dma_fence *new;
171 
172 		if (WARN_ON_ONCE(!obj))
173 			continue;
174 
175 		ret = dma_resv_get_singleton(obj->resv, usage, &new);
176 		if (ret)
177 			goto error;
178 
179 		if (new && fence) {
180 			struct dma_fence_chain *chain = dma_fence_chain_alloc();
181 
182 			if (!chain) {
183 				ret = -ENOMEM;
184 				goto error;
185 			}
186 
187 			dma_fence_chain_init(chain, fence, new, 1);
188 			fence = &chain->base;
189 
190 		} else if (new) {
191 			fence = new;
192 		}
193 	}
194 
195 	dma_fence_put(state->fence);
196 	state->fence = fence;
197 	return 0;
198 
199 error:
200 	dma_fence_put(fence);
201 	return ret;
202 }
203 EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
204 
205 /**
206  * drm_gem_simple_display_pipe_prepare_fb - prepare_fb helper for &drm_simple_display_pipe
207  * @pipe: Simple display pipe
208  * @plane_state: Plane state
209  *
210  * This function uses drm_gem_plane_helper_prepare_fb() to extract the fences
211  * from &drm_gem_object.resv and attaches them to the plane state for the atomic
212  * helper to wait on. This is necessary to correctly implement implicit
213  * synchronization for any buffers shared as a struct &dma_buf. Drivers can use
214  * this as their &drm_simple_display_pipe_funcs.prepare_fb callback.
215  *
216  * See drm_gem_plane_helper_prepare_fb() for a discussion of implicit and
217  * explicit fencing in atomic modeset updates.
218  */
drm_gem_simple_display_pipe_prepare_fb(struct drm_simple_display_pipe * pipe,struct drm_plane_state * plane_state)219 int drm_gem_simple_display_pipe_prepare_fb(struct drm_simple_display_pipe *pipe,
220 					   struct drm_plane_state *plane_state)
221 {
222 	return drm_gem_plane_helper_prepare_fb(&pipe->plane, plane_state);
223 }
224 EXPORT_SYMBOL(drm_gem_simple_display_pipe_prepare_fb);
225 
226 /*
227  * Shadow-buffered Planes
228  */
229 
230 /**
231  * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
232  * @plane: the plane
233  * @new_shadow_plane_state: the new shadow-buffered plane state
234  *
235  * This function duplicates shadow-buffered plane state. This is helpful for drivers
236  * that subclass struct drm_shadow_plane_state.
237  *
238  * The function does not duplicate existing mappings of the shadow buffers.
239  * Mappings are maintained during the atomic commit by the plane's prepare_fb
240  * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
241  * for corresponding helpers.
242  */
243 void
__drm_gem_duplicate_shadow_plane_state(struct drm_plane * plane,struct drm_shadow_plane_state * new_shadow_plane_state)244 __drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
245 				       struct drm_shadow_plane_state *new_shadow_plane_state)
246 {
247 	__drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
248 }
249 EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
250 
251 /**
252  * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
253  * @plane: the plane
254  *
255  * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
256  * shadow-buffered planes. It assumes the existing state to be of type
257  * struct drm_shadow_plane_state and it allocates the new state to be of this
258  * type.
259  *
260  * The function does not duplicate existing mappings of the shadow buffers.
261  * Mappings are maintained during the atomic commit by the plane's prepare_fb
262  * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
263  * for corresponding helpers.
264  *
265  * Returns:
266  * A pointer to a new plane state on success, or NULL otherwise.
267  */
268 struct drm_plane_state *
drm_gem_duplicate_shadow_plane_state(struct drm_plane * plane)269 drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
270 {
271 	struct drm_plane_state *plane_state = plane->state;
272 	struct drm_shadow_plane_state *new_shadow_plane_state;
273 
274 	if (!plane_state)
275 		return NULL;
276 
277 	new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
278 	if (!new_shadow_plane_state)
279 		return NULL;
280 	__drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
281 
282 	return &new_shadow_plane_state->base;
283 }
284 EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
285 
286 /**
287  * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
288  * @shadow_plane_state: the shadow-buffered plane state
289  *
290  * This function cleans up shadow-buffered plane state. Helpful for drivers that
291  * subclass struct drm_shadow_plane_state.
292  */
__drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state * shadow_plane_state)293 void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
294 {
295 	__drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
296 }
297 EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
298 
299 /**
300  * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
301  * @plane: the plane
302  * @plane_state: the plane state of type struct drm_shadow_plane_state
303  *
304  * This function implements struct &drm_plane_funcs.atomic_destroy_state
305  * for shadow-buffered planes. It expects that mappings of shadow buffers
306  * have been released already.
307  */
drm_gem_destroy_shadow_plane_state(struct drm_plane * plane,struct drm_plane_state * plane_state)308 void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
309 					struct drm_plane_state *plane_state)
310 {
311 	struct drm_shadow_plane_state *shadow_plane_state =
312 		to_drm_shadow_plane_state(plane_state);
313 
314 	__drm_gem_destroy_shadow_plane_state(shadow_plane_state);
315 	kfree(shadow_plane_state);
316 }
317 EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
318 
319 /**
320  * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
321  * @plane: the plane
322  * @shadow_plane_state: the shadow-buffered plane state
323  *
324  * This function resets state for shadow-buffered planes. Helpful
325  * for drivers that subclass struct drm_shadow_plane_state.
326  */
__drm_gem_reset_shadow_plane(struct drm_plane * plane,struct drm_shadow_plane_state * shadow_plane_state)327 void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
328 				  struct drm_shadow_plane_state *shadow_plane_state)
329 {
330 	__drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
331 }
332 EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
333 
334 /**
335  * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
336  * @plane: the plane
337  *
338  * This function implements struct &drm_plane_funcs.reset_plane for
339  * shadow-buffered planes. It assumes the current plane state to be
340  * of type struct drm_shadow_plane and it allocates the new state of
341  * this type.
342  */
drm_gem_reset_shadow_plane(struct drm_plane * plane)343 void drm_gem_reset_shadow_plane(struct drm_plane *plane)
344 {
345 	struct drm_shadow_plane_state *shadow_plane_state;
346 
347 	if (plane->state) {
348 		drm_gem_destroy_shadow_plane_state(plane, plane->state);
349 		plane->state = NULL; /* must be set to NULL here */
350 	}
351 
352 	shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
353 	if (!shadow_plane_state)
354 		return;
355 	__drm_gem_reset_shadow_plane(plane, shadow_plane_state);
356 }
357 EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
358 
359 /**
360  * drm_gem_prepare_shadow_fb - prepares shadow framebuffers
361  * @plane: the plane
362  * @plane_state: the plane state of type struct drm_shadow_plane_state
363  *
364  * This function implements struct &drm_plane_helper_funcs.prepare_fb. It
365  * maps all buffer objects of the plane's framebuffer into kernel address
366  * space and stores them in &struct drm_shadow_plane_state.map. The
367  * framebuffer will be synchronized as part of the atomic commit.
368  *
369  * See drm_gem_cleanup_shadow_fb() for cleanup.
370  *
371  * Returns:
372  * 0 on success, or a negative errno code otherwise.
373  */
drm_gem_prepare_shadow_fb(struct drm_plane * plane,struct drm_plane_state * plane_state)374 int drm_gem_prepare_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
375 {
376 	struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
377 	struct drm_framebuffer *fb = plane_state->fb;
378 	int ret;
379 
380 	if (!fb)
381 		return 0;
382 
383 	ret = drm_gem_plane_helper_prepare_fb(plane, plane_state);
384 	if (ret)
385 		return ret;
386 
387 	return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data);
388 }
389 EXPORT_SYMBOL(drm_gem_prepare_shadow_fb);
390 
391 /**
392  * drm_gem_cleanup_shadow_fb - releases shadow framebuffers
393  * @plane: the plane
394  * @plane_state: the plane state of type struct drm_shadow_plane_state
395  *
396  * This function implements struct &drm_plane_helper_funcs.cleanup_fb.
397  * This function unmaps all buffer objects of the plane's framebuffer.
398  *
399  * See drm_gem_prepare_shadow_fb() for more information.
400  */
drm_gem_cleanup_shadow_fb(struct drm_plane * plane,struct drm_plane_state * plane_state)401 void drm_gem_cleanup_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
402 {
403 	struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
404 	struct drm_framebuffer *fb = plane_state->fb;
405 
406 	if (!fb)
407 		return;
408 
409 	drm_gem_fb_vunmap(fb, shadow_plane_state->map);
410 }
411 EXPORT_SYMBOL(drm_gem_cleanup_shadow_fb);
412 
413 /**
414  * drm_gem_simple_kms_prepare_shadow_fb - prepares shadow framebuffers
415  * @pipe: the simple display pipe
416  * @plane_state: the plane state of type struct drm_shadow_plane_state
417  *
418  * This function implements struct drm_simple_display_funcs.prepare_fb. It
419  * maps all buffer objects of the plane's framebuffer into kernel address
420  * space and stores them in struct drm_shadow_plane_state.map. The
421  * framebuffer will be synchronized as part of the atomic commit.
422  *
423  * See drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
424  *
425  * Returns:
426  * 0 on success, or a negative errno code otherwise.
427  */
drm_gem_simple_kms_prepare_shadow_fb(struct drm_simple_display_pipe * pipe,struct drm_plane_state * plane_state)428 int drm_gem_simple_kms_prepare_shadow_fb(struct drm_simple_display_pipe *pipe,
429 					 struct drm_plane_state *plane_state)
430 {
431 	return drm_gem_prepare_shadow_fb(&pipe->plane, plane_state);
432 }
433 EXPORT_SYMBOL(drm_gem_simple_kms_prepare_shadow_fb);
434 
435 /**
436  * drm_gem_simple_kms_cleanup_shadow_fb - releases shadow framebuffers
437  * @pipe: the simple display pipe
438  * @plane_state: the plane state of type struct drm_shadow_plane_state
439  *
440  * This function implements struct drm_simple_display_funcs.cleanup_fb.
441  * This function unmaps all buffer objects of the plane's framebuffer.
442  *
443  * See drm_gem_simple_kms_prepare_shadow_fb().
444  */
drm_gem_simple_kms_cleanup_shadow_fb(struct drm_simple_display_pipe * pipe,struct drm_plane_state * plane_state)445 void drm_gem_simple_kms_cleanup_shadow_fb(struct drm_simple_display_pipe *pipe,
446 					  struct drm_plane_state *plane_state)
447 {
448 	drm_gem_cleanup_shadow_fb(&pipe->plane, plane_state);
449 }
450 EXPORT_SYMBOL(drm_gem_simple_kms_cleanup_shadow_fb);
451 
452 /**
453  * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
454  * @pipe: the simple display pipe
455  *
456  * This function implements struct drm_simple_display_funcs.reset_plane
457  * for shadow-buffered planes.
458  */
drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe * pipe)459 void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
460 {
461 	drm_gem_reset_shadow_plane(&pipe->plane);
462 }
463 EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
464 
465 /**
466  * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
467  * @pipe: the simple display pipe
468  *
469  * This function implements struct drm_simple_display_funcs.duplicate_plane_state
470  * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
471  * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
472  * and cleanup_fb helpers.
473  *
474  * Returns:
475  * A pointer to a new plane state on success, or NULL otherwise.
476  */
477 struct drm_plane_state *
drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe * pipe)478 drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
479 {
480 	return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
481 }
482 EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
483 
484 /**
485  * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
486  * @pipe: the simple display pipe
487  * @plane_state: the plane state of type struct drm_shadow_plane_state
488  *
489  * This function implements struct drm_simple_display_funcs.destroy_plane_state
490  * for shadow-buffered planes. It expects that mappings of shadow buffers
491  * have been released already.
492  */
drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe * pipe,struct drm_plane_state * plane_state)493 void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
494 						   struct drm_plane_state *plane_state)
495 {
496 	drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
497 }
498 EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);
499