drm_gem_atomic_helper.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. #include <linux/dma-resv.h>
  3. #include <linux/dma-fence-chain.h>
  4. #include <drm/drm_atomic_state_helper.h>
  5. #include <drm/drm_atomic_uapi.h>
  6. #include <drm/drm_framebuffer.h>
  7. #include <drm/drm_gem.h>
  8. #include <drm/drm_gem_atomic_helper.h>
  9. #include <drm/drm_gem_framebuffer_helper.h>
  10. #include <drm/drm_simple_kms_helper.h>
  11. #include "drm_internal.h"
  12. /**
  13. * DOC: overview
  14. *
  15. * The GEM atomic helpers library implements generic atomic-commit
  16. * functions for drivers that use GEM objects. Currently, it provides
  17. * synchronization helpers, and plane state and framebuffer BO mappings
  18. * for planes with shadow buffers.
  19. *
  20. * Before scanout, a plane's framebuffer needs to be synchronized with
  21. * possible writers that draw into the framebuffer. All drivers should
  22. * call drm_gem_plane_helper_prepare_fb() from their implementation of
  23. * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
  24. * the framebuffer so that the DRM core can synchronize access automatically.
  25. * drm_gem_plane_helper_prepare_fb() can also be used directly as
  26. * implementation of prepare_fb.
  27. *
  28. * .. code-block:: c
  29. *
  30. * #include <drm/drm_gem_atomic_helper.h>
  31. *
  32. * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
  33. * ...,
  34. * . prepare_fb = drm_gem_plane_helper_prepare_fb,
  35. * };
  36. *
  37. * A driver using a shadow buffer copies the content of the shadow buffers
  38. * into the HW's framebuffer memory during an atomic update. This requires
  39. * a mapping of the shadow buffer into kernel address space. The mappings
  40. * cannot be established by commit-tail functions, such as atomic_update,
  41. * as this would violate locking rules around dma_buf_vmap().
  42. *
  43. * The helpers for shadow-buffered planes establish and release mappings,
  44. * and provide struct drm_shadow_plane_state, which stores the plane's mapping
  45. * for commit-tail functions.
  46. *
  47. * Shadow-buffered planes can easily be enabled by using the provided macros
  48. * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
  49. * These macros set up the plane and plane-helper callbacks to point to the
  50. * shadow-buffer helpers.
  51. *
  52. * .. code-block:: c
  53. *
  54. * #include <drm/drm_gem_atomic_helper.h>
  55. *
  56. * struct drm_plane_funcs driver_plane_funcs = {
  57. * ...,
  58. * DRM_GEM_SHADOW_PLANE_FUNCS,
  59. * };
  60. *
  61. * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
  62. * ...,
  63. * DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
  64. * };
  65. *
  66. * In the driver's atomic-update function, shadow-buffer mappings are available
  67. * from the plane state. Use to_drm_shadow_plane_state() to upcast from
  68. * struct drm_plane_state.
  69. *
  70. * .. code-block:: c
  71. *
  72. * void driver_plane_atomic_update(struct drm_plane *plane,
  73. * struct drm_plane_state *old_plane_state)
  74. * {
  75. * struct drm_plane_state *plane_state = plane->state;
  76. * struct drm_shadow_plane_state *shadow_plane_state =
  77. * to_drm_shadow_plane_state(plane_state);
  78. *
  79. * // access shadow buffer via shadow_plane_state->map
  80. * }
  81. *
  82. * A mapping address for each of the framebuffer's buffer object is stored in
  83. * struct &drm_shadow_plane_state.map. The mappings are valid while the state
  84. * is being used.
  85. *
  86. * Drivers that use struct drm_simple_display_pipe can use
  87. * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
  88. * callbacks. Access to shadow-buffer mappings is similar to regular
  89. * atomic_update.
  90. *
  91. * .. code-block:: c
  92. *
  93. * struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
  94. * ...,
  95. * DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
  96. * };
  97. *
  98. * void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
  99. * struct drm_crtc_state *crtc_state,
  100. * struct drm_plane_state *plane_state)
  101. * {
  102. * struct drm_shadow_plane_state *shadow_plane_state =
  103. * to_drm_shadow_plane_state(plane_state);
  104. *
  105. * // access shadow buffer via shadow_plane_state->map
  106. * }
  107. */
  108. /*
  109. * Plane Helpers
  110. */
  111. /**
  112. * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
  113. * @plane: Plane
  114. * @state: Plane state the fence will be attached to
  115. *
  116. * This function extracts the exclusive fence from &drm_gem_object.resv and
  117. * attaches it to plane state for the atomic helper to wait on. This is
  118. * necessary to correctly implement implicit synchronization for any buffers
  119. * shared as a struct &dma_buf. This function can be used as the
  120. * &drm_plane_helper_funcs.prepare_fb callback.
  121. *
  122. * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
  123. * GEM based framebuffer drivers which have their buffers always pinned in
  124. * memory.
  125. *
  126. * This function is the default implementation for GEM drivers of
  127. * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
  128. */
  129. int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane,
  130. struct drm_plane_state *state)
  131. {
  132. struct dma_fence *fence = dma_fence_get(state->fence);
  133. enum dma_resv_usage usage;
  134. size_t i;
  135. int ret;
  136. if (!state->fb)
  137. return 0;
  138. /*
  139. * Only add the kernel fences here if there is already a fence set via
  140. * explicit fencing interfaces on the atomic ioctl.
  141. *
  142. * This way explicit fencing can be used to overrule implicit fencing,
  143. * which is important to make explicit fencing use-cases work: One
  144. * example is using one buffer for 2 screens with different refresh
  145. * rates. Implicit fencing will clamp rendering to the refresh rate of
  146. * the slower screen, whereas explicit fence allows 2 independent
  147. * render and display loops on a single buffer. If a driver allows
  148. * obeys both implicit and explicit fences for plane updates, then it
  149. * will break all the benefits of explicit fencing.
  150. */
  151. usage = fence ? DMA_RESV_USAGE_KERNEL : DMA_RESV_USAGE_WRITE;
  152. for (i = 0; i < state->fb->format->num_planes; ++i) {
  153. struct drm_gem_object *obj = drm_gem_fb_get_obj(state->fb, i);
  154. struct dma_fence *new;
  155. if (!obj) {
  156. ret = -EINVAL;
  157. goto error;
  158. }
  159. ret = dma_resv_get_singleton(obj->resv, usage, &new);
  160. if (ret)
  161. goto error;
  162. if (new && fence) {
  163. struct dma_fence_chain *chain = dma_fence_chain_alloc();
  164. if (!chain) {
  165. ret = -ENOMEM;
  166. goto error;
  167. }
  168. dma_fence_chain_init(chain, fence, new, 1);
  169. fence = &chain->base;
  170. } else if (new) {
  171. fence = new;
  172. }
  173. }
  174. dma_fence_put(state->fence);
  175. state->fence = fence;
  176. return 0;
  177. error:
  178. dma_fence_put(fence);
  179. return ret;
  180. }
  181. EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
  182. /*
  183. * Shadow-buffered Planes
  184. */
  185. /**
  186. * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
  187. * @plane: the plane
  188. * @new_shadow_plane_state: the new shadow-buffered plane state
  189. *
  190. * This function duplicates shadow-buffered plane state. This is helpful for drivers
  191. * that subclass struct drm_shadow_plane_state.
  192. *
  193. * The function does not duplicate existing mappings of the shadow buffers.
  194. * Mappings are maintained during the atomic commit by the plane's prepare_fb
  195. * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
  196. * for corresponding helpers.
  197. */
  198. void
  199. __drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
  200. struct drm_shadow_plane_state *new_shadow_plane_state)
  201. {
  202. struct drm_plane_state *plane_state = plane->state;
  203. struct drm_shadow_plane_state *shadow_plane_state =
  204. to_drm_shadow_plane_state(plane_state);
  205. __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
  206. drm_format_conv_state_copy(&new_shadow_plane_state->fmtcnv_state,
  207. &shadow_plane_state->fmtcnv_state);
  208. }
  209. EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
  210. /**
  211. * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
  212. * @plane: the plane
  213. *
  214. * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
  215. * shadow-buffered planes. It assumes the existing state to be of type
  216. * struct drm_shadow_plane_state and it allocates the new state to be of this
  217. * type.
  218. *
  219. * The function does not duplicate existing mappings of the shadow buffers.
  220. * Mappings are maintained during the atomic commit by the plane's prepare_fb
  221. * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
  222. * for corresponding helpers.
  223. *
  224. * Returns:
  225. * A pointer to a new plane state on success, or NULL otherwise.
  226. */
  227. struct drm_plane_state *
  228. drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
  229. {
  230. struct drm_plane_state *plane_state = plane->state;
  231. struct drm_shadow_plane_state *new_shadow_plane_state;
  232. if (!plane_state)
  233. return NULL;
  234. new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
  235. if (!new_shadow_plane_state)
  236. return NULL;
  237. __drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
  238. return &new_shadow_plane_state->base;
  239. }
  240. EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
  241. /**
  242. * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
  243. * @shadow_plane_state: the shadow-buffered plane state
  244. *
  245. * This function cleans up shadow-buffered plane state. Helpful for drivers that
  246. * subclass struct drm_shadow_plane_state.
  247. */
  248. void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
  249. {
  250. drm_format_conv_state_release(&shadow_plane_state->fmtcnv_state);
  251. __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
  252. }
  253. EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
  254. /**
  255. * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
  256. * @plane: the plane
  257. * @plane_state: the plane state of type struct drm_shadow_plane_state
  258. *
  259. * This function implements struct &drm_plane_funcs.atomic_destroy_state
  260. * for shadow-buffered planes. It expects that mappings of shadow buffers
  261. * have been released already.
  262. */
  263. void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
  264. struct drm_plane_state *plane_state)
  265. {
  266. struct drm_shadow_plane_state *shadow_plane_state =
  267. to_drm_shadow_plane_state(plane_state);
  268. __drm_gem_destroy_shadow_plane_state(shadow_plane_state);
  269. kfree(shadow_plane_state);
  270. }
  271. EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
  272. /**
  273. * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
  274. * @plane: the plane
  275. * @shadow_plane_state: the shadow-buffered plane state
  276. *
  277. * This function resets state for shadow-buffered planes. Helpful
  278. * for drivers that subclass struct drm_shadow_plane_state.
  279. */
  280. void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
  281. struct drm_shadow_plane_state *shadow_plane_state)
  282. {
  283. __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
  284. drm_format_conv_state_init(&shadow_plane_state->fmtcnv_state);
  285. }
  286. EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
  287. /**
  288. * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
  289. * @plane: the plane
  290. *
  291. * This function implements struct &drm_plane_funcs.reset_plane for
  292. * shadow-buffered planes. It assumes the current plane state to be
  293. * of type struct drm_shadow_plane and it allocates the new state of
  294. * this type.
  295. */
  296. void drm_gem_reset_shadow_plane(struct drm_plane *plane)
  297. {
  298. struct drm_shadow_plane_state *shadow_plane_state;
  299. if (plane->state) {
  300. drm_gem_destroy_shadow_plane_state(plane, plane->state);
  301. plane->state = NULL; /* must be set to NULL here */
  302. }
  303. shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
  304. if (!shadow_plane_state)
  305. return;
  306. __drm_gem_reset_shadow_plane(plane, shadow_plane_state);
  307. }
  308. EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
  309. /**
  310. * drm_gem_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
  311. * @plane: the plane
  312. * @plane_state: the plane state of type struct drm_shadow_plane_state
  313. *
  314. * This function implements struct &drm_plane_helper_funcs.begin_fb_access. It
  315. * maps all buffer objects of the plane's framebuffer into kernel address
  316. * space and stores them in struct &drm_shadow_plane_state.map. The first data
  317. * bytes are available in struct &drm_shadow_plane_state.data.
  318. *
  319. * See drm_gem_end_shadow_fb_access() for cleanup.
  320. *
  321. * Returns:
  322. * 0 on success, or a negative errno code otherwise.
  323. */
  324. int drm_gem_begin_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
  325. {
  326. struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
  327. struct drm_framebuffer *fb = plane_state->fb;
  328. if (!fb)
  329. return 0;
  330. return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data);
  331. }
  332. EXPORT_SYMBOL(drm_gem_begin_shadow_fb_access);
  333. /**
  334. * drm_gem_end_shadow_fb_access - releases shadow framebuffers from CPU access
  335. * @plane: the plane
  336. * @plane_state: the plane state of type struct drm_shadow_plane_state
  337. *
  338. * This function implements struct &drm_plane_helper_funcs.end_fb_access. It
  339. * undoes all effects of drm_gem_begin_shadow_fb_access() in reverse order.
  340. *
  341. * See drm_gem_begin_shadow_fb_access() for more information.
  342. */
  343. void drm_gem_end_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
  344. {
  345. struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
  346. struct drm_framebuffer *fb = plane_state->fb;
  347. if (!fb)
  348. return;
  349. drm_gem_fb_vunmap(fb, shadow_plane_state->map);
  350. }
  351. EXPORT_SYMBOL(drm_gem_end_shadow_fb_access);
  352. /**
  353. * drm_gem_simple_kms_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
  354. * @pipe: the simple display pipe
  355. * @plane_state: the plane state of type struct drm_shadow_plane_state
  356. *
  357. * This function implements struct drm_simple_display_funcs.begin_fb_access.
  358. *
  359. * See drm_gem_begin_shadow_fb_access() for details and
  360. * drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
  361. *
  362. * Returns:
  363. * 0 on success, or a negative errno code otherwise.
  364. */
  365. int drm_gem_simple_kms_begin_shadow_fb_access(struct drm_simple_display_pipe *pipe,
  366. struct drm_plane_state *plane_state)
  367. {
  368. return drm_gem_begin_shadow_fb_access(&pipe->plane, plane_state);
  369. }
  370. EXPORT_SYMBOL(drm_gem_simple_kms_begin_shadow_fb_access);
  371. /**
  372. * drm_gem_simple_kms_end_shadow_fb_access - releases shadow framebuffers from CPU access
  373. * @pipe: the simple display pipe
  374. * @plane_state: the plane state of type struct drm_shadow_plane_state
  375. *
  376. * This function implements struct drm_simple_display_funcs.end_fb_access.
  377. * It undoes all effects of drm_gem_simple_kms_begin_shadow_fb_access() in
  378. * reverse order.
  379. *
  380. * See drm_gem_simple_kms_begin_shadow_fb_access().
  381. */
  382. void drm_gem_simple_kms_end_shadow_fb_access(struct drm_simple_display_pipe *pipe,
  383. struct drm_plane_state *plane_state)
  384. {
  385. drm_gem_end_shadow_fb_access(&pipe->plane, plane_state);
  386. }
  387. EXPORT_SYMBOL(drm_gem_simple_kms_end_shadow_fb_access);
  388. /**
  389. * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
  390. * @pipe: the simple display pipe
  391. *
  392. * This function implements struct drm_simple_display_funcs.reset_plane
  393. * for shadow-buffered planes.
  394. */
  395. void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
  396. {
  397. drm_gem_reset_shadow_plane(&pipe->plane);
  398. }
  399. EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
  400. /**
  401. * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
  402. * @pipe: the simple display pipe
  403. *
  404. * This function implements struct drm_simple_display_funcs.duplicate_plane_state
  405. * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
  406. * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
  407. * and cleanup_fb helpers.
  408. *
  409. * Returns:
  410. * A pointer to a new plane state on success, or NULL otherwise.
  411. */
  412. struct drm_plane_state *
  413. drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
  414. {
  415. return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
  416. }
  417. EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
  418. /**
  419. * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
  420. * @pipe: the simple display pipe
  421. * @plane_state: the plane state of type struct drm_shadow_plane_state
  422. *
  423. * This function implements struct drm_simple_display_funcs.destroy_plane_state
  424. * for shadow-buffered planes. It expects that mappings of shadow buffers
  425. * have been released already.
  426. */
  427. void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
  428. struct drm_plane_state *plane_state)
  429. {
  430. drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
  431. }
  432. EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);