linux/drivers/gpu/drm/drm_gem_atomic_helper.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2
   3#include <linux/dma-resv.h>
   4
   5#include <drm/drm_atomic_state_helper.h>
   6#include <drm/drm_atomic_uapi.h>
   7#include <drm/drm_gem.h>
   8#include <drm/drm_gem_atomic_helper.h>
   9#include <drm/drm_gem_framebuffer_helper.h>
  10#include <drm/drm_simple_kms_helper.h>
  11
  12#include "drm_internal.h"
  13
  14/**
  15 * DOC: overview
  16 *
  17 * The GEM atomic helpers library implements generic atomic-commit
  18 * functions for drivers that use GEM objects. Currently, it provides
  19 * synchronization helpers, and plane state and framebuffer BO mappings
  20 * for planes with shadow buffers.
  21 *
  22 * Before scanout, a plane's framebuffer needs to be synchronized with
  23 * possible writers that draw into the framebuffer. All drivers should
  24 * call drm_gem_plane_helper_prepare_fb() from their implementation of
  25 * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
  26 * the framebuffer so that the DRM core can synchronize access automatically.
  27 *
  28 * drm_gem_plane_helper_prepare_fb() can also be used directly as
  29 * implementation of prepare_fb. For drivers based on
  30 * struct drm_simple_display_pipe, drm_gem_simple_display_pipe_prepare_fb()
  31 * provides equivalent functionality.
  32 *
  33 * .. code-block:: c
  34 *
  35 *      #include <drm/drm_gem_atomic_helper.h>
  36 *
  37 *      struct drm_plane_helper_funcs driver_plane_helper_funcs = {
  38 *              ...,
  39 *              . prepare_fb = drm_gem_plane_helper_prepare_fb,
  40 *      };
  41 *
  42 *      struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
  43 *              ...,
  44 *              . prepare_fb = drm_gem_simple_display_pipe_prepare_fb,
  45 *      };
  46 *
  47 * A driver using a shadow buffer copies the content of the shadow buffers
  48 * into the HW's framebuffer memory during an atomic update. This requires
  49 * a mapping of the shadow buffer into kernel address space. The mappings
  50 * cannot be established by commit-tail functions, such as atomic_update,
  51 * as this would violate locking rules around dma_buf_vmap().
  52 *
  53 * The helpers for shadow-buffered planes establish and release mappings,
  54 * and provide struct drm_shadow_plane_state, which stores the plane's mapping
  55 * for commit-tail functions.
  56 *
  57 * Shadow-buffered planes can easily be enabled by using the provided macros
  58 * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
  59 * These macros set up the plane and plane-helper callbacks to point to the
  60 * shadow-buffer helpers.
  61 *
  62 * .. code-block:: c
  63 *
  64 *      #include <drm/drm_gem_atomic_helper.h>
  65 *
  66 *      struct drm_plane_funcs driver_plane_funcs = {
  67 *              ...,
  68 *              DRM_GEM_SHADOW_PLANE_FUNCS,
  69 *      };
  70 *
  71 *      struct drm_plane_helper_funcs driver_plane_helper_funcs = {
  72 *              ...,
  73 *              DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
  74 *      };
  75 *
  76 * In the driver's atomic-update function, shadow-buffer mappings are available
  77 * from the plane state. Use to_drm_shadow_plane_state() to upcast from
  78 * struct drm_plane_state.
  79 *
  80 * .. code-block:: c
  81 *
  82 *      void driver_plane_atomic_update(struct drm_plane *plane,
  83 *                                      struct drm_plane_state *old_plane_state)
  84 *      {
  85 *              struct drm_plane_state *plane_state = plane->state;
  86 *              struct drm_shadow_plane_state *shadow_plane_state =
  87 *                      to_drm_shadow_plane_state(plane_state);
  88 *
  89 *              // access shadow buffer via shadow_plane_state->map
  90 *      }
  91 *
  92 * A mapping address for each of the framebuffer's buffer object is stored in
  93 * struct &drm_shadow_plane_state.map. The mappings are valid while the state
  94 * is being used.
  95 *
  96 * Drivers that use struct drm_simple_display_pipe can use
  97 * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
  98 * callbacks. Access to shadow-buffer mappings is similar to regular
  99 * atomic_update.
 100 *
 101 * .. code-block:: c
 102 *
 103 *      struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
 104 *              ...,
 105 *              DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
 106 *      };
 107 *
 108 *      void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
 109 *                              struct drm_crtc_state *crtc_state,
 110 *                              struct drm_plane_state *plane_state)
 111 *      {
 112 *              struct drm_shadow_plane_state *shadow_plane_state =
 113 *                      to_drm_shadow_plane_state(plane_state);
 114 *
 115 *              // access shadow buffer via shadow_plane_state->map
 116 *      }
 117 */
 118
 119/*
 120 * Plane Helpers
 121 */
 122
 123/**
 124 * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
 125 * @plane: Plane
 126 * @state: Plane state the fence will be attached to
 127 *
 128 * This function extracts the exclusive fence from &drm_gem_object.resv and
 129 * attaches it to plane state for the atomic helper to wait on. This is
 130 * necessary to correctly implement implicit synchronization for any buffers
 131 * shared as a struct &dma_buf. This function can be used as the
 132 * &drm_plane_helper_funcs.prepare_fb callback.
 133 *
 134 * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
 135 * GEM based framebuffer drivers which have their buffers always pinned in
 136 * memory.
 137 *
 138 * This function is the default implementation for GEM drivers of
 139 * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
 140 *
 141 * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
 142 * explicit fencing in atomic modeset updates.
 143 */
 144int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
 145{
 146        struct drm_gem_object *obj;
 147        struct dma_fence *fence;
 148
 149        if (!state->fb)
 150                return 0;
 151
 152        obj = drm_gem_fb_get_obj(state->fb, 0);
 153        fence = dma_resv_get_excl_unlocked(obj->resv);
 154        drm_atomic_set_fence_for_plane(state, fence);
 155
 156        return 0;
 157}
 158EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
 159
 160/**
 161 * drm_gem_simple_display_pipe_prepare_fb - prepare_fb helper for &drm_simple_display_pipe
 162 * @pipe: Simple display pipe
 163 * @plane_state: Plane state
 164 *
 165 * This function uses drm_gem_plane_helper_prepare_fb() to extract the exclusive fence
 166 * from &drm_gem_object.resv and attaches it to plane state for the atomic
 167 * helper to wait on. This is necessary to correctly implement implicit
 168 * synchronization for any buffers shared as a struct &dma_buf. Drivers can use
 169 * this as their &drm_simple_display_pipe_funcs.prepare_fb callback.
 170 *
 171 * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
 172 * explicit fencing in atomic modeset updates.
 173 */
 174int drm_gem_simple_display_pipe_prepare_fb(struct drm_simple_display_pipe *pipe,
 175                                           struct drm_plane_state *plane_state)
 176{
 177        return drm_gem_plane_helper_prepare_fb(&pipe->plane, plane_state);
 178}
 179EXPORT_SYMBOL(drm_gem_simple_display_pipe_prepare_fb);
 180
 181/*
 182 * Shadow-buffered Planes
 183 */
 184
 185/**
 186 * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
 187 * @plane: the plane
 188 * @new_shadow_plane_state: the new shadow-buffered plane state
 189 *
 190 * This function duplicates shadow-buffered plane state. This is helpful for drivers
 191 * that subclass struct drm_shadow_plane_state.
 192 *
 193 * The function does not duplicate existing mappings of the shadow buffers.
 194 * Mappings are maintained during the atomic commit by the plane's prepare_fb
 195 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
 196 * for corresponding helpers.
 197 */
 198void
 199__drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
 200                                       struct drm_shadow_plane_state *new_shadow_plane_state)
 201{
 202        __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
 203}
 204EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
 205
 206/**
 207 * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
 208 * @plane: the plane
 209 *
 210 * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
 211 * shadow-buffered planes. It assumes the existing state to be of type
 212 * struct drm_shadow_plane_state and it allocates the new state to be of this
 213 * type.
 214 *
 215 * The function does not duplicate existing mappings of the shadow buffers.
 216 * Mappings are maintained during the atomic commit by the plane's prepare_fb
 217 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
 218 * for corresponding helpers.
 219 *
 220 * Returns:
 221 * A pointer to a new plane state on success, or NULL otherwise.
 222 */
 223struct drm_plane_state *
 224drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
 225{
 226        struct drm_plane_state *plane_state = plane->state;
 227        struct drm_shadow_plane_state *new_shadow_plane_state;
 228
 229        if (!plane_state)
 230                return NULL;
 231
 232        new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
 233        if (!new_shadow_plane_state)
 234                return NULL;
 235        __drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
 236
 237        return &new_shadow_plane_state->base;
 238}
 239EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
 240
 241/**
 242 * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
 243 * @shadow_plane_state: the shadow-buffered plane state
 244 *
 245 * This function cleans up shadow-buffered plane state. Helpful for drivers that
 246 * subclass struct drm_shadow_plane_state.
 247 */
 248void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
 249{
 250        __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
 251}
 252EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
 253
 254/**
 255 * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
 256 * @plane: the plane
 257 * @plane_state: the plane state of type struct drm_shadow_plane_state
 258 *
 259 * This function implements struct &drm_plane_funcs.atomic_destroy_state
 260 * for shadow-buffered planes. It expects that mappings of shadow buffers
 261 * have been released already.
 262 */
 263void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
 264                                        struct drm_plane_state *plane_state)
 265{
 266        struct drm_shadow_plane_state *shadow_plane_state =
 267                to_drm_shadow_plane_state(plane_state);
 268
 269        __drm_gem_destroy_shadow_plane_state(shadow_plane_state);
 270        kfree(shadow_plane_state);
 271}
 272EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
 273
 274/**
 275 * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
 276 * @plane: the plane
 277 * @shadow_plane_state: the shadow-buffered plane state
 278 *
 279 * This function resets state for shadow-buffered planes. Helpful
 280 * for drivers that subclass struct drm_shadow_plane_state.
 281 */
 282void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
 283                                  struct drm_shadow_plane_state *shadow_plane_state)
 284{
 285        __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
 286}
 287EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
 288
 289/**
 290 * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
 291 * @plane: the plane
 292 *
 293 * This function implements struct &drm_plane_funcs.reset_plane for
 294 * shadow-buffered planes. It assumes the current plane state to be
 295 * of type struct drm_shadow_plane and it allocates the new state of
 296 * this type.
 297 */
 298void drm_gem_reset_shadow_plane(struct drm_plane *plane)
 299{
 300        struct drm_shadow_plane_state *shadow_plane_state;
 301
 302        if (plane->state) {
 303                drm_gem_destroy_shadow_plane_state(plane, plane->state);
 304                plane->state = NULL; /* must be set to NULL here */
 305        }
 306
 307        shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
 308        if (!shadow_plane_state)
 309                return;
 310        __drm_gem_reset_shadow_plane(plane, shadow_plane_state);
 311}
 312EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
 313
 314/**
 315 * drm_gem_prepare_shadow_fb - prepares shadow framebuffers
 316 * @plane: the plane
 317 * @plane_state: the plane state of type struct drm_shadow_plane_state
 318 *
 319 * This function implements struct &drm_plane_helper_funcs.prepare_fb. It
 320 * maps all buffer objects of the plane's framebuffer into kernel address
 321 * space and stores them in &struct drm_shadow_plane_state.map. The
 322 * framebuffer will be synchronized as part of the atomic commit.
 323 *
 324 * See drm_gem_cleanup_shadow_fb() for cleanup.
 325 *
 326 * Returns:
 327 * 0 on success, or a negative errno code otherwise.
 328 */
 329int drm_gem_prepare_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
 330{
 331        struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
 332        struct drm_framebuffer *fb = plane_state->fb;
 333        int ret;
 334
 335        if (!fb)
 336                return 0;
 337
 338        ret = drm_gem_plane_helper_prepare_fb(plane, plane_state);
 339        if (ret)
 340                return ret;
 341
 342        return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data);
 343}
 344EXPORT_SYMBOL(drm_gem_prepare_shadow_fb);
 345
 346/**
 347 * drm_gem_cleanup_shadow_fb - releases shadow framebuffers
 348 * @plane: the plane
 349 * @plane_state: the plane state of type struct drm_shadow_plane_state
 350 *
 351 * This function implements struct &drm_plane_helper_funcs.cleanup_fb.
 352 * This function unmaps all buffer objects of the plane's framebuffer.
 353 *
 354 * See drm_gem_prepare_shadow_fb() for more information.
 355 */
 356void drm_gem_cleanup_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
 357{
 358        struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
 359        struct drm_framebuffer *fb = plane_state->fb;
 360
 361        if (!fb)
 362                return;
 363
 364        drm_gem_fb_vunmap(fb, shadow_plane_state->map);
 365}
 366EXPORT_SYMBOL(drm_gem_cleanup_shadow_fb);
 367
 368/**
 369 * drm_gem_simple_kms_prepare_shadow_fb - prepares shadow framebuffers
 370 * @pipe: the simple display pipe
 371 * @plane_state: the plane state of type struct drm_shadow_plane_state
 372 *
 373 * This function implements struct drm_simple_display_funcs.prepare_fb. It
 374 * maps all buffer objects of the plane's framebuffer into kernel address
 375 * space and stores them in struct drm_shadow_plane_state.map. The
 376 * framebuffer will be synchronized as part of the atomic commit.
 377 *
 378 * See drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
 379 *
 380 * Returns:
 381 * 0 on success, or a negative errno code otherwise.
 382 */
 383int drm_gem_simple_kms_prepare_shadow_fb(struct drm_simple_display_pipe *pipe,
 384                                         struct drm_plane_state *plane_state)
 385{
 386        return drm_gem_prepare_shadow_fb(&pipe->plane, plane_state);
 387}
 388EXPORT_SYMBOL(drm_gem_simple_kms_prepare_shadow_fb);
 389
 390/**
 391 * drm_gem_simple_kms_cleanup_shadow_fb - releases shadow framebuffers
 392 * @pipe: the simple display pipe
 393 * @plane_state: the plane state of type struct drm_shadow_plane_state
 394 *
 395 * This function implements struct drm_simple_display_funcs.cleanup_fb.
 396 * This function unmaps all buffer objects of the plane's framebuffer.
 397 *
 398 * See drm_gem_simple_kms_prepare_shadow_fb().
 399 */
 400void drm_gem_simple_kms_cleanup_shadow_fb(struct drm_simple_display_pipe *pipe,
 401                                          struct drm_plane_state *plane_state)
 402{
 403        drm_gem_cleanup_shadow_fb(&pipe->plane, plane_state);
 404}
 405EXPORT_SYMBOL(drm_gem_simple_kms_cleanup_shadow_fb);
 406
 407/**
 408 * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
 409 * @pipe: the simple display pipe
 410 *
 411 * This function implements struct drm_simple_display_funcs.reset_plane
 412 * for shadow-buffered planes.
 413 */
 414void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
 415{
 416        drm_gem_reset_shadow_plane(&pipe->plane);
 417}
 418EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
 419
 420/**
 421 * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
 422 * @pipe: the simple display pipe
 423 *
 424 * This function implements struct drm_simple_display_funcs.duplicate_plane_state
 425 * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
 426 * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
 427 * and cleanup_fb helpers.
 428 *
 429 * Returns:
 430 * A pointer to a new plane state on success, or NULL otherwise.
 431 */
 432struct drm_plane_state *
 433drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
 434{
 435        return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
 436}
 437EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
 438
 439/**
 440 * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
 441 * @pipe: the simple display pipe
 442 * @plane_state: the plane state of type struct drm_shadow_plane_state
 443 *
 444 * This function implements struct drm_simple_display_funcs.destroy_plane_state
 445 * for shadow-buffered planes. It expects that mappings of shadow buffers
 446 * have been released already.
 447 */
 448void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
 449                                                   struct drm_plane_state *plane_state)
 450{
 451        drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
 452}
 453EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);
 454