linux/drivers/gpu/drm/virtio/virtgpu_display.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2015 Red Hat, Inc.
   3 * All Rights Reserved.
   4 *
   5 * Authors:
   6 *    Dave Airlie
   7 *    Alon Levy
   8 *
   9 * Permission is hereby granted, free of charge, to any person obtaining a
  10 * copy of this software and associated documentation files (the "Software"),
  11 * to deal in the Software without restriction, including without limitation
  12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  13 * and/or sell copies of the Software, and to permit persons to whom the
  14 * Software is furnished to do so, subject to the following conditions:
  15 *
  16 * The above copyright notice and this permission notice shall be included in
  17 * all copies or substantial portions of the Software.
  18 *
  19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  22 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  25 * OTHER DEALINGS IN THE SOFTWARE.
  26 */
  27
  28#include "virtgpu_drv.h"
  29#include <drm/drm_crtc_helper.h>
  30#include <drm/drm_atomic_helper.h>
  31
  32#define XRES_MIN   320
  33#define YRES_MIN   200
  34
  35#define XRES_DEF  1024
  36#define YRES_DEF   768
  37
  38#define XRES_MAX  8192
  39#define YRES_MAX  8192
  40
  41static void virtio_gpu_crtc_gamma_set(struct drm_crtc *crtc,
  42                                      u16 *red, u16 *green, u16 *blue,
  43                                      uint32_t start, uint32_t size)
  44{
  45        /* TODO */
  46}
  47
  48static void
  49virtio_gpu_hide_cursor(struct virtio_gpu_device *vgdev,
  50                       struct virtio_gpu_output *output)
  51{
  52        output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
  53        output->cursor.resource_id = 0;
  54        virtio_gpu_cursor_ping(vgdev, output);
  55}
  56
  57static int virtio_gpu_crtc_cursor_set(struct drm_crtc *crtc,
  58                                      struct drm_file *file_priv,
  59                                      uint32_t handle,
  60                                      uint32_t width,
  61                                      uint32_t height,
  62                                      int32_t hot_x, int32_t hot_y)
  63{
  64        struct virtio_gpu_device *vgdev = crtc->dev->dev_private;
  65        struct virtio_gpu_output *output =
  66                container_of(crtc, struct virtio_gpu_output, crtc);
  67        struct drm_gem_object *gobj = NULL;
  68        struct virtio_gpu_object *qobj = NULL;
  69        struct virtio_gpu_fence *fence = NULL;
  70        int ret = 0;
  71
  72        if (handle == 0) {
  73                virtio_gpu_hide_cursor(vgdev, output);
  74                return 0;
  75        }
  76
  77        /* lookup the cursor */
  78        gobj = drm_gem_object_lookup(crtc->dev, file_priv, handle);
  79        if (gobj == NULL)
  80                return -ENOENT;
  81
  82        qobj = gem_to_virtio_gpu_obj(gobj);
  83
  84        if (!qobj->hw_res_handle) {
  85                ret = -EINVAL;
  86                goto out;
  87        }
  88
  89        virtio_gpu_cmd_transfer_to_host_2d(vgdev, qobj->hw_res_handle, 0,
  90                                           cpu_to_le32(64),
  91                                           cpu_to_le32(64),
  92                                           0, 0, &fence);
  93        ret = virtio_gpu_object_reserve(qobj, false);
  94        if (!ret) {
  95                reservation_object_add_excl_fence(qobj->tbo.resv,
  96                                                  &fence->f);
  97                fence_put(&fence->f);
  98                virtio_gpu_object_unreserve(qobj);
  99                virtio_gpu_object_wait(qobj, false);
 100        }
 101
 102        output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
 103        output->cursor.resource_id = cpu_to_le32(qobj->hw_res_handle);
 104        output->cursor.hot_x = cpu_to_le32(hot_x);
 105        output->cursor.hot_y = cpu_to_le32(hot_y);
 106        virtio_gpu_cursor_ping(vgdev, output);
 107        ret = 0;
 108
 109out:
 110        drm_gem_object_unreference_unlocked(gobj);
 111        return ret;
 112}
 113
 114static int virtio_gpu_crtc_cursor_move(struct drm_crtc *crtc,
 115                                    int x, int y)
 116{
 117        struct virtio_gpu_device *vgdev = crtc->dev->dev_private;
 118        struct virtio_gpu_output *output =
 119                container_of(crtc, struct virtio_gpu_output, crtc);
 120
 121        output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_MOVE_CURSOR);
 122        output->cursor.pos.x = cpu_to_le32(x);
 123        output->cursor.pos.y = cpu_to_le32(y);
 124        virtio_gpu_cursor_ping(vgdev, output);
 125        return 0;
 126}
 127
 128static int virtio_gpu_page_flip(struct drm_crtc *crtc,
 129                                struct drm_framebuffer *fb,
 130                                struct drm_pending_vblank_event *event,
 131                                uint32_t flags)
 132{
 133        struct virtio_gpu_device *vgdev = crtc->dev->dev_private;
 134        struct virtio_gpu_output *output =
 135                container_of(crtc, struct virtio_gpu_output, crtc);
 136        struct drm_plane *plane = crtc->primary;
 137        struct virtio_gpu_framebuffer *vgfb;
 138        struct virtio_gpu_object *bo;
 139        unsigned long irqflags;
 140        uint32_t handle;
 141
 142        plane->fb = fb;
 143        vgfb = to_virtio_gpu_framebuffer(plane->fb);
 144        bo = gem_to_virtio_gpu_obj(vgfb->obj);
 145        handle = bo->hw_res_handle;
 146
 147        DRM_DEBUG("handle 0x%x%s, crtc %dx%d\n", handle,
 148                  bo->dumb ? ", dumb" : "",
 149                  crtc->mode.hdisplay, crtc->mode.vdisplay);
 150        if (bo->dumb) {
 151                virtio_gpu_cmd_transfer_to_host_2d
 152                        (vgdev, handle, 0,
 153                         cpu_to_le32(crtc->mode.hdisplay),
 154                         cpu_to_le32(crtc->mode.vdisplay),
 155                         0, 0, NULL);
 156        }
 157        virtio_gpu_cmd_set_scanout(vgdev, output->index, handle,
 158                                   crtc->mode.hdisplay,
 159                                   crtc->mode.vdisplay, 0, 0);
 160        virtio_gpu_cmd_resource_flush(vgdev, handle, 0, 0,
 161                                      crtc->mode.hdisplay,
 162                                      crtc->mode.vdisplay);
 163
 164        if (event) {
 165                spin_lock_irqsave(&crtc->dev->event_lock, irqflags);
 166                drm_send_vblank_event(crtc->dev, -1, event);
 167                spin_unlock_irqrestore(&crtc->dev->event_lock, irqflags);
 168        }
 169
 170        return 0;
 171}
 172
 173static const struct drm_crtc_funcs virtio_gpu_crtc_funcs = {
 174        .cursor_set2            = virtio_gpu_crtc_cursor_set,
 175        .cursor_move            = virtio_gpu_crtc_cursor_move,
 176        .gamma_set              = virtio_gpu_crtc_gamma_set,
 177        .set_config             = drm_atomic_helper_set_config,
 178        .destroy                = drm_crtc_cleanup,
 179
 180        .page_flip              = virtio_gpu_page_flip,
 181        .reset                  = drm_atomic_helper_crtc_reset,
 182        .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
 183        .atomic_destroy_state   = drm_atomic_helper_crtc_destroy_state,
 184};
 185
 186static void virtio_gpu_user_framebuffer_destroy(struct drm_framebuffer *fb)
 187{
 188        struct virtio_gpu_framebuffer *virtio_gpu_fb
 189                = to_virtio_gpu_framebuffer(fb);
 190
 191        if (virtio_gpu_fb->obj)
 192                drm_gem_object_unreference_unlocked(virtio_gpu_fb->obj);
 193        drm_framebuffer_cleanup(fb);
 194        kfree(virtio_gpu_fb);
 195}
 196
 197static int
 198virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb,
 199                                     struct drm_file *file_priv,
 200                                     unsigned flags, unsigned color,
 201                                     struct drm_clip_rect *clips,
 202                                     unsigned num_clips)
 203{
 204        struct virtio_gpu_framebuffer *virtio_gpu_fb
 205                = to_virtio_gpu_framebuffer(fb);
 206
 207        return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips);
 208}
 209
 210static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = {
 211        .destroy = virtio_gpu_user_framebuffer_destroy,
 212        .dirty = virtio_gpu_framebuffer_surface_dirty,
 213};
 214
 215int
 216virtio_gpu_framebuffer_init(struct drm_device *dev,
 217                            struct virtio_gpu_framebuffer *vgfb,
 218                            const struct drm_mode_fb_cmd2 *mode_cmd,
 219                            struct drm_gem_object *obj)
 220{
 221        int ret;
 222        struct virtio_gpu_object *bo;
 223        vgfb->obj = obj;
 224
 225        bo = gem_to_virtio_gpu_obj(obj);
 226
 227        ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs);
 228        if (ret) {
 229                vgfb->obj = NULL;
 230                return ret;
 231        }
 232        drm_helper_mode_fill_fb_struct(&vgfb->base, mode_cmd);
 233
 234        spin_lock_init(&vgfb->dirty_lock);
 235        vgfb->x1 = vgfb->y1 = INT_MAX;
 236        vgfb->x2 = vgfb->y2 = 0;
 237        return 0;
 238}
 239
 240static void virtio_gpu_crtc_mode_set_nofb(struct drm_crtc *crtc)
 241{
 242        struct drm_device *dev = crtc->dev;
 243        struct virtio_gpu_device *vgdev = dev->dev_private;
 244        struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
 245
 246        virtio_gpu_cmd_set_scanout(vgdev, output->index, 0,
 247                                   crtc->mode.hdisplay,
 248                                   crtc->mode.vdisplay, 0, 0);
 249}
 250
 251static void virtio_gpu_crtc_enable(struct drm_crtc *crtc)
 252{
 253}
 254
 255static void virtio_gpu_crtc_disable(struct drm_crtc *crtc)
 256{
 257        struct drm_device *dev = crtc->dev;
 258        struct virtio_gpu_device *vgdev = dev->dev_private;
 259        struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
 260
 261        virtio_gpu_cmd_set_scanout(vgdev, output->index, 0, 0, 0, 0, 0);
 262}
 263
 264static int virtio_gpu_crtc_atomic_check(struct drm_crtc *crtc,
 265                                        struct drm_crtc_state *state)
 266{
 267        return 0;
 268}
 269
 270static void virtio_gpu_crtc_atomic_flush(struct drm_crtc *crtc,
 271                                         struct drm_crtc_state *old_state)
 272{
 273        unsigned long flags;
 274
 275        spin_lock_irqsave(&crtc->dev->event_lock, flags);
 276        if (crtc->state->event)
 277                drm_crtc_send_vblank_event(crtc, crtc->state->event);
 278        spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
 279}
 280
 281static const struct drm_crtc_helper_funcs virtio_gpu_crtc_helper_funcs = {
 282        .enable        = virtio_gpu_crtc_enable,
 283        .disable       = virtio_gpu_crtc_disable,
 284        .mode_set_nofb = virtio_gpu_crtc_mode_set_nofb,
 285        .atomic_check  = virtio_gpu_crtc_atomic_check,
 286        .atomic_flush  = virtio_gpu_crtc_atomic_flush,
 287};
 288
 289static void virtio_gpu_enc_mode_set(struct drm_encoder *encoder,
 290                                    struct drm_display_mode *mode,
 291                                    struct drm_display_mode *adjusted_mode)
 292{
 293}
 294
 295static void virtio_gpu_enc_enable(struct drm_encoder *encoder)
 296{
 297}
 298
 299static void virtio_gpu_enc_disable(struct drm_encoder *encoder)
 300{
 301}
 302
 303static int virtio_gpu_conn_get_modes(struct drm_connector *connector)
 304{
 305        struct virtio_gpu_output *output =
 306                drm_connector_to_virtio_gpu_output(connector);
 307        struct drm_display_mode *mode = NULL;
 308        int count, width, height;
 309
 310        width  = le32_to_cpu(output->info.r.width);
 311        height = le32_to_cpu(output->info.r.height);
 312        count = drm_add_modes_noedid(connector, XRES_MAX, YRES_MAX);
 313
 314        if (width == 0 || height == 0) {
 315                width = XRES_DEF;
 316                height = YRES_DEF;
 317                drm_set_preferred_mode(connector, XRES_DEF, YRES_DEF);
 318        } else {
 319                DRM_DEBUG("add mode: %dx%d\n", width, height);
 320                mode = drm_cvt_mode(connector->dev, width, height, 60,
 321                                    false, false, false);
 322                mode->type |= DRM_MODE_TYPE_PREFERRED;
 323                drm_mode_probed_add(connector, mode);
 324                count++;
 325        }
 326
 327        return count;
 328}
 329
 330static int virtio_gpu_conn_mode_valid(struct drm_connector *connector,
 331                                      struct drm_display_mode *mode)
 332{
 333        struct virtio_gpu_output *output =
 334                drm_connector_to_virtio_gpu_output(connector);
 335        int width, height;
 336
 337        width  = le32_to_cpu(output->info.r.width);
 338        height = le32_to_cpu(output->info.r.height);
 339
 340        if (!(mode->type & DRM_MODE_TYPE_PREFERRED))
 341                return MODE_OK;
 342        if (mode->hdisplay == XRES_DEF && mode->vdisplay == YRES_DEF)
 343                return MODE_OK;
 344        if (mode->hdisplay <= width  && mode->hdisplay >= width - 16 &&
 345            mode->vdisplay <= height && mode->vdisplay >= height - 16)
 346                return MODE_OK;
 347
 348        DRM_DEBUG("del mode: %dx%d\n", mode->hdisplay, mode->vdisplay);
 349        return MODE_BAD;
 350}
 351
 352static struct drm_encoder*
 353virtio_gpu_best_encoder(struct drm_connector *connector)
 354{
 355        struct virtio_gpu_output *virtio_gpu_output =
 356                drm_connector_to_virtio_gpu_output(connector);
 357
 358        return &virtio_gpu_output->enc;
 359}
 360
 361static const struct drm_encoder_helper_funcs virtio_gpu_enc_helper_funcs = {
 362        .mode_set   = virtio_gpu_enc_mode_set,
 363        .enable     = virtio_gpu_enc_enable,
 364        .disable    = virtio_gpu_enc_disable,
 365};
 366
 367static const struct drm_connector_helper_funcs virtio_gpu_conn_helper_funcs = {
 368        .get_modes    = virtio_gpu_conn_get_modes,
 369        .mode_valid   = virtio_gpu_conn_mode_valid,
 370        .best_encoder = virtio_gpu_best_encoder,
 371};
 372
 373static enum drm_connector_status virtio_gpu_conn_detect(
 374                        struct drm_connector *connector,
 375                        bool force)
 376{
 377        struct virtio_gpu_output *output =
 378                drm_connector_to_virtio_gpu_output(connector);
 379
 380        if (output->info.enabled)
 381                return connector_status_connected;
 382        else
 383                return connector_status_disconnected;
 384}
 385
 386static void virtio_gpu_conn_destroy(struct drm_connector *connector)
 387{
 388        struct virtio_gpu_output *virtio_gpu_output =
 389                drm_connector_to_virtio_gpu_output(connector);
 390
 391        drm_connector_unregister(connector);
 392        drm_connector_cleanup(connector);
 393        kfree(virtio_gpu_output);
 394}
 395
 396static const struct drm_connector_funcs virtio_gpu_connector_funcs = {
 397        .dpms = drm_atomic_helper_connector_dpms,
 398        .detect = virtio_gpu_conn_detect,
 399        .fill_modes = drm_helper_probe_single_connector_modes,
 400        .destroy = virtio_gpu_conn_destroy,
 401        .reset = drm_atomic_helper_connector_reset,
 402        .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
 403        .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
 404};
 405
 406static const struct drm_encoder_funcs virtio_gpu_enc_funcs = {
 407        .destroy = drm_encoder_cleanup,
 408};
 409
 410static int vgdev_output_init(struct virtio_gpu_device *vgdev, int index)
 411{
 412        struct drm_device *dev = vgdev->ddev;
 413        struct virtio_gpu_output *output = vgdev->outputs + index;
 414        struct drm_connector *connector = &output->conn;
 415        struct drm_encoder *encoder = &output->enc;
 416        struct drm_crtc *crtc = &output->crtc;
 417        struct drm_plane *plane;
 418
 419        output->index = index;
 420        if (index == 0) {
 421                output->info.enabled = cpu_to_le32(true);
 422                output->info.r.width = cpu_to_le32(XRES_DEF);
 423                output->info.r.height = cpu_to_le32(YRES_DEF);
 424        }
 425
 426        plane = virtio_gpu_plane_init(vgdev, index);
 427        if (IS_ERR(plane))
 428                return PTR_ERR(plane);
 429        drm_crtc_init_with_planes(dev, crtc, plane, NULL,
 430                                  &virtio_gpu_crtc_funcs, NULL);
 431        drm_mode_crtc_set_gamma_size(crtc, 256);
 432        drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs);
 433        plane->crtc = crtc;
 434
 435        drm_connector_init(dev, connector, &virtio_gpu_connector_funcs,
 436                           DRM_MODE_CONNECTOR_VIRTUAL);
 437        drm_connector_helper_add(connector, &virtio_gpu_conn_helper_funcs);
 438
 439        drm_encoder_init(dev, encoder, &virtio_gpu_enc_funcs,
 440                         DRM_MODE_ENCODER_VIRTUAL, NULL);
 441        drm_encoder_helper_add(encoder, &virtio_gpu_enc_helper_funcs);
 442        encoder->possible_crtcs = 1 << index;
 443
 444        drm_mode_connector_attach_encoder(connector, encoder);
 445        drm_connector_register(connector);
 446        return 0;
 447}
 448
 449static struct drm_framebuffer *
 450virtio_gpu_user_framebuffer_create(struct drm_device *dev,
 451                                   struct drm_file *file_priv,
 452                                   const struct drm_mode_fb_cmd2 *mode_cmd)
 453{
 454        struct drm_gem_object *obj = NULL;
 455        struct virtio_gpu_framebuffer *virtio_gpu_fb;
 456        int ret;
 457
 458        /* lookup object associated with res handle */
 459        obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
 460        if (!obj)
 461                return ERR_PTR(-EINVAL);
 462
 463        virtio_gpu_fb = kzalloc(sizeof(*virtio_gpu_fb), GFP_KERNEL);
 464        if (virtio_gpu_fb == NULL)
 465                return ERR_PTR(-ENOMEM);
 466
 467        ret = virtio_gpu_framebuffer_init(dev, virtio_gpu_fb, mode_cmd, obj);
 468        if (ret) {
 469                kfree(virtio_gpu_fb);
 470                if (obj)
 471                        drm_gem_object_unreference_unlocked(obj);
 472                return NULL;
 473        }
 474
 475        return &virtio_gpu_fb->base;
 476}
 477
 478static const struct drm_mode_config_funcs virtio_gpu_mode_funcs = {
 479        .fb_create = virtio_gpu_user_framebuffer_create,
 480        .atomic_check = drm_atomic_helper_check,
 481        .atomic_commit = drm_atomic_helper_commit,
 482};
 483
 484int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev)
 485{
 486        int i;
 487
 488        drm_mode_config_init(vgdev->ddev);
 489        vgdev->ddev->mode_config.funcs = (void *)&virtio_gpu_mode_funcs;
 490
 491        /* modes will be validated against the framebuffer size */
 492        vgdev->ddev->mode_config.min_width = XRES_MIN;
 493        vgdev->ddev->mode_config.min_height = YRES_MIN;
 494        vgdev->ddev->mode_config.max_width = XRES_MAX;
 495        vgdev->ddev->mode_config.max_height = YRES_MAX;
 496
 497        for (i = 0 ; i < vgdev->num_scanouts; ++i)
 498                vgdev_output_init(vgdev, i);
 499
 500        drm_mode_config_reset(vgdev->ddev);
 501        return 0;
 502}
 503
 504void virtio_gpu_modeset_fini(struct virtio_gpu_device *vgdev)
 505{
 506        virtio_gpu_fbdev_fini(vgdev);
 507        drm_mode_config_cleanup(vgdev->ddev);
 508}
 509