linux/drivers/gpu/drm/virtio/virtgpu_display.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2015 Red Hat, Inc.
   3 * All Rights Reserved.
   4 *
   5 * Authors:
   6 *    Dave Airlie
   7 *    Alon Levy
   8 *
   9 * Permission is hereby granted, free of charge, to any person obtaining a
  10 * copy of this software and associated documentation files (the "Software"),
  11 * to deal in the Software without restriction, including without limitation
  12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  13 * and/or sell copies of the Software, and to permit persons to whom the
  14 * Software is furnished to do so, subject to the following conditions:
  15 *
  16 * The above copyright notice and this permission notice shall be included in
  17 * all copies or substantial portions of the Software.
  18 *
  19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  22 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  25 * OTHER DEALINGS IN THE SOFTWARE.
  26 */
  27
  28#include "virtgpu_drv.h"
  29#include <drm/drm_crtc_helper.h>
  30#include <drm/drm_atomic_helper.h>
  31
  32#define XRES_MIN    32
  33#define YRES_MIN    32
  34
  35#define XRES_DEF  1024
  36#define YRES_DEF   768
  37
  38#define XRES_MAX  8192
  39#define YRES_MAX  8192
  40
  41static const struct drm_crtc_funcs virtio_gpu_crtc_funcs = {
  42        .set_config             = drm_atomic_helper_set_config,
  43        .destroy                = drm_crtc_cleanup,
  44
  45        .page_flip              = drm_atomic_helper_page_flip,
  46        .reset                  = drm_atomic_helper_crtc_reset,
  47        .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
  48        .atomic_destroy_state   = drm_atomic_helper_crtc_destroy_state,
  49};
  50
  51static void virtio_gpu_user_framebuffer_destroy(struct drm_framebuffer *fb)
  52{
  53        struct virtio_gpu_framebuffer *virtio_gpu_fb
  54                = to_virtio_gpu_framebuffer(fb);
  55
  56        drm_gem_object_put_unlocked(virtio_gpu_fb->obj);
  57        drm_framebuffer_cleanup(fb);
  58        kfree(virtio_gpu_fb);
  59}
  60
  61static int
  62virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb,
  63                                     struct drm_file *file_priv,
  64                                     unsigned int flags, unsigned int color,
  65                                     struct drm_clip_rect *clips,
  66                                     unsigned int num_clips)
  67{
  68        struct virtio_gpu_framebuffer *virtio_gpu_fb
  69                = to_virtio_gpu_framebuffer(fb);
  70
  71        return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips);
  72}
  73
  74static int
  75virtio_gpu_framebuffer_create_handle(struct drm_framebuffer *fb,
  76                                     struct drm_file *file_priv,
  77                                     unsigned int *handle)
  78{
  79        struct virtio_gpu_framebuffer *virtio_gpu_fb =
  80                to_virtio_gpu_framebuffer(fb);
  81
  82        return drm_gem_handle_create(file_priv, virtio_gpu_fb->obj, handle);
  83}
  84
  85static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = {
  86        .create_handle = virtio_gpu_framebuffer_create_handle,
  87        .destroy = virtio_gpu_user_framebuffer_destroy,
  88        .dirty = virtio_gpu_framebuffer_surface_dirty,
  89};
  90
  91int
  92virtio_gpu_framebuffer_init(struct drm_device *dev,
  93                            struct virtio_gpu_framebuffer *vgfb,
  94                            const struct drm_mode_fb_cmd2 *mode_cmd,
  95                            struct drm_gem_object *obj)
  96{
  97        int ret;
  98        struct virtio_gpu_object *bo;
  99
 100        vgfb->obj = obj;
 101
 102        bo = gem_to_virtio_gpu_obj(obj);
 103
 104        drm_helper_mode_fill_fb_struct(dev, &vgfb->base, mode_cmd);
 105
 106        ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs);
 107        if (ret) {
 108                vgfb->obj = NULL;
 109                return ret;
 110        }
 111
 112        spin_lock_init(&vgfb->dirty_lock);
 113        vgfb->x1 = vgfb->y1 = INT_MAX;
 114        vgfb->x2 = vgfb->y2 = 0;
 115        return 0;
 116}
 117
 118static void virtio_gpu_crtc_mode_set_nofb(struct drm_crtc *crtc)
 119{
 120        struct drm_device *dev = crtc->dev;
 121        struct virtio_gpu_device *vgdev = dev->dev_private;
 122        struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
 123
 124        virtio_gpu_cmd_set_scanout(vgdev, output->index, 0,
 125                                   crtc->mode.hdisplay,
 126                                   crtc->mode.vdisplay, 0, 0);
 127}
 128
 129static void virtio_gpu_crtc_atomic_enable(struct drm_crtc *crtc,
 130                                          struct drm_crtc_state *old_state)
 131{
 132}
 133
 134static void virtio_gpu_crtc_atomic_disable(struct drm_crtc *crtc,
 135                                           struct drm_crtc_state *old_state)
 136{
 137        struct drm_device *dev = crtc->dev;
 138        struct virtio_gpu_device *vgdev = dev->dev_private;
 139        struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
 140
 141        virtio_gpu_cmd_set_scanout(vgdev, output->index, 0, 0, 0, 0, 0);
 142}
 143
 144static int virtio_gpu_crtc_atomic_check(struct drm_crtc *crtc,
 145                                        struct drm_crtc_state *state)
 146{
 147        return 0;
 148}
 149
 150static void virtio_gpu_crtc_atomic_flush(struct drm_crtc *crtc,
 151                                         struct drm_crtc_state *old_state)
 152{
 153        unsigned long flags;
 154
 155        spin_lock_irqsave(&crtc->dev->event_lock, flags);
 156        if (crtc->state->event)
 157                drm_crtc_send_vblank_event(crtc, crtc->state->event);
 158        crtc->state->event = NULL;
 159        spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
 160}
 161
 162static const struct drm_crtc_helper_funcs virtio_gpu_crtc_helper_funcs = {
 163        .mode_set_nofb = virtio_gpu_crtc_mode_set_nofb,
 164        .atomic_check  = virtio_gpu_crtc_atomic_check,
 165        .atomic_flush  = virtio_gpu_crtc_atomic_flush,
 166        .atomic_enable = virtio_gpu_crtc_atomic_enable,
 167        .atomic_disable = virtio_gpu_crtc_atomic_disable,
 168};
 169
 170static void virtio_gpu_enc_mode_set(struct drm_encoder *encoder,
 171                                    struct drm_display_mode *mode,
 172                                    struct drm_display_mode *adjusted_mode)
 173{
 174}
 175
 176static void virtio_gpu_enc_enable(struct drm_encoder *encoder)
 177{
 178}
 179
 180static void virtio_gpu_enc_disable(struct drm_encoder *encoder)
 181{
 182}
 183
 184static int virtio_gpu_conn_get_modes(struct drm_connector *connector)
 185{
 186        struct virtio_gpu_output *output =
 187                drm_connector_to_virtio_gpu_output(connector);
 188        struct drm_display_mode *mode = NULL;
 189        int count, width, height;
 190
 191        width  = le32_to_cpu(output->info.r.width);
 192        height = le32_to_cpu(output->info.r.height);
 193        count = drm_add_modes_noedid(connector, XRES_MAX, YRES_MAX);
 194
 195        if (width == 0 || height == 0) {
 196                width = XRES_DEF;
 197                height = YRES_DEF;
 198                drm_set_preferred_mode(connector, XRES_DEF, YRES_DEF);
 199        } else {
 200                DRM_DEBUG("add mode: %dx%d\n", width, height);
 201                mode = drm_cvt_mode(connector->dev, width, height, 60,
 202                                    false, false, false);
 203                mode->type |= DRM_MODE_TYPE_PREFERRED;
 204                drm_mode_probed_add(connector, mode);
 205                count++;
 206        }
 207
 208        return count;
 209}
 210
 211static int virtio_gpu_conn_mode_valid(struct drm_connector *connector,
 212                                      struct drm_display_mode *mode)
 213{
 214        struct virtio_gpu_output *output =
 215                drm_connector_to_virtio_gpu_output(connector);
 216        int width, height;
 217
 218        width  = le32_to_cpu(output->info.r.width);
 219        height = le32_to_cpu(output->info.r.height);
 220
 221        if (!(mode->type & DRM_MODE_TYPE_PREFERRED))
 222                return MODE_OK;
 223        if (mode->hdisplay == XRES_DEF && mode->vdisplay == YRES_DEF)
 224                return MODE_OK;
 225        if (mode->hdisplay <= width  && mode->hdisplay >= width - 16 &&
 226            mode->vdisplay <= height && mode->vdisplay >= height - 16)
 227                return MODE_OK;
 228
 229        DRM_DEBUG("del mode: %dx%d\n", mode->hdisplay, mode->vdisplay);
 230        return MODE_BAD;
 231}
 232
 233static const struct drm_encoder_helper_funcs virtio_gpu_enc_helper_funcs = {
 234        .mode_set   = virtio_gpu_enc_mode_set,
 235        .enable     = virtio_gpu_enc_enable,
 236        .disable    = virtio_gpu_enc_disable,
 237};
 238
 239static const struct drm_connector_helper_funcs virtio_gpu_conn_helper_funcs = {
 240        .get_modes    = virtio_gpu_conn_get_modes,
 241        .mode_valid   = virtio_gpu_conn_mode_valid,
 242};
 243
 244static enum drm_connector_status virtio_gpu_conn_detect(
 245                        struct drm_connector *connector,
 246                        bool force)
 247{
 248        struct virtio_gpu_output *output =
 249                drm_connector_to_virtio_gpu_output(connector);
 250
 251        if (output->info.enabled)
 252                return connector_status_connected;
 253        else
 254                return connector_status_disconnected;
 255}
 256
 257static void virtio_gpu_conn_destroy(struct drm_connector *connector)
 258{
 259        struct virtio_gpu_output *virtio_gpu_output =
 260                drm_connector_to_virtio_gpu_output(connector);
 261
 262        drm_connector_unregister(connector);
 263        drm_connector_cleanup(connector);
 264        kfree(virtio_gpu_output);
 265}
 266
 267static const struct drm_connector_funcs virtio_gpu_connector_funcs = {
 268        .detect = virtio_gpu_conn_detect,
 269        .fill_modes = drm_helper_probe_single_connector_modes,
 270        .destroy = virtio_gpu_conn_destroy,
 271        .reset = drm_atomic_helper_connector_reset,
 272        .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
 273        .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
 274};
 275
 276static const struct drm_encoder_funcs virtio_gpu_enc_funcs = {
 277        .destroy = drm_encoder_cleanup,
 278};
 279
 280static int vgdev_output_init(struct virtio_gpu_device *vgdev, int index)
 281{
 282        struct drm_device *dev = vgdev->ddev;
 283        struct virtio_gpu_output *output = vgdev->outputs + index;
 284        struct drm_connector *connector = &output->conn;
 285        struct drm_encoder *encoder = &output->enc;
 286        struct drm_crtc *crtc = &output->crtc;
 287        struct drm_plane *primary, *cursor;
 288
 289        output->index = index;
 290        if (index == 0) {
 291                output->info.enabled = cpu_to_le32(true);
 292                output->info.r.width = cpu_to_le32(XRES_DEF);
 293                output->info.r.height = cpu_to_le32(YRES_DEF);
 294        }
 295
 296        primary = virtio_gpu_plane_init(vgdev, DRM_PLANE_TYPE_PRIMARY, index);
 297        if (IS_ERR(primary))
 298                return PTR_ERR(primary);
 299        cursor = virtio_gpu_plane_init(vgdev, DRM_PLANE_TYPE_CURSOR, index);
 300        if (IS_ERR(cursor))
 301                return PTR_ERR(cursor);
 302        drm_crtc_init_with_planes(dev, crtc, primary, cursor,
 303                                  &virtio_gpu_crtc_funcs, NULL);
 304        drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs);
 305        primary->crtc = crtc;
 306        cursor->crtc = crtc;
 307
 308        drm_connector_init(dev, connector, &virtio_gpu_connector_funcs,
 309                           DRM_MODE_CONNECTOR_VIRTUAL);
 310        drm_connector_helper_add(connector, &virtio_gpu_conn_helper_funcs);
 311
 312        drm_encoder_init(dev, encoder, &virtio_gpu_enc_funcs,
 313                         DRM_MODE_ENCODER_VIRTUAL, NULL);
 314        drm_encoder_helper_add(encoder, &virtio_gpu_enc_helper_funcs);
 315        encoder->possible_crtcs = 1 << index;
 316
 317        drm_mode_connector_attach_encoder(connector, encoder);
 318        drm_connector_register(connector);
 319        return 0;
 320}
 321
 322static struct drm_framebuffer *
 323virtio_gpu_user_framebuffer_create(struct drm_device *dev,
 324                                   struct drm_file *file_priv,
 325                                   const struct drm_mode_fb_cmd2 *mode_cmd)
 326{
 327        struct drm_gem_object *obj = NULL;
 328        struct virtio_gpu_framebuffer *virtio_gpu_fb;
 329        int ret;
 330
 331        /* lookup object associated with res handle */
 332        obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
 333        if (!obj)
 334                return ERR_PTR(-EINVAL);
 335
 336        virtio_gpu_fb = kzalloc(sizeof(*virtio_gpu_fb), GFP_KERNEL);
 337        if (virtio_gpu_fb == NULL)
 338                return ERR_PTR(-ENOMEM);
 339
 340        ret = virtio_gpu_framebuffer_init(dev, virtio_gpu_fb, mode_cmd, obj);
 341        if (ret) {
 342                kfree(virtio_gpu_fb);
 343                drm_gem_object_put_unlocked(obj);
 344                return NULL;
 345        }
 346
 347        return &virtio_gpu_fb->base;
 348}
 349
 350static void vgdev_atomic_commit_tail(struct drm_atomic_state *state)
 351{
 352        struct drm_device *dev = state->dev;
 353
 354        drm_atomic_helper_commit_modeset_disables(dev, state);
 355        drm_atomic_helper_commit_modeset_enables(dev, state);
 356        drm_atomic_helper_commit_planes(dev, state, 0);
 357
 358        drm_atomic_helper_commit_hw_done(state);
 359
 360        drm_atomic_helper_wait_for_vblanks(dev, state);
 361        drm_atomic_helper_cleanup_planes(dev, state);
 362}
 363
 364static const struct drm_mode_config_helper_funcs virtio_mode_config_helpers = {
 365        .atomic_commit_tail = vgdev_atomic_commit_tail,
 366};
 367
 368static const struct drm_mode_config_funcs virtio_gpu_mode_funcs = {
 369        .fb_create = virtio_gpu_user_framebuffer_create,
 370        .atomic_check = drm_atomic_helper_check,
 371        .atomic_commit = drm_atomic_helper_commit,
 372};
 373
 374int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev)
 375{
 376        int i;
 377
 378        drm_mode_config_init(vgdev->ddev);
 379        vgdev->ddev->mode_config.funcs = &virtio_gpu_mode_funcs;
 380        vgdev->ddev->mode_config.helper_private = &virtio_mode_config_helpers;
 381
 382        /* modes will be validated against the framebuffer size */
 383        vgdev->ddev->mode_config.min_width = XRES_MIN;
 384        vgdev->ddev->mode_config.min_height = YRES_MIN;
 385        vgdev->ddev->mode_config.max_width = XRES_MAX;
 386        vgdev->ddev->mode_config.max_height = YRES_MAX;
 387
 388        for (i = 0 ; i < vgdev->num_scanouts; ++i)
 389                vgdev_output_init(vgdev, i);
 390
 391        drm_mode_config_reset(vgdev->ddev);
 392        return 0;
 393}
 394
 395void virtio_gpu_modeset_fini(struct virtio_gpu_device *vgdev)
 396{
 397        virtio_gpu_fbdev_fini(vgdev);
 398        drm_mode_config_cleanup(vgdev->ddev);
 399}
 400