linux/drivers/gpu/drm/nouveau/nouveau_display.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2008 Maarten Maathuis.
   3 * All Rights Reserved.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining
   6 * a copy of this software and associated documentation files (the
   7 * "Software"), to deal in the Software without restriction, including
   8 * without limitation the rights to use, copy, modify, merge, publish,
   9 * distribute, sublicense, and/or sell copies of the Software, and to
  10 * permit persons to whom the Software is furnished to do so, subject to
  11 * the following conditions:
  12 *
  13 * The above copyright notice and this permission notice (including the
  14 * next paragraph) shall be included in all copies or substantial
  15 * portions of the Software.
  16 *
  17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24 *
  25 */
  26
  27#include <drm/drmP.h>
  28#include <drm/drm_crtc_helper.h>
  29#include <drm/ttm/ttm_execbuf_util.h>
  30
  31#include "nouveau_fbcon.h"
  32#include "dispnv04/hw.h"
  33#include "nouveau_crtc.h"
  34#include "nouveau_dma.h"
  35#include "nouveau_gem.h"
  36#include "nouveau_connector.h"
  37#include "nv50_display.h"
  38
  39#include "nouveau_fence.h"
  40
  41#include <subdev/bios/gpio.h>
  42#include <subdev/gpio.h>
  43#include <engine/disp.h>
  44
  45#include <core/class.h>
  46
  47static void
  48nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
  49{
  50        struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
  51
  52        if (fb->nvbo)
  53                drm_gem_object_unreference_unlocked(fb->nvbo->gem);
  54
  55        drm_framebuffer_cleanup(drm_fb);
  56        kfree(fb);
  57}
  58
  59static int
  60nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
  61                                       struct drm_file *file_priv,
  62                                       unsigned int *handle)
  63{
  64        struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
  65
  66        return drm_gem_handle_create(file_priv, fb->nvbo->gem, handle);
  67}
  68
  69static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
  70        .destroy = nouveau_user_framebuffer_destroy,
  71        .create_handle = nouveau_user_framebuffer_create_handle,
  72};
  73
  74int
  75nouveau_framebuffer_init(struct drm_device *dev,
  76                         struct nouveau_framebuffer *nv_fb,
  77                         struct drm_mode_fb_cmd2 *mode_cmd,
  78                         struct nouveau_bo *nvbo)
  79{
  80        struct nouveau_drm *drm = nouveau_drm(dev);
  81        struct drm_framebuffer *fb = &nv_fb->base;
  82        int ret;
  83
  84        drm_helper_mode_fill_fb_struct(fb, mode_cmd);
  85        nv_fb->nvbo = nvbo;
  86
  87        if (nv_device(drm->device)->card_type >= NV_50) {
  88                u32 tile_flags = nouveau_bo_tile_layout(nvbo);
  89                if (tile_flags == 0x7a00 ||
  90                    tile_flags == 0xfe00)
  91                        nv_fb->r_dma = NvEvoFB32;
  92                else
  93                if (tile_flags == 0x7000)
  94                        nv_fb->r_dma = NvEvoFB16;
  95                else
  96                        nv_fb->r_dma = NvEvoVRAM_LP;
  97
  98                switch (fb->depth) {
  99                case  8: nv_fb->r_format = 0x1e00; break;
 100                case 15: nv_fb->r_format = 0xe900; break;
 101                case 16: nv_fb->r_format = 0xe800; break;
 102                case 24:
 103                case 32: nv_fb->r_format = 0xcf00; break;
 104                case 30: nv_fb->r_format = 0xd100; break;
 105                default:
 106                         NV_ERROR(drm, "unknown depth %d\n", fb->depth);
 107                         return -EINVAL;
 108                }
 109
 110                if (nv_device(drm->device)->chipset == 0x50)
 111                        nv_fb->r_format |= (tile_flags << 8);
 112
 113                if (!tile_flags) {
 114                        if (nv_device(drm->device)->card_type < NV_D0)
 115                                nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
 116                        else
 117                                nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
 118                } else {
 119                        u32 mode = nvbo->tile_mode;
 120                        if (nv_device(drm->device)->card_type >= NV_C0)
 121                                mode >>= 4;
 122                        nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
 123                }
 124        }
 125
 126        ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
 127        if (ret) {
 128                return ret;
 129        }
 130
 131        return 0;
 132}
 133
 134static struct drm_framebuffer *
 135nouveau_user_framebuffer_create(struct drm_device *dev,
 136                                struct drm_file *file_priv,
 137                                struct drm_mode_fb_cmd2 *mode_cmd)
 138{
 139        struct nouveau_framebuffer *nouveau_fb;
 140        struct drm_gem_object *gem;
 141        int ret = -ENOMEM;
 142
 143        gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
 144        if (!gem)
 145                return ERR_PTR(-ENOENT);
 146
 147        nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
 148        if (!nouveau_fb)
 149                goto err_unref;
 150
 151        ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
 152        if (ret)
 153                goto err;
 154
 155        return &nouveau_fb->base;
 156
 157err:
 158        kfree(nouveau_fb);
 159err_unref:
 160        drm_gem_object_unreference(gem);
 161        return ERR_PTR(ret);
 162}
 163
 164static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
 165        .fb_create = nouveau_user_framebuffer_create,
 166        .output_poll_changed = nouveau_fbcon_output_poll_changed,
 167};
 168
 169
 170struct nouveau_drm_prop_enum_list {
 171        u8 gen_mask;
 172        int type;
 173        char *name;
 174};
 175
 176static struct nouveau_drm_prop_enum_list underscan[] = {
 177        { 6, UNDERSCAN_AUTO, "auto" },
 178        { 6, UNDERSCAN_OFF, "off" },
 179        { 6, UNDERSCAN_ON, "on" },
 180        {}
 181};
 182
 183static struct nouveau_drm_prop_enum_list dither_mode[] = {
 184        { 7, DITHERING_MODE_AUTO, "auto" },
 185        { 7, DITHERING_MODE_OFF, "off" },
 186        { 1, DITHERING_MODE_ON, "on" },
 187        { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
 188        { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
 189        { 4, DITHERING_MODE_TEMPORAL, "temporal" },
 190        {}
 191};
 192
 193static struct nouveau_drm_prop_enum_list dither_depth[] = {
 194        { 6, DITHERING_DEPTH_AUTO, "auto" },
 195        { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
 196        { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
 197        {}
 198};
 199
 200#define PROP_ENUM(p,gen,n,list) do {                                           \
 201        struct nouveau_drm_prop_enum_list *l = (list);                         \
 202        int c = 0;                                                             \
 203        while (l->gen_mask) {                                                  \
 204                if (l->gen_mask & (1 << (gen)))                                \
 205                        c++;                                                   \
 206                l++;                                                           \
 207        }                                                                      \
 208        if (c) {                                                               \
 209                p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c);        \
 210                l = (list);                                                    \
 211                c = 0;                                                         \
 212                while (p && l->gen_mask) {                                     \
 213                        if (l->gen_mask & (1 << (gen))) {                      \
 214                                drm_property_add_enum(p, c, l->type, l->name); \
 215                                c++;                                           \
 216                        }                                                      \
 217                        l++;                                                   \
 218                }                                                              \
 219        }                                                                      \
 220} while(0)
 221
 222int
 223nouveau_display_init(struct drm_device *dev)
 224{
 225        struct nouveau_drm *drm = nouveau_drm(dev);
 226        struct nouveau_display *disp = nouveau_display(dev);
 227        struct nouveau_gpio *gpio = nouveau_gpio(drm->device);
 228        struct drm_connector *connector;
 229        int ret;
 230
 231        ret = disp->init(dev);
 232        if (ret)
 233                return ret;
 234
 235        /* enable polling for external displays */
 236        drm_kms_helper_poll_enable(dev);
 237
 238        /* enable hotplug interrupts */
 239        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 240                struct nouveau_connector *conn = nouveau_connector(connector);
 241                if (gpio && conn->hpd.func != DCB_GPIO_UNUSED) {
 242                        nouveau_event_get(gpio->events, conn->hpd.line,
 243                                         &conn->hpd_func);
 244                }
 245        }
 246
 247        return ret;
 248}
 249
 250void
 251nouveau_display_fini(struct drm_device *dev)
 252{
 253        struct nouveau_drm *drm = nouveau_drm(dev);
 254        struct nouveau_display *disp = nouveau_display(dev);
 255        struct nouveau_gpio *gpio = nouveau_gpio(drm->device);
 256        struct drm_connector *connector;
 257
 258        /* disable hotplug interrupts */
 259        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 260                struct nouveau_connector *conn = nouveau_connector(connector);
 261                if (gpio && conn->hpd.func != DCB_GPIO_UNUSED) {
 262                        nouveau_event_put(gpio->events, conn->hpd.line,
 263                                         &conn->hpd_func);
 264                }
 265        }
 266
 267        drm_kms_helper_poll_disable(dev);
 268        disp->fini(dev);
 269}
 270
 271int
 272nouveau_display_create(struct drm_device *dev)
 273{
 274        struct nouveau_drm *drm = nouveau_drm(dev);
 275        struct nouveau_display *disp;
 276        u32 pclass = dev->pdev->class >> 8;
 277        int ret, gen;
 278
 279        disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
 280        if (!disp)
 281                return -ENOMEM;
 282
 283        drm_mode_config_init(dev);
 284        drm_mode_create_scaling_mode_property(dev);
 285        drm_mode_create_dvi_i_properties(dev);
 286
 287        if (nv_device(drm->device)->card_type < NV_50)
 288                gen = 0;
 289        else
 290        if (nv_device(drm->device)->card_type < NV_D0)
 291                gen = 1;
 292        else
 293                gen = 2;
 294
 295        PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
 296        PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
 297        PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
 298
 299        disp->underscan_hborder_property =
 300                drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
 301
 302        disp->underscan_vborder_property =
 303                drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
 304
 305        if (gen >= 1) {
 306                /* -90..+90 */
 307                disp->vibrant_hue_property =
 308                        drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
 309
 310                /* -100..+100 */
 311                disp->color_vibrance_property =
 312                        drm_property_create_range(dev, 0, "color vibrance", 0, 200);
 313        }
 314
 315        dev->mode_config.funcs = &nouveau_mode_config_funcs;
 316        dev->mode_config.fb_base = pci_resource_start(dev->pdev, 1);
 317
 318        dev->mode_config.min_width = 0;
 319        dev->mode_config.min_height = 0;
 320        if (nv_device(drm->device)->card_type < NV_10) {
 321                dev->mode_config.max_width = 2048;
 322                dev->mode_config.max_height = 2048;
 323        } else
 324        if (nv_device(drm->device)->card_type < NV_50) {
 325                dev->mode_config.max_width = 4096;
 326                dev->mode_config.max_height = 4096;
 327        } else {
 328                dev->mode_config.max_width = 8192;
 329                dev->mode_config.max_height = 8192;
 330        }
 331
 332        dev->mode_config.preferred_depth = 24;
 333        dev->mode_config.prefer_shadow = 1;
 334
 335        drm_kms_helper_poll_init(dev);
 336        drm_kms_helper_poll_disable(dev);
 337
 338        if (nouveau_modeset == 1 ||
 339            (nouveau_modeset < 0 && pclass == PCI_CLASS_DISPLAY_VGA)) {
 340                if (drm->vbios.dcb.entries) {
 341                        if (nv_device(drm->device)->card_type < NV_50)
 342                                ret = nv04_display_create(dev);
 343                        else
 344                                ret = nv50_display_create(dev);
 345                } else {
 346                        ret = 0;
 347                }
 348
 349                if (ret)
 350                        goto disp_create_err;
 351
 352                if (dev->mode_config.num_crtc) {
 353                        ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
 354                        if (ret)
 355                                goto vblank_err;
 356                }
 357
 358                nouveau_backlight_init(dev);
 359        }
 360
 361        return 0;
 362
 363vblank_err:
 364        disp->dtor(dev);
 365disp_create_err:
 366        drm_kms_helper_poll_fini(dev);
 367        drm_mode_config_cleanup(dev);
 368        return ret;
 369}
 370
 371void
 372nouveau_display_destroy(struct drm_device *dev)
 373{
 374        struct nouveau_display *disp = nouveau_display(dev);
 375
 376        nouveau_backlight_exit(dev);
 377        drm_vblank_cleanup(dev);
 378
 379        drm_kms_helper_poll_fini(dev);
 380        drm_mode_config_cleanup(dev);
 381
 382        if (disp->dtor)
 383                disp->dtor(dev);
 384
 385        nouveau_drm(dev)->display = NULL;
 386        kfree(disp);
 387}
 388
 389int
 390nouveau_display_suspend(struct drm_device *dev)
 391{
 392        struct nouveau_drm *drm = nouveau_drm(dev);
 393        struct drm_crtc *crtc;
 394
 395        nouveau_display_fini(dev);
 396
 397        NV_INFO(drm, "unpinning framebuffer(s)...\n");
 398        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 399                struct nouveau_framebuffer *nouveau_fb;
 400
 401                nouveau_fb = nouveau_framebuffer(crtc->fb);
 402                if (!nouveau_fb || !nouveau_fb->nvbo)
 403                        continue;
 404
 405                nouveau_bo_unpin(nouveau_fb->nvbo);
 406        }
 407
 408        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 409                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 410
 411                nouveau_bo_unmap(nv_crtc->cursor.nvbo);
 412                nouveau_bo_unpin(nv_crtc->cursor.nvbo);
 413        }
 414
 415        return 0;
 416}
 417
 418void
 419nouveau_display_resume(struct drm_device *dev)
 420{
 421        struct nouveau_drm *drm = nouveau_drm(dev);
 422        struct drm_crtc *crtc;
 423        int ret;
 424
 425        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 426                struct nouveau_framebuffer *nouveau_fb;
 427
 428                nouveau_fb = nouveau_framebuffer(crtc->fb);
 429                if (!nouveau_fb || !nouveau_fb->nvbo)
 430                        continue;
 431
 432                nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM);
 433        }
 434
 435        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 436                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 437
 438                ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
 439                if (!ret)
 440                        ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
 441                if (ret)
 442                        NV_ERROR(drm, "Could not pin/map cursor.\n");
 443        }
 444
 445        nouveau_fbcon_set_suspend(dev, 0);
 446        nouveau_fbcon_zfill_all(dev);
 447
 448        nouveau_display_init(dev);
 449
 450        /* Force CLUT to get re-loaded during modeset */
 451        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 452                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 453
 454                nv_crtc->lut.depth = 0;
 455        }
 456
 457        drm_helper_resume_force_mode(dev);
 458
 459        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 460                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 461                u32 offset = nv_crtc->cursor.nvbo->bo.offset;
 462
 463                nv_crtc->cursor.set_offset(nv_crtc, offset);
 464                nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x,
 465                                                 nv_crtc->cursor_saved_y);
 466        }
 467}
 468
 469static int
 470nouveau_page_flip_emit(struct nouveau_channel *chan,
 471                       struct nouveau_bo *old_bo,
 472                       struct nouveau_bo *new_bo,
 473                       struct nouveau_page_flip_state *s,
 474                       struct nouveau_fence **pfence)
 475{
 476        struct nouveau_fence_chan *fctx = chan->fence;
 477        struct nouveau_drm *drm = chan->drm;
 478        struct drm_device *dev = drm->dev;
 479        unsigned long flags;
 480        int ret;
 481
 482        /* Queue it to the pending list */
 483        spin_lock_irqsave(&dev->event_lock, flags);
 484        list_add_tail(&s->head, &fctx->flip);
 485        spin_unlock_irqrestore(&dev->event_lock, flags);
 486
 487        /* Synchronize with the old framebuffer */
 488        ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
 489        if (ret)
 490                goto fail;
 491
 492        /* Emit the pageflip */
 493        ret = RING_SPACE(chan, 3);
 494        if (ret)
 495                goto fail;
 496
 497        if (nv_device(drm->device)->card_type < NV_C0) {
 498                BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
 499                OUT_RING  (chan, 0x00000000);
 500                OUT_RING  (chan, 0x00000000);
 501        } else {
 502                BEGIN_NVC0(chan, 0, NV10_SUBCHAN_REF_CNT, 1);
 503                OUT_RING  (chan, 0);
 504                BEGIN_IMC0(chan, 0, NVSW_SUBCHAN_PAGE_FLIP, 0x0000);
 505        }
 506        FIRE_RING (chan);
 507
 508        ret = nouveau_fence_new(chan, false, pfence);
 509        if (ret)
 510                goto fail;
 511
 512        return 0;
 513fail:
 514        spin_lock_irqsave(&dev->event_lock, flags);
 515        list_del(&s->head);
 516        spin_unlock_irqrestore(&dev->event_lock, flags);
 517        return ret;
 518}
 519
 520int
 521nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
 522                       struct drm_pending_vblank_event *event)
 523{
 524        struct drm_device *dev = crtc->dev;
 525        struct nouveau_drm *drm = nouveau_drm(dev);
 526        struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->fb)->nvbo;
 527        struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
 528        struct nouveau_page_flip_state *s;
 529        struct nouveau_channel *chan = NULL;
 530        struct nouveau_fence *fence;
 531        struct ttm_validate_buffer resv[2] = {
 532                { .bo = &old_bo->bo },
 533                { .bo = &new_bo->bo },
 534        };
 535        struct ww_acquire_ctx ticket;
 536        LIST_HEAD(res);
 537        int ret;
 538
 539        if (!drm->channel)
 540                return -ENODEV;
 541
 542        s = kzalloc(sizeof(*s), GFP_KERNEL);
 543        if (!s)
 544                return -ENOMEM;
 545
 546        /* Choose the channel the flip will be handled in */
 547        spin_lock(&old_bo->bo.bdev->fence_lock);
 548        fence = new_bo->bo.sync_obj;
 549        if (fence)
 550                chan = fence->channel;
 551        if (!chan)
 552                chan = drm->channel;
 553        spin_unlock(&old_bo->bo.bdev->fence_lock);
 554
 555        if (new_bo != old_bo) {
 556                ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
 557                if (ret)
 558                        goto fail_free;
 559
 560                list_add(&resv[1].head, &res);
 561        }
 562        list_add(&resv[0].head, &res);
 563
 564        mutex_lock(&chan->cli->mutex);
 565        ret = ttm_eu_reserve_buffers(&ticket, &res);
 566        if (ret)
 567                goto fail_unpin;
 568
 569        /* Initialize a page flip struct */
 570        *s = (struct nouveau_page_flip_state)
 571                { { }, event, nouveau_crtc(crtc)->index,
 572                  fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
 573                  new_bo->bo.offset };
 574
 575        /* Emit a page flip */
 576        if (nv_device(drm->device)->card_type >= NV_50) {
 577                ret = nv50_display_flip_next(crtc, fb, chan, 0);
 578                if (ret)
 579                        goto fail_unreserve;
 580        } else {
 581                struct nv04_display *dispnv04 = nv04_display(dev);
 582                nouveau_bo_ref(new_bo, &dispnv04->image[nouveau_crtc(crtc)->index]);
 583        }
 584
 585        ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
 586        mutex_unlock(&chan->cli->mutex);
 587        if (ret)
 588                goto fail_unreserve;
 589
 590        /* Update the crtc struct and cleanup */
 591        crtc->fb = fb;
 592
 593        ttm_eu_fence_buffer_objects(&ticket, &res, fence);
 594        if (old_bo != new_bo)
 595                nouveau_bo_unpin(old_bo);
 596        nouveau_fence_unref(&fence);
 597        return 0;
 598
 599fail_unreserve:
 600        ttm_eu_backoff_reservation(&ticket, &res);
 601fail_unpin:
 602        mutex_unlock(&chan->cli->mutex);
 603        if (old_bo != new_bo)
 604                nouveau_bo_unpin(new_bo);
 605fail_free:
 606        kfree(s);
 607        return ret;
 608}
 609
 610int
 611nouveau_finish_page_flip(struct nouveau_channel *chan,
 612                         struct nouveau_page_flip_state *ps)
 613{
 614        struct nouveau_fence_chan *fctx = chan->fence;
 615        struct nouveau_drm *drm = chan->drm;
 616        struct drm_device *dev = drm->dev;
 617        struct nouveau_page_flip_state *s;
 618        unsigned long flags;
 619
 620        spin_lock_irqsave(&dev->event_lock, flags);
 621
 622        if (list_empty(&fctx->flip)) {
 623                NV_ERROR(drm, "unexpected pageflip\n");
 624                spin_unlock_irqrestore(&dev->event_lock, flags);
 625                return -EINVAL;
 626        }
 627
 628        s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head);
 629        if (s->event)
 630                drm_send_vblank_event(dev, -1, s->event);
 631
 632        list_del(&s->head);
 633        if (ps)
 634                *ps = *s;
 635        kfree(s);
 636
 637        spin_unlock_irqrestore(&dev->event_lock, flags);
 638        return 0;
 639}
 640
 641int
 642nouveau_flip_complete(void *data)
 643{
 644        struct nouveau_channel *chan = data;
 645        struct nouveau_drm *drm = chan->drm;
 646        struct nouveau_page_flip_state state;
 647
 648        if (!nouveau_finish_page_flip(chan, &state)) {
 649                if (nv_device(drm->device)->card_type < NV_50) {
 650                        nv_set_crtc_base(drm->dev, state.crtc, state.offset +
 651                                         state.y * state.pitch +
 652                                         state.x * state.bpp / 8);
 653                }
 654        }
 655
 656        return 0;
 657}
 658
 659int
 660nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
 661                            struct drm_mode_create_dumb *args)
 662{
 663        struct nouveau_bo *bo;
 664        int ret;
 665
 666        args->pitch = roundup(args->width * (args->bpp / 8), 256);
 667        args->size = args->pitch * args->height;
 668        args->size = roundup(args->size, PAGE_SIZE);
 669
 670        ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
 671        if (ret)
 672                return ret;
 673
 674        ret = drm_gem_handle_create(file_priv, bo->gem, &args->handle);
 675        drm_gem_object_unreference_unlocked(bo->gem);
 676        return ret;
 677}
 678
 679int
 680nouveau_display_dumb_destroy(struct drm_file *file_priv, struct drm_device *dev,
 681                             uint32_t handle)
 682{
 683        return drm_gem_handle_delete(file_priv, handle);
 684}
 685
 686int
 687nouveau_display_dumb_map_offset(struct drm_file *file_priv,
 688                                struct drm_device *dev,
 689                                uint32_t handle, uint64_t *poffset)
 690{
 691        struct drm_gem_object *gem;
 692
 693        gem = drm_gem_object_lookup(dev, file_priv, handle);
 694        if (gem) {
 695                struct nouveau_bo *bo = gem->driver_private;
 696                *poffset = bo->bo.addr_space_offset;
 697                drm_gem_object_unreference_unlocked(gem);
 698                return 0;
 699        }
 700
 701        return -ENOENT;
 702}
 703