linux/drivers/gpu/drm/nouveau/nouveau_display.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2008 Maarten Maathuis.
   3 * All Rights Reserved.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining
   6 * a copy of this software and associated documentation files (the
   7 * "Software"), to deal in the Software without restriction, including
   8 * without limitation the rights to use, copy, modify, merge, publish,
   9 * distribute, sublicense, and/or sell copies of the Software, and to
  10 * permit persons to whom the Software is furnished to do so, subject to
  11 * the following conditions:
  12 *
  13 * The above copyright notice and this permission notice (including the
  14 * next paragraph) shall be included in all copies or substantial
  15 * portions of the Software.
  16 *
  17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24 *
  25 */
  26
  27#include <drm/drmP.h>
  28#include <drm/drm_crtc_helper.h>
  29
  30#include "nouveau_fbcon.h"
  31#include "dispnv04/hw.h"
  32#include "nouveau_crtc.h"
  33#include "nouveau_dma.h"
  34#include "nouveau_gem.h"
  35#include "nouveau_connector.h"
  36#include "nv50_display.h"
  37
  38#include "nouveau_fence.h"
  39
  40#include <subdev/bios/gpio.h>
  41#include <subdev/gpio.h>
  42#include <engine/disp.h>
  43
  44#include <core/class.h>
  45
  46static void
  47nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
  48{
  49        struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
  50
  51        if (fb->nvbo)
  52                drm_gem_object_unreference_unlocked(fb->nvbo->gem);
  53
  54        drm_framebuffer_cleanup(drm_fb);
  55        kfree(fb);
  56}
  57
  58static int
  59nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
  60                                       struct drm_file *file_priv,
  61                                       unsigned int *handle)
  62{
  63        struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
  64
  65        return drm_gem_handle_create(file_priv, fb->nvbo->gem, handle);
  66}
  67
  68static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
  69        .destroy = nouveau_user_framebuffer_destroy,
  70        .create_handle = nouveau_user_framebuffer_create_handle,
  71};
  72
  73int
  74nouveau_framebuffer_init(struct drm_device *dev,
  75                         struct nouveau_framebuffer *nv_fb,
  76                         struct drm_mode_fb_cmd2 *mode_cmd,
  77                         struct nouveau_bo *nvbo)
  78{
  79        struct nouveau_drm *drm = nouveau_drm(dev);
  80        struct drm_framebuffer *fb = &nv_fb->base;
  81        int ret;
  82
  83        drm_helper_mode_fill_fb_struct(fb, mode_cmd);
  84        nv_fb->nvbo = nvbo;
  85
  86        if (nv_device(drm->device)->card_type >= NV_50) {
  87                u32 tile_flags = nouveau_bo_tile_layout(nvbo);
  88                if (tile_flags == 0x7a00 ||
  89                    tile_flags == 0xfe00)
  90                        nv_fb->r_dma = NvEvoFB32;
  91                else
  92                if (tile_flags == 0x7000)
  93                        nv_fb->r_dma = NvEvoFB16;
  94                else
  95                        nv_fb->r_dma = NvEvoVRAM_LP;
  96
  97                switch (fb->depth) {
  98                case  8: nv_fb->r_format = 0x1e00; break;
  99                case 15: nv_fb->r_format = 0xe900; break;
 100                case 16: nv_fb->r_format = 0xe800; break;
 101                case 24:
 102                case 32: nv_fb->r_format = 0xcf00; break;
 103                case 30: nv_fb->r_format = 0xd100; break;
 104                default:
 105                         NV_ERROR(drm, "unknown depth %d\n", fb->depth);
 106                         return -EINVAL;
 107                }
 108
 109                if (nv_device(drm->device)->chipset == 0x50)
 110                        nv_fb->r_format |= (tile_flags << 8);
 111
 112                if (!tile_flags) {
 113                        if (nv_device(drm->device)->card_type < NV_D0)
 114                                nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
 115                        else
 116                                nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
 117                } else {
 118                        u32 mode = nvbo->tile_mode;
 119                        if (nv_device(drm->device)->card_type >= NV_C0)
 120                                mode >>= 4;
 121                        nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
 122                }
 123        }
 124
 125        ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
 126        if (ret) {
 127                return ret;
 128        }
 129
 130        return 0;
 131}
 132
 133static struct drm_framebuffer *
 134nouveau_user_framebuffer_create(struct drm_device *dev,
 135                                struct drm_file *file_priv,
 136                                struct drm_mode_fb_cmd2 *mode_cmd)
 137{
 138        struct nouveau_framebuffer *nouveau_fb;
 139        struct drm_gem_object *gem;
 140        int ret;
 141
 142        gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
 143        if (!gem)
 144                return ERR_PTR(-ENOENT);
 145
 146        nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
 147        if (!nouveau_fb)
 148                return ERR_PTR(-ENOMEM);
 149
 150        ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
 151        if (ret) {
 152                drm_gem_object_unreference(gem);
 153                return ERR_PTR(ret);
 154        }
 155
 156        return &nouveau_fb->base;
 157}
 158
 159static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
 160        .fb_create = nouveau_user_framebuffer_create,
 161        .output_poll_changed = nouveau_fbcon_output_poll_changed,
 162};
 163
 164
 165struct nouveau_drm_prop_enum_list {
 166        u8 gen_mask;
 167        int type;
 168        char *name;
 169};
 170
 171static struct nouveau_drm_prop_enum_list underscan[] = {
 172        { 6, UNDERSCAN_AUTO, "auto" },
 173        { 6, UNDERSCAN_OFF, "off" },
 174        { 6, UNDERSCAN_ON, "on" },
 175        {}
 176};
 177
 178static struct nouveau_drm_prop_enum_list dither_mode[] = {
 179        { 7, DITHERING_MODE_AUTO, "auto" },
 180        { 7, DITHERING_MODE_OFF, "off" },
 181        { 1, DITHERING_MODE_ON, "on" },
 182        { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
 183        { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
 184        { 4, DITHERING_MODE_TEMPORAL, "temporal" },
 185        {}
 186};
 187
 188static struct nouveau_drm_prop_enum_list dither_depth[] = {
 189        { 6, DITHERING_DEPTH_AUTO, "auto" },
 190        { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
 191        { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
 192        {}
 193};
 194
 195#define PROP_ENUM(p,gen,n,list) do {                                           \
 196        struct nouveau_drm_prop_enum_list *l = (list);                         \
 197        int c = 0;                                                             \
 198        while (l->gen_mask) {                                                  \
 199                if (l->gen_mask & (1 << (gen)))                                \
 200                        c++;                                                   \
 201                l++;                                                           \
 202        }                                                                      \
 203        if (c) {                                                               \
 204                p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c);        \
 205                l = (list);                                                    \
 206                c = 0;                                                         \
 207                while (p && l->gen_mask) {                                     \
 208                        if (l->gen_mask & (1 << (gen))) {                      \
 209                                drm_property_add_enum(p, c, l->type, l->name); \
 210                                c++;                                           \
 211                        }                                                      \
 212                        l++;                                                   \
 213                }                                                              \
 214        }                                                                      \
 215} while(0)
 216
 217int
 218nouveau_display_init(struct drm_device *dev)
 219{
 220        struct nouveau_drm *drm = nouveau_drm(dev);
 221        struct nouveau_display *disp = nouveau_display(dev);
 222        struct nouveau_gpio *gpio = nouveau_gpio(drm->device);
 223        struct drm_connector *connector;
 224        int ret;
 225
 226        ret = disp->init(dev);
 227        if (ret)
 228                return ret;
 229
 230        /* enable polling for external displays */
 231        drm_kms_helper_poll_enable(dev);
 232
 233        /* enable hotplug interrupts */
 234        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 235                struct nouveau_connector *conn = nouveau_connector(connector);
 236                if (gpio && conn->hpd.func != DCB_GPIO_UNUSED) {
 237                        nouveau_event_get(gpio->events, conn->hpd.line,
 238                                         &conn->hpd_func);
 239                }
 240        }
 241
 242        return ret;
 243}
 244
 245void
 246nouveau_display_fini(struct drm_device *dev)
 247{
 248        struct nouveau_drm *drm = nouveau_drm(dev);
 249        struct nouveau_display *disp = nouveau_display(dev);
 250        struct nouveau_gpio *gpio = nouveau_gpio(drm->device);
 251        struct drm_connector *connector;
 252
 253        /* disable hotplug interrupts */
 254        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 255                struct nouveau_connector *conn = nouveau_connector(connector);
 256                if (gpio && conn->hpd.func != DCB_GPIO_UNUSED) {
 257                        nouveau_event_put(gpio->events, conn->hpd.line,
 258                                         &conn->hpd_func);
 259                }
 260        }
 261
 262        drm_kms_helper_poll_disable(dev);
 263        disp->fini(dev);
 264}
 265
 266int
 267nouveau_display_create(struct drm_device *dev)
 268{
 269        struct nouveau_drm *drm = nouveau_drm(dev);
 270        struct nouveau_display *disp;
 271        u32 pclass = dev->pdev->class >> 8;
 272        int ret, gen;
 273
 274        disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
 275        if (!disp)
 276                return -ENOMEM;
 277
 278        drm_mode_config_init(dev);
 279        drm_mode_create_scaling_mode_property(dev);
 280        drm_mode_create_dvi_i_properties(dev);
 281
 282        if (nv_device(drm->device)->card_type < NV_50)
 283                gen = 0;
 284        else
 285        if (nv_device(drm->device)->card_type < NV_D0)
 286                gen = 1;
 287        else
 288                gen = 2;
 289
 290        PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
 291        PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
 292        PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
 293
 294        disp->underscan_hborder_property =
 295                drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
 296
 297        disp->underscan_vborder_property =
 298                drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
 299
 300        if (gen >= 1) {
 301                /* -90..+90 */
 302                disp->vibrant_hue_property =
 303                        drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
 304
 305                /* -100..+100 */
 306                disp->color_vibrance_property =
 307                        drm_property_create_range(dev, 0, "color vibrance", 0, 200);
 308        }
 309
 310        dev->mode_config.funcs = &nouveau_mode_config_funcs;
 311        dev->mode_config.fb_base = pci_resource_start(dev->pdev, 1);
 312
 313        dev->mode_config.min_width = 0;
 314        dev->mode_config.min_height = 0;
 315        if (nv_device(drm->device)->card_type < NV_10) {
 316                dev->mode_config.max_width = 2048;
 317                dev->mode_config.max_height = 2048;
 318        } else
 319        if (nv_device(drm->device)->card_type < NV_50) {
 320                dev->mode_config.max_width = 4096;
 321                dev->mode_config.max_height = 4096;
 322        } else {
 323                dev->mode_config.max_width = 8192;
 324                dev->mode_config.max_height = 8192;
 325        }
 326
 327        dev->mode_config.preferred_depth = 24;
 328        dev->mode_config.prefer_shadow = 1;
 329
 330        drm_kms_helper_poll_init(dev);
 331        drm_kms_helper_poll_disable(dev);
 332
 333        if (nouveau_modeset == 1 ||
 334            (nouveau_modeset < 0 && pclass == PCI_CLASS_DISPLAY_VGA)) {
 335                if (nv_device(drm->device)->card_type < NV_50)
 336                        ret = nv04_display_create(dev);
 337                else
 338                        ret = nv50_display_create(dev);
 339                if (ret)
 340                        goto disp_create_err;
 341
 342                if (dev->mode_config.num_crtc) {
 343                        ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
 344                        if (ret)
 345                                goto vblank_err;
 346                }
 347
 348                nouveau_backlight_init(dev);
 349        }
 350
 351        return 0;
 352
 353vblank_err:
 354        disp->dtor(dev);
 355disp_create_err:
 356        drm_kms_helper_poll_fini(dev);
 357        drm_mode_config_cleanup(dev);
 358        return ret;
 359}
 360
 361void
 362nouveau_display_destroy(struct drm_device *dev)
 363{
 364        struct nouveau_display *disp = nouveau_display(dev);
 365
 366        nouveau_backlight_exit(dev);
 367        drm_vblank_cleanup(dev);
 368
 369        drm_kms_helper_poll_fini(dev);
 370        drm_mode_config_cleanup(dev);
 371
 372        if (disp->dtor)
 373                disp->dtor(dev);
 374
 375        nouveau_drm(dev)->display = NULL;
 376        kfree(disp);
 377}
 378
 379int
 380nouveau_display_suspend(struct drm_device *dev)
 381{
 382        struct nouveau_drm *drm = nouveau_drm(dev);
 383        struct drm_crtc *crtc;
 384
 385        nouveau_display_fini(dev);
 386
 387        NV_INFO(drm, "unpinning framebuffer(s)...\n");
 388        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 389                struct nouveau_framebuffer *nouveau_fb;
 390
 391                nouveau_fb = nouveau_framebuffer(crtc->fb);
 392                if (!nouveau_fb || !nouveau_fb->nvbo)
 393                        continue;
 394
 395                nouveau_bo_unpin(nouveau_fb->nvbo);
 396        }
 397
 398        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 399                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 400
 401                nouveau_bo_unmap(nv_crtc->cursor.nvbo);
 402                nouveau_bo_unpin(nv_crtc->cursor.nvbo);
 403        }
 404
 405        return 0;
 406}
 407
 408void
 409nouveau_display_resume(struct drm_device *dev)
 410{
 411        struct nouveau_drm *drm = nouveau_drm(dev);
 412        struct drm_crtc *crtc;
 413        int ret;
 414
 415        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 416                struct nouveau_framebuffer *nouveau_fb;
 417
 418                nouveau_fb = nouveau_framebuffer(crtc->fb);
 419                if (!nouveau_fb || !nouveau_fb->nvbo)
 420                        continue;
 421
 422                nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM);
 423        }
 424
 425        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 426                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 427
 428                ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
 429                if (!ret)
 430                        ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
 431                if (ret)
 432                        NV_ERROR(drm, "Could not pin/map cursor.\n");
 433        }
 434
 435        nouveau_fbcon_set_suspend(dev, 0);
 436        nouveau_fbcon_zfill_all(dev);
 437
 438        nouveau_display_init(dev);
 439
 440        /* Force CLUT to get re-loaded during modeset */
 441        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 442                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 443
 444                nv_crtc->lut.depth = 0;
 445        }
 446
 447        drm_helper_resume_force_mode(dev);
 448
 449        list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 450                struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 451                u32 offset = nv_crtc->cursor.nvbo->bo.offset;
 452
 453                nv_crtc->cursor.set_offset(nv_crtc, offset);
 454                nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x,
 455                                                 nv_crtc->cursor_saved_y);
 456        }
 457}
 458
 459static int
 460nouveau_page_flip_reserve(struct nouveau_bo *old_bo,
 461                          struct nouveau_bo *new_bo)
 462{
 463        int ret;
 464
 465        ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
 466        if (ret)
 467                return ret;
 468
 469        ret = ttm_bo_reserve(&new_bo->bo, false, false, false, 0);
 470        if (ret)
 471                goto fail;
 472
 473        if (likely(old_bo != new_bo)) {
 474                ret = ttm_bo_reserve(&old_bo->bo, false, false, false, 0);
 475                if (ret)
 476                        goto fail_unreserve;
 477        }
 478
 479        return 0;
 480
 481fail_unreserve:
 482        ttm_bo_unreserve(&new_bo->bo);
 483fail:
 484        nouveau_bo_unpin(new_bo);
 485        return ret;
 486}
 487
 488static void
 489nouveau_page_flip_unreserve(struct nouveau_bo *old_bo,
 490                            struct nouveau_bo *new_bo,
 491                            struct nouveau_fence *fence)
 492{
 493        nouveau_bo_fence(new_bo, fence);
 494        ttm_bo_unreserve(&new_bo->bo);
 495
 496        if (likely(old_bo != new_bo)) {
 497                nouveau_bo_fence(old_bo, fence);
 498                ttm_bo_unreserve(&old_bo->bo);
 499        }
 500
 501        nouveau_bo_unpin(old_bo);
 502}
 503
 504static int
 505nouveau_page_flip_emit(struct nouveau_channel *chan,
 506                       struct nouveau_bo *old_bo,
 507                       struct nouveau_bo *new_bo,
 508                       struct nouveau_page_flip_state *s,
 509                       struct nouveau_fence **pfence)
 510{
 511        struct nouveau_fence_chan *fctx = chan->fence;
 512        struct nouveau_drm *drm = chan->drm;
 513        struct drm_device *dev = drm->dev;
 514        unsigned long flags;
 515        int ret;
 516
 517        /* Queue it to the pending list */
 518        spin_lock_irqsave(&dev->event_lock, flags);
 519        list_add_tail(&s->head, &fctx->flip);
 520        spin_unlock_irqrestore(&dev->event_lock, flags);
 521
 522        /* Synchronize with the old framebuffer */
 523        ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
 524        if (ret)
 525                goto fail;
 526
 527        /* Emit the pageflip */
 528        ret = RING_SPACE(chan, 3);
 529        if (ret)
 530                goto fail;
 531
 532        if (nv_device(drm->device)->card_type < NV_C0) {
 533                BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
 534                OUT_RING  (chan, 0x00000000);
 535                OUT_RING  (chan, 0x00000000);
 536        } else {
 537                BEGIN_NVC0(chan, 0, NV10_SUBCHAN_REF_CNT, 1);
 538                OUT_RING  (chan, 0);
 539                BEGIN_IMC0(chan, 0, NVSW_SUBCHAN_PAGE_FLIP, 0x0000);
 540        }
 541        FIRE_RING (chan);
 542
 543        ret = nouveau_fence_new(chan, false, pfence);
 544        if (ret)
 545                goto fail;
 546
 547        return 0;
 548fail:
 549        spin_lock_irqsave(&dev->event_lock, flags);
 550        list_del(&s->head);
 551        spin_unlock_irqrestore(&dev->event_lock, flags);
 552        return ret;
 553}
 554
 555int
 556nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
 557                       struct drm_pending_vblank_event *event)
 558{
 559        struct drm_device *dev = crtc->dev;
 560        struct nouveau_drm *drm = nouveau_drm(dev);
 561        struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->fb)->nvbo;
 562        struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
 563        struct nouveau_page_flip_state *s;
 564        struct nouveau_channel *chan = NULL;
 565        struct nouveau_fence *fence;
 566        int ret;
 567
 568        if (!drm->channel)
 569                return -ENODEV;
 570
 571        s = kzalloc(sizeof(*s), GFP_KERNEL);
 572        if (!s)
 573                return -ENOMEM;
 574
 575        /* Don't let the buffers go away while we flip */
 576        ret = nouveau_page_flip_reserve(old_bo, new_bo);
 577        if (ret)
 578                goto fail_free;
 579
 580        /* Initialize a page flip struct */
 581        *s = (struct nouveau_page_flip_state)
 582                { { }, event, nouveau_crtc(crtc)->index,
 583                  fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
 584                  new_bo->bo.offset };
 585
 586        /* Choose the channel the flip will be handled in */
 587        fence = new_bo->bo.sync_obj;
 588        if (fence)
 589                chan = fence->channel;
 590        if (!chan)
 591                chan = drm->channel;
 592        mutex_lock(&chan->cli->mutex);
 593
 594        /* Emit a page flip */
 595        if (nv_device(drm->device)->card_type >= NV_50) {
 596                ret = nv50_display_flip_next(crtc, fb, chan, 0);
 597                if (ret) {
 598                        mutex_unlock(&chan->cli->mutex);
 599                        goto fail_unreserve;
 600                }
 601        }
 602
 603        ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
 604        mutex_unlock(&chan->cli->mutex);
 605        if (ret)
 606                goto fail_unreserve;
 607
 608        /* Update the crtc struct and cleanup */
 609        crtc->fb = fb;
 610
 611        nouveau_page_flip_unreserve(old_bo, new_bo, fence);
 612        nouveau_fence_unref(&fence);
 613        return 0;
 614
 615fail_unreserve:
 616        nouveau_page_flip_unreserve(old_bo, new_bo, NULL);
 617fail_free:
 618        kfree(s);
 619        return ret;
 620}
 621
 622int
 623nouveau_finish_page_flip(struct nouveau_channel *chan,
 624                         struct nouveau_page_flip_state *ps)
 625{
 626        struct nouveau_fence_chan *fctx = chan->fence;
 627        struct nouveau_drm *drm = chan->drm;
 628        struct drm_device *dev = drm->dev;
 629        struct nouveau_page_flip_state *s;
 630        unsigned long flags;
 631
 632        spin_lock_irqsave(&dev->event_lock, flags);
 633
 634        if (list_empty(&fctx->flip)) {
 635                NV_ERROR(drm, "unexpected pageflip\n");
 636                spin_unlock_irqrestore(&dev->event_lock, flags);
 637                return -EINVAL;
 638        }
 639
 640        s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head);
 641        if (s->event)
 642                drm_send_vblank_event(dev, -1, s->event);
 643
 644        list_del(&s->head);
 645        if (ps)
 646                *ps = *s;
 647        kfree(s);
 648
 649        spin_unlock_irqrestore(&dev->event_lock, flags);
 650        return 0;
 651}
 652
 653int
 654nouveau_flip_complete(void *data)
 655{
 656        struct nouveau_channel *chan = data;
 657        struct nouveau_drm *drm = chan->drm;
 658        struct nouveau_page_flip_state state;
 659
 660        if (!nouveau_finish_page_flip(chan, &state)) {
 661                if (nv_device(drm->device)->card_type < NV_50) {
 662                        nv_set_crtc_base(drm->dev, state.crtc, state.offset +
 663                                         state.y * state.pitch +
 664                                         state.x * state.bpp / 8);
 665                }
 666        }
 667
 668        return 0;
 669}
 670
 671int
 672nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
 673                            struct drm_mode_create_dumb *args)
 674{
 675        struct nouveau_bo *bo;
 676        int ret;
 677
 678        args->pitch = roundup(args->width * (args->bpp / 8), 256);
 679        args->size = args->pitch * args->height;
 680        args->size = roundup(args->size, PAGE_SIZE);
 681
 682        ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
 683        if (ret)
 684                return ret;
 685
 686        ret = drm_gem_handle_create(file_priv, bo->gem, &args->handle);
 687        drm_gem_object_unreference_unlocked(bo->gem);
 688        return ret;
 689}
 690
 691int
 692nouveau_display_dumb_destroy(struct drm_file *file_priv, struct drm_device *dev,
 693                             uint32_t handle)
 694{
 695        return drm_gem_handle_delete(file_priv, handle);
 696}
 697
 698int
 699nouveau_display_dumb_map_offset(struct drm_file *file_priv,
 700                                struct drm_device *dev,
 701                                uint32_t handle, uint64_t *poffset)
 702{
 703        struct drm_gem_object *gem;
 704
 705        gem = drm_gem_object_lookup(dev, file_priv, handle);
 706        if (gem) {
 707                struct nouveau_bo *bo = gem->driver_private;
 708                *poffset = bo->bo.addr_space_offset;
 709                drm_gem_object_unreference_unlocked(gem);
 710                return 0;
 711        }
 712
 713        return -ENOENT;
 714}
 715