linux/drivers/gpu/drm/nouveau/nv50_display.c
<<
>>
Prefs
   1/*
   2 * Copyright 2011 Red Hat Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Ben Skeggs
  23 */
  24
  25#include <linux/dma-mapping.h>
  26
  27#include <drm/drmP.h>
  28#include <drm/drm_atomic.h>
  29#include <drm/drm_atomic_helper.h>
  30#include <drm/drm_crtc_helper.h>
  31#include <drm/drm_dp_helper.h>
  32#include <drm/drm_fb_helper.h>
  33#include <drm/drm_plane_helper.h>
  34
  35#include <nvif/class.h>
  36#include <nvif/cl0002.h>
  37#include <nvif/cl5070.h>
  38#include <nvif/cl507a.h>
  39#include <nvif/cl507b.h>
  40#include <nvif/cl507c.h>
  41#include <nvif/cl507d.h>
  42#include <nvif/cl507e.h>
  43#include <nvif/event.h>
  44
  45#include "nouveau_drv.h"
  46#include "nouveau_dma.h"
  47#include "nouveau_gem.h"
  48#include "nouveau_connector.h"
  49#include "nouveau_encoder.h"
  50#include "nouveau_crtc.h"
  51#include "nouveau_fence.h"
  52#include "nouveau_fbcon.h"
  53#include "nv50_display.h"
  54
  55#define EVO_DMA_NR 9
  56
  57#define EVO_MASTER  (0x00)
  58#define EVO_FLIP(c) (0x01 + (c))
  59#define EVO_OVLY(c) (0x05 + (c))
  60#define EVO_OIMM(c) (0x09 + (c))
  61#define EVO_CURS(c) (0x0d + (c))
  62
  63/* offsets in shared sync bo of various structures */
  64#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  65#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
  66#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
  67#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
  68#define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  69#define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  70
  71/******************************************************************************
  72 * Atomic state
  73 *****************************************************************************/
  74#define nv50_atom(p) container_of((p), struct nv50_atom, state)
  75
  76struct nv50_atom {
  77        struct drm_atomic_state state;
  78
  79        struct list_head outp;
  80        bool lock_core;
  81        bool flush_disable;
  82};
  83
  84struct nv50_outp_atom {
  85        struct list_head head;
  86
  87        struct drm_encoder *encoder;
  88        bool flush_disable;
  89
  90        union {
  91                struct {
  92                        bool ctrl:1;
  93                };
  94                u8 mask;
  95        } clr;
  96
  97        union {
  98                struct {
  99                        bool ctrl:1;
 100                };
 101                u8 mask;
 102        } set;
 103};
 104
 105#define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
 106
 107struct nv50_head_atom {
 108        struct drm_crtc_state state;
 109
 110        struct {
 111                u16 iW;
 112                u16 iH;
 113                u16 oW;
 114                u16 oH;
 115        } view;
 116
 117        struct nv50_head_mode {
 118                bool interlace;
 119                u32 clock;
 120                struct {
 121                        u16 active;
 122                        u16 synce;
 123                        u16 blanke;
 124                        u16 blanks;
 125                } h;
 126                struct {
 127                        u32 active;
 128                        u16 synce;
 129                        u16 blanke;
 130                        u16 blanks;
 131                        u16 blank2s;
 132                        u16 blank2e;
 133                        u16 blankus;
 134                } v;
 135        } mode;
 136
 137        struct {
 138                u32 handle;
 139                u64 offset:40;
 140        } lut;
 141
 142        struct {
 143                bool visible;
 144                u32 handle;
 145                u64 offset:40;
 146                u8  format;
 147                u8  kind:7;
 148                u8  layout:1;
 149                u8  block:4;
 150                u32 pitch:20;
 151                u16 x;
 152                u16 y;
 153                u16 w;
 154                u16 h;
 155        } core;
 156
 157        struct {
 158                bool visible;
 159                u32 handle;
 160                u64 offset:40;
 161                u8  layout:1;
 162                u8  format:1;
 163        } curs;
 164
 165        struct {
 166                u8  depth;
 167                u8  cpp;
 168                u16 x;
 169                u16 y;
 170                u16 w;
 171                u16 h;
 172        } base;
 173
 174        struct {
 175                u8 cpp;
 176        } ovly;
 177
 178        struct {
 179                bool enable:1;
 180                u8 bits:2;
 181                u8 mode:4;
 182        } dither;
 183
 184        struct {
 185                struct {
 186                        u16 cos:12;
 187                        u16 sin:12;
 188                } sat;
 189        } procamp;
 190
 191        union {
 192                struct {
 193                        bool core:1;
 194                        bool curs:1;
 195                };
 196                u8 mask;
 197        } clr;
 198
 199        union {
 200                struct {
 201                        bool core:1;
 202                        bool curs:1;
 203                        bool view:1;
 204                        bool mode:1;
 205                        bool base:1;
 206                        bool ovly:1;
 207                        bool dither:1;
 208                        bool procamp:1;
 209                };
 210                u16 mask;
 211        } set;
 212};
 213
 214static inline struct nv50_head_atom *
 215nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
 216{
 217        struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
 218        if (IS_ERR(statec))
 219                return (void *)statec;
 220        return nv50_head_atom(statec);
 221}
 222
 223#define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
 224
 225struct nv50_wndw_atom {
 226        struct drm_plane_state state;
 227        u8 interval;
 228
 229        struct drm_rect clip;
 230
 231        struct {
 232                u32  handle;
 233                u16  offset:12;
 234                bool awaken:1;
 235        } ntfy;
 236
 237        struct {
 238                u32 handle;
 239                u16 offset:12;
 240                u32 acquire;
 241                u32 release;
 242        } sema;
 243
 244        struct {
 245                u8 enable:2;
 246        } lut;
 247
 248        struct {
 249                u8  mode:2;
 250                u8  interval:4;
 251
 252                u8  format;
 253                u8  kind:7;
 254                u8  layout:1;
 255                u8  block:4;
 256                u32 pitch:20;
 257                u16 w;
 258                u16 h;
 259
 260                u32 handle;
 261                u64 offset;
 262        } image;
 263
 264        struct {
 265                u16 x;
 266                u16 y;
 267        } point;
 268
 269        union {
 270                struct {
 271                        bool ntfy:1;
 272                        bool sema:1;
 273                        bool image:1;
 274                };
 275                u8 mask;
 276        } clr;
 277
 278        union {
 279                struct {
 280                        bool ntfy:1;
 281                        bool sema:1;
 282                        bool image:1;
 283                        bool lut:1;
 284                        bool point:1;
 285                };
 286                u8 mask;
 287        } set;
 288};
 289
 290/******************************************************************************
 291 * EVO channel
 292 *****************************************************************************/
 293
 294struct nv50_chan {
 295        struct nvif_object user;
 296        struct nvif_device *device;
 297};
 298
 299static int
 300nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
 301                 const s32 *oclass, u8 head, void *data, u32 size,
 302                 struct nv50_chan *chan)
 303{
 304        struct nvif_sclass *sclass;
 305        int ret, i, n;
 306
 307        chan->device = device;
 308
 309        ret = n = nvif_object_sclass_get(disp, &sclass);
 310        if (ret < 0)
 311                return ret;
 312
 313        while (oclass[0]) {
 314                for (i = 0; i < n; i++) {
 315                        if (sclass[i].oclass == oclass[0]) {
 316                                ret = nvif_object_init(disp, 0, oclass[0],
 317                                                       data, size, &chan->user);
 318                                if (ret == 0)
 319                                        nvif_object_map(&chan->user);
 320                                nvif_object_sclass_put(&sclass);
 321                                return ret;
 322                        }
 323                }
 324                oclass++;
 325        }
 326
 327        nvif_object_sclass_put(&sclass);
 328        return -ENOSYS;
 329}
 330
 331static void
 332nv50_chan_destroy(struct nv50_chan *chan)
 333{
 334        nvif_object_fini(&chan->user);
 335}
 336
 337/******************************************************************************
 338 * PIO EVO channel
 339 *****************************************************************************/
 340
 341struct nv50_pioc {
 342        struct nv50_chan base;
 343};
 344
 345static void
 346nv50_pioc_destroy(struct nv50_pioc *pioc)
 347{
 348        nv50_chan_destroy(&pioc->base);
 349}
 350
 351static int
 352nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
 353                 const s32 *oclass, u8 head, void *data, u32 size,
 354                 struct nv50_pioc *pioc)
 355{
 356        return nv50_chan_create(device, disp, oclass, head, data, size,
 357                                &pioc->base);
 358}
 359
 360/******************************************************************************
 361 * Overlay Immediate
 362 *****************************************************************************/
 363
 364struct nv50_oimm {
 365        struct nv50_pioc base;
 366};
 367
 368static int
 369nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
 370                 int head, struct nv50_oimm *oimm)
 371{
 372        struct nv50_disp_cursor_v0 args = {
 373                .head = head,
 374        };
 375        static const s32 oclass[] = {
 376                GK104_DISP_OVERLAY,
 377                GF110_DISP_OVERLAY,
 378                GT214_DISP_OVERLAY,
 379                G82_DISP_OVERLAY,
 380                NV50_DISP_OVERLAY,
 381                0
 382        };
 383
 384        return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 385                                &oimm->base);
 386}
 387
 388/******************************************************************************
 389 * DMA EVO channel
 390 *****************************************************************************/
 391
 392struct nv50_dmac_ctxdma {
 393        struct list_head head;
 394        struct nvif_object object;
 395};
 396
 397struct nv50_dmac {
 398        struct nv50_chan base;
 399        dma_addr_t handle;
 400        u32 *ptr;
 401
 402        struct nvif_object sync;
 403        struct nvif_object vram;
 404        struct list_head ctxdma;
 405
 406        /* Protects against concurrent pushbuf access to this channel, lock is
 407         * grabbed by evo_wait (if the pushbuf reservation is successful) and
 408         * dropped again by evo_kick. */
 409        struct mutex lock;
 410};
 411
 412static void
 413nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
 414{
 415        nvif_object_fini(&ctxdma->object);
 416        list_del(&ctxdma->head);
 417        kfree(ctxdma);
 418}
 419
 420static struct nv50_dmac_ctxdma *
 421nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
 422{
 423        struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
 424        struct nv50_dmac_ctxdma *ctxdma;
 425        const u8    kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
 426        const u32 handle = 0xfb000000 | kind;
 427        struct {
 428                struct nv_dma_v0 base;
 429                union {
 430                        struct nv50_dma_v0 nv50;
 431                        struct gf100_dma_v0 gf100;
 432                        struct gf119_dma_v0 gf119;
 433                };
 434        } args = {};
 435        u32 argc = sizeof(args.base);
 436        int ret;
 437
 438        list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
 439                if (ctxdma->object.handle == handle)
 440                        return ctxdma;
 441        }
 442
 443        if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
 444                return ERR_PTR(-ENOMEM);
 445        list_add(&ctxdma->head, &dmac->ctxdma);
 446
 447        args.base.target = NV_DMA_V0_TARGET_VRAM;
 448        args.base.access = NV_DMA_V0_ACCESS_RDWR;
 449        args.base.start  = 0;
 450        args.base.limit  = drm->device.info.ram_user - 1;
 451
 452        if (drm->device.info.chipset < 0x80) {
 453                args.nv50.part = NV50_DMA_V0_PART_256;
 454                argc += sizeof(args.nv50);
 455        } else
 456        if (drm->device.info.chipset < 0xc0) {
 457                args.nv50.part = NV50_DMA_V0_PART_256;
 458                args.nv50.kind = kind;
 459                argc += sizeof(args.nv50);
 460        } else
 461        if (drm->device.info.chipset < 0xd0) {
 462                args.gf100.kind = kind;
 463                argc += sizeof(args.gf100);
 464        } else {
 465                args.gf119.page = GF119_DMA_V0_PAGE_LP;
 466                args.gf119.kind = kind;
 467                argc += sizeof(args.gf119);
 468        }
 469
 470        ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
 471                               &args, argc, &ctxdma->object);
 472        if (ret) {
 473                nv50_dmac_ctxdma_del(ctxdma);
 474                return ERR_PTR(ret);
 475        }
 476
 477        return ctxdma;
 478}
 479
 480static void
 481nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
 482{
 483        struct nvif_device *device = dmac->base.device;
 484        struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
 485
 486        list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
 487                nv50_dmac_ctxdma_del(ctxdma);
 488        }
 489
 490        nvif_object_fini(&dmac->vram);
 491        nvif_object_fini(&dmac->sync);
 492
 493        nv50_chan_destroy(&dmac->base);
 494
 495        if (dmac->ptr) {
 496                struct device *dev = nvxx_device(device)->dev;
 497                dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
 498        }
 499}
 500
 501static int
 502nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
 503                 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
 504                 struct nv50_dmac *dmac)
 505{
 506        struct nv50_disp_core_channel_dma_v0 *args = data;
 507        struct nvif_object pushbuf;
 508        int ret;
 509
 510        mutex_init(&dmac->lock);
 511
 512        dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
 513                                       &dmac->handle, GFP_KERNEL);
 514        if (!dmac->ptr)
 515                return -ENOMEM;
 516
 517        ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
 518                               &(struct nv_dma_v0) {
 519                                        .target = NV_DMA_V0_TARGET_PCI_US,
 520                                        .access = NV_DMA_V0_ACCESS_RD,
 521                                        .start = dmac->handle + 0x0000,
 522                                        .limit = dmac->handle + 0x0fff,
 523                               }, sizeof(struct nv_dma_v0), &pushbuf);
 524        if (ret)
 525                return ret;
 526
 527        args->pushbuf = nvif_handle(&pushbuf);
 528
 529        ret = nv50_chan_create(device, disp, oclass, head, data, size,
 530                               &dmac->base);
 531        nvif_object_fini(&pushbuf);
 532        if (ret)
 533                return ret;
 534
 535        ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
 536                               &(struct nv_dma_v0) {
 537                                        .target = NV_DMA_V0_TARGET_VRAM,
 538                                        .access = NV_DMA_V0_ACCESS_RDWR,
 539                                        .start = syncbuf + 0x0000,
 540                                        .limit = syncbuf + 0x0fff,
 541                               }, sizeof(struct nv_dma_v0),
 542                               &dmac->sync);
 543        if (ret)
 544                return ret;
 545
 546        ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
 547                               &(struct nv_dma_v0) {
 548                                        .target = NV_DMA_V0_TARGET_VRAM,
 549                                        .access = NV_DMA_V0_ACCESS_RDWR,
 550                                        .start = 0,
 551                                        .limit = device->info.ram_user - 1,
 552                               }, sizeof(struct nv_dma_v0),
 553                               &dmac->vram);
 554        if (ret)
 555                return ret;
 556
 557        INIT_LIST_HEAD(&dmac->ctxdma);
 558        return ret;
 559}
 560
 561/******************************************************************************
 562 * Core
 563 *****************************************************************************/
 564
 565struct nv50_mast {
 566        struct nv50_dmac base;
 567};
 568
 569static int
 570nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
 571                 u64 syncbuf, struct nv50_mast *core)
 572{
 573        struct nv50_disp_core_channel_dma_v0 args = {
 574                .pushbuf = 0xb0007d00,
 575        };
 576        static const s32 oclass[] = {
 577                GP102_DISP_CORE_CHANNEL_DMA,
 578                GP100_DISP_CORE_CHANNEL_DMA,
 579                GM200_DISP_CORE_CHANNEL_DMA,
 580                GM107_DISP_CORE_CHANNEL_DMA,
 581                GK110_DISP_CORE_CHANNEL_DMA,
 582                GK104_DISP_CORE_CHANNEL_DMA,
 583                GF110_DISP_CORE_CHANNEL_DMA,
 584                GT214_DISP_CORE_CHANNEL_DMA,
 585                GT206_DISP_CORE_CHANNEL_DMA,
 586                GT200_DISP_CORE_CHANNEL_DMA,
 587                G82_DISP_CORE_CHANNEL_DMA,
 588                NV50_DISP_CORE_CHANNEL_DMA,
 589                0
 590        };
 591
 592        return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
 593                                syncbuf, &core->base);
 594}
 595
 596/******************************************************************************
 597 * Base
 598 *****************************************************************************/
 599
 600struct nv50_sync {
 601        struct nv50_dmac base;
 602        u32 addr;
 603        u32 data;
 604};
 605
 606static int
 607nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
 608                 int head, u64 syncbuf, struct nv50_sync *base)
 609{
 610        struct nv50_disp_base_channel_dma_v0 args = {
 611                .pushbuf = 0xb0007c00 | head,
 612                .head = head,
 613        };
 614        static const s32 oclass[] = {
 615                GK110_DISP_BASE_CHANNEL_DMA,
 616                GK104_DISP_BASE_CHANNEL_DMA,
 617                GF110_DISP_BASE_CHANNEL_DMA,
 618                GT214_DISP_BASE_CHANNEL_DMA,
 619                GT200_DISP_BASE_CHANNEL_DMA,
 620                G82_DISP_BASE_CHANNEL_DMA,
 621                NV50_DISP_BASE_CHANNEL_DMA,
 622                0
 623        };
 624
 625        return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 626                                syncbuf, &base->base);
 627}
 628
 629/******************************************************************************
 630 * Overlay
 631 *****************************************************************************/
 632
 633struct nv50_ovly {
 634        struct nv50_dmac base;
 635};
 636
 637static int
 638nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
 639                 int head, u64 syncbuf, struct nv50_ovly *ovly)
 640{
 641        struct nv50_disp_overlay_channel_dma_v0 args = {
 642                .pushbuf = 0xb0007e00 | head,
 643                .head = head,
 644        };
 645        static const s32 oclass[] = {
 646                GK104_DISP_OVERLAY_CONTROL_DMA,
 647                GF110_DISP_OVERLAY_CONTROL_DMA,
 648                GT214_DISP_OVERLAY_CHANNEL_DMA,
 649                GT200_DISP_OVERLAY_CHANNEL_DMA,
 650                G82_DISP_OVERLAY_CHANNEL_DMA,
 651                NV50_DISP_OVERLAY_CHANNEL_DMA,
 652                0
 653        };
 654
 655        return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 656                                syncbuf, &ovly->base);
 657}
 658
 659struct nv50_head {
 660        struct nouveau_crtc base;
 661        struct nv50_ovly ovly;
 662        struct nv50_oimm oimm;
 663};
 664
 665#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
 666#define nv50_ovly(c) (&nv50_head(c)->ovly)
 667#define nv50_oimm(c) (&nv50_head(c)->oimm)
 668#define nv50_chan(c) (&(c)->base.base)
 669#define nv50_vers(c) nv50_chan(c)->user.oclass
 670
 671struct nv50_disp {
 672        struct nvif_object *disp;
 673        struct nv50_mast mast;
 674
 675        struct nouveau_bo *sync;
 676
 677        struct mutex mutex;
 678};
 679
 680static struct nv50_disp *
 681nv50_disp(struct drm_device *dev)
 682{
 683        return nouveau_display(dev)->priv;
 684}
 685
 686#define nv50_mast(d) (&nv50_disp(d)->mast)
 687
 688/******************************************************************************
 689 * EVO channel helpers
 690 *****************************************************************************/
 691static u32 *
 692evo_wait(void *evoc, int nr)
 693{
 694        struct nv50_dmac *dmac = evoc;
 695        struct nvif_device *device = dmac->base.device;
 696        u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
 697
 698        mutex_lock(&dmac->lock);
 699        if (put + nr >= (PAGE_SIZE / 4) - 8) {
 700                dmac->ptr[put] = 0x20000000;
 701
 702                nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
 703                if (nvif_msec(device, 2000,
 704                        if (!nvif_rd32(&dmac->base.user, 0x0004))
 705                                break;
 706                ) < 0) {
 707                        mutex_unlock(&dmac->lock);
 708                        printk(KERN_ERR "nouveau: evo channel stalled\n");
 709                        return NULL;
 710                }
 711
 712                put = 0;
 713        }
 714
 715        return dmac->ptr + put;
 716}
 717
 718static void
 719evo_kick(u32 *push, void *evoc)
 720{
 721        struct nv50_dmac *dmac = evoc;
 722        nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
 723        mutex_unlock(&dmac->lock);
 724}
 725
 726#define evo_mthd(p,m,s) do {                                                   \
 727        const u32 _m = (m), _s = (s);                                          \
 728        if (drm_debug & DRM_UT_KMS)                                            \
 729                printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);             \
 730        *((p)++) = ((_s << 18) | _m);                                          \
 731} while(0)
 732
 733#define evo_data(p,d) do {                                                     \
 734        const u32 _d = (d);                                                    \
 735        if (drm_debug & DRM_UT_KMS)                                            \
 736                printk(KERN_ERR "\t%08x\n", _d);                               \
 737        *((p)++) = _d;                                                         \
 738} while(0)
 739
 740/******************************************************************************
 741 * Plane
 742 *****************************************************************************/
 743#define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
 744
 745struct nv50_wndw {
 746        const struct nv50_wndw_func *func;
 747        struct nv50_dmac *dmac;
 748
 749        struct drm_plane plane;
 750
 751        struct nvif_notify notify;
 752        u16 ntfy;
 753        u16 sema;
 754        u32 data;
 755};
 756
 757struct nv50_wndw_func {
 758        void *(*dtor)(struct nv50_wndw *);
 759        int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 760                       struct nv50_head_atom *asyh);
 761        void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 762                        struct nv50_head_atom *asyh);
 763        void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
 764                        struct nv50_wndw_atom *asyw);
 765
 766        void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 767        void (*sema_clr)(struct nv50_wndw *);
 768        void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 769        void (*ntfy_clr)(struct nv50_wndw *);
 770        int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
 771        void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 772        void (*image_clr)(struct nv50_wndw *);
 773        void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
 774        void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
 775
 776        u32 (*update)(struct nv50_wndw *, u32 interlock);
 777};
 778
 779static int
 780nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
 781{
 782        if (asyw->set.ntfy)
 783                return wndw->func->ntfy_wait_begun(wndw, asyw);
 784        return 0;
 785}
 786
 787static u32
 788nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
 789                    struct nv50_wndw_atom *asyw)
 790{
 791        if (asyw->clr.sema && (!asyw->set.sema || flush))
 792                wndw->func->sema_clr(wndw);
 793        if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
 794                wndw->func->ntfy_clr(wndw);
 795        if (asyw->clr.image && (!asyw->set.image || flush))
 796                wndw->func->image_clr(wndw);
 797
 798        return flush ? wndw->func->update(wndw, interlock) : 0;
 799}
 800
 801static u32
 802nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
 803                    struct nv50_wndw_atom *asyw)
 804{
 805        if (interlock) {
 806                asyw->image.mode = 0;
 807                asyw->image.interval = 1;
 808        }
 809
 810        if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
 811        if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
 812        if (asyw->set.image) wndw->func->image_set(wndw, asyw);
 813        if (asyw->set.lut  ) wndw->func->lut      (wndw, asyw);
 814        if (asyw->set.point) wndw->func->point    (wndw, asyw);
 815
 816        return wndw->func->update(wndw, interlock);
 817}
 818
 819static void
 820nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
 821                               struct nv50_wndw_atom *asyw,
 822                               struct nv50_head_atom *asyh)
 823{
 824        struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 825        NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
 826        wndw->func->release(wndw, asyw, asyh);
 827        asyw->ntfy.handle = 0;
 828        asyw->sema.handle = 0;
 829}
 830
 831static int
 832nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
 833                               struct nv50_wndw_atom *asyw,
 834                               struct nv50_head_atom *asyh)
 835{
 836        struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
 837        struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 838        int ret;
 839
 840        NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
 841        asyw->clip.x1 = 0;
 842        asyw->clip.y1 = 0;
 843        asyw->clip.x2 = asyh->state.mode.hdisplay;
 844        asyw->clip.y2 = asyh->state.mode.vdisplay;
 845
 846        asyw->image.w = fb->base.width;
 847        asyw->image.h = fb->base.height;
 848        asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
 849        if (asyw->image.kind) {
 850                asyw->image.layout = 0;
 851                if (drm->device.info.chipset >= 0xc0)
 852                        asyw->image.block = fb->nvbo->tile_mode >> 4;
 853                else
 854                        asyw->image.block = fb->nvbo->tile_mode;
 855                asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
 856        } else {
 857                asyw->image.layout = 1;
 858                asyw->image.block  = 0;
 859                asyw->image.pitch  = fb->base.pitches[0];
 860        }
 861
 862        ret = wndw->func->acquire(wndw, asyw, asyh);
 863        if (ret)
 864                return ret;
 865
 866        if (asyw->set.image) {
 867                if (!(asyw->image.mode = asyw->interval ? 0 : 1))
 868                        asyw->image.interval = asyw->interval;
 869                else
 870                        asyw->image.interval = 0;
 871        }
 872
 873        return 0;
 874}
 875
 876static int
 877nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
 878{
 879        struct nouveau_drm *drm = nouveau_drm(plane->dev);
 880        struct nv50_wndw *wndw = nv50_wndw(plane);
 881        struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
 882        struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 883        struct nv50_head_atom *harm = NULL, *asyh = NULL;
 884        bool varm = false, asyv = false, asym = false;
 885        int ret;
 886
 887        NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
 888        if (asyw->state.crtc) {
 889                asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 890                if (IS_ERR(asyh))
 891                        return PTR_ERR(asyh);
 892                asym = drm_atomic_crtc_needs_modeset(&asyh->state);
 893                asyv = asyh->state.active;
 894        }
 895
 896        if (armw->state.crtc) {
 897                harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
 898                if (IS_ERR(harm))
 899                        return PTR_ERR(harm);
 900                varm = harm->state.crtc->state->active;
 901        }
 902
 903        if (asyv) {
 904                asyw->point.x = asyw->state.crtc_x;
 905                asyw->point.y = asyw->state.crtc_y;
 906                if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
 907                        asyw->set.point = true;
 908
 909                ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
 910                if (ret)
 911                        return ret;
 912        } else
 913        if (varm) {
 914                nv50_wndw_atomic_check_release(wndw, asyw, harm);
 915        } else {
 916                return 0;
 917        }
 918
 919        if (!asyv || asym) {
 920                asyw->clr.ntfy = armw->ntfy.handle != 0;
 921                asyw->clr.sema = armw->sema.handle != 0;
 922                if (wndw->func->image_clr)
 923                        asyw->clr.image = armw->image.handle != 0;
 924                asyw->set.lut = wndw->func->lut && asyv;
 925        }
 926
 927        return 0;
 928}
 929
 930static void
 931nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
 932{
 933        struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
 934        struct nouveau_drm *drm = nouveau_drm(plane->dev);
 935
 936        NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
 937        if (!old_state->fb)
 938                return;
 939
 940        nouveau_bo_unpin(fb->nvbo);
 941}
 942
 943static int
 944nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
 945{
 946        struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
 947        struct nouveau_drm *drm = nouveau_drm(plane->dev);
 948        struct nv50_wndw *wndw = nv50_wndw(plane);
 949        struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 950        struct nv50_head_atom *asyh;
 951        struct nv50_dmac_ctxdma *ctxdma;
 952        int ret;
 953
 954        NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
 955        if (!asyw->state.fb)
 956                return 0;
 957
 958        ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
 959        if (ret)
 960                return ret;
 961
 962        ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
 963        if (IS_ERR(ctxdma)) {
 964                nouveau_bo_unpin(fb->nvbo);
 965                return PTR_ERR(ctxdma);
 966        }
 967
 968        asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
 969        asyw->image.handle = ctxdma->object.handle;
 970        asyw->image.offset = fb->nvbo->bo.offset;
 971
 972        if (wndw->func->prepare) {
 973                asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 974                if (IS_ERR(asyh))
 975                        return PTR_ERR(asyh);
 976
 977                wndw->func->prepare(wndw, asyh, asyw);
 978        }
 979
 980        return 0;
 981}
 982
 983static const struct drm_plane_helper_funcs
 984nv50_wndw_helper = {
 985        .prepare_fb = nv50_wndw_prepare_fb,
 986        .cleanup_fb = nv50_wndw_cleanup_fb,
 987        .atomic_check = nv50_wndw_atomic_check,
 988};
 989
 990static void
 991nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
 992                               struct drm_plane_state *state)
 993{
 994        struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 995        __drm_atomic_helper_plane_destroy_state(&asyw->state);
 996        kfree(asyw);
 997}
 998
 999static struct drm_plane_state *
1000nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
1001{
1002        struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
1003        struct nv50_wndw_atom *asyw;
1004        if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
1005                return NULL;
1006        __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
1007        asyw->interval = 1;
1008        asyw->sema = armw->sema;
1009        asyw->ntfy = armw->ntfy;
1010        asyw->image = armw->image;
1011        asyw->point = armw->point;
1012        asyw->lut = armw->lut;
1013        asyw->clr.mask = 0;
1014        asyw->set.mask = 0;
1015        return &asyw->state;
1016}
1017
1018static void
1019nv50_wndw_reset(struct drm_plane *plane)
1020{
1021        struct nv50_wndw_atom *asyw;
1022
1023        if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
1024                return;
1025
1026        if (plane->state)
1027                plane->funcs->atomic_destroy_state(plane, plane->state);
1028        plane->state = &asyw->state;
1029        plane->state->plane = plane;
1030        plane->state->rotation = DRM_ROTATE_0;
1031}
1032
1033static void
1034nv50_wndw_destroy(struct drm_plane *plane)
1035{
1036        struct nv50_wndw *wndw = nv50_wndw(plane);
1037        void *data;
1038        nvif_notify_fini(&wndw->notify);
1039        data = wndw->func->dtor(wndw);
1040        drm_plane_cleanup(&wndw->plane);
1041        kfree(data);
1042}
1043
1044static const struct drm_plane_funcs
1045nv50_wndw = {
1046        .update_plane = drm_atomic_helper_update_plane,
1047        .disable_plane = drm_atomic_helper_disable_plane,
1048        .destroy = nv50_wndw_destroy,
1049        .reset = nv50_wndw_reset,
1050        .set_property = drm_atomic_helper_plane_set_property,
1051        .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
1052        .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
1053};
1054
1055static void
1056nv50_wndw_fini(struct nv50_wndw *wndw)
1057{
1058        nvif_notify_put(&wndw->notify);
1059}
1060
1061static void
1062nv50_wndw_init(struct nv50_wndw *wndw)
1063{
1064        nvif_notify_get(&wndw->notify);
1065}
1066
1067static int
1068nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
1069               enum drm_plane_type type, const char *name, int index,
1070               struct nv50_dmac *dmac, const u32 *format, int nformat,
1071               struct nv50_wndw *wndw)
1072{
1073        int ret;
1074
1075        wndw->func = func;
1076        wndw->dmac = dmac;
1077
1078        ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw, format,
1079                                       nformat, type, "%s-%d", name, index);
1080        if (ret)
1081                return ret;
1082
1083        drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
1084        return 0;
1085}
1086
1087/******************************************************************************
1088 * Cursor plane
1089 *****************************************************************************/
1090#define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1091
1092struct nv50_curs {
1093        struct nv50_wndw wndw;
1094        struct nvif_object chan;
1095};
1096
1097static u32
1098nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
1099{
1100        struct nv50_curs *curs = nv50_curs(wndw);
1101        nvif_wr32(&curs->chan, 0x0080, 0x00000000);
1102        return 0;
1103}
1104
1105static void
1106nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1107{
1108        struct nv50_curs *curs = nv50_curs(wndw);
1109        nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
1110}
1111
1112static void
1113nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
1114                  struct nv50_wndw_atom *asyw)
1115{
1116        u32 handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
1117        u32 offset = asyw->image.offset;
1118        if (asyh->curs.handle != handle || asyh->curs.offset != offset) {
1119                asyh->curs.handle = handle;
1120                asyh->curs.offset = offset;
1121                asyh->set.curs = asyh->curs.visible;
1122        }
1123}
1124
1125static void
1126nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1127                  struct nv50_head_atom *asyh)
1128{
1129        asyh->curs.visible = false;
1130}
1131
1132static int
1133nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1134                  struct nv50_head_atom *asyh)
1135{
1136        int ret;
1137
1138        ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1139                                           DRM_PLANE_HELPER_NO_SCALING,
1140                                           DRM_PLANE_HELPER_NO_SCALING,
1141                                           true, true);
1142        asyh->curs.visible = asyw->state.visible;
1143        if (ret || !asyh->curs.visible)
1144                return ret;
1145
1146        switch (asyw->state.fb->width) {
1147        case 32: asyh->curs.layout = 0; break;
1148        case 64: asyh->curs.layout = 1; break;
1149        default:
1150                return -EINVAL;
1151        }
1152
1153        if (asyw->state.fb->width != asyw->state.fb->height)
1154                return -EINVAL;
1155
1156        switch (asyw->state.fb->pixel_format) {
1157        case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
1158        default:
1159                WARN_ON(1);
1160                return -EINVAL;
1161        }
1162
1163        return 0;
1164}
1165
1166static void *
1167nv50_curs_dtor(struct nv50_wndw *wndw)
1168{
1169        struct nv50_curs *curs = nv50_curs(wndw);
1170        nvif_object_fini(&curs->chan);
1171        return curs;
1172}
1173
1174static const u32
1175nv50_curs_format[] = {
1176        DRM_FORMAT_ARGB8888,
1177};
1178
1179static const struct nv50_wndw_func
1180nv50_curs = {
1181        .dtor = nv50_curs_dtor,
1182        .acquire = nv50_curs_acquire,
1183        .release = nv50_curs_release,
1184        .prepare = nv50_curs_prepare,
1185        .point = nv50_curs_point,
1186        .update = nv50_curs_update,
1187};
1188
1189static int
1190nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
1191              struct nv50_curs **pcurs)
1192{
1193        static const struct nvif_mclass curses[] = {
1194                { GK104_DISP_CURSOR, 0 },
1195                { GF110_DISP_CURSOR, 0 },
1196                { GT214_DISP_CURSOR, 0 },
1197                {   G82_DISP_CURSOR, 0 },
1198                {  NV50_DISP_CURSOR, 0 },
1199                {}
1200        };
1201        struct nv50_disp_cursor_v0 args = {
1202                .head = head->base.index,
1203        };
1204        struct nv50_disp *disp = nv50_disp(drm->dev);
1205        struct nv50_curs *curs;
1206        int cid, ret;
1207
1208        cid = nvif_mclass(disp->disp, curses);
1209        if (cid < 0) {
1210                NV_ERROR(drm, "No supported cursor immediate class\n");
1211                return cid;
1212        }
1213
1214        if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
1215                return -ENOMEM;
1216
1217        ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
1218                             "curs", head->base.index, &disp->mast.base,
1219                             nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
1220                             &curs->wndw);
1221        if (ret) {
1222                kfree(curs);
1223                return ret;
1224        }
1225
1226        ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
1227                               sizeof(args), &curs->chan);
1228        if (ret) {
1229                NV_ERROR(drm, "curs%04x allocation failed: %d\n",
1230                         curses[cid].oclass, ret);
1231                return ret;
1232        }
1233
1234        return 0;
1235}
1236
1237/******************************************************************************
1238 * Primary plane
1239 *****************************************************************************/
1240#define nv50_base(p) container_of((p), struct nv50_base, wndw)
1241
1242struct nv50_base {
1243        struct nv50_wndw wndw;
1244        struct nv50_sync chan;
1245        int id;
1246};
1247
1248static int
1249nv50_base_notify(struct nvif_notify *notify)
1250{
1251        return NVIF_NOTIFY_KEEP;
1252}
1253
1254static void
1255nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1256{
1257        struct nv50_base *base = nv50_base(wndw);
1258        u32 *push;
1259        if ((push = evo_wait(&base->chan, 2))) {
1260                evo_mthd(push, 0x00e0, 1);
1261                evo_data(push, asyw->lut.enable << 30);
1262                evo_kick(push, &base->chan);
1263        }
1264}
1265
1266static void
1267nv50_base_image_clr(struct nv50_wndw *wndw)
1268{
1269        struct nv50_base *base = nv50_base(wndw);
1270        u32 *push;
1271        if ((push = evo_wait(&base->chan, 4))) {
1272                evo_mthd(push, 0x0084, 1);
1273                evo_data(push, 0x00000000);
1274                evo_mthd(push, 0x00c0, 1);
1275                evo_data(push, 0x00000000);
1276                evo_kick(push, &base->chan);
1277        }
1278}
1279
1280static void
1281nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1282{
1283        struct nv50_base *base = nv50_base(wndw);
1284        const s32 oclass = base->chan.base.base.user.oclass;
1285        u32 *push;
1286        if ((push = evo_wait(&base->chan, 10))) {
1287                evo_mthd(push, 0x0084, 1);
1288                evo_data(push, (asyw->image.mode << 8) |
1289                               (asyw->image.interval << 4));
1290                evo_mthd(push, 0x00c0, 1);
1291                evo_data(push, asyw->image.handle);
1292                if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
1293                        evo_mthd(push, 0x0800, 5);
1294                        evo_data(push, asyw->image.offset >> 8);
1295                        evo_data(push, 0x00000000);
1296                        evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1297                        evo_data(push, (asyw->image.layout << 20) |
1298                                        asyw->image.pitch |
1299                                        asyw->image.block);
1300                        evo_data(push, (asyw->image.kind << 16) |
1301                                       (asyw->image.format << 8));
1302                } else
1303                if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1304                        evo_mthd(push, 0x0800, 5);
1305                        evo_data(push, asyw->image.offset >> 8);
1306                        evo_data(push, 0x00000000);
1307                        evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1308                        evo_data(push, (asyw->image.layout << 20) |
1309                                        asyw->image.pitch |
1310                                        asyw->image.block);
1311                        evo_data(push, asyw->image.format << 8);
1312                } else {
1313                        evo_mthd(push, 0x0400, 5);
1314                        evo_data(push, asyw->image.offset >> 8);
1315                        evo_data(push, 0x00000000);
1316                        evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1317                        evo_data(push, (asyw->image.layout << 24) |
1318                                        asyw->image.pitch |
1319                                        asyw->image.block);
1320                        evo_data(push, asyw->image.format << 8);
1321                }
1322                evo_kick(push, &base->chan);
1323        }
1324}
1325
1326static void
1327nv50_base_ntfy_clr(struct nv50_wndw *wndw)
1328{
1329        struct nv50_base *base = nv50_base(wndw);
1330        u32 *push;
1331        if ((push = evo_wait(&base->chan, 2))) {
1332                evo_mthd(push, 0x00a4, 1);
1333                evo_data(push, 0x00000000);
1334                evo_kick(push, &base->chan);
1335        }
1336}
1337
1338static void
1339nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1340{
1341        struct nv50_base *base = nv50_base(wndw);
1342        u32 *push;
1343        if ((push = evo_wait(&base->chan, 3))) {
1344                evo_mthd(push, 0x00a0, 2);
1345                evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
1346                evo_data(push, asyw->ntfy.handle);
1347                evo_kick(push, &base->chan);
1348        }
1349}
1350
1351static void
1352nv50_base_sema_clr(struct nv50_wndw *wndw)
1353{
1354        struct nv50_base *base = nv50_base(wndw);
1355        u32 *push;
1356        if ((push = evo_wait(&base->chan, 2))) {
1357                evo_mthd(push, 0x0094, 1);
1358                evo_data(push, 0x00000000);
1359                evo_kick(push, &base->chan);
1360        }
1361}
1362
1363static void
1364nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1365{
1366        struct nv50_base *base = nv50_base(wndw);
1367        u32 *push;
1368        if ((push = evo_wait(&base->chan, 5))) {
1369                evo_mthd(push, 0x0088, 4);
1370                evo_data(push, asyw->sema.offset);
1371                evo_data(push, asyw->sema.acquire);
1372                evo_data(push, asyw->sema.release);
1373                evo_data(push, asyw->sema.handle);
1374                evo_kick(push, &base->chan);
1375        }
1376}
1377
1378static u32
1379nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
1380{
1381        struct nv50_base *base = nv50_base(wndw);
1382        u32 *push;
1383
1384        if (!(push = evo_wait(&base->chan, 2)))
1385                return 0;
1386        evo_mthd(push, 0x0080, 1);
1387        evo_data(push, interlock);
1388        evo_kick(push, &base->chan);
1389
1390        if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
1391                return interlock ? 2 << (base->id * 8) : 0;
1392        return interlock ? 2 << (base->id * 4) : 0;
1393}
1394
1395static int
1396nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1397{
1398        struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
1399        struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1400        if (nvif_msec(&drm->device, 2000ULL,
1401                u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
1402                if ((data & 0xc0000000) == 0x40000000)
1403                        break;
1404                usleep_range(1, 2);
1405        ) < 0)
1406                return -ETIMEDOUT;
1407        return 0;
1408}
1409
1410static void
1411nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1412                  struct nv50_head_atom *asyh)
1413{
1414        asyh->base.cpp = 0;
1415}
1416
1417static int
1418nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1419                  struct nv50_head_atom *asyh)
1420{
1421        const u32 format = asyw->state.fb->pixel_format;
1422        const struct drm_format_info *info;
1423        int ret;
1424
1425        info = drm_format_info(format);
1426        if (!info || !info->depth)
1427                return -EINVAL;
1428
1429        ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1430                                           DRM_PLANE_HELPER_NO_SCALING,
1431                                           DRM_PLANE_HELPER_NO_SCALING,
1432                                           false, true);
1433        if (ret)
1434                return ret;
1435
1436        asyh->base.depth = info->depth;
1437        asyh->base.cpp = info->cpp[0];
1438        asyh->base.x = asyw->state.src.x1 >> 16;
1439        asyh->base.y = asyw->state.src.y1 >> 16;
1440        asyh->base.w = asyw->state.fb->width;
1441        asyh->base.h = asyw->state.fb->height;
1442
1443        switch (format) {
1444        case DRM_FORMAT_C8         : asyw->image.format = 0x1e; break;
1445        case DRM_FORMAT_RGB565     : asyw->image.format = 0xe8; break;
1446        case DRM_FORMAT_XRGB1555   :
1447        case DRM_FORMAT_ARGB1555   : asyw->image.format = 0xe9; break;
1448        case DRM_FORMAT_XRGB8888   :
1449        case DRM_FORMAT_ARGB8888   : asyw->image.format = 0xcf; break;
1450        case DRM_FORMAT_XBGR2101010:
1451        case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
1452        case DRM_FORMAT_XBGR8888   :
1453        case DRM_FORMAT_ABGR8888   : asyw->image.format = 0xd5; break;
1454        default:
1455                WARN_ON(1);
1456                return -EINVAL;
1457        }
1458
1459        asyw->lut.enable = 1;
1460        asyw->set.image = true;
1461        return 0;
1462}
1463
1464static void *
1465nv50_base_dtor(struct nv50_wndw *wndw)
1466{
1467        struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1468        struct nv50_base *base = nv50_base(wndw);
1469        nv50_dmac_destroy(&base->chan.base, disp->disp);
1470        return base;
1471}
1472
1473static const u32
1474nv50_base_format[] = {
1475        DRM_FORMAT_C8,
1476        DRM_FORMAT_RGB565,
1477        DRM_FORMAT_XRGB1555,
1478        DRM_FORMAT_ARGB1555,
1479        DRM_FORMAT_XRGB8888,
1480        DRM_FORMAT_ARGB8888,
1481        DRM_FORMAT_XBGR2101010,
1482        DRM_FORMAT_ABGR2101010,
1483        DRM_FORMAT_XBGR8888,
1484        DRM_FORMAT_ABGR8888,
1485};
1486
1487static const struct nv50_wndw_func
1488nv50_base = {
1489        .dtor = nv50_base_dtor,
1490        .acquire = nv50_base_acquire,
1491        .release = nv50_base_release,
1492        .sema_set = nv50_base_sema_set,
1493        .sema_clr = nv50_base_sema_clr,
1494        .ntfy_set = nv50_base_ntfy_set,
1495        .ntfy_clr = nv50_base_ntfy_clr,
1496        .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
1497        .image_set = nv50_base_image_set,
1498        .image_clr = nv50_base_image_clr,
1499        .lut = nv50_base_lut,
1500        .update = nv50_base_update,
1501};
1502
1503static int
1504nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
1505              struct nv50_base **pbase)
1506{
1507        struct nv50_disp *disp = nv50_disp(drm->dev);
1508        struct nv50_base *base;
1509        int ret;
1510
1511        if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
1512                return -ENOMEM;
1513        base->id = head->base.index;
1514        base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
1515        base->wndw.sema = EVO_FLIP_SEM0(base->id);
1516        base->wndw.data = 0x00000000;
1517
1518        ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
1519                             "base", base->id, &base->chan.base,
1520                             nv50_base_format, ARRAY_SIZE(nv50_base_format),
1521                             &base->wndw);
1522        if (ret) {
1523                kfree(base);
1524                return ret;
1525        }
1526
1527        ret = nv50_base_create(&drm->device, disp->disp, base->id,
1528                               disp->sync->bo.offset, &base->chan);
1529        if (ret)
1530                return ret;
1531
1532        return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
1533                                false,
1534                                NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
1535                                &(struct nvif_notify_uevent_req) {},
1536                                sizeof(struct nvif_notify_uevent_req),
1537                                sizeof(struct nvif_notify_uevent_rep),
1538                                &base->wndw.notify);
1539}
1540
1541/******************************************************************************
1542 * Head
1543 *****************************************************************************/
1544static void
1545nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
1546{
1547        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1548        u32 *push;
1549        if ((push = evo_wait(core, 2))) {
1550                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1551                        evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
1552                else
1553                        evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
1554                evo_data(push, (asyh->procamp.sat.sin << 20) |
1555                               (asyh->procamp.sat.cos << 8));
1556                evo_kick(push, core);
1557        }
1558}
1559
1560static void
1561nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
1562{
1563        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1564        u32 *push;
1565        if ((push = evo_wait(core, 2))) {
1566                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1567                        evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
1568                else
1569                if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
1570                        evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
1571                else
1572                        evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
1573                evo_data(push, (asyh->dither.mode << 3) |
1574                               (asyh->dither.bits << 1) |
1575                                asyh->dither.enable);
1576                evo_kick(push, core);
1577        }
1578}
1579
1580static void
1581nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
1582{
1583        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1584        u32 bounds = 0;
1585        u32 *push;
1586
1587        if (asyh->base.cpp) {
1588                switch (asyh->base.cpp) {
1589                case 8: bounds |= 0x00000500; break;
1590                case 4: bounds |= 0x00000300; break;
1591                case 2: bounds |= 0x00000100; break;
1592                default:
1593                        WARN_ON(1);
1594                        break;
1595                }
1596                bounds |= 0x00000001;
1597        }
1598
1599        if ((push = evo_wait(core, 2))) {
1600                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1601                        evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
1602                else
1603                        evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
1604                evo_data(push, bounds);
1605                evo_kick(push, core);
1606        }
1607}
1608
1609static void
1610nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
1611{
1612        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1613        u32 bounds = 0;
1614        u32 *push;
1615
1616        if (asyh->base.cpp) {
1617                switch (asyh->base.cpp) {
1618                case 8: bounds |= 0x00000500; break;
1619                case 4: bounds |= 0x00000300; break;
1620                case 2: bounds |= 0x00000100; break;
1621                case 1: bounds |= 0x00000000; break;
1622                default:
1623                        WARN_ON(1);
1624                        break;
1625                }
1626                bounds |= 0x00000001;
1627        }
1628
1629        if ((push = evo_wait(core, 2))) {
1630                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1631                        evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
1632                else
1633                        evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
1634                evo_data(push, bounds);
1635                evo_kick(push, core);
1636        }
1637}
1638
1639static void
1640nv50_head_curs_clr(struct nv50_head *head)
1641{
1642        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1643        u32 *push;
1644        if ((push = evo_wait(core, 4))) {
1645                if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1646                        evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1647                        evo_data(push, 0x05000000);
1648                } else
1649                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1650                        evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1651                        evo_data(push, 0x05000000);
1652                        evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1653                        evo_data(push, 0x00000000);
1654                } else {
1655                        evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
1656                        evo_data(push, 0x05000000);
1657                        evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1658                        evo_data(push, 0x00000000);
1659                }
1660                evo_kick(push, core);
1661        }
1662}
1663
1664static void
1665nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1666{
1667        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1668        u32 *push;
1669        if ((push = evo_wait(core, 5))) {
1670                if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
1671                        evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1672                        evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1673                                                    (asyh->curs.format << 24));
1674                        evo_data(push, asyh->curs.offset >> 8);
1675                } else
1676                if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1677                        evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1678                        evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1679                                                    (asyh->curs.format << 24));
1680                        evo_data(push, asyh->curs.offset >> 8);
1681                        evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1682                        evo_data(push, asyh->curs.handle);
1683                } else {
1684                        evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
1685                        evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1686                                                    (asyh->curs.format << 24));
1687                        evo_data(push, asyh->curs.offset >> 8);
1688                        evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1689                        evo_data(push, asyh->curs.handle);
1690                }
1691                evo_kick(push, core);
1692        }
1693}
1694
1695static void
1696nv50_head_core_clr(struct nv50_head *head)
1697{
1698        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1699        u32 *push;
1700        if ((push = evo_wait(core, 2))) {
1701                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1702                        evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
1703                else
1704                        evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
1705                evo_data(push, 0x00000000);
1706                evo_kick(push, core);
1707        }
1708}
1709
1710static void
1711nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1712{
1713        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1714        u32 *push;
1715        if ((push = evo_wait(core, 9))) {
1716                if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1717                        evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1718                        evo_data(push, asyh->core.offset >> 8);
1719                        evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1720                        evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1721                        evo_data(push, asyh->core.layout << 20 |
1722                                       (asyh->core.pitch >> 8) << 8 |
1723                                       asyh->core.block);
1724                        evo_data(push, asyh->core.kind << 16 |
1725                                       asyh->core.format << 8);
1726                        evo_data(push, asyh->core.handle);
1727                        evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1728                        evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1729                        /* EVO will complain with INVALID_STATE if we have an
1730                         * active cursor and (re)specify HeadSetContextDmaIso
1731                         * without also updating HeadSetOffsetCursor.
1732                         */
1733                        asyh->set.curs = asyh->curs.visible;
1734                } else
1735                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1736                        evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1737                        evo_data(push, asyh->core.offset >> 8);
1738                        evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1739                        evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1740                        evo_data(push, asyh->core.layout << 20 |
1741                                       (asyh->core.pitch >> 8) << 8 |
1742                                       asyh->core.block);
1743                        evo_data(push, asyh->core.format << 8);
1744                        evo_data(push, asyh->core.handle);
1745                        evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1746                        evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1747                } else {
1748                        evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1749                        evo_data(push, asyh->core.offset >> 8);
1750                        evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1751                        evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1752                        evo_data(push, asyh->core.layout << 24 |
1753                                       (asyh->core.pitch >> 8) << 8 |
1754                                       asyh->core.block);
1755                        evo_data(push, asyh->core.format << 8);
1756                        evo_data(push, asyh->core.handle);
1757                        evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1758                        evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1759                }
1760                evo_kick(push, core);
1761        }
1762}
1763
1764static void
1765nv50_head_lut_clr(struct nv50_head *head)
1766{
1767        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1768        u32 *push;
1769        if ((push = evo_wait(core, 4))) {
1770                if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1771                        evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1772                        evo_data(push, 0x40000000);
1773                } else
1774                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1775                        evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1776                        evo_data(push, 0x40000000);
1777                        evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1778                        evo_data(push, 0x00000000);
1779                } else {
1780                        evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
1781                        evo_data(push, 0x03000000);
1782                        evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1783                        evo_data(push, 0x00000000);
1784                }
1785                evo_kick(push, core);
1786        }
1787}
1788
1789static void
1790nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1791{
1792        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1793        u32 *push;
1794        if ((push = evo_wait(core, 7))) {
1795                if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1796                        evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1797                        evo_data(push, 0xc0000000);
1798                        evo_data(push, asyh->lut.offset >> 8);
1799                } else
1800                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1801                        evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1802                        evo_data(push, 0xc0000000);
1803                        evo_data(push, asyh->lut.offset >> 8);
1804                        evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1805                        evo_data(push, asyh->lut.handle);
1806                } else {
1807                        evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1808                        evo_data(push, 0x83000000);
1809                        evo_data(push, asyh->lut.offset >> 8);
1810                        evo_data(push, 0x00000000);
1811                        evo_data(push, 0x00000000);
1812                        evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1813                        evo_data(push, asyh->lut.handle);
1814                }
1815                evo_kick(push, core);
1816        }
1817}
1818
1819static void
1820nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1821{
1822        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1823        struct nv50_head_mode *m = &asyh->mode;
1824        u32 *push;
1825        if ((push = evo_wait(core, 14))) {
1826                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1827                        evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1828                        evo_data(push, 0x00800000 | m->clock);
1829                        evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1830                        evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1831                        evo_data(push, 0x00000000);
1832                        evo_data(push, (m->v.active  << 16) | m->h.active );
1833                        evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1834                        evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1835                        evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1836                        evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1837                        evo_data(push, asyh->mode.v.blankus);
1838                        evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1839                        evo_data(push, 0x00000000);
1840                } else {
1841                        evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1842                        evo_data(push, 0x00000000);
1843                        evo_data(push, (m->v.active  << 16) | m->h.active );
1844                        evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1845                        evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1846                        evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1847                        evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1848                        evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1849                        evo_data(push, 0x00000000); /* ??? */
1850                        evo_data(push, 0xffffff00);
1851                        evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1852                        evo_data(push, m->clock * 1000);
1853                        evo_data(push, 0x00200000); /* ??? */
1854                        evo_data(push, m->clock * 1000);
1855                }
1856                evo_kick(push, core);
1857        }
1858}
1859
1860static void
1861nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1862{
1863        struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1864        u32 *push;
1865        if ((push = evo_wait(core, 10))) {
1866                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1867                        evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1868                        evo_data(push, 0x00000000);
1869                        evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1870                        evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1871                        evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1872                        evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1873                        evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1874                } else {
1875                        evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1876                        evo_data(push, 0x00000000);
1877                        evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1878                        evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1879                        evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1880                        evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1881                        evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1882                        evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1883                }
1884                evo_kick(push, core);
1885        }
1886}
1887
1888static void
1889nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1890{
1891        if (asyh->clr.core && (!asyh->set.core || y))
1892                nv50_head_lut_clr(head);
1893        if (asyh->clr.core && (!asyh->set.core || y))
1894                nv50_head_core_clr(head);
1895        if (asyh->clr.curs && (!asyh->set.curs || y))
1896                nv50_head_curs_clr(head);
1897}
1898
1899static void
1900nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1901{
1902        if (asyh->set.view   ) nv50_head_view    (head, asyh);
1903        if (asyh->set.mode   ) nv50_head_mode    (head, asyh);
1904        if (asyh->set.core   ) nv50_head_lut_set (head, asyh);
1905        if (asyh->set.core   ) nv50_head_core_set(head, asyh);
1906        if (asyh->set.curs   ) nv50_head_curs_set(head, asyh);
1907        if (asyh->set.base   ) nv50_head_base    (head, asyh);
1908        if (asyh->set.ovly   ) nv50_head_ovly    (head, asyh);
1909        if (asyh->set.dither ) nv50_head_dither  (head, asyh);
1910        if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1911}
1912
1913static void
1914nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1915                               struct nv50_head_atom *asyh,
1916                               struct nouveau_conn_atom *asyc)
1917{
1918        const int vib = asyc->procamp.color_vibrance - 100;
1919        const int hue = asyc->procamp.vibrant_hue - 90;
1920        const int adj = (vib > 0) ? 50 : 0;
1921        asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1922        asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1923        asyh->set.procamp = true;
1924}
1925
1926static void
1927nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1928                              struct nv50_head_atom *asyh,
1929                              struct nouveau_conn_atom *asyc)
1930{
1931        struct drm_connector *connector = asyc->state.connector;
1932        u32 mode = 0x00;
1933
1934        if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1935                if (asyh->base.depth > connector->display_info.bpc * 3)
1936                        mode = DITHERING_MODE_DYNAMIC2X2;
1937        } else {
1938                mode = asyc->dither.mode;
1939        }
1940
1941        if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
1942                if (connector->display_info.bpc >= 8)
1943                        mode |= DITHERING_DEPTH_8BPC;
1944        } else {
1945                mode |= asyc->dither.depth;
1946        }
1947
1948        asyh->dither.enable = mode;
1949        asyh->dither.bits = mode >> 1;
1950        asyh->dither.mode = mode >> 3;
1951        asyh->set.dither = true;
1952}
1953
1954static void
1955nv50_head_atomic_check_view(struct nv50_head_atom *armh,
1956                            struct nv50_head_atom *asyh,
1957                            struct nouveau_conn_atom *asyc)
1958{
1959        struct drm_connector *connector = asyc->state.connector;
1960        struct drm_display_mode *omode = &asyh->state.adjusted_mode;
1961        struct drm_display_mode *umode = &asyh->state.mode;
1962        int mode = asyc->scaler.mode;
1963        struct edid *edid;
1964
1965        if (connector->edid_blob_ptr)
1966                edid = (struct edid *)connector->edid_blob_ptr->data;
1967        else
1968                edid = NULL;
1969
1970        if (!asyc->scaler.full) {
1971                if (mode == DRM_MODE_SCALE_NONE)
1972                        omode = umode;
1973        } else {
1974                /* Non-EDID LVDS/eDP mode. */
1975                mode = DRM_MODE_SCALE_FULLSCREEN;
1976        }
1977
1978        asyh->view.iW = umode->hdisplay;
1979        asyh->view.iH = umode->vdisplay;
1980        asyh->view.oW = omode->hdisplay;
1981        asyh->view.oH = omode->vdisplay;
1982        if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
1983                asyh->view.oH *= 2;
1984
1985        /* Add overscan compensation if necessary, will keep the aspect
1986         * ratio the same as the backend mode unless overridden by the
1987         * user setting both hborder and vborder properties.
1988         */
1989        if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
1990            (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
1991             drm_detect_hdmi_monitor(edid)))) {
1992                u32 bX = asyc->scaler.underscan.hborder;
1993                u32 bY = asyc->scaler.underscan.vborder;
1994                u32 r = (asyh->view.oH << 19) / asyh->view.oW;
1995
1996                if (bX) {
1997                        asyh->view.oW -= (bX * 2);
1998                        if (bY) asyh->view.oH -= (bY * 2);
1999                        else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
2000                } else {
2001                        asyh->view.oW -= (asyh->view.oW >> 4) + 32;
2002                        if (bY) asyh->view.oH -= (bY * 2);
2003                        else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
2004                }
2005        }
2006
2007        /* Handle CENTER/ASPECT scaling, taking into account the areas
2008         * removed already for overscan compensation.
2009         */
2010        switch (mode) {
2011        case DRM_MODE_SCALE_CENTER:
2012                asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
2013                asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH);
2014                /* fall-through */
2015        case DRM_MODE_SCALE_ASPECT:
2016                if (asyh->view.oH < asyh->view.oW) {
2017                        u32 r = (asyh->view.iW << 19) / asyh->view.iH;
2018                        asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
2019                } else {
2020                        u32 r = (asyh->view.iH << 19) / asyh->view.iW;
2021                        asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2022                }
2023                break;
2024        default:
2025                break;
2026        }
2027
2028        asyh->set.view = true;
2029}
2030
2031static void
2032nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
2033{
2034        struct drm_display_mode *mode = &asyh->state.adjusted_mode;
2035        u32 ilace   = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
2036        u32 vscan   = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
2037        u32 hbackp  =  mode->htotal - mode->hsync_end;
2038        u32 vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
2039        u32 hfrontp =  mode->hsync_start - mode->hdisplay;
2040        u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
2041        u32 blankus;
2042        struct nv50_head_mode *m = &asyh->mode;
2043
2044        m->h.active = mode->htotal;
2045        m->h.synce  = mode->hsync_end - mode->hsync_start - 1;
2046        m->h.blanke = m->h.synce + hbackp;
2047        m->h.blanks = mode->htotal - hfrontp - 1;
2048
2049        m->v.active = mode->vtotal * vscan / ilace;
2050        m->v.synce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
2051        m->v.blanke = m->v.synce + vbackp;
2052        m->v.blanks = m->v.active - vfrontp - 1;
2053
2054        /*XXX: Safe underestimate, even "0" works */
2055        blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
2056        blankus *= 1000;
2057        blankus /= mode->clock;
2058        m->v.blankus = blankus;
2059
2060        if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
2061                m->v.blank2e =  m->v.active + m->v.synce + vbackp;
2062                m->v.blank2s =  m->v.blank2e + (mode->vdisplay * vscan / ilace);
2063                m->v.active  = (m->v.active * 2) + 1;
2064                m->interlace = true;
2065        } else {
2066                m->v.blank2e = 0;
2067                m->v.blank2s = 1;
2068                m->interlace = false;
2069        }
2070        m->clock = mode->clock;
2071
2072        drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
2073        asyh->set.mode = true;
2074}
2075
2076static int
2077nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
2078{
2079        struct nouveau_drm *drm = nouveau_drm(crtc->dev);
2080        struct nv50_disp *disp = nv50_disp(crtc->dev);
2081        struct nv50_head *head = nv50_head(crtc);
2082        struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2083        struct nv50_head_atom *asyh = nv50_head_atom(state);
2084        struct nouveau_conn_atom *asyc = NULL;
2085        struct drm_connector_state *conns;
2086        struct drm_connector *conn;
2087        int i;
2088
2089        NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
2090        if (asyh->state.active) {
2091                for_each_connector_in_state(asyh->state.state, conn, conns, i) {
2092                        if (conns->crtc == crtc) {
2093                                asyc = nouveau_conn_atom(conns);
2094                                break;
2095                        }
2096                }
2097
2098                if (armh->state.active) {
2099                        if (asyc) {
2100                                if (asyh->state.mode_changed)
2101                                        asyc->set.scaler = true;
2102                                if (armh->base.depth != asyh->base.depth)
2103                                        asyc->set.dither = true;
2104                        }
2105                } else {
2106                        asyc->set.mask = ~0;
2107                        asyh->set.mask = ~0;
2108                }
2109
2110                if (asyh->state.mode_changed)
2111                        nv50_head_atomic_check_mode(head, asyh);
2112
2113                if (asyc) {
2114                        if (asyc->set.scaler)
2115                                nv50_head_atomic_check_view(armh, asyh, asyc);
2116                        if (asyc->set.dither)
2117                                nv50_head_atomic_check_dither(armh, asyh, asyc);
2118                        if (asyc->set.procamp)
2119                                nv50_head_atomic_check_procamp(armh, asyh, asyc);
2120                }
2121
2122                if ((asyh->core.visible = (asyh->base.cpp != 0))) {
2123                        asyh->core.x = asyh->base.x;
2124                        asyh->core.y = asyh->base.y;
2125                        asyh->core.w = asyh->base.w;
2126                        asyh->core.h = asyh->base.h;
2127                } else
2128                if ((asyh->core.visible = asyh->curs.visible)) {
2129                        /*XXX: We need to either find some way of having the
2130                         *     primary base layer appear black, while still
2131                         *     being able to display the other layers, or we
2132                         *     need to allocate a dummy black surface here.
2133                         */
2134                        asyh->core.x = 0;
2135                        asyh->core.y = 0;
2136                        asyh->core.w = asyh->state.mode.hdisplay;
2137                        asyh->core.h = asyh->state.mode.vdisplay;
2138                }
2139                asyh->core.handle = disp->mast.base.vram.handle;
2140                asyh->core.offset = 0;
2141                asyh->core.format = 0xcf;
2142                asyh->core.kind = 0;
2143                asyh->core.layout = 1;
2144                asyh->core.block = 0;
2145                asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
2146                asyh->lut.handle = disp->mast.base.vram.handle;
2147                asyh->lut.offset = head->base.lut.nvbo->bo.offset;
2148                asyh->set.base = armh->base.cpp != asyh->base.cpp;
2149                asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
2150        } else {
2151                asyh->core.visible = false;
2152                asyh->curs.visible = false;
2153                asyh->base.cpp = 0;
2154                asyh->ovly.cpp = 0;
2155        }
2156
2157        if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
2158                if (asyh->core.visible) {
2159                        if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
2160                                asyh->set.core = true;
2161                } else
2162                if (armh->core.visible) {
2163                        asyh->clr.core = true;
2164                }
2165
2166                if (asyh->curs.visible) {
2167                        if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
2168                                asyh->set.curs = true;
2169                } else
2170                if (armh->curs.visible) {
2171                        asyh->clr.curs = true;
2172                }
2173        } else {
2174                asyh->clr.core = armh->core.visible;
2175                asyh->clr.curs = armh->curs.visible;
2176                asyh->set.core = asyh->core.visible;
2177                asyh->set.curs = asyh->curs.visible;
2178        }
2179
2180        if (asyh->clr.mask || asyh->set.mask)
2181                nv50_atom(asyh->state.state)->lock_core = true;
2182        return 0;
2183}
2184
2185static void
2186nv50_head_lut_load(struct drm_crtc *crtc)
2187{
2188        struct nv50_disp *disp = nv50_disp(crtc->dev);
2189        struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2190        void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
2191        int i;
2192
2193        for (i = 0; i < 256; i++) {
2194                u16 r = nv_crtc->lut.r[i] >> 2;
2195                u16 g = nv_crtc->lut.g[i] >> 2;
2196                u16 b = nv_crtc->lut.b[i] >> 2;
2197
2198                if (disp->disp->oclass < GF110_DISP) {
2199                        writew(r + 0x0000, lut + (i * 0x08) + 0);
2200                        writew(g + 0x0000, lut + (i * 0x08) + 2);
2201                        writew(b + 0x0000, lut + (i * 0x08) + 4);
2202                } else {
2203                        writew(r + 0x6000, lut + (i * 0x20) + 0);
2204                        writew(g + 0x6000, lut + (i * 0x20) + 2);
2205                        writew(b + 0x6000, lut + (i * 0x20) + 4);
2206                }
2207        }
2208}
2209
2210static int
2211nv50_head_mode_set_base_atomic(struct drm_crtc *crtc,
2212                               struct drm_framebuffer *fb, int x, int y,
2213                               enum mode_set_atomic state)
2214{
2215        WARN_ON(1);
2216        return 0;
2217}
2218
2219static const struct drm_crtc_helper_funcs
2220nv50_head_help = {
2221        .mode_set_base_atomic = nv50_head_mode_set_base_atomic,
2222        .load_lut = nv50_head_lut_load,
2223        .atomic_check = nv50_head_atomic_check,
2224};
2225
2226/* This is identical to the version in the atomic helpers, except that
2227 * it supports non-vblanked ("async") page flips.
2228 */
2229static int
2230nv50_head_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
2231                    struct drm_pending_vblank_event *event, u32 flags)
2232{
2233        struct drm_plane *plane = crtc->primary;
2234        struct drm_atomic_state *state;
2235        struct drm_plane_state *plane_state;
2236        struct drm_crtc_state *crtc_state;
2237        int ret = 0;
2238
2239        state = drm_atomic_state_alloc(plane->dev);
2240        if (!state)
2241                return -ENOMEM;
2242
2243        state->acquire_ctx = drm_modeset_legacy_acquire_ctx(crtc);
2244retry:
2245        crtc_state = drm_atomic_get_crtc_state(state, crtc);
2246        if (IS_ERR(crtc_state)) {
2247                ret = PTR_ERR(crtc_state);
2248                goto fail;
2249        }
2250        crtc_state->event = event;
2251
2252        plane_state = drm_atomic_get_plane_state(state, plane);
2253        if (IS_ERR(plane_state)) {
2254                ret = PTR_ERR(plane_state);
2255                goto fail;
2256        }
2257
2258        ret = drm_atomic_set_crtc_for_plane(plane_state, crtc);
2259        if (ret != 0)
2260                goto fail;
2261        drm_atomic_set_fb_for_plane(plane_state, fb);
2262
2263        /* Make sure we don't accidentally do a full modeset. */
2264        state->allow_modeset = false;
2265        if (!crtc_state->active) {
2266                DRM_DEBUG_ATOMIC("[CRTC:%d] disabled, rejecting legacy flip\n",
2267                                 crtc->base.id);
2268                ret = -EINVAL;
2269                goto fail;
2270        }
2271
2272        if (flags & DRM_MODE_PAGE_FLIP_ASYNC)
2273                nv50_wndw_atom(plane_state)->interval = 0;
2274
2275        ret = drm_atomic_nonblocking_commit(state);
2276fail:
2277        if (ret == -EDEADLK)
2278                goto backoff;
2279
2280        drm_atomic_state_put(state);
2281        return ret;
2282
2283backoff:
2284        drm_atomic_state_clear(state);
2285        drm_atomic_legacy_backoff(state);
2286
2287        /*
2288         * Someone might have exchanged the framebuffer while we dropped locks
2289         * in the backoff code. We need to fix up the fb refcount tracking the
2290         * core does for us.
2291         */
2292        plane->old_fb = plane->fb;
2293
2294        goto retry;
2295}
2296
2297static int
2298nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
2299                    uint32_t size)
2300{
2301        struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2302        u32 i;
2303
2304        for (i = 0; i < size; i++) {
2305                nv_crtc->lut.r[i] = r[i];
2306                nv_crtc->lut.g[i] = g[i];
2307                nv_crtc->lut.b[i] = b[i];
2308        }
2309
2310        nv50_head_lut_load(crtc);
2311        return 0;
2312}
2313
2314static void
2315nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
2316                               struct drm_crtc_state *state)
2317{
2318        struct nv50_head_atom *asyh = nv50_head_atom(state);
2319        __drm_atomic_helper_crtc_destroy_state(&asyh->state);
2320        kfree(asyh);
2321}
2322
2323static struct drm_crtc_state *
2324nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
2325{
2326        struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2327        struct nv50_head_atom *asyh;
2328        if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
2329                return NULL;
2330        __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
2331        asyh->view = armh->view;
2332        asyh->mode = armh->mode;
2333        asyh->lut  = armh->lut;
2334        asyh->core = armh->core;
2335        asyh->curs = armh->curs;
2336        asyh->base = armh->base;
2337        asyh->ovly = armh->ovly;
2338        asyh->dither = armh->dither;
2339        asyh->procamp = armh->procamp;
2340        asyh->clr.mask = 0;
2341        asyh->set.mask = 0;
2342        return &asyh->state;
2343}
2344
2345static void
2346__drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
2347                               struct drm_crtc_state *state)
2348{
2349        if (crtc->state)
2350                crtc->funcs->atomic_destroy_state(crtc, crtc->state);
2351        crtc->state = state;
2352        crtc->state->crtc = crtc;
2353}
2354
2355static void
2356nv50_head_reset(struct drm_crtc *crtc)
2357{
2358        struct nv50_head_atom *asyh;
2359
2360        if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
2361                return;
2362
2363        __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
2364}
2365
2366static void
2367nv50_head_destroy(struct drm_crtc *crtc)
2368{
2369        struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2370        struct nv50_disp *disp = nv50_disp(crtc->dev);
2371        struct nv50_head *head = nv50_head(crtc);
2372
2373        nv50_dmac_destroy(&head->ovly.base, disp->disp);
2374        nv50_pioc_destroy(&head->oimm.base);
2375
2376        nouveau_bo_unmap(nv_crtc->lut.nvbo);
2377        if (nv_crtc->lut.nvbo)
2378                nouveau_bo_unpin(nv_crtc->lut.nvbo);
2379        nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
2380
2381        drm_crtc_cleanup(crtc);
2382        kfree(crtc);
2383}
2384
2385static const struct drm_crtc_funcs
2386nv50_head_func = {
2387        .reset = nv50_head_reset,
2388        .gamma_set = nv50_head_gamma_set,
2389        .destroy = nv50_head_destroy,
2390        .set_config = drm_atomic_helper_set_config,
2391        .page_flip = nv50_head_page_flip,
2392        .set_property = drm_atomic_helper_crtc_set_property,
2393        .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
2394        .atomic_destroy_state = nv50_head_atomic_destroy_state,
2395};
2396
2397static int
2398nv50_head_create(struct drm_device *dev, int index)
2399{
2400        struct nouveau_drm *drm = nouveau_drm(dev);
2401        struct nvif_device *device = &drm->device;
2402        struct nv50_disp *disp = nv50_disp(dev);
2403        struct nv50_head *head;
2404        struct nv50_base *base;
2405        struct nv50_curs *curs;
2406        struct drm_crtc *crtc;
2407        int ret, i;
2408
2409        head = kzalloc(sizeof(*head), GFP_KERNEL);
2410        if (!head)
2411                return -ENOMEM;
2412
2413        head->base.index = index;
2414        for (i = 0; i < 256; i++) {
2415                head->base.lut.r[i] = i << 8;
2416                head->base.lut.g[i] = i << 8;
2417                head->base.lut.b[i] = i << 8;
2418        }
2419
2420        ret = nv50_base_new(drm, head, &base);
2421        if (ret == 0)
2422                ret = nv50_curs_new(drm, head, &curs);
2423        if (ret) {
2424                kfree(head);
2425                return ret;
2426        }
2427
2428        crtc = &head->base.base;
2429        drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
2430                                  &curs->wndw.plane, &nv50_head_func,
2431                                  "head-%d", head->base.index);
2432        drm_crtc_helper_add(crtc, &nv50_head_help);
2433        drm_mode_crtc_set_gamma_size(crtc, 256);
2434
2435        ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
2436                             0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
2437        if (!ret) {
2438                ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
2439                if (!ret) {
2440                        ret = nouveau_bo_map(head->base.lut.nvbo);
2441                        if (ret)
2442                                nouveau_bo_unpin(head->base.lut.nvbo);
2443                }
2444                if (ret)
2445                        nouveau_bo_ref(NULL, &head->base.lut.nvbo);
2446        }
2447
2448        if (ret)
2449                goto out;
2450
2451        /* allocate overlay resources */
2452        ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
2453        if (ret)
2454                goto out;
2455
2456        ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
2457                               &head->ovly);
2458        if (ret)
2459                goto out;
2460
2461out:
2462        if (ret)
2463                nv50_head_destroy(crtc);
2464        return ret;
2465}
2466
2467/******************************************************************************
2468 * Output path helpers
2469 *****************************************************************************/
2470static int
2471nv50_outp_atomic_check_view(struct drm_encoder *encoder,
2472                            struct drm_crtc_state *crtc_state,
2473                            struct drm_connector_state *conn_state,
2474                            struct drm_display_mode *native_mode)
2475{
2476        struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
2477        struct drm_display_mode *mode = &crtc_state->mode;
2478        struct drm_connector *connector = conn_state->connector;
2479        struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
2480        struct nouveau_drm *drm = nouveau_drm(encoder->dev);
2481
2482        NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
2483        asyc->scaler.full = false;
2484        if (!native_mode)
2485                return 0;
2486
2487        if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
2488                switch (connector->connector_type) {
2489                case DRM_MODE_CONNECTOR_LVDS:
2490                case DRM_MODE_CONNECTOR_eDP:
2491                        /* Force use of scaler for non-EDID modes. */
2492                        if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
2493                                break;
2494                        mode = native_mode;
2495                        asyc->scaler.full = true;
2496                        break;
2497                default:
2498                        break;
2499                }
2500        } else {
2501                mode = native_mode;
2502        }
2503
2504        if (!drm_mode_equal(adjusted_mode, mode)) {
2505                drm_mode_copy(adjusted_mode, mode);
2506                crtc_state->mode_changed = true;
2507        }
2508
2509        return 0;
2510}
2511
2512static int
2513nv50_outp_atomic_check(struct drm_encoder *encoder,
2514                       struct drm_crtc_state *crtc_state,
2515                       struct drm_connector_state *conn_state)
2516{
2517        struct nouveau_connector *nv_connector =
2518                nouveau_connector(conn_state->connector);
2519        return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2520                                           nv_connector->native_mode);
2521}
2522
2523/******************************************************************************
2524 * DAC
2525 *****************************************************************************/
2526static void
2527nv50_dac_dpms(struct drm_encoder *encoder, int mode)
2528{
2529        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2530        struct nv50_disp *disp = nv50_disp(encoder->dev);
2531        struct {
2532                struct nv50_disp_mthd_v1 base;
2533                struct nv50_disp_dac_pwr_v0 pwr;
2534        } args = {
2535                .base.version = 1,
2536                .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
2537                .base.hasht  = nv_encoder->dcb->hasht,
2538                .base.hashm  = nv_encoder->dcb->hashm,
2539                .pwr.state = 1,
2540                .pwr.data  = 1,
2541                .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
2542                              mode != DRM_MODE_DPMS_OFF),
2543                .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
2544                              mode != DRM_MODE_DPMS_OFF),
2545        };
2546
2547        nvif_mthd(disp->disp, 0, &args, sizeof(args));
2548}
2549
2550static void
2551nv50_dac_disable(struct drm_encoder *encoder)
2552{
2553        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2554        struct nv50_mast *mast = nv50_mast(encoder->dev);
2555        const int or = nv_encoder->or;
2556        u32 *push;
2557
2558        if (nv_encoder->crtc) {
2559                push = evo_wait(mast, 4);
2560                if (push) {
2561                        if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2562                                evo_mthd(push, 0x0400 + (or * 0x080), 1);
2563                                evo_data(push, 0x00000000);
2564                        } else {
2565                                evo_mthd(push, 0x0180 + (or * 0x020), 1);
2566                                evo_data(push, 0x00000000);
2567                        }
2568                        evo_kick(push, mast);
2569                }
2570        }
2571
2572        nv_encoder->crtc = NULL;
2573}
2574
2575static void
2576nv50_dac_enable(struct drm_encoder *encoder)
2577{
2578        struct nv50_mast *mast = nv50_mast(encoder->dev);
2579        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2580        struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2581        struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
2582        u32 *push;
2583
2584        push = evo_wait(mast, 8);
2585        if (push) {
2586                if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2587                        u32 syncs = 0x00000000;
2588
2589                        if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2590                                syncs |= 0x00000001;
2591                        if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2592                                syncs |= 0x00000002;
2593
2594                        evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2595                        evo_data(push, 1 << nv_crtc->index);
2596                        evo_data(push, syncs);
2597                } else {
2598                        u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2599                        u32 syncs = 0x00000001;
2600
2601                        if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2602                                syncs |= 0x00000008;
2603                        if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2604                                syncs |= 0x00000010;
2605
2606                        if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2607                                magic |= 0x00000001;
2608
2609                        evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2610                        evo_data(push, syncs);
2611                        evo_data(push, magic);
2612                        evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2613                        evo_data(push, 1 << nv_crtc->index);
2614                }
2615
2616                evo_kick(push, mast);
2617        }
2618
2619        nv_encoder->crtc = encoder->crtc;
2620}
2621
2622static enum drm_connector_status
2623nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2624{
2625        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2626        struct nv50_disp *disp = nv50_disp(encoder->dev);
2627        struct {
2628                struct nv50_disp_mthd_v1 base;
2629                struct nv50_disp_dac_load_v0 load;
2630        } args = {
2631                .base.version = 1,
2632                .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2633                .base.hasht  = nv_encoder->dcb->hasht,
2634                .base.hashm  = nv_encoder->dcb->hashm,
2635        };
2636        int ret;
2637
2638        args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2639        if (args.load.data == 0)
2640                args.load.data = 340;
2641
2642        ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2643        if (ret || !args.load.load)
2644                return connector_status_disconnected;
2645
2646        return connector_status_connected;
2647}
2648
2649static const struct drm_encoder_helper_funcs
2650nv50_dac_help = {
2651        .dpms = nv50_dac_dpms,
2652        .atomic_check = nv50_outp_atomic_check,
2653        .enable = nv50_dac_enable,
2654        .disable = nv50_dac_disable,
2655        .detect = nv50_dac_detect
2656};
2657
2658static void
2659nv50_dac_destroy(struct drm_encoder *encoder)
2660{
2661        drm_encoder_cleanup(encoder);
2662        kfree(encoder);
2663}
2664
2665static const struct drm_encoder_funcs
2666nv50_dac_func = {
2667        .destroy = nv50_dac_destroy,
2668};
2669
2670static int
2671nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2672{
2673        struct nouveau_drm *drm = nouveau_drm(connector->dev);
2674        struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2675        struct nvkm_i2c_bus *bus;
2676        struct nouveau_encoder *nv_encoder;
2677        struct drm_encoder *encoder;
2678        int type = DRM_MODE_ENCODER_DAC;
2679
2680        nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2681        if (!nv_encoder)
2682                return -ENOMEM;
2683        nv_encoder->dcb = dcbe;
2684        nv_encoder->or = ffs(dcbe->or) - 1;
2685
2686        bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2687        if (bus)
2688                nv_encoder->i2c = &bus->i2c;
2689
2690        encoder = to_drm_encoder(nv_encoder);
2691        encoder->possible_crtcs = dcbe->heads;
2692        encoder->possible_clones = 0;
2693        drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2694                         "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2695        drm_encoder_helper_add(encoder, &nv50_dac_help);
2696
2697        drm_mode_connector_attach_encoder(connector, encoder);
2698        return 0;
2699}
2700
2701/******************************************************************************
2702 * Audio
2703 *****************************************************************************/
2704static void
2705nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2706{
2707        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2708        struct nv50_disp *disp = nv50_disp(encoder->dev);
2709        struct {
2710                struct nv50_disp_mthd_v1 base;
2711                struct nv50_disp_sor_hda_eld_v0 eld;
2712        } args = {
2713                .base.version = 1,
2714                .base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2715                .base.hasht   = nv_encoder->dcb->hasht,
2716                .base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2717                                (0x0100 << nv_crtc->index),
2718        };
2719
2720        nvif_mthd(disp->disp, 0, &args, sizeof(args));
2721}
2722
2723static void
2724nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2725{
2726        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2727        struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2728        struct nouveau_connector *nv_connector;
2729        struct nv50_disp *disp = nv50_disp(encoder->dev);
2730        struct __packed {
2731                struct {
2732                        struct nv50_disp_mthd_v1 mthd;
2733                        struct nv50_disp_sor_hda_eld_v0 eld;
2734                } base;
2735                u8 data[sizeof(nv_connector->base.eld)];
2736        } args = {
2737                .base.mthd.version = 1,
2738                .base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2739                .base.mthd.hasht   = nv_encoder->dcb->hasht,
2740                .base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2741                                     (0x0100 << nv_crtc->index),
2742        };
2743
2744        nv_connector = nouveau_encoder_connector_get(nv_encoder);
2745        if (!drm_detect_monitor_audio(nv_connector->edid))
2746                return;
2747
2748        drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
2749        memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2750
2751        nvif_mthd(disp->disp, 0, &args,
2752                  sizeof(args.base) + drm_eld_size(args.data));
2753}
2754
2755/******************************************************************************
2756 * HDMI
2757 *****************************************************************************/
2758static void
2759nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2760{
2761        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2762        struct nv50_disp *disp = nv50_disp(encoder->dev);
2763        struct {
2764                struct nv50_disp_mthd_v1 base;
2765                struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2766        } args = {
2767                .base.version = 1,
2768                .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2769                .base.hasht  = nv_encoder->dcb->hasht,
2770                .base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2771                               (0x0100 << nv_crtc->index),
2772        };
2773
2774        nvif_mthd(disp->disp, 0, &args, sizeof(args));
2775}
2776
2777static void
2778nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2779{
2780        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2781        struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2782        struct nv50_disp *disp = nv50_disp(encoder->dev);
2783        struct {
2784                struct nv50_disp_mthd_v1 base;
2785                struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2786        } args = {
2787                .base.version = 1,
2788                .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2789                .base.hasht  = nv_encoder->dcb->hasht,
2790                .base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2791                               (0x0100 << nv_crtc->index),
2792                .pwr.state = 1,
2793                .pwr.rekey = 56, /* binary driver, and tegra, constant */
2794        };
2795        struct nouveau_connector *nv_connector;
2796        u32 max_ac_packet;
2797
2798        nv_connector = nouveau_encoder_connector_get(nv_encoder);
2799        if (!drm_detect_hdmi_monitor(nv_connector->edid))
2800                return;
2801
2802        max_ac_packet  = mode->htotal - mode->hdisplay;
2803        max_ac_packet -= args.pwr.rekey;
2804        max_ac_packet -= 18; /* constant from tegra */
2805        args.pwr.max_ac_packet = max_ac_packet / 32;
2806
2807        nvif_mthd(disp->disp, 0, &args, sizeof(args));
2808        nv50_audio_enable(encoder, mode);
2809}
2810
2811/******************************************************************************
2812 * MST
2813 *****************************************************************************/
2814#define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2815#define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2816#define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2817
2818struct nv50_mstm {
2819        struct nouveau_encoder *outp;
2820
2821        struct drm_dp_mst_topology_mgr mgr;
2822        struct nv50_msto *msto[4];
2823
2824        bool modified;
2825};
2826
2827struct nv50_mstc {
2828        struct nv50_mstm *mstm;
2829        struct drm_dp_mst_port *port;
2830        struct drm_connector connector;
2831
2832        struct drm_display_mode *native;
2833        struct edid *edid;
2834
2835        int pbn;
2836};
2837
2838struct nv50_msto {
2839        struct drm_encoder encoder;
2840
2841        struct nv50_head *head;
2842        struct nv50_mstc *mstc;
2843        bool disabled;
2844};
2845
2846static struct drm_dp_payload *
2847nv50_msto_payload(struct nv50_msto *msto)
2848{
2849        struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2850        struct nv50_mstc *mstc = msto->mstc;
2851        struct nv50_mstm *mstm = mstc->mstm;
2852        int vcpi = mstc->port->vcpi.vcpi, i;
2853
2854        NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
2855        for (i = 0; i < mstm->mgr.max_payloads; i++) {
2856                struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2857                NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2858                          mstm->outp->base.base.name, i, payload->vcpi,
2859                          payload->start_slot, payload->num_slots);
2860        }
2861
2862        for (i = 0; i < mstm->mgr.max_payloads; i++) {
2863                struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2864                if (payload->vcpi == vcpi)
2865                        return payload;
2866        }
2867
2868        return NULL;
2869}
2870
2871static void
2872nv50_msto_cleanup(struct nv50_msto *msto)
2873{
2874        struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2875        struct nv50_mstc *mstc = msto->mstc;
2876        struct nv50_mstm *mstm = mstc->mstm;
2877
2878        NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
2879        if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
2880                drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
2881        if (msto->disabled) {
2882                msto->mstc = NULL;
2883                msto->head = NULL;
2884                msto->disabled = false;
2885        }
2886}
2887
2888static void
2889nv50_msto_prepare(struct nv50_msto *msto)
2890{
2891        struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2892        struct nv50_mstc *mstc = msto->mstc;
2893        struct nv50_mstm *mstm = mstc->mstm;
2894        struct {
2895                struct nv50_disp_mthd_v1 base;
2896                struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
2897        } args = {
2898                .base.version = 1,
2899                .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
2900                .base.hasht  = mstm->outp->dcb->hasht,
2901                .base.hashm  = (0xf0ff & mstm->outp->dcb->hashm) |
2902                               (0x0100 << msto->head->base.index),
2903        };
2904
2905        NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
2906        if (mstc->port && mstc->port->vcpi.vcpi > 0) {
2907                struct drm_dp_payload *payload = nv50_msto_payload(msto);
2908                if (payload) {
2909                        args.vcpi.start_slot = payload->start_slot;
2910                        args.vcpi.num_slots = payload->num_slots;
2911                        args.vcpi.pbn = mstc->port->vcpi.pbn;
2912                        args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
2913                }
2914        }
2915
2916        NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
2917                  msto->encoder.name, msto->head->base.base.name,
2918                  args.vcpi.start_slot, args.vcpi.num_slots,
2919                  args.vcpi.pbn, args.vcpi.aligned_pbn);
2920        nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
2921}
2922
2923static int
2924nv50_msto_atomic_check(struct drm_encoder *encoder,
2925                       struct drm_crtc_state *crtc_state,
2926                       struct drm_connector_state *conn_state)
2927{
2928        struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
2929        struct nv50_mstm *mstm = mstc->mstm;
2930        int bpp = conn_state->connector->display_info.bpc * 3;
2931        int slots;
2932
2933        mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
2934
2935        slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2936        if (slots < 0)
2937                return slots;
2938
2939        return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2940                                           mstc->native);
2941}
2942
2943static void
2944nv50_msto_enable(struct drm_encoder *encoder)
2945{
2946        struct nv50_head *head = nv50_head(encoder->crtc);
2947        struct nv50_msto *msto = nv50_msto(encoder);
2948        struct nv50_mstc *mstc = NULL;
2949        struct nv50_mstm *mstm = NULL;
2950        struct drm_connector *connector;
2951        u8 proto, depth;
2952        int slots;
2953        bool r;
2954
2955        drm_for_each_connector(connector, encoder->dev) {
2956                if (connector->state->best_encoder == &msto->encoder) {
2957                        mstc = nv50_mstc(connector);
2958                        mstm = mstc->mstm;
2959                        break;
2960                }
2961        }
2962
2963        if (WARN_ON(!mstc))
2964                return;
2965
2966        r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, &slots);
2967        WARN_ON(!r);
2968
2969        if (mstm->outp->dcb->sorconf.link & 1)
2970                proto = 0x8;
2971        else
2972                proto = 0x9;
2973
2974        switch (mstc->connector.display_info.bpc) {
2975        case  6: depth = 0x2; break;
2976        case  8: depth = 0x5; break;
2977        case 10:
2978        default: depth = 0x6; break;
2979        }
2980
2981        mstm->outp->update(mstm->outp, head->base.index,
2982                           &head->base.base.state->adjusted_mode, proto, depth);
2983
2984        msto->head = head;
2985        msto->mstc = mstc;
2986        mstm->modified = true;
2987}
2988
2989static void
2990nv50_msto_disable(struct drm_encoder *encoder)
2991{
2992        struct nv50_msto *msto = nv50_msto(encoder);
2993        struct nv50_mstc *mstc = msto->mstc;
2994        struct nv50_mstm *mstm = mstc->mstm;
2995
2996        if (mstc->port)
2997                drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
2998
2999        mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
3000        mstm->modified = true;
3001        msto->disabled = true;
3002}
3003
3004static const struct drm_encoder_helper_funcs
3005nv50_msto_help = {
3006        .disable = nv50_msto_disable,
3007        .enable = nv50_msto_enable,
3008        .atomic_check = nv50_msto_atomic_check,
3009};
3010
3011static void
3012nv50_msto_destroy(struct drm_encoder *encoder)
3013{
3014        struct nv50_msto *msto = nv50_msto(encoder);
3015        drm_encoder_cleanup(&msto->encoder);
3016        kfree(msto);
3017}
3018
3019static const struct drm_encoder_funcs
3020nv50_msto = {
3021        .destroy = nv50_msto_destroy,
3022};
3023
3024static int
3025nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
3026              struct nv50_msto **pmsto)
3027{
3028        struct nv50_msto *msto;
3029        int ret;
3030
3031        if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
3032                return -ENOMEM;
3033
3034        ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
3035                               DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
3036        if (ret) {
3037                kfree(*pmsto);
3038                *pmsto = NULL;
3039                return ret;
3040        }
3041
3042        drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
3043        msto->encoder.possible_crtcs = heads;
3044        return 0;
3045}
3046
3047static struct drm_encoder *
3048nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
3049                              struct drm_connector_state *connector_state)
3050{
3051        struct nv50_head *head = nv50_head(connector_state->crtc);
3052        struct nv50_mstc *mstc = nv50_mstc(connector);
3053        if (mstc->port) {
3054                struct nv50_mstm *mstm = mstc->mstm;
3055                return &mstm->msto[head->base.index]->encoder;
3056        }
3057        return NULL;
3058}
3059
3060static struct drm_encoder *
3061nv50_mstc_best_encoder(struct drm_connector *connector)
3062{
3063        struct nv50_mstc *mstc = nv50_mstc(connector);
3064        if (mstc->port) {
3065                struct nv50_mstm *mstm = mstc->mstm;
3066                return &mstm->msto[0]->encoder;
3067        }
3068        return NULL;
3069}
3070
3071static enum drm_mode_status
3072nv50_mstc_mode_valid(struct drm_connector *connector,
3073                     struct drm_display_mode *mode)
3074{
3075        return MODE_OK;
3076}
3077
3078static int
3079nv50_mstc_get_modes(struct drm_connector *connector)
3080{
3081        struct nv50_mstc *mstc = nv50_mstc(connector);
3082        int ret = 0;
3083
3084        mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
3085        drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
3086        if (mstc->edid) {
3087                ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
3088                drm_edid_to_eld(&mstc->connector, mstc->edid);
3089        }
3090
3091        if (!mstc->connector.display_info.bpc)
3092                mstc->connector.display_info.bpc = 8;
3093
3094        if (mstc->native)
3095                drm_mode_destroy(mstc->connector.dev, mstc->native);
3096        mstc->native = nouveau_conn_native_mode(&mstc->connector);
3097        return ret;
3098}
3099
3100static const struct drm_connector_helper_funcs
3101nv50_mstc_help = {
3102        .get_modes = nv50_mstc_get_modes,
3103        .mode_valid = nv50_mstc_mode_valid,
3104        .best_encoder = nv50_mstc_best_encoder,
3105        .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
3106};
3107
3108static enum drm_connector_status
3109nv50_mstc_detect(struct drm_connector *connector, bool force)
3110{
3111        struct nv50_mstc *mstc = nv50_mstc(connector);
3112        if (!mstc->port)
3113                return connector_status_disconnected;
3114        return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
3115}
3116
3117static void
3118nv50_mstc_destroy(struct drm_connector *connector)
3119{
3120        struct nv50_mstc *mstc = nv50_mstc(connector);
3121        drm_connector_cleanup(&mstc->connector);
3122        kfree(mstc);
3123}
3124
3125static const struct drm_connector_funcs
3126nv50_mstc = {
3127        .dpms = drm_atomic_helper_connector_dpms,
3128        .reset = nouveau_conn_reset,
3129        .detect = nv50_mstc_detect,
3130        .fill_modes = drm_helper_probe_single_connector_modes,
3131        .set_property = drm_atomic_helper_connector_set_property,
3132        .destroy = nv50_mstc_destroy,
3133        .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
3134        .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
3135        .atomic_set_property = nouveau_conn_atomic_set_property,
3136        .atomic_get_property = nouveau_conn_atomic_get_property,
3137};
3138
3139static int
3140nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
3141              const char *path, struct nv50_mstc **pmstc)
3142{
3143        struct drm_device *dev = mstm->outp->base.base.dev;
3144        struct nv50_mstc *mstc;
3145        int ret, i;
3146
3147        if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
3148                return -ENOMEM;
3149        mstc->mstm = mstm;
3150        mstc->port = port;
3151
3152        ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
3153                                 DRM_MODE_CONNECTOR_DisplayPort);
3154        if (ret) {
3155                kfree(*pmstc);
3156                *pmstc = NULL;
3157                return ret;
3158        }
3159
3160        drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
3161
3162        mstc->connector.funcs->reset(&mstc->connector);
3163        nouveau_conn_attach_properties(&mstc->connector);
3164
3165        for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto; i++)
3166                drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
3167
3168        drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
3169        drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
3170        drm_mode_connector_set_path_property(&mstc->connector, path);
3171        return 0;
3172}
3173
3174static void
3175nv50_mstm_cleanup(struct nv50_mstm *mstm)
3176{
3177        struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3178        struct drm_encoder *encoder;
3179        int ret;
3180
3181        NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
3182        ret = drm_dp_check_act_status(&mstm->mgr);
3183
3184        ret = drm_dp_update_payload_part2(&mstm->mgr);
3185
3186        drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3187                if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3188                        struct nv50_msto *msto = nv50_msto(encoder);
3189                        struct nv50_mstc *mstc = msto->mstc;
3190                        if (mstc && mstc->mstm == mstm)
3191                                nv50_msto_cleanup(msto);
3192                }
3193        }
3194
3195        mstm->modified = false;
3196}
3197
3198static void
3199nv50_mstm_prepare(struct nv50_mstm *mstm)
3200{
3201        struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3202        struct drm_encoder *encoder;
3203        int ret;
3204
3205        NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
3206        ret = drm_dp_update_payload_part1(&mstm->mgr);
3207
3208        drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3209                if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3210                        struct nv50_msto *msto = nv50_msto(encoder);
3211                        struct nv50_mstc *mstc = msto->mstc;
3212                        if (mstc && mstc->mstm == mstm)
3213                                nv50_msto_prepare(msto);
3214                }
3215        }
3216}
3217
3218static void
3219nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
3220{
3221        struct nv50_mstm *mstm = nv50_mstm(mgr);
3222        drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
3223}
3224
3225static void
3226nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
3227                            struct drm_connector *connector)
3228{
3229        struct nouveau_drm *drm = nouveau_drm(connector->dev);
3230        struct nv50_mstc *mstc = nv50_mstc(connector);
3231
3232        drm_connector_unregister(&mstc->connector);
3233
3234        drm_modeset_lock_all(drm->dev);
3235        drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
3236        mstc->port = NULL;
3237        drm_modeset_unlock_all(drm->dev);
3238
3239        drm_connector_unreference(&mstc->connector);
3240}
3241
3242static void
3243nv50_mstm_register_connector(struct drm_connector *connector)
3244{
3245        struct nouveau_drm *drm = nouveau_drm(connector->dev);
3246
3247        drm_modeset_lock_all(drm->dev);
3248        drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
3249        drm_modeset_unlock_all(drm->dev);
3250
3251        drm_connector_register(connector);
3252}
3253
3254static struct drm_connector *
3255nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
3256                        struct drm_dp_mst_port *port, const char *path)
3257{
3258        struct nv50_mstm *mstm = nv50_mstm(mgr);
3259        struct nv50_mstc *mstc;
3260        int ret;
3261
3262        ret = nv50_mstc_new(mstm, port, path, &mstc);
3263        if (ret) {
3264                if (mstc)
3265                        mstc->connector.funcs->destroy(&mstc->connector);
3266                return NULL;
3267        }
3268
3269        return &mstc->connector;
3270}
3271
3272static const struct drm_dp_mst_topology_cbs
3273nv50_mstm = {
3274        .add_connector = nv50_mstm_add_connector,
3275        .register_connector = nv50_mstm_register_connector,
3276        .destroy_connector = nv50_mstm_destroy_connector,
3277        .hotplug = nv50_mstm_hotplug,
3278};
3279
3280void
3281nv50_mstm_service(struct nv50_mstm *mstm)
3282{
3283        struct drm_dp_aux *aux = mstm->mgr.aux;
3284        bool handled = true;
3285        int ret;
3286        u8 esi[8] = {};
3287
3288        while (handled) {
3289                ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
3290                if (ret != 8) {
3291                        drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3292                        return;
3293                }
3294
3295                drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
3296                if (!handled)
3297                        break;
3298
3299                drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
3300        }
3301}
3302
3303void
3304nv50_mstm_remove(struct nv50_mstm *mstm)
3305{
3306        if (mstm)
3307                drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3308}
3309
3310static int
3311nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
3312{
3313        struct nouveau_encoder *outp = mstm->outp;
3314        struct {
3315                struct nv50_disp_mthd_v1 base;
3316                struct nv50_disp_sor_dp_mst_link_v0 mst;
3317        } args = {
3318                .base.version = 1,
3319                .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
3320                .base.hasht = outp->dcb->hasht,
3321                .base.hashm = outp->dcb->hashm,
3322                .mst.state = state,
3323        };
3324        struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
3325        struct nvif_object *disp = &drm->display->disp;
3326        int ret;
3327
3328        if (dpcd >= 0x12) {
3329                ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
3330                if (ret < 0)
3331                        return ret;
3332
3333                dpcd &= ~DP_MST_EN;
3334                if (state)
3335                        dpcd |= DP_MST_EN;
3336
3337                ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
3338                if (ret < 0)
3339                        return ret;
3340        }
3341
3342        return nvif_mthd(disp, 0, &args, sizeof(args));
3343}
3344
3345int
3346nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
3347{
3348        int ret, state = 0;
3349
3350        if (!mstm)
3351                return 0;
3352
3353        if (dpcd[0] >= 0x12) {
3354                ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
3355                if (ret < 0)
3356                        return ret;
3357
3358                if (!(dpcd[1] & DP_MST_CAP))
3359                        dpcd[0] = 0x11;
3360                else
3361                        state = allow;
3362        }
3363
3364        ret = nv50_mstm_enable(mstm, dpcd[0], state);
3365        if (ret)
3366                return ret;
3367
3368        ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
3369        if (ret)
3370                return nv50_mstm_enable(mstm, dpcd[0], 0);
3371
3372        return mstm->mgr.mst_state;
3373}
3374
3375static void
3376nv50_mstm_fini(struct nv50_mstm *mstm)
3377{
3378        if (mstm && mstm->mgr.mst_state)
3379                drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
3380}
3381
3382static void
3383nv50_mstm_init(struct nv50_mstm *mstm)
3384{
3385        if (mstm && mstm->mgr.mst_state)
3386                drm_dp_mst_topology_mgr_resume(&mstm->mgr);
3387}
3388
3389static void
3390nv50_mstm_del(struct nv50_mstm **pmstm)
3391{
3392        struct nv50_mstm *mstm = *pmstm;
3393        if (mstm) {
3394                kfree(*pmstm);
3395                *pmstm = NULL;
3396        }
3397}
3398
3399static int
3400nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
3401              int conn_base_id, struct nv50_mstm **pmstm)
3402{
3403        const int max_payloads = hweight8(outp->dcb->heads);
3404        struct drm_device *dev = outp->base.base.dev;
3405        struct nv50_mstm *mstm;
3406        int ret, i;
3407        u8 dpcd;
3408
3409        /* This is a workaround for some monitors not functioning
3410         * correctly in MST mode on initial module load.  I think
3411         * some bad interaction with the VBIOS may be responsible.
3412         *
3413         * A good ol' off and on again seems to work here ;)
3414         */
3415        ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
3416        if (ret >= 0 && dpcd >= 0x12)
3417                drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
3418
3419        if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
3420                return -ENOMEM;
3421        mstm->outp = outp;
3422        mstm->mgr.cbs = &nv50_mstm;
3423
3424        ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max,
3425                                           max_payloads, conn_base_id);
3426        if (ret)
3427                return ret;
3428
3429        for (i = 0; i < max_payloads; i++) {
3430                ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
3431                                    i, &mstm->msto[i]);
3432                if (ret)
3433                        return ret;
3434        }
3435
3436        return 0;
3437}
3438
3439/******************************************************************************
3440 * SOR
3441 *****************************************************************************/
3442static void
3443nv50_sor_dpms(struct drm_encoder *encoder, int mode)
3444{
3445        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3446        struct nv50_disp *disp = nv50_disp(encoder->dev);
3447        struct {
3448                struct nv50_disp_mthd_v1 base;
3449                struct nv50_disp_sor_pwr_v0 pwr;
3450        } args = {
3451                .base.version = 1,
3452                .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
3453                .base.hasht  = nv_encoder->dcb->hasht,
3454                .base.hashm  = nv_encoder->dcb->hashm,
3455                .pwr.state = mode == DRM_MODE_DPMS_ON,
3456        };
3457
3458        nvif_mthd(disp->disp, 0, &args, sizeof(args));
3459}
3460
3461static void
3462nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
3463                struct drm_display_mode *mode, u8 proto, u8 depth)
3464{
3465        struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
3466        u32 *push;
3467
3468        if (!mode) {
3469                nv_encoder->ctrl &= ~BIT(head);
3470                if (!(nv_encoder->ctrl & 0x0000000f))
3471                        nv_encoder->ctrl = 0;
3472        } else {
3473                nv_encoder->ctrl |= proto << 8;
3474                nv_encoder->ctrl |= BIT(head);
3475        }
3476
3477        if ((push = evo_wait(core, 6))) {
3478                if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
3479                        if (mode) {
3480                                if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3481                                        nv_encoder->ctrl |= 0x00001000;
3482                                if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3483                                        nv_encoder->ctrl |= 0x00002000;
3484                                nv_encoder->ctrl |= depth << 16;
3485                        }
3486                        evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
3487                } else {
3488                        if (mode) {
3489                                u32 magic = 0x31ec6000 | (head << 25);
3490                                u32 syncs = 0x00000001;
3491                                if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3492                                        syncs |= 0x00000008;
3493                                if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3494                                        syncs |= 0x00000010;
3495                                if (mode->flags & DRM_MODE_FLAG_INTERLACE)
3496                                        magic |= 0x00000001;
3497
3498                                evo_mthd(push, 0x0404 + (head * 0x300), 2);
3499                                evo_data(push, syncs | (depth << 6));
3500                                evo_data(push, magic);
3501                        }
3502                        evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
3503                }
3504                evo_data(push, nv_encoder->ctrl);
3505                evo_kick(push, core);
3506        }
3507}
3508
3509static void
3510nv50_sor_disable(struct drm_encoder *encoder)
3511{
3512        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3513        struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
3514
3515        nv_encoder->crtc = NULL;
3516
3517        if (nv_crtc) {
3518                struct nvkm_i2c_aux *aux = nv_encoder->aux;
3519                u8 pwr;
3520
3521                if (aux) {
3522                        int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
3523                        if (ret == 0) {
3524                                pwr &= ~DP_SET_POWER_MASK;
3525                                pwr |=  DP_SET_POWER_D3;
3526                                nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
3527                        }
3528                }
3529
3530                nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
3531                nv50_audio_disable(encoder, nv_crtc);
3532                nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
3533        }
3534}
3535
3536static void
3537nv50_sor_enable(struct drm_encoder *encoder)
3538{
3539        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3540        struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3541        struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3542        struct {
3543                struct nv50_disp_mthd_v1 base;
3544                struct nv50_disp_sor_lvds_script_v0 lvds;
3545        } lvds = {
3546                .base.version = 1,
3547                .base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
3548                .base.hasht   = nv_encoder->dcb->hasht,
3549                .base.hashm   = nv_encoder->dcb->hashm,
3550        };
3551        struct nv50_disp *disp = nv50_disp(encoder->dev);
3552        struct drm_device *dev = encoder->dev;
3553        struct nouveau_drm *drm = nouveau_drm(dev);
3554        struct nouveau_connector *nv_connector;
3555        struct nvbios *bios = &drm->vbios;
3556        u8 proto = 0xf;
3557        u8 depth = 0x0;
3558
3559        nv_connector = nouveau_encoder_connector_get(nv_encoder);
3560        nv_encoder->crtc = encoder->crtc;
3561
3562        switch (nv_encoder->dcb->type) {
3563        case DCB_OUTPUT_TMDS:
3564                if (nv_encoder->dcb->sorconf.link & 1) {
3565                        proto = 0x1;
3566                        /* Only enable dual-link if:
3567                         *  - Need to (i.e. rate > 165MHz)
3568                         *  - DCB says we can
3569                         *  - Not an HDMI monitor, since there's no dual-link
3570                         *    on HDMI.
3571                         */
3572                        if (mode->clock >= 165000 &&
3573                            nv_encoder->dcb->duallink_possible &&
3574                            !drm_detect_hdmi_monitor(nv_connector->edid))
3575                                proto |= 0x4;
3576                } else {
3577                        proto = 0x2;
3578                }
3579
3580                nv50_hdmi_enable(&nv_encoder->base.base, mode);
3581                break;
3582        case DCB_OUTPUT_LVDS:
3583                proto = 0x0;
3584
3585                if (bios->fp_no_ddc) {
3586                        if (bios->fp.dual_link)
3587                                lvds.lvds.script |= 0x0100;
3588                        if (bios->fp.if_is_24bit)
3589                                lvds.lvds.script |= 0x0200;
3590                } else {
3591                        if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3592                                if (((u8 *)nv_connector->edid)[121] == 2)
3593                                        lvds.lvds.script |= 0x0100;
3594                        } else
3595                        if (mode->clock >= bios->fp.duallink_transition_clk) {
3596                                lvds.lvds.script |= 0x0100;
3597                        }
3598
3599                        if (lvds.lvds.script & 0x0100) {
3600                                if (bios->fp.strapless_is_24bit & 2)
3601                                        lvds.lvds.script |= 0x0200;
3602                        } else {
3603                                if (bios->fp.strapless_is_24bit & 1)
3604                                        lvds.lvds.script |= 0x0200;
3605                        }
3606
3607                        if (nv_connector->base.display_info.bpc == 8)
3608                                lvds.lvds.script |= 0x0200;
3609                }
3610
3611                nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
3612                break;
3613        case DCB_OUTPUT_DP:
3614                if (nv_connector->base.display_info.bpc == 6)
3615                        depth = 0x2;
3616                else
3617                if (nv_connector->base.display_info.bpc == 8)
3618                        depth = 0x5;
3619                else
3620                        depth = 0x6;
3621
3622                if (nv_encoder->dcb->sorconf.link & 1)
3623                        proto = 0x8;
3624                else
3625                        proto = 0x9;
3626
3627                nv50_audio_enable(encoder, mode);
3628                break;
3629        default:
3630                BUG_ON(1);
3631                break;
3632        }
3633
3634        nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
3635}
3636
3637static const struct drm_encoder_helper_funcs
3638nv50_sor_help = {
3639        .dpms = nv50_sor_dpms,
3640        .atomic_check = nv50_outp_atomic_check,
3641        .enable = nv50_sor_enable,
3642        .disable = nv50_sor_disable,
3643};
3644
3645static void
3646nv50_sor_destroy(struct drm_encoder *encoder)
3647{
3648        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3649        nv50_mstm_del(&nv_encoder->dp.mstm);
3650        drm_encoder_cleanup(encoder);
3651        kfree(encoder);
3652}
3653
3654static const struct drm_encoder_funcs
3655nv50_sor_func = {
3656        .destroy = nv50_sor_destroy,
3657};
3658
3659static int
3660nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
3661{
3662        struct nouveau_connector *nv_connector = nouveau_connector(connector);
3663        struct nouveau_drm *drm = nouveau_drm(connector->dev);
3664        struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
3665        struct nouveau_encoder *nv_encoder;
3666        struct drm_encoder *encoder;
3667        int type, ret;
3668
3669        switch (dcbe->type) {
3670        case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
3671        case DCB_OUTPUT_TMDS:
3672        case DCB_OUTPUT_DP:
3673        default:
3674                type = DRM_MODE_ENCODER_TMDS;
3675                break;
3676        }
3677
3678        nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3679        if (!nv_encoder)
3680                return -ENOMEM;
3681        nv_encoder->dcb = dcbe;
3682        nv_encoder->or = ffs(dcbe->or) - 1;
3683        nv_encoder->update = nv50_sor_update;
3684
3685        encoder = to_drm_encoder(nv_encoder);
3686        encoder->possible_crtcs = dcbe->heads;
3687        encoder->possible_clones = 0;
3688        drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
3689                         "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
3690        drm_encoder_helper_add(encoder, &nv50_sor_help);
3691
3692        drm_mode_connector_attach_encoder(connector, encoder);
3693
3694        if (dcbe->type == DCB_OUTPUT_DP) {
3695                struct nvkm_i2c_aux *aux =
3696                        nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
3697                if (aux) {
3698                        nv_encoder->i2c = &aux->i2c;
3699                        nv_encoder->aux = aux;
3700                }
3701
3702                /*TODO: Use DP Info Table to check for support. */
3703                if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
3704                        ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
3705                                            nv_connector->base.base.id,
3706                                            &nv_encoder->dp.mstm);
3707                        if (ret)
3708                                return ret;
3709                }
3710        } else {
3711                struct nvkm_i2c_bus *bus =
3712                        nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
3713                if (bus)
3714                        nv_encoder->i2c = &bus->i2c;
3715        }
3716
3717        return 0;
3718}
3719
3720/******************************************************************************
3721 * PIOR
3722 *****************************************************************************/
3723static void
3724nv50_pior_dpms(struct drm_encoder *encoder, int mode)
3725{
3726        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3727        struct nv50_disp *disp = nv50_disp(encoder->dev);
3728        struct {
3729                struct nv50_disp_mthd_v1 base;
3730                struct nv50_disp_pior_pwr_v0 pwr;
3731        } args = {
3732                .base.version = 1,
3733                .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
3734                .base.hasht  = nv_encoder->dcb->hasht,
3735                .base.hashm  = nv_encoder->dcb->hashm,
3736                .pwr.state = mode == DRM_MODE_DPMS_ON,
3737                .pwr.type = nv_encoder->dcb->type,
3738        };
3739
3740        nvif_mthd(disp->disp, 0, &args, sizeof(args));
3741}
3742
3743static int
3744nv50_pior_atomic_check(struct drm_encoder *encoder,
3745                       struct drm_crtc_state *crtc_state,
3746                       struct drm_connector_state *conn_state)
3747{
3748        int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
3749        if (ret)
3750                return ret;
3751        crtc_state->adjusted_mode.clock *= 2;
3752        return 0;
3753}
3754
3755static void
3756nv50_pior_disable(struct drm_encoder *encoder)
3757{
3758        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3759        struct nv50_mast *mast = nv50_mast(encoder->dev);
3760        const int or = nv_encoder->or;
3761        u32 *push;
3762
3763        if (nv_encoder->crtc) {
3764                push = evo_wait(mast, 4);
3765                if (push) {
3766                        if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3767                                evo_mthd(push, 0x0700 + (or * 0x040), 1);
3768                                evo_data(push, 0x00000000);
3769                        }
3770                        evo_kick(push, mast);
3771                }
3772        }
3773
3774        nv_encoder->crtc = NULL;
3775}
3776
3777static void
3778nv50_pior_enable(struct drm_encoder *encoder)
3779{
3780        struct nv50_mast *mast = nv50_mast(encoder->dev);
3781        struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3782        struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3783        struct nouveau_connector *nv_connector;
3784        struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3785        u8 owner = 1 << nv_crtc->index;
3786        u8 proto, depth;
3787        u32 *push;
3788
3789        nv_connector = nouveau_encoder_connector_get(nv_encoder);
3790        switch (nv_connector->base.display_info.bpc) {
3791        case 10: depth = 0x6; break;
3792        case  8: depth = 0x5; break;
3793        case  6: depth = 0x2; break;
3794        default: depth = 0x0; break;
3795        }
3796
3797        switch (nv_encoder->dcb->type) {
3798        case DCB_OUTPUT_TMDS:
3799        case DCB_OUTPUT_DP:
3800                proto = 0x0;
3801                break;
3802        default:
3803                BUG_ON(1);
3804                break;
3805        }
3806
3807        push = evo_wait(mast, 8);
3808        if (push) {
3809                if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3810                        u32 ctrl = (depth << 16) | (proto << 8) | owner;
3811                        if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3812                                ctrl |= 0x00001000;
3813                        if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3814                                ctrl |= 0x00002000;
3815                        evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
3816                        evo_data(push, ctrl);
3817                }
3818
3819                evo_kick(push, mast);
3820        }
3821
3822        nv_encoder->crtc = encoder->crtc;
3823}
3824
3825static const struct drm_encoder_helper_funcs
3826nv50_pior_help = {
3827        .dpms = nv50_pior_dpms,
3828        .atomic_check = nv50_pior_atomic_check,
3829        .enable = nv50_pior_enable,
3830        .disable = nv50_pior_disable,
3831};
3832
3833static void
3834nv50_pior_destroy(struct drm_encoder *encoder)
3835{
3836        drm_encoder_cleanup(encoder);
3837        kfree(encoder);
3838}
3839
3840static const struct drm_encoder_funcs
3841nv50_pior_func = {
3842        .destroy = nv50_pior_destroy,
3843};
3844
3845static int
3846nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
3847{
3848        struct nouveau_drm *drm = nouveau_drm(connector->dev);
3849        struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
3850        struct nvkm_i2c_bus *bus = NULL;
3851        struct nvkm_i2c_aux *aux = NULL;
3852        struct i2c_adapter *ddc;
3853        struct nouveau_encoder *nv_encoder;
3854        struct drm_encoder *encoder;
3855        int type;
3856
3857        switch (dcbe->type) {
3858        case DCB_OUTPUT_TMDS:
3859                bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
3860                ddc  = bus ? &bus->i2c : NULL;
3861                type = DRM_MODE_ENCODER_TMDS;
3862                break;
3863        case DCB_OUTPUT_DP:
3864                aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
3865                ddc  = aux ? &aux->i2c : NULL;
3866                type = DRM_MODE_ENCODER_TMDS;
3867                break;
3868        default:
3869                return -ENODEV;
3870        }
3871
3872        nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3873        if (!nv_encoder)
3874                return -ENOMEM;
3875        nv_encoder->dcb = dcbe;
3876        nv_encoder->or = ffs(dcbe->or) - 1;
3877        nv_encoder->i2c = ddc;
3878        nv_encoder->aux = aux;
3879
3880        encoder = to_drm_encoder(nv_encoder);
3881        encoder->possible_crtcs = dcbe->heads;
3882        encoder->possible_clones = 0;
3883        drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
3884                         "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
3885        drm_encoder_helper_add(encoder, &nv50_pior_help);
3886
3887        drm_mode_connector_attach_encoder(connector, encoder);
3888        return 0;
3889}
3890
3891/******************************************************************************
3892 * Atomic
3893 *****************************************************************************/
3894
3895static void
3896nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
3897{
3898        struct nv50_disp *disp = nv50_disp(drm->dev);
3899        struct nv50_dmac *core = &disp->mast.base;
3900        struct nv50_mstm *mstm;
3901        struct drm_encoder *encoder;
3902        u32 *push;
3903
3904        NV_ATOMIC(drm, "commit core %08x\n", interlock);
3905
3906        drm_for_each_encoder(encoder, drm->dev) {
3907                if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3908                        mstm = nouveau_encoder(encoder)->dp.mstm;
3909                        if (mstm && mstm->modified)
3910                                nv50_mstm_prepare(mstm);
3911                }
3912        }
3913
3914        if ((push = evo_wait(core, 5))) {
3915                evo_mthd(push, 0x0084, 1);
3916                evo_data(push, 0x80000000);
3917                evo_mthd(push, 0x0080, 2);
3918                evo_data(push, interlock);
3919                evo_data(push, 0x00000000);
3920                nouveau_bo_wr32(disp->sync, 0, 0x00000000);
3921                evo_kick(push, core);
3922                if (nvif_msec(&drm->device, 2000ULL,
3923                        if (nouveau_bo_rd32(disp->sync, 0))
3924                                break;
3925                        usleep_range(1, 2);
3926                ) < 0)
3927                        NV_ERROR(drm, "EVO timeout\n");
3928        }
3929
3930        drm_for_each_encoder(encoder, drm->dev) {
3931                if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3932                        mstm = nouveau_encoder(encoder)->dp.mstm;
3933                        if (mstm && mstm->modified)
3934                                nv50_mstm_cleanup(mstm);
3935                }
3936        }
3937}
3938
3939static void
3940nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
3941{
3942        struct drm_device *dev = state->dev;
3943        struct drm_crtc_state *crtc_state;
3944        struct drm_crtc *crtc;
3945        struct drm_plane_state *plane_state;
3946        struct drm_plane *plane;
3947        struct nouveau_drm *drm = nouveau_drm(dev);
3948        struct nv50_disp *disp = nv50_disp(dev);
3949        struct nv50_atom *atom = nv50_atom(state);
3950        struct nv50_outp_atom *outp, *outt;
3951        u32 interlock_core = 0;
3952        u32 interlock_chan = 0;
3953        int i;
3954
3955        NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
3956        drm_atomic_helper_wait_for_fences(dev, state, false);
3957        drm_atomic_helper_wait_for_dependencies(state);
3958        drm_atomic_helper_update_legacy_modeset_state(dev, state);
3959
3960        if (atom->lock_core)
3961                mutex_lock(&disp->mutex);
3962
3963        /* Disable head(s). */
3964        for_each_crtc_in_state(state, crtc, crtc_state, i) {
3965                struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
3966                struct nv50_head *head = nv50_head(crtc);
3967
3968                NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
3969                          asyh->clr.mask, asyh->set.mask);
3970
3971                if (asyh->clr.mask) {
3972                        nv50_head_flush_clr(head, asyh, atom->flush_disable);
3973                        interlock_core |= 1;
3974                }
3975        }
3976
3977        /* Disable plane(s). */
3978        for_each_plane_in_state(state, plane, plane_state, i) {
3979                struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
3980                struct nv50_wndw *wndw = nv50_wndw(plane);
3981
3982                NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
3983                          asyw->clr.mask, asyw->set.mask);
3984                if (!asyw->clr.mask)
3985                        continue;
3986
3987                interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
3988                                                      atom->flush_disable,
3989                                                      asyw);
3990        }
3991
3992        /* Disable output path(s). */
3993        list_for_each_entry(outp, &atom->outp, head) {
3994                const struct drm_encoder_helper_funcs *help;
3995                struct drm_encoder *encoder;
3996
3997                encoder = outp->encoder;
3998                help = encoder->helper_private;
3999
4000                NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
4001                          outp->clr.mask, outp->set.mask);
4002
4003                if (outp->clr.mask) {
4004                        help->disable(encoder);
4005                        interlock_core |= 1;
4006                        if (outp->flush_disable) {
4007                                nv50_disp_atomic_commit_core(drm, interlock_chan);
4008                                interlock_core = 0;
4009                                interlock_chan = 0;
4010                        }
4011                }
4012        }
4013
4014        /* Flush disable. */
4015        if (interlock_core) {
4016                if (atom->flush_disable) {
4017                        nv50_disp_atomic_commit_core(drm, interlock_chan);
4018                        interlock_core = 0;
4019                        interlock_chan = 0;
4020                }
4021        }
4022
4023        /* Update output path(s). */
4024        list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4025                const struct drm_encoder_helper_funcs *help;
4026                struct drm_encoder *encoder;
4027
4028                encoder = outp->encoder;
4029                help = encoder->helper_private;
4030
4031                NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
4032                          outp->set.mask, outp->clr.mask);
4033
4034                if (outp->set.mask) {
4035                        help->enable(encoder);
4036                        interlock_core = 1;
4037                }
4038
4039                list_del(&outp->head);
4040                kfree(outp);
4041        }
4042
4043        /* Update head(s). */
4044        for_each_crtc_in_state(state, crtc, crtc_state, i) {
4045                struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
4046                struct nv50_head *head = nv50_head(crtc);
4047
4048                NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
4049                          asyh->set.mask, asyh->clr.mask);
4050
4051                if (asyh->set.mask) {
4052                        nv50_head_flush_set(head, asyh);
4053                        interlock_core = 1;
4054                }
4055        }
4056
4057        for_each_crtc_in_state(state, crtc, crtc_state, i) {
4058                if (crtc->state->event)
4059                        drm_crtc_vblank_get(crtc);
4060        }
4061
4062        /* Update plane(s). */
4063        for_each_plane_in_state(state, plane, plane_state, i) {
4064                struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
4065                struct nv50_wndw *wndw = nv50_wndw(plane);
4066
4067                NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
4068                          asyw->set.mask, asyw->clr.mask);
4069                if ( !asyw->set.mask &&
4070                    (!asyw->clr.mask || atom->flush_disable))
4071                        continue;
4072
4073                interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
4074        }
4075
4076        /* Flush update. */
4077        if (interlock_core) {
4078                if (!interlock_chan && atom->state.legacy_cursor_update) {
4079                        u32 *push = evo_wait(&disp->mast, 2);
4080                        if (push) {
4081                                evo_mthd(push, 0x0080, 1);
4082                                evo_data(push, 0x00000000);
4083                                evo_kick(push, &disp->mast);
4084                        }
4085                } else {
4086                        nv50_disp_atomic_commit_core(drm, interlock_chan);
4087                }
4088        }
4089
4090        if (atom->lock_core)
4091                mutex_unlock(&disp->mutex);
4092
4093        /* Wait for HW to signal completion. */
4094        for_each_plane_in_state(state, plane, plane_state, i) {
4095                struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
4096                struct nv50_wndw *wndw = nv50_wndw(plane);
4097                int ret = nv50_wndw_wait_armed(wndw, asyw);
4098                if (ret)
4099                        NV_ERROR(drm, "%s: timeout\n", plane->name);
4100        }
4101
4102        for_each_crtc_in_state(state, crtc, crtc_state, i) {
4103                if (crtc->state->event) {
4104                        unsigned long flags;
4105                        /* Get correct count/ts if racing with vblank irq */
4106                        drm_accurate_vblank_count(crtc);
4107                        spin_lock_irqsave(&crtc->dev->event_lock, flags);
4108                        drm_crtc_send_vblank_event(crtc, crtc->state->event);
4109                        spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
4110                        crtc->state->event = NULL;
4111                        drm_crtc_vblank_put(crtc);
4112                }
4113        }
4114
4115        drm_atomic_helper_commit_hw_done(state);
4116        drm_atomic_helper_cleanup_planes(dev, state);
4117        drm_atomic_helper_commit_cleanup_done(state);
4118        drm_atomic_state_put(state);
4119}
4120
4121static void
4122nv50_disp_atomic_commit_work(struct work_struct *work)
4123{
4124        struct drm_atomic_state *state =
4125                container_of(work, typeof(*state), commit_work);
4126        nv50_disp_atomic_commit_tail(state);
4127}
4128
4129static int
4130nv50_disp_atomic_commit(struct drm_device *dev,
4131                        struct drm_atomic_state *state, bool nonblock)
4132{
4133        struct nouveau_drm *drm = nouveau_drm(dev);
4134        struct nv50_disp *disp = nv50_disp(dev);
4135        struct drm_plane_state *plane_state;
4136        struct drm_plane *plane;
4137        struct drm_crtc *crtc;
4138        bool active = false;
4139        int ret, i;
4140
4141        ret = pm_runtime_get_sync(dev->dev);
4142        if (ret < 0 && ret != -EACCES)
4143                return ret;
4144
4145        ret = drm_atomic_helper_setup_commit(state, nonblock);
4146        if (ret)
4147                goto done;
4148
4149        INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
4150
4151        ret = drm_atomic_helper_prepare_planes(dev, state);
4152        if (ret)
4153                goto done;
4154
4155        if (!nonblock) {
4156                ret = drm_atomic_helper_wait_for_fences(dev, state, true);
4157                if (ret)
4158                        goto done;
4159        }
4160
4161        for_each_plane_in_state(state, plane, plane_state, i) {
4162                struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane_state);
4163                struct nv50_wndw *wndw = nv50_wndw(plane);
4164                if (asyw->set.image) {
4165                        asyw->ntfy.handle = wndw->dmac->sync.handle;
4166                        asyw->ntfy.offset = wndw->ntfy;
4167                        asyw->ntfy.awaken = false;
4168                        asyw->set.ntfy = true;
4169                        nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
4170                        wndw->ntfy ^= 0x10;
4171                }
4172        }
4173
4174        drm_atomic_helper_swap_state(state, true);
4175        drm_atomic_state_get(state);
4176
4177        if (nonblock)
4178                queue_work(system_unbound_wq, &state->commit_work);
4179        else
4180                nv50_disp_atomic_commit_tail(state);
4181
4182        drm_for_each_crtc(crtc, dev) {
4183                if (crtc->state->enable) {
4184                        if (!drm->have_disp_power_ref) {
4185                                drm->have_disp_power_ref = true;
4186                                return ret;
4187                        }
4188                        active = true;
4189                        break;
4190                }
4191        }
4192
4193        if (!active && drm->have_disp_power_ref) {
4194                pm_runtime_put_autosuspend(dev->dev);
4195                drm->have_disp_power_ref = false;
4196        }
4197
4198done:
4199        pm_runtime_put_autosuspend(dev->dev);
4200        return ret;
4201}
4202
4203static struct nv50_outp_atom *
4204nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
4205{
4206        struct nv50_outp_atom *outp;
4207
4208        list_for_each_entry(outp, &atom->outp, head) {
4209                if (outp->encoder == encoder)
4210                        return outp;
4211        }
4212
4213        outp = kzalloc(sizeof(*outp), GFP_KERNEL);
4214        if (!outp)
4215                return ERR_PTR(-ENOMEM);
4216
4217        list_add(&outp->head, &atom->outp);
4218        outp->encoder = encoder;
4219        return outp;
4220}
4221
4222static int
4223nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
4224                                struct drm_connector *connector)
4225{
4226        struct drm_encoder *encoder = connector->state->best_encoder;
4227        struct drm_crtc_state *crtc_state;
4228        struct drm_crtc *crtc;
4229        struct nv50_outp_atom *outp;
4230
4231        if (!(crtc = connector->state->crtc))
4232                return 0;
4233
4234        crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4235        if (crtc->state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4236                outp = nv50_disp_outp_atomic_add(atom, encoder);
4237                if (IS_ERR(outp))
4238                        return PTR_ERR(outp);
4239
4240                if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
4241                        outp->flush_disable = true;
4242                        atom->flush_disable = true;
4243                }
4244                outp->clr.ctrl = true;
4245                atom->lock_core = true;
4246        }
4247
4248        return 0;
4249}
4250
4251static int
4252nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
4253                                struct drm_connector_state *connector_state)
4254{
4255        struct drm_encoder *encoder = connector_state->best_encoder;
4256        struct drm_crtc_state *crtc_state;
4257        struct drm_crtc *crtc;
4258        struct nv50_outp_atom *outp;
4259
4260        if (!(crtc = connector_state->crtc))
4261                return 0;
4262
4263        crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4264        if (crtc_state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4265                outp = nv50_disp_outp_atomic_add(atom, encoder);
4266                if (IS_ERR(outp))
4267                        return PTR_ERR(outp);
4268
4269                outp->set.ctrl = true;
4270                atom->lock_core = true;
4271        }
4272
4273        return 0;
4274}
4275
4276static int
4277nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
4278{
4279        struct nv50_atom *atom = nv50_atom(state);
4280        struct drm_connector_state *connector_state;
4281        struct drm_connector *connector;
4282        int ret, i;
4283
4284        ret = drm_atomic_helper_check(dev, state);
4285        if (ret)
4286                return ret;
4287
4288        for_each_connector_in_state(state, connector, connector_state, i) {
4289                ret = nv50_disp_outp_atomic_check_clr(atom, connector);
4290                if (ret)
4291                        return ret;
4292
4293                ret = nv50_disp_outp_atomic_check_set(atom, connector_state);
4294                if (ret)
4295                        return ret;
4296        }
4297
4298        return 0;
4299}
4300
4301static void
4302nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
4303{
4304        struct nv50_atom *atom = nv50_atom(state);
4305        struct nv50_outp_atom *outp, *outt;
4306
4307        list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4308                list_del(&outp->head);
4309                kfree(outp);
4310        }
4311
4312        drm_atomic_state_default_clear(state);
4313}
4314
4315static void
4316nv50_disp_atomic_state_free(struct drm_atomic_state *state)
4317{
4318        struct nv50_atom *atom = nv50_atom(state);
4319        drm_atomic_state_default_release(&atom->state);
4320        kfree(atom);
4321}
4322
4323static struct drm_atomic_state *
4324nv50_disp_atomic_state_alloc(struct drm_device *dev)
4325{
4326        struct nv50_atom *atom;
4327        if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
4328            drm_atomic_state_init(dev, &atom->state) < 0) {
4329                kfree(atom);
4330                return NULL;
4331        }
4332        INIT_LIST_HEAD(&atom->outp);
4333        return &atom->state;
4334}
4335
4336static const struct drm_mode_config_funcs
4337nv50_disp_func = {
4338        .fb_create = nouveau_user_framebuffer_create,
4339        .output_poll_changed = nouveau_fbcon_output_poll_changed,
4340        .atomic_check = nv50_disp_atomic_check,
4341        .atomic_commit = nv50_disp_atomic_commit,
4342        .atomic_state_alloc = nv50_disp_atomic_state_alloc,
4343        .atomic_state_clear = nv50_disp_atomic_state_clear,
4344        .atomic_state_free = nv50_disp_atomic_state_free,
4345};
4346
4347/******************************************************************************
4348 * Init
4349 *****************************************************************************/
4350
4351void
4352nv50_display_fini(struct drm_device *dev)
4353{
4354        struct nouveau_encoder *nv_encoder;
4355        struct drm_encoder *encoder;
4356        struct drm_plane *plane;
4357
4358        drm_for_each_plane(plane, dev) {
4359                struct nv50_wndw *wndw = nv50_wndw(plane);
4360                if (plane->funcs != &nv50_wndw)
4361                        continue;
4362                nv50_wndw_fini(wndw);
4363        }
4364
4365        list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4366                if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4367                        nv_encoder = nouveau_encoder(encoder);
4368                        nv50_mstm_fini(nv_encoder->dp.mstm);
4369                }
4370        }
4371}
4372
4373int
4374nv50_display_init(struct drm_device *dev)
4375{
4376        struct drm_encoder *encoder;
4377        struct drm_plane *plane;
4378        struct drm_crtc *crtc;
4379        u32 *push;
4380
4381        push = evo_wait(nv50_mast(dev), 32);
4382        if (!push)
4383                return -EBUSY;
4384
4385        evo_mthd(push, 0x0088, 1);
4386        evo_data(push, nv50_mast(dev)->base.sync.handle);
4387        evo_kick(push, nv50_mast(dev));
4388
4389        list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4390                if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4391                        const struct drm_encoder_helper_funcs *help;
4392                        struct nouveau_encoder *nv_encoder;
4393
4394                        nv_encoder = nouveau_encoder(encoder);
4395                        help = encoder->helper_private;
4396                        if (help && help->dpms)
4397                                help->dpms(encoder, DRM_MODE_DPMS_ON);
4398
4399                        nv50_mstm_init(nv_encoder->dp.mstm);
4400                }
4401        }
4402
4403        drm_for_each_crtc(crtc, dev) {
4404                nv50_head_lut_load(crtc);
4405        }
4406
4407        drm_for_each_plane(plane, dev) {
4408                struct nv50_wndw *wndw = nv50_wndw(plane);
4409                if (plane->funcs != &nv50_wndw)
4410                        continue;
4411                nv50_wndw_init(wndw);
4412        }
4413
4414        return 0;
4415}
4416
4417void
4418nv50_display_destroy(struct drm_device *dev)
4419{
4420        struct nv50_disp *disp = nv50_disp(dev);
4421
4422        nv50_dmac_destroy(&disp->mast.base, disp->disp);
4423
4424        nouveau_bo_unmap(disp->sync);
4425        if (disp->sync)
4426                nouveau_bo_unpin(disp->sync);
4427        nouveau_bo_ref(NULL, &disp->sync);
4428
4429        nouveau_display(dev)->priv = NULL;
4430        kfree(disp);
4431}
4432
4433MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
4434static int nouveau_atomic = 0;
4435module_param_named(atomic, nouveau_atomic, int, 0400);
4436
4437int
4438nv50_display_create(struct drm_device *dev)
4439{
4440        struct nvif_device *device = &nouveau_drm(dev)->device;
4441        struct nouveau_drm *drm = nouveau_drm(dev);
4442        struct dcb_table *dcb = &drm->vbios.dcb;
4443        struct drm_connector *connector, *tmp;
4444        struct nv50_disp *disp;
4445        struct dcb_output *dcbe;
4446        int crtcs, ret, i;
4447
4448        disp = kzalloc(sizeof(*disp), GFP_KERNEL);
4449        if (!disp)
4450                return -ENOMEM;
4451
4452        mutex_init(&disp->mutex);
4453
4454        nouveau_display(dev)->priv = disp;
4455        nouveau_display(dev)->dtor = nv50_display_destroy;
4456        nouveau_display(dev)->init = nv50_display_init;
4457        nouveau_display(dev)->fini = nv50_display_fini;
4458        disp->disp = &nouveau_display(dev)->disp;
4459        dev->mode_config.funcs = &nv50_disp_func;
4460        if (nouveau_atomic)
4461                dev->driver->driver_features |= DRIVER_ATOMIC;
4462
4463        /* small shared memory area we use for notifiers and semaphores */
4464        ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
4465                             0, 0x0000, NULL, NULL, &disp->sync);
4466        if (!ret) {
4467                ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
4468                if (!ret) {
4469                        ret = nouveau_bo_map(disp->sync);
4470                        if (ret)
4471                                nouveau_bo_unpin(disp->sync);
4472                }
4473                if (ret)
4474                        nouveau_bo_ref(NULL, &disp->sync);
4475        }
4476
4477        if (ret)
4478                goto out;
4479
4480        /* allocate master evo channel */
4481        ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
4482                              &disp->mast);
4483        if (ret)
4484                goto out;
4485
4486        /* create crtc objects to represent the hw heads */
4487        if (disp->disp->oclass >= GF110_DISP)
4488                crtcs = nvif_rd32(&device->object, 0x022448);
4489        else
4490                crtcs = 2;
4491
4492        for (i = 0; i < crtcs; i++) {
4493                ret = nv50_head_create(dev, i);
4494                if (ret)
4495                        goto out;
4496        }
4497
4498        /* create encoder/connector objects based on VBIOS DCB table */
4499        for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
4500                connector = nouveau_connector_create(dev, dcbe->connector);
4501                if (IS_ERR(connector))
4502                        continue;
4503
4504                if (dcbe->location == DCB_LOC_ON_CHIP) {
4505                        switch (dcbe->type) {
4506                        case DCB_OUTPUT_TMDS:
4507                        case DCB_OUTPUT_LVDS:
4508                        case DCB_OUTPUT_DP:
4509                                ret = nv50_sor_create(connector, dcbe);
4510                                break;
4511                        case DCB_OUTPUT_ANALOG:
4512                                ret = nv50_dac_create(connector, dcbe);
4513                                break;
4514                        default:
4515                                ret = -ENODEV;
4516                                break;
4517                        }
4518                } else {
4519                        ret = nv50_pior_create(connector, dcbe);
4520                }
4521
4522                if (ret) {
4523                        NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
4524                                     dcbe->location, dcbe->type,
4525                                     ffs(dcbe->or) - 1, ret);
4526                        ret = 0;
4527                }
4528        }
4529
4530        /* cull any connectors we created that don't have an encoder */
4531        list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
4532                if (connector->encoder_ids[0])
4533                        continue;
4534
4535                NV_WARN(drm, "%s has no encoders, removing\n",
4536                        connector->name);
4537                connector->funcs->destroy(connector);
4538        }
4539
4540out:
4541        if (ret)
4542                nv50_display_destroy(dev);
4543        return ret;
4544}
4545