linux/drivers/gpu/drm/nouveau/dispnv50/head.c
<<
>>
Prefs
   1/*
   2 * Copyright 2018 Red Hat Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 */
  22#include "head.h"
  23#include "base.h"
  24#include "core.h"
  25#include "curs.h"
  26#include "ovly.h"
  27
  28#include <nvif/class.h>
  29
  30#include <drm/drm_atomic_helper.h>
  31#include <drm/drm_crtc_helper.h>
  32#include "nouveau_connector.h"
  33void
  34nv50_head_flush_clr(struct nv50_head *head,
  35                    struct nv50_head_atom *asyh, bool flush)
  36{
  37        union nv50_head_atom_mask clr = {
  38                .mask = asyh->clr.mask & ~(flush ? 0 : asyh->set.mask),
  39        };
  40        if (clr.olut) head->func->olut_clr(head);
  41        if (clr.core) head->func->core_clr(head);
  42        if (clr.curs) head->func->curs_clr(head);
  43}
  44
  45void
  46nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  47{
  48        if (asyh->set.view   ) head->func->view    (head, asyh);
  49        if (asyh->set.mode   ) head->func->mode    (head, asyh);
  50        if (asyh->set.core   ) head->func->core_set(head, asyh);
  51        if (asyh->set.olut   ) {
  52                asyh->olut.offset = nv50_lut_load(&head->olut,
  53                                                  asyh->olut.buffer,
  54                                                  asyh->state.gamma_lut,
  55                                                  asyh->olut.load);
  56                head->func->olut_set(head, asyh);
  57        }
  58        if (asyh->set.curs   ) head->func->curs_set(head, asyh);
  59        if (asyh->set.base   ) head->func->base    (head, asyh);
  60        if (asyh->set.ovly   ) head->func->ovly    (head, asyh);
  61        if (asyh->set.dither ) head->func->dither  (head, asyh);
  62        if (asyh->set.procamp) head->func->procamp (head, asyh);
  63        if (asyh->set.or     ) head->func->or      (head, asyh);
  64}
  65
  66static void
  67nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
  68                               struct nv50_head_atom *asyh,
  69                               struct nouveau_conn_atom *asyc)
  70{
  71        const int vib = asyc->procamp.color_vibrance - 100;
  72        const int hue = asyc->procamp.vibrant_hue - 90;
  73        const int adj = (vib > 0) ? 50 : 0;
  74        asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
  75        asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
  76        asyh->set.procamp = true;
  77}
  78
  79static void
  80nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
  81                              struct nv50_head_atom *asyh,
  82                              struct nouveau_conn_atom *asyc)
  83{
  84        struct drm_connector *connector = asyc->state.connector;
  85        u32 mode = 0x00;
  86
  87        if (asyc->dither.mode == DITHERING_MODE_AUTO) {
  88                if (asyh->base.depth > connector->display_info.bpc * 3)
  89                        mode = DITHERING_MODE_DYNAMIC2X2;
  90        } else {
  91                mode = asyc->dither.mode;
  92        }
  93
  94        if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
  95                if (connector->display_info.bpc >= 8)
  96                        mode |= DITHERING_DEPTH_8BPC;
  97        } else {
  98                mode |= asyc->dither.depth;
  99        }
 100
 101        asyh->dither.enable = mode;
 102        asyh->dither.bits = mode >> 1;
 103        asyh->dither.mode = mode >> 3;
 104        asyh->set.dither = true;
 105}
 106
 107static void
 108nv50_head_atomic_check_view(struct nv50_head_atom *armh,
 109                            struct nv50_head_atom *asyh,
 110                            struct nouveau_conn_atom *asyc)
 111{
 112        struct drm_connector *connector = asyc->state.connector;
 113        struct drm_display_mode *omode = &asyh->state.adjusted_mode;
 114        struct drm_display_mode *umode = &asyh->state.mode;
 115        int mode = asyc->scaler.mode;
 116        struct edid *edid;
 117        int umode_vdisplay, omode_hdisplay, omode_vdisplay;
 118
 119        if (connector->edid_blob_ptr)
 120                edid = (struct edid *)connector->edid_blob_ptr->data;
 121        else
 122                edid = NULL;
 123
 124        if (!asyc->scaler.full) {
 125                if (mode == DRM_MODE_SCALE_NONE)
 126                        omode = umode;
 127        } else {
 128                /* Non-EDID LVDS/eDP mode. */
 129                mode = DRM_MODE_SCALE_FULLSCREEN;
 130        }
 131
 132        /* For the user-specified mode, we must ignore doublescan and
 133         * the like, but honor frame packing.
 134         */
 135        umode_vdisplay = umode->vdisplay;
 136        if ((umode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
 137                umode_vdisplay += umode->vtotal;
 138        asyh->view.iW = umode->hdisplay;
 139        asyh->view.iH = umode_vdisplay;
 140        /* For the output mode, we can just use the stock helper. */
 141        drm_mode_get_hv_timing(omode, &omode_hdisplay, &omode_vdisplay);
 142        asyh->view.oW = omode_hdisplay;
 143        asyh->view.oH = omode_vdisplay;
 144
 145        /* Add overscan compensation if necessary, will keep the aspect
 146         * ratio the same as the backend mode unless overridden by the
 147         * user setting both hborder and vborder properties.
 148         */
 149        if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
 150            (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
 151             drm_detect_hdmi_monitor(edid)))) {
 152                u32 bX = asyc->scaler.underscan.hborder;
 153                u32 bY = asyc->scaler.underscan.vborder;
 154                u32 r = (asyh->view.oH << 19) / asyh->view.oW;
 155
 156                if (bX) {
 157                        asyh->view.oW -= (bX * 2);
 158                        if (bY) asyh->view.oH -= (bY * 2);
 159                        else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
 160                } else {
 161                        asyh->view.oW -= (asyh->view.oW >> 4) + 32;
 162                        if (bY) asyh->view.oH -= (bY * 2);
 163                        else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
 164                }
 165        }
 166
 167        /* Handle CENTER/ASPECT scaling, taking into account the areas
 168         * removed already for overscan compensation.
 169         */
 170        switch (mode) {
 171        case DRM_MODE_SCALE_CENTER:
 172                asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
 173                asyh->view.oH = min((u16)umode_vdisplay, asyh->view.oH);
 174                /* fall-through */
 175        case DRM_MODE_SCALE_ASPECT:
 176                if (asyh->view.oH < asyh->view.oW) {
 177                        u32 r = (asyh->view.iW << 19) / asyh->view.iH;
 178                        asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
 179                } else {
 180                        u32 r = (asyh->view.iH << 19) / asyh->view.iW;
 181                        asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
 182                }
 183                break;
 184        default:
 185                break;
 186        }
 187
 188        asyh->set.view = true;
 189}
 190
 191static int
 192nv50_head_atomic_check_lut(struct nv50_head *head,
 193                           struct nv50_head_atom *asyh)
 194{
 195        struct nv50_disp *disp = nv50_disp(head->base.base.dev);
 196        struct drm_property_blob *olut = asyh->state.gamma_lut;
 197
 198        /* Determine whether core output LUT should be enabled. */
 199        if (olut) {
 200                /* Check if any window(s) have stolen the core output LUT
 201                 * to as an input LUT for legacy gamma + I8 colour format.
 202                 */
 203                if (asyh->wndw.olut) {
 204                        /* If any window has stolen the core output LUT,
 205                         * all of them must.
 206                         */
 207                        if (asyh->wndw.olut != asyh->wndw.mask)
 208                                return -EINVAL;
 209                        olut = NULL;
 210                }
 211        }
 212
 213        if (!olut && !head->func->olut_identity) {
 214                asyh->olut.handle = 0;
 215                return 0;
 216        }
 217
 218        asyh->olut.handle = disp->core->chan.vram.handle;
 219        asyh->olut.buffer = !asyh->olut.buffer;
 220        head->func->olut(head, asyh);
 221        return 0;
 222}
 223
 224static void
 225nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
 226{
 227        struct drm_display_mode *mode = &asyh->state.adjusted_mode;
 228        struct nv50_head_mode *m = &asyh->mode;
 229        u32 blankus;
 230
 231        drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V | CRTC_STEREO_DOUBLE);
 232
 233        /*
 234         * DRM modes are defined in terms of a repeating interval
 235         * starting with the active display area.  The hardware modes
 236         * are defined in terms of a repeating interval starting one
 237         * unit (pixel or line) into the sync pulse.  So, add bias.
 238         */
 239
 240        m->h.active = mode->crtc_htotal;
 241        m->h.synce  = mode->crtc_hsync_end - mode->crtc_hsync_start - 1;
 242        m->h.blanke = mode->crtc_hblank_end - mode->crtc_hsync_start - 1;
 243        m->h.blanks = m->h.blanke + mode->crtc_hdisplay;
 244
 245        m->v.active = mode->crtc_vtotal;
 246        m->v.synce  = mode->crtc_vsync_end - mode->crtc_vsync_start - 1;
 247        m->v.blanke = mode->crtc_vblank_end - mode->crtc_vsync_start - 1;
 248        m->v.blanks = m->v.blanke + mode->crtc_vdisplay;
 249
 250        /*XXX: Safe underestimate, even "0" works */
 251        blankus = (m->v.active - mode->crtc_vdisplay - 2) * m->h.active;
 252        blankus *= 1000;
 253        blankus /= mode->crtc_clock;
 254        m->v.blankus = blankus;
 255
 256        if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
 257                m->v.blank2e =  m->v.active + m->v.blanke;
 258                m->v.blank2s =  m->v.blank2e + mode->crtc_vdisplay;
 259                m->v.active  = (m->v.active * 2) + 1;
 260                m->interlace = true;
 261        } else {
 262                m->v.blank2e = 0;
 263                m->v.blank2s = 1;
 264                m->interlace = false;
 265        }
 266        m->clock = mode->crtc_clock;
 267
 268        asyh->or.nhsync = !!(mode->flags & DRM_MODE_FLAG_NHSYNC);
 269        asyh->or.nvsync = !!(mode->flags & DRM_MODE_FLAG_NVSYNC);
 270        asyh->set.or = head->func->or != NULL;
 271        asyh->set.mode = true;
 272}
 273
 274static int
 275nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
 276{
 277        struct nouveau_drm *drm = nouveau_drm(crtc->dev);
 278        struct nv50_head *head = nv50_head(crtc);
 279        struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
 280        struct nv50_head_atom *asyh = nv50_head_atom(state);
 281        struct nouveau_conn_atom *asyc = NULL;
 282        struct drm_connector_state *conns;
 283        struct drm_connector *conn;
 284        int i;
 285
 286        NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
 287        if (asyh->state.active) {
 288                for_each_new_connector_in_state(asyh->state.state, conn, conns, i) {
 289                        if (conns->crtc == crtc) {
 290                                asyc = nouveau_conn_atom(conns);
 291                                break;
 292                        }
 293                }
 294
 295                if (armh->state.active) {
 296                        if (asyc) {
 297                                if (asyh->state.mode_changed)
 298                                        asyc->set.scaler = true;
 299                                if (armh->base.depth != asyh->base.depth)
 300                                        asyc->set.dither = true;
 301                        }
 302                } else {
 303                        if (asyc)
 304                                asyc->set.mask = ~0;
 305                        asyh->set.mask = ~0;
 306                        asyh->set.or = head->func->or != NULL;
 307                }
 308
 309                if (asyh->state.mode_changed || asyh->state.connectors_changed)
 310                        nv50_head_atomic_check_mode(head, asyh);
 311
 312                if (asyh->state.color_mgmt_changed ||
 313                    memcmp(&armh->wndw, &asyh->wndw, sizeof(asyh->wndw))) {
 314                        int ret = nv50_head_atomic_check_lut(head, asyh);
 315                        if (ret)
 316                                return ret;
 317
 318                        asyh->olut.visible = asyh->olut.handle != 0;
 319                }
 320
 321                if (asyc) {
 322                        if (asyc->set.scaler)
 323                                nv50_head_atomic_check_view(armh, asyh, asyc);
 324                        if (asyc->set.dither)
 325                                nv50_head_atomic_check_dither(armh, asyh, asyc);
 326                        if (asyc->set.procamp)
 327                                nv50_head_atomic_check_procamp(armh, asyh, asyc);
 328                }
 329
 330                if (head->func->core_calc) {
 331                        head->func->core_calc(head, asyh);
 332                        if (!asyh->core.visible)
 333                                asyh->olut.visible = false;
 334                }
 335
 336                asyh->set.base = armh->base.cpp != asyh->base.cpp;
 337                asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
 338        } else {
 339                asyh->olut.visible = false;
 340                asyh->core.visible = false;
 341                asyh->curs.visible = false;
 342                asyh->base.cpp = 0;
 343                asyh->ovly.cpp = 0;
 344        }
 345
 346        if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
 347                if (asyh->core.visible) {
 348                        if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
 349                                asyh->set.core = true;
 350                } else
 351                if (armh->core.visible) {
 352                        asyh->clr.core = true;
 353                }
 354
 355                if (asyh->curs.visible) {
 356                        if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
 357                                asyh->set.curs = true;
 358                } else
 359                if (armh->curs.visible) {
 360                        asyh->clr.curs = true;
 361                }
 362
 363                if (asyh->olut.visible) {
 364                        if (memcmp(&armh->olut, &asyh->olut, sizeof(asyh->olut)))
 365                                asyh->set.olut = true;
 366                } else
 367                if (armh->olut.visible) {
 368                        asyh->clr.olut = true;
 369                }
 370        } else {
 371                asyh->clr.olut = armh->olut.visible;
 372                asyh->clr.core = armh->core.visible;
 373                asyh->clr.curs = armh->curs.visible;
 374                asyh->set.olut = asyh->olut.visible;
 375                asyh->set.core = asyh->core.visible;
 376                asyh->set.curs = asyh->curs.visible;
 377        }
 378
 379        if (asyh->clr.mask || asyh->set.mask)
 380                nv50_atom(asyh->state.state)->lock_core = true;
 381        return 0;
 382}
 383
 384static const struct drm_crtc_helper_funcs
 385nv50_head_help = {
 386        .atomic_check = nv50_head_atomic_check,
 387};
 388
 389static void
 390nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
 391                               struct drm_crtc_state *state)
 392{
 393        struct nv50_head_atom *asyh = nv50_head_atom(state);
 394        __drm_atomic_helper_crtc_destroy_state(&asyh->state);
 395        kfree(asyh);
 396}
 397
 398static struct drm_crtc_state *
 399nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
 400{
 401        struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
 402        struct nv50_head_atom *asyh;
 403        if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
 404                return NULL;
 405        __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
 406        asyh->wndw = armh->wndw;
 407        asyh->view = armh->view;
 408        asyh->mode = armh->mode;
 409        asyh->olut = armh->olut;
 410        asyh->core = armh->core;
 411        asyh->curs = armh->curs;
 412        asyh->base = armh->base;
 413        asyh->ovly = armh->ovly;
 414        asyh->dither = armh->dither;
 415        asyh->procamp = armh->procamp;
 416        asyh->or = armh->or;
 417        asyh->dp = armh->dp;
 418        asyh->clr.mask = 0;
 419        asyh->set.mask = 0;
 420        return &asyh->state;
 421}
 422
 423static void
 424__drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
 425                               struct drm_crtc_state *state)
 426{
 427        if (crtc->state)
 428                crtc->funcs->atomic_destroy_state(crtc, crtc->state);
 429        crtc->state = state;
 430        crtc->state->crtc = crtc;
 431}
 432
 433static void
 434nv50_head_reset(struct drm_crtc *crtc)
 435{
 436        struct nv50_head_atom *asyh;
 437
 438        if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
 439                return;
 440
 441        __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
 442}
 443
 444static void
 445nv50_head_destroy(struct drm_crtc *crtc)
 446{
 447        struct nv50_head *head = nv50_head(crtc);
 448        nv50_lut_fini(&head->olut);
 449        drm_crtc_cleanup(crtc);
 450        kfree(head);
 451}
 452
 453static const struct drm_crtc_funcs
 454nv50_head_func = {
 455        .reset = nv50_head_reset,
 456        .gamma_set = drm_atomic_helper_legacy_gamma_set,
 457        .destroy = nv50_head_destroy,
 458        .set_config = drm_atomic_helper_set_config,
 459        .page_flip = drm_atomic_helper_page_flip,
 460        .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
 461        .atomic_destroy_state = nv50_head_atomic_destroy_state,
 462};
 463
 464int
 465nv50_head_create(struct drm_device *dev, int index)
 466{
 467        struct nouveau_drm *drm = nouveau_drm(dev);
 468        struct nv50_disp *disp = nv50_disp(dev);
 469        struct nv50_head *head;
 470        struct nv50_wndw *curs, *wndw;
 471        struct drm_crtc *crtc;
 472        int ret;
 473
 474        head = kzalloc(sizeof(*head), GFP_KERNEL);
 475        if (!head)
 476                return -ENOMEM;
 477
 478        head->func = disp->core->func->head;
 479        head->base.index = index;
 480
 481        if (disp->disp->object.oclass < GV100_DISP) {
 482                ret = nv50_ovly_new(drm, head->base.index, &wndw);
 483                ret = nv50_base_new(drm, head->base.index, &wndw);
 484        } else {
 485                ret = nv50_wndw_new(drm, DRM_PLANE_TYPE_OVERLAY,
 486                                    head->base.index * 2 + 1, &wndw);
 487                ret = nv50_wndw_new(drm, DRM_PLANE_TYPE_PRIMARY,
 488                                    head->base.index * 2 + 0, &wndw);
 489        }
 490        if (ret == 0)
 491                ret = nv50_curs_new(drm, head->base.index, &curs);
 492        if (ret) {
 493                kfree(head);
 494                return ret;
 495        }
 496
 497        crtc = &head->base.base;
 498        drm_crtc_init_with_planes(dev, crtc, &wndw->plane, &curs->plane,
 499                                  &nv50_head_func, "head-%d", head->base.index);
 500        drm_crtc_helper_add(crtc, &nv50_head_help);
 501        drm_mode_crtc_set_gamma_size(crtc, 256);
 502
 503        if (head->func->olut_set) {
 504                ret = nv50_lut_init(disp, &drm->client.mmu, &head->olut);
 505                if (ret)
 506                        goto out;
 507        }
 508
 509out:
 510        if (ret)
 511                nv50_head_destroy(crtc);
 512        return ret;
 513}
 514