linux/drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
   4 * Copyright (C) 2013 Red Hat
   5 * Author: Rob Clark <robdclark@gmail.com>
   6 */
   7
   8#define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
   9#include <linux/debugfs.h>
  10#include <linux/kthread.h>
  11#include <linux/seq_file.h>
  12
  13#include <drm/drm_crtc.h>
  14#include <drm/drm_file.h>
  15#include <drm/drm_probe_helper.h>
  16
  17#include "msm_drv.h"
  18#include "dpu_kms.h"
  19#include "dpu_hwio.h"
  20#include "dpu_hw_catalog.h"
  21#include "dpu_hw_intf.h"
  22#include "dpu_hw_ctl.h"
  23#include "dpu_formats.h"
  24#include "dpu_encoder_phys.h"
  25#include "dpu_crtc.h"
  26#include "dpu_trace.h"
  27#include "dpu_core_irq.h"
  28
  29#define DPU_DEBUG_ENC(e, fmt, ...) DPU_DEBUG("enc%d " fmt,\
  30                (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
  31
  32#define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
  33                (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
  34
  35#define DPU_DEBUG_PHYS(p, fmt, ...) DPU_DEBUG("enc%d intf%d pp%d " fmt,\
  36                (p) ? (p)->parent->base.id : -1, \
  37                (p) ? (p)->intf_idx - INTF_0 : -1, \
  38                (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
  39                ##__VA_ARGS__)
  40
  41#define DPU_ERROR_PHYS(p, fmt, ...) DPU_ERROR("enc%d intf%d pp%d " fmt,\
  42                (p) ? (p)->parent->base.id : -1, \
  43                (p) ? (p)->intf_idx - INTF_0 : -1, \
  44                (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
  45                ##__VA_ARGS__)
  46
  47/*
  48 * Two to anticipate panels that can do cmd/vid dynamic switching
  49 * plan is to create all possible physical encoder types, and switch between
  50 * them at runtime
  51 */
  52#define NUM_PHYS_ENCODER_TYPES 2
  53
  54#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
  55        (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
  56
  57#define MAX_CHANNELS_PER_ENC 2
  58
  59#define IDLE_SHORT_TIMEOUT      1
  60
  61#define MAX_VDISPLAY_SPLIT 1080
  62
  63/* timeout in frames waiting for frame done */
  64#define DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES 5
  65
  66/**
  67 * enum dpu_enc_rc_events - events for resource control state machine
  68 * @DPU_ENC_RC_EVENT_KICKOFF:
  69 *      This event happens at NORMAL priority.
  70 *      Event that signals the start of the transfer. When this event is
  71 *      received, enable MDP/DSI core clocks. Regardless of the previous
  72 *      state, the resource should be in ON state at the end of this event.
  73 * @DPU_ENC_RC_EVENT_FRAME_DONE:
  74 *      This event happens at INTERRUPT level.
  75 *      Event signals the end of the data transfer after the PP FRAME_DONE
  76 *      event. At the end of this event, a delayed work is scheduled to go to
  77 *      IDLE_PC state after IDLE_TIMEOUT time.
  78 * @DPU_ENC_RC_EVENT_PRE_STOP:
  79 *      This event happens at NORMAL priority.
  80 *      This event, when received during the ON state, leave the RC STATE
  81 *      in the PRE_OFF state. It should be followed by the STOP event as
  82 *      part of encoder disable.
  83 *      If received during IDLE or OFF states, it will do nothing.
  84 * @DPU_ENC_RC_EVENT_STOP:
  85 *      This event happens at NORMAL priority.
  86 *      When this event is received, disable all the MDP/DSI core clocks, and
  87 *      disable IRQs. It should be called from the PRE_OFF or IDLE states.
  88 *      IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
  89 *      PRE_OFF is expected when PRE_STOP was executed during the ON state.
  90 *      Resource state should be in OFF at the end of the event.
  91 * @DPU_ENC_RC_EVENT_ENTER_IDLE:
  92 *      This event happens at NORMAL priority from a work item.
  93 *      Event signals that there were no frame updates for IDLE_TIMEOUT time.
  94 *      This would disable MDP/DSI core clocks and change the resource state
  95 *      to IDLE.
  96 */
  97enum dpu_enc_rc_events {
  98        DPU_ENC_RC_EVENT_KICKOFF = 1,
  99        DPU_ENC_RC_EVENT_FRAME_DONE,
 100        DPU_ENC_RC_EVENT_PRE_STOP,
 101        DPU_ENC_RC_EVENT_STOP,
 102        DPU_ENC_RC_EVENT_ENTER_IDLE
 103};
 104
 105/*
 106 * enum dpu_enc_rc_states - states that the resource control maintains
 107 * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
 108 * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
 109 * @DPU_ENC_RC_STATE_ON: Resource is in ON state
 110 * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
 111 * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
 112 */
 113enum dpu_enc_rc_states {
 114        DPU_ENC_RC_STATE_OFF,
 115        DPU_ENC_RC_STATE_PRE_OFF,
 116        DPU_ENC_RC_STATE_ON,
 117        DPU_ENC_RC_STATE_IDLE
 118};
 119
 120/**
 121 * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
 122 *      encoders. Virtual encoder manages one "logical" display. Physical
 123 *      encoders manage one intf block, tied to a specific panel/sub-panel.
 124 *      Virtual encoder defers as much as possible to the physical encoders.
 125 *      Virtual encoder registers itself with the DRM Framework as the encoder.
 126 * @base:               drm_encoder base class for registration with DRM
 127 * @enc_spinlock:       Virtual-Encoder-Wide Spin Lock for IRQ purposes
 128 * @bus_scaling_client: Client handle to the bus scaling interface
 129 * @enabled:            True if the encoder is active, protected by enc_lock
 130 * @num_phys_encs:      Actual number of physical encoders contained.
 131 * @phys_encs:          Container of physical encoders managed.
 132 * @cur_master:         Pointer to the current master in this mode. Optimization
 133 *                      Only valid after enable. Cleared as disable.
 134 * @hw_pp               Handle to the pingpong blocks used for the display. No.
 135 *                      pingpong blocks can be different than num_phys_encs.
 136 * @intfs_swapped       Whether or not the phys_enc interfaces have been swapped
 137 *                      for partial update right-only cases, such as pingpong
 138 *                      split where virtual pingpong does not generate IRQs
 139 * @crtc:               Pointer to the currently assigned crtc. Normally you
 140 *                      would use crtc->state->encoder_mask to determine the
 141 *                      link between encoder/crtc. However in this case we need
 142 *                      to track crtc in the disable() hook which is called
 143 *                      _after_ encoder_mask is cleared.
 144 * @crtc_kickoff_cb:            Callback into CRTC that will flush & start
 145 *                              all CTL paths
 146 * @crtc_kickoff_cb_data:       Opaque user data given to crtc_kickoff_cb
 147 * @debugfs_root:               Debug file system root file node
 148 * @enc_lock:                   Lock around physical encoder
 149 *                              create/destroy/enable/disable
 150 * @frame_busy_mask:            Bitmask tracking which phys_enc we are still
 151 *                              busy processing current command.
 152 *                              Bit0 = phys_encs[0] etc.
 153 * @crtc_frame_event_cb:        callback handler for frame event
 154 * @crtc_frame_event_cb_data:   callback handler private data
 155 * @frame_done_timeout_ms:      frame done timeout in ms
 156 * @frame_done_timer:           watchdog timer for frame done event
 157 * @vsync_event_timer:          vsync timer
 158 * @disp_info:                  local copy of msm_display_info struct
 159 * @idle_pc_supported:          indicate if idle power collaps is supported
 160 * @rc_lock:                    resource control mutex lock to protect
 161 *                              virt encoder over various state changes
 162 * @rc_state:                   resource controller state
 163 * @delayed_off_work:           delayed worker to schedule disabling of
 164 *                              clks and resources after IDLE_TIMEOUT time.
 165 * @vsync_event_work:           worker to handle vsync event for autorefresh
 166 * @topology:                   topology of the display
 167 * @mode_set_complete:          flag to indicate modeset completion
 168 * @idle_timeout:               idle timeout duration in milliseconds
 169 */
 170struct dpu_encoder_virt {
 171        struct drm_encoder base;
 172        spinlock_t enc_spinlock;
 173        uint32_t bus_scaling_client;
 174
 175        bool enabled;
 176
 177        unsigned int num_phys_encs;
 178        struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
 179        struct dpu_encoder_phys *cur_master;
 180        struct dpu_encoder_phys *cur_slave;
 181        struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
 182
 183        bool intfs_swapped;
 184
 185        struct drm_crtc *crtc;
 186
 187        struct dentry *debugfs_root;
 188        struct mutex enc_lock;
 189        DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
 190        void (*crtc_frame_event_cb)(void *, u32 event);
 191        void *crtc_frame_event_cb_data;
 192
 193        atomic_t frame_done_timeout_ms;
 194        struct timer_list frame_done_timer;
 195        struct timer_list vsync_event_timer;
 196
 197        struct msm_display_info disp_info;
 198
 199        bool idle_pc_supported;
 200        struct mutex rc_lock;
 201        enum dpu_enc_rc_states rc_state;
 202        struct delayed_work delayed_off_work;
 203        struct kthread_work vsync_event_work;
 204        struct msm_display_topology topology;
 205        bool mode_set_complete;
 206
 207        u32 idle_timeout;
 208};
 209
 210#define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
 211
 212void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
 213                enum dpu_intr_idx intr_idx)
 214{
 215        DRM_ERROR("irq timeout id=%u, intf=%d, pp=%d, intr=%d\n",
 216                  DRMID(phys_enc->parent), phys_enc->intf_idx - INTF_0,
 217                  phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
 218
 219        if (phys_enc->parent_ops->handle_frame_done)
 220                phys_enc->parent_ops->handle_frame_done(
 221                                phys_enc->parent, phys_enc,
 222                                DPU_ENCODER_FRAME_EVENT_ERROR);
 223}
 224
 225static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
 226                int32_t hw_id, struct dpu_encoder_wait_info *info);
 227
 228int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
 229                enum dpu_intr_idx intr_idx,
 230                struct dpu_encoder_wait_info *wait_info)
 231{
 232        struct dpu_encoder_irq *irq;
 233        u32 irq_status;
 234        int ret;
 235
 236        if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
 237                DPU_ERROR("invalid params\n");
 238                return -EINVAL;
 239        }
 240        irq = &phys_enc->irq[intr_idx];
 241
 242        /* note: do master / slave checking outside */
 243
 244        /* return EWOULDBLOCK since we know the wait isn't necessary */
 245        if (phys_enc->enable_state == DPU_ENC_DISABLED) {
 246                DRM_ERROR("encoder is disabled id=%u, intr=%d, hw=%d, irq=%d",
 247                          DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 248                          irq->irq_idx);
 249                return -EWOULDBLOCK;
 250        }
 251
 252        if (irq->irq_idx < 0) {
 253                DRM_DEBUG_KMS("skip irq wait id=%u, intr=%d, hw=%d, irq=%s",
 254                              DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 255                              irq->name);
 256                return 0;
 257        }
 258
 259        DRM_DEBUG_KMS("id=%u, intr=%d, hw=%d, irq=%d, pp=%d, pending_cnt=%d",
 260                      DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 261                      irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
 262                      atomic_read(wait_info->atomic_cnt));
 263
 264        ret = dpu_encoder_helper_wait_event_timeout(
 265                        DRMID(phys_enc->parent),
 266                        irq->hw_idx,
 267                        wait_info);
 268
 269        if (ret <= 0) {
 270                irq_status = dpu_core_irq_read(phys_enc->dpu_kms,
 271                                irq->irq_idx, true);
 272                if (irq_status) {
 273                        unsigned long flags;
 274
 275                        DRM_DEBUG_KMS("irq not triggered id=%u, intr=%d, "
 276                                      "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
 277                                      DRMID(phys_enc->parent), intr_idx,
 278                                      irq->hw_idx, irq->irq_idx,
 279                                      phys_enc->hw_pp->idx - PINGPONG_0,
 280                                      atomic_read(wait_info->atomic_cnt));
 281                        local_irq_save(flags);
 282                        irq->cb.func(phys_enc, irq->irq_idx);
 283                        local_irq_restore(flags);
 284                        ret = 0;
 285                } else {
 286                        ret = -ETIMEDOUT;
 287                        DRM_DEBUG_KMS("irq timeout id=%u, intr=%d, "
 288                                      "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
 289                                      DRMID(phys_enc->parent), intr_idx,
 290                                      irq->hw_idx, irq->irq_idx,
 291                                      phys_enc->hw_pp->idx - PINGPONG_0,
 292                                      atomic_read(wait_info->atomic_cnt));
 293                }
 294        } else {
 295                ret = 0;
 296                trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
 297                        intr_idx, irq->hw_idx, irq->irq_idx,
 298                        phys_enc->hw_pp->idx - PINGPONG_0,
 299                        atomic_read(wait_info->atomic_cnt));
 300        }
 301
 302        return ret;
 303}
 304
 305int dpu_encoder_helper_register_irq(struct dpu_encoder_phys *phys_enc,
 306                enum dpu_intr_idx intr_idx)
 307{
 308        struct dpu_encoder_irq *irq;
 309        int ret = 0;
 310
 311        if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
 312                DPU_ERROR("invalid params\n");
 313                return -EINVAL;
 314        }
 315        irq = &phys_enc->irq[intr_idx];
 316
 317        if (irq->irq_idx >= 0) {
 318                DPU_DEBUG_PHYS(phys_enc,
 319                                "skipping already registered irq %s type %d\n",
 320                                irq->name, irq->intr_type);
 321                return 0;
 322        }
 323
 324        irq->irq_idx = dpu_core_irq_idx_lookup(phys_enc->dpu_kms,
 325                        irq->intr_type, irq->hw_idx);
 326        if (irq->irq_idx < 0) {
 327                DPU_ERROR_PHYS(phys_enc,
 328                        "failed to lookup IRQ index for %s type:%d\n",
 329                        irq->name, irq->intr_type);
 330                return -EINVAL;
 331        }
 332
 333        ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx,
 334                        &irq->cb);
 335        if (ret) {
 336                DPU_ERROR_PHYS(phys_enc,
 337                        "failed to register IRQ callback for %s\n",
 338                        irq->name);
 339                irq->irq_idx = -EINVAL;
 340                return ret;
 341        }
 342
 343        ret = dpu_core_irq_enable(phys_enc->dpu_kms, &irq->irq_idx, 1);
 344        if (ret) {
 345                DRM_ERROR("enable failed id=%u, intr=%d, hw=%d, irq=%d",
 346                          DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 347                          irq->irq_idx);
 348                dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
 349                                irq->irq_idx, &irq->cb);
 350                irq->irq_idx = -EINVAL;
 351                return ret;
 352        }
 353
 354        trace_dpu_enc_irq_register_success(DRMID(phys_enc->parent), intr_idx,
 355                                irq->hw_idx, irq->irq_idx);
 356
 357        return ret;
 358}
 359
 360int dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys *phys_enc,
 361                enum dpu_intr_idx intr_idx)
 362{
 363        struct dpu_encoder_irq *irq;
 364        int ret;
 365
 366        if (!phys_enc) {
 367                DPU_ERROR("invalid encoder\n");
 368                return -EINVAL;
 369        }
 370        irq = &phys_enc->irq[intr_idx];
 371
 372        /* silently skip irqs that weren't registered */
 373        if (irq->irq_idx < 0) {
 374                DRM_ERROR("duplicate unregister id=%u, intr=%d, hw=%d, irq=%d",
 375                          DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 376                          irq->irq_idx);
 377                return 0;
 378        }
 379
 380        ret = dpu_core_irq_disable(phys_enc->dpu_kms, &irq->irq_idx, 1);
 381        if (ret) {
 382                DRM_ERROR("disable failed id=%u, intr=%d, hw=%d, irq=%d ret=%d",
 383                          DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 384                          irq->irq_idx, ret);
 385        }
 386
 387        ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx,
 388                        &irq->cb);
 389        if (ret) {
 390                DRM_ERROR("unreg cb fail id=%u, intr=%d, hw=%d, irq=%d ret=%d",
 391                          DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
 392                          irq->irq_idx, ret);
 393        }
 394
 395        trace_dpu_enc_irq_unregister_success(DRMID(phys_enc->parent), intr_idx,
 396                                             irq->hw_idx, irq->irq_idx);
 397
 398        irq->irq_idx = -EINVAL;
 399
 400        return 0;
 401}
 402
 403void dpu_encoder_get_hw_resources(struct drm_encoder *drm_enc,
 404                                  struct dpu_encoder_hw_resources *hw_res)
 405{
 406        struct dpu_encoder_virt *dpu_enc = NULL;
 407        int i = 0;
 408
 409        dpu_enc = to_dpu_encoder_virt(drm_enc);
 410        DPU_DEBUG_ENC(dpu_enc, "\n");
 411
 412        /* Query resources used by phys encs, expected to be without overlap */
 413        memset(hw_res, 0, sizeof(*hw_res));
 414
 415        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
 416                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
 417
 418                if (phys && phys->ops.get_hw_resources)
 419                        phys->ops.get_hw_resources(phys, hw_res);
 420        }
 421}
 422
 423static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
 424{
 425        struct dpu_encoder_virt *dpu_enc = NULL;
 426        int i = 0;
 427
 428        if (!drm_enc) {
 429                DPU_ERROR("invalid encoder\n");
 430                return;
 431        }
 432
 433        dpu_enc = to_dpu_encoder_virt(drm_enc);
 434        DPU_DEBUG_ENC(dpu_enc, "\n");
 435
 436        mutex_lock(&dpu_enc->enc_lock);
 437
 438        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
 439                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
 440
 441                if (phys && phys->ops.destroy) {
 442                        phys->ops.destroy(phys);
 443                        --dpu_enc->num_phys_encs;
 444                        dpu_enc->phys_encs[i] = NULL;
 445                }
 446        }
 447
 448        if (dpu_enc->num_phys_encs)
 449                DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
 450                                dpu_enc->num_phys_encs);
 451        dpu_enc->num_phys_encs = 0;
 452        mutex_unlock(&dpu_enc->enc_lock);
 453
 454        drm_encoder_cleanup(drm_enc);
 455        mutex_destroy(&dpu_enc->enc_lock);
 456}
 457
 458void dpu_encoder_helper_split_config(
 459                struct dpu_encoder_phys *phys_enc,
 460                enum dpu_intf interface)
 461{
 462        struct dpu_encoder_virt *dpu_enc;
 463        struct split_pipe_cfg cfg = { 0 };
 464        struct dpu_hw_mdp *hw_mdptop;
 465        struct msm_display_info *disp_info;
 466
 467        if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
 468                DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
 469                return;
 470        }
 471
 472        dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
 473        hw_mdptop = phys_enc->hw_mdptop;
 474        disp_info = &dpu_enc->disp_info;
 475
 476        if (disp_info->intf_type != DRM_MODE_ENCODER_DSI)
 477                return;
 478
 479        /**
 480         * disable split modes since encoder will be operating in as the only
 481         * encoder, either for the entire use case in the case of, for example,
 482         * single DSI, or for this frame in the case of left/right only partial
 483         * update.
 484         */
 485        if (phys_enc->split_role == ENC_ROLE_SOLO) {
 486                if (hw_mdptop->ops.setup_split_pipe)
 487                        hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
 488                return;
 489        }
 490
 491        cfg.en = true;
 492        cfg.mode = phys_enc->intf_mode;
 493        cfg.intf = interface;
 494
 495        if (cfg.en && phys_enc->ops.needs_single_flush &&
 496                        phys_enc->ops.needs_single_flush(phys_enc))
 497                cfg.split_flush_en = true;
 498
 499        if (phys_enc->split_role == ENC_ROLE_MASTER) {
 500                DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
 501
 502                if (hw_mdptop->ops.setup_split_pipe)
 503                        hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
 504        }
 505}
 506
 507static void _dpu_encoder_adjust_mode(struct drm_connector *connector,
 508                struct drm_display_mode *adj_mode)
 509{
 510        struct drm_display_mode *cur_mode;
 511
 512        if (!connector || !adj_mode)
 513                return;
 514
 515        list_for_each_entry(cur_mode, &connector->modes, head) {
 516                if (cur_mode->vdisplay == adj_mode->vdisplay &&
 517                    cur_mode->hdisplay == adj_mode->hdisplay &&
 518                    drm_mode_vrefresh(cur_mode) == drm_mode_vrefresh(adj_mode)) {
 519                        adj_mode->private = cur_mode->private;
 520                        adj_mode->private_flags |= cur_mode->private_flags;
 521                }
 522        }
 523}
 524
 525static struct msm_display_topology dpu_encoder_get_topology(
 526                        struct dpu_encoder_virt *dpu_enc,
 527                        struct dpu_kms *dpu_kms,
 528                        struct drm_display_mode *mode)
 529{
 530        struct msm_display_topology topology;
 531        int i, intf_count = 0;
 532
 533        for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
 534                if (dpu_enc->phys_encs[i])
 535                        intf_count++;
 536
 537        /* User split topology for width > 1080 */
 538        topology.num_lm = (mode->vdisplay > MAX_VDISPLAY_SPLIT) ? 2 : 1;
 539        topology.num_enc = 0;
 540        topology.num_intf = intf_count;
 541
 542        return topology;
 543}
 544static int dpu_encoder_virt_atomic_check(
 545                struct drm_encoder *drm_enc,
 546                struct drm_crtc_state *crtc_state,
 547                struct drm_connector_state *conn_state)
 548{
 549        struct dpu_encoder_virt *dpu_enc;
 550        struct msm_drm_private *priv;
 551        struct dpu_kms *dpu_kms;
 552        const struct drm_display_mode *mode;
 553        struct drm_display_mode *adj_mode;
 554        struct msm_display_topology topology;
 555        int i = 0;
 556        int ret = 0;
 557
 558        if (!drm_enc || !crtc_state || !conn_state) {
 559                DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
 560                                drm_enc != 0, crtc_state != 0, conn_state != 0);
 561                return -EINVAL;
 562        }
 563
 564        dpu_enc = to_dpu_encoder_virt(drm_enc);
 565        DPU_DEBUG_ENC(dpu_enc, "\n");
 566
 567        priv = drm_enc->dev->dev_private;
 568        dpu_kms = to_dpu_kms(priv->kms);
 569        mode = &crtc_state->mode;
 570        adj_mode = &crtc_state->adjusted_mode;
 571        trace_dpu_enc_atomic_check(DRMID(drm_enc));
 572
 573        /*
 574         * display drivers may populate private fields of the drm display mode
 575         * structure while registering possible modes of a connector with DRM.
 576         * These private fields are not populated back while DRM invokes
 577         * the mode_set callbacks. This module retrieves and populates the
 578         * private fields of the given mode.
 579         */
 580        _dpu_encoder_adjust_mode(conn_state->connector, adj_mode);
 581
 582        /* perform atomic check on the first physical encoder (master) */
 583        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
 584                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
 585
 586                if (phys && phys->ops.atomic_check)
 587                        ret = phys->ops.atomic_check(phys, crtc_state,
 588                                        conn_state);
 589                else if (phys && phys->ops.mode_fixup)
 590                        if (!phys->ops.mode_fixup(phys, mode, adj_mode))
 591                                ret = -EINVAL;
 592
 593                if (ret) {
 594                        DPU_ERROR_ENC(dpu_enc,
 595                                        "mode unsupported, phys idx %d\n", i);
 596                        break;
 597                }
 598        }
 599
 600        topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
 601
 602        /* Reserve dynamic resources now. Indicating AtomicTest phase */
 603        if (!ret) {
 604                /*
 605                 * Avoid reserving resources when mode set is pending. Topology
 606                 * info may not be available to complete reservation.
 607                 */
 608                if (drm_atomic_crtc_needs_modeset(crtc_state)
 609                                && dpu_enc->mode_set_complete) {
 610                        ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, crtc_state,
 611                                             topology, true);
 612                        dpu_enc->mode_set_complete = false;
 613                }
 614        }
 615
 616        trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags,
 617                        adj_mode->private_flags);
 618
 619        return ret;
 620}
 621
 622static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
 623                        struct msm_display_info *disp_info)
 624{
 625        struct dpu_vsync_source_cfg vsync_cfg = { 0 };
 626        struct msm_drm_private *priv;
 627        struct dpu_kms *dpu_kms;
 628        struct dpu_hw_mdp *hw_mdptop;
 629        struct drm_encoder *drm_enc;
 630        int i;
 631
 632        if (!dpu_enc || !disp_info) {
 633                DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
 634                                        dpu_enc != NULL, disp_info != NULL);
 635                return;
 636        } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
 637                DPU_ERROR("invalid num phys enc %d/%d\n",
 638                                dpu_enc->num_phys_encs,
 639                                (int) ARRAY_SIZE(dpu_enc->hw_pp));
 640                return;
 641        }
 642
 643        drm_enc = &dpu_enc->base;
 644        /* this pointers are checked in virt_enable_helper */
 645        priv = drm_enc->dev->dev_private;
 646
 647        dpu_kms = to_dpu_kms(priv->kms);
 648        if (!dpu_kms) {
 649                DPU_ERROR("invalid dpu_kms\n");
 650                return;
 651        }
 652
 653        hw_mdptop = dpu_kms->hw_mdp;
 654        if (!hw_mdptop) {
 655                DPU_ERROR("invalid mdptop\n");
 656                return;
 657        }
 658
 659        if (hw_mdptop->ops.setup_vsync_source &&
 660                        disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
 661                for (i = 0; i < dpu_enc->num_phys_encs; i++)
 662                        vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
 663
 664                vsync_cfg.pp_count = dpu_enc->num_phys_encs;
 665                if (disp_info->is_te_using_watchdog_timer)
 666                        vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
 667                else
 668                        vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
 669
 670                hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
 671        }
 672}
 673
 674static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
 675{
 676        struct dpu_encoder_virt *dpu_enc;
 677        int i;
 678
 679        if (!drm_enc) {
 680                DPU_ERROR("invalid encoder\n");
 681                return;
 682        }
 683
 684        dpu_enc = to_dpu_encoder_virt(drm_enc);
 685
 686        DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
 687        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
 688                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
 689
 690                if (phys && phys->ops.irq_control)
 691                        phys->ops.irq_control(phys, enable);
 692        }
 693
 694}
 695
 696static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
 697                bool enable)
 698{
 699        struct msm_drm_private *priv;
 700        struct dpu_kms *dpu_kms;
 701        struct dpu_encoder_virt *dpu_enc;
 702
 703        dpu_enc = to_dpu_encoder_virt(drm_enc);
 704        priv = drm_enc->dev->dev_private;
 705        dpu_kms = to_dpu_kms(priv->kms);
 706
 707        trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
 708
 709        if (!dpu_enc->cur_master) {
 710                DPU_ERROR("encoder master not set\n");
 711                return;
 712        }
 713
 714        if (enable) {
 715                /* enable DPU core clks */
 716                pm_runtime_get_sync(&dpu_kms->pdev->dev);
 717
 718                /* enable all the irq */
 719                _dpu_encoder_irq_control(drm_enc, true);
 720
 721        } else {
 722                /* disable all the irq */
 723                _dpu_encoder_irq_control(drm_enc, false);
 724
 725                /* disable DPU core clks */
 726                pm_runtime_put_sync(&dpu_kms->pdev->dev);
 727        }
 728
 729}
 730
 731static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
 732                u32 sw_event)
 733{
 734        struct dpu_encoder_virt *dpu_enc;
 735        struct msm_drm_private *priv;
 736        bool is_vid_mode = false;
 737
 738        if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
 739                        !drm_enc->crtc) {
 740                DPU_ERROR("invalid parameters\n");
 741                return -EINVAL;
 742        }
 743        dpu_enc = to_dpu_encoder_virt(drm_enc);
 744        priv = drm_enc->dev->dev_private;
 745        is_vid_mode = dpu_enc->disp_info.capabilities &
 746                                                MSM_DISPLAY_CAP_VID_MODE;
 747
 748        /*
 749         * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
 750         * events and return early for other events (ie wb display).
 751         */
 752        if (!dpu_enc->idle_pc_supported &&
 753                        (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
 754                        sw_event != DPU_ENC_RC_EVENT_STOP &&
 755                        sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
 756                return 0;
 757
 758        trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
 759                         dpu_enc->rc_state, "begin");
 760
 761        switch (sw_event) {
 762        case DPU_ENC_RC_EVENT_KICKOFF:
 763                /* cancel delayed off work, if any */
 764                if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
 765                        DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
 766                                        sw_event);
 767
 768                mutex_lock(&dpu_enc->rc_lock);
 769
 770                /* return if the resource control is already in ON state */
 771                if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
 772                        DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in ON state\n",
 773                                      DRMID(drm_enc), sw_event);
 774                        mutex_unlock(&dpu_enc->rc_lock);
 775                        return 0;
 776                } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
 777                                dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
 778                        DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in state %d\n",
 779                                      DRMID(drm_enc), sw_event,
 780                                      dpu_enc->rc_state);
 781                        mutex_unlock(&dpu_enc->rc_lock);
 782                        return -EINVAL;
 783                }
 784
 785                if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
 786                        _dpu_encoder_irq_control(drm_enc, true);
 787                else
 788                        _dpu_encoder_resource_control_helper(drm_enc, true);
 789
 790                dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
 791
 792                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 793                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 794                                 "kickoff");
 795
 796                mutex_unlock(&dpu_enc->rc_lock);
 797                break;
 798
 799        case DPU_ENC_RC_EVENT_FRAME_DONE:
 800                /*
 801                 * mutex lock is not used as this event happens at interrupt
 802                 * context. And locking is not required as, the other events
 803                 * like KICKOFF and STOP does a wait-for-idle before executing
 804                 * the resource_control
 805                 */
 806                if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
 807                        DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
 808                                      DRMID(drm_enc), sw_event,
 809                                      dpu_enc->rc_state);
 810                        return -EINVAL;
 811                }
 812
 813                /*
 814                 * schedule off work item only when there are no
 815                 * frames pending
 816                 */
 817                if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
 818                        DRM_DEBUG_KMS("id:%d skip schedule work\n",
 819                                      DRMID(drm_enc));
 820                        return 0;
 821                }
 822
 823                queue_delayed_work(priv->wq, &dpu_enc->delayed_off_work,
 824                                   msecs_to_jiffies(dpu_enc->idle_timeout));
 825
 826                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 827                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 828                                 "frame done");
 829                break;
 830
 831        case DPU_ENC_RC_EVENT_PRE_STOP:
 832                /* cancel delayed off work, if any */
 833                if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
 834                        DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
 835                                        sw_event);
 836
 837                mutex_lock(&dpu_enc->rc_lock);
 838
 839                if (is_vid_mode &&
 840                          dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
 841                        _dpu_encoder_irq_control(drm_enc, true);
 842                }
 843                /* skip if is already OFF or IDLE, resources are off already */
 844                else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
 845                                dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
 846                        DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
 847                                      DRMID(drm_enc), sw_event,
 848                                      dpu_enc->rc_state);
 849                        mutex_unlock(&dpu_enc->rc_lock);
 850                        return 0;
 851                }
 852
 853                dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
 854
 855                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 856                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 857                                 "pre stop");
 858
 859                mutex_unlock(&dpu_enc->rc_lock);
 860                break;
 861
 862        case DPU_ENC_RC_EVENT_STOP:
 863                mutex_lock(&dpu_enc->rc_lock);
 864
 865                /* return if the resource control is already in OFF state */
 866                if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
 867                        DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
 868                                      DRMID(drm_enc), sw_event);
 869                        mutex_unlock(&dpu_enc->rc_lock);
 870                        return 0;
 871                } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
 872                        DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
 873                                  DRMID(drm_enc), sw_event, dpu_enc->rc_state);
 874                        mutex_unlock(&dpu_enc->rc_lock);
 875                        return -EINVAL;
 876                }
 877
 878                /**
 879                 * expect to arrive here only if in either idle state or pre-off
 880                 * and in IDLE state the resources are already disabled
 881                 */
 882                if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
 883                        _dpu_encoder_resource_control_helper(drm_enc, false);
 884
 885                dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
 886
 887                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 888                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 889                                 "stop");
 890
 891                mutex_unlock(&dpu_enc->rc_lock);
 892                break;
 893
 894        case DPU_ENC_RC_EVENT_ENTER_IDLE:
 895                mutex_lock(&dpu_enc->rc_lock);
 896
 897                if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
 898                        DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
 899                                  DRMID(drm_enc), sw_event, dpu_enc->rc_state);
 900                        mutex_unlock(&dpu_enc->rc_lock);
 901                        return 0;
 902                }
 903
 904                /*
 905                 * if we are in ON but a frame was just kicked off,
 906                 * ignore the IDLE event, it's probably a stale timer event
 907                 */
 908                if (dpu_enc->frame_busy_mask[0]) {
 909                        DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
 910                                  DRMID(drm_enc), sw_event, dpu_enc->rc_state);
 911                        mutex_unlock(&dpu_enc->rc_lock);
 912                        return 0;
 913                }
 914
 915                if (is_vid_mode)
 916                        _dpu_encoder_irq_control(drm_enc, false);
 917                else
 918                        _dpu_encoder_resource_control_helper(drm_enc, false);
 919
 920                dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
 921
 922                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 923                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 924                                 "idle");
 925
 926                mutex_unlock(&dpu_enc->rc_lock);
 927                break;
 928
 929        default:
 930                DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
 931                          sw_event);
 932                trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 933                                 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 934                                 "error");
 935                break;
 936        }
 937
 938        trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
 939                         dpu_enc->idle_pc_supported, dpu_enc->rc_state,
 940                         "end");
 941        return 0;
 942}
 943
 944static void dpu_encoder_virt_mode_set(struct drm_encoder *drm_enc,
 945                                      struct drm_display_mode *mode,
 946                                      struct drm_display_mode *adj_mode)
 947{
 948        struct dpu_encoder_virt *dpu_enc;
 949        struct msm_drm_private *priv;
 950        struct dpu_kms *dpu_kms;
 951        struct list_head *connector_list;
 952        struct drm_connector *conn = NULL, *conn_iter;
 953        struct drm_crtc *drm_crtc;
 954        struct dpu_crtc_state *cstate;
 955        struct dpu_rm_hw_iter hw_iter;
 956        struct msm_display_topology topology;
 957        struct dpu_hw_ctl *hw_ctl[MAX_CHANNELS_PER_ENC] = { NULL };
 958        struct dpu_hw_mixer *hw_lm[MAX_CHANNELS_PER_ENC] = { NULL };
 959        int num_lm = 0, num_ctl = 0;
 960        int i, j, ret;
 961
 962        if (!drm_enc) {
 963                DPU_ERROR("invalid encoder\n");
 964                return;
 965        }
 966
 967        dpu_enc = to_dpu_encoder_virt(drm_enc);
 968        DPU_DEBUG_ENC(dpu_enc, "\n");
 969
 970        priv = drm_enc->dev->dev_private;
 971        dpu_kms = to_dpu_kms(priv->kms);
 972        connector_list = &dpu_kms->dev->mode_config.connector_list;
 973
 974        trace_dpu_enc_mode_set(DRMID(drm_enc));
 975
 976        list_for_each_entry(conn_iter, connector_list, head)
 977                if (conn_iter->encoder == drm_enc)
 978                        conn = conn_iter;
 979
 980        if (!conn) {
 981                DPU_ERROR_ENC(dpu_enc, "failed to find attached connector\n");
 982                return;
 983        } else if (!conn->state) {
 984                DPU_ERROR_ENC(dpu_enc, "invalid connector state\n");
 985                return;
 986        }
 987
 988        drm_for_each_crtc(drm_crtc, drm_enc->dev)
 989                if (drm_crtc->state->encoder_mask & drm_encoder_mask(drm_enc))
 990                        break;
 991
 992        topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
 993
 994        /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
 995        ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, drm_crtc->state,
 996                             topology, false);
 997        if (ret) {
 998                DPU_ERROR_ENC(dpu_enc,
 999                                "failed to reserve hw resources, %d\n", ret);
1000                return;
1001        }
1002
1003        dpu_rm_init_hw_iter(&hw_iter, drm_enc->base.id, DPU_HW_BLK_PINGPONG);
1004        for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1005                dpu_enc->hw_pp[i] = NULL;
1006                if (!dpu_rm_get_hw(&dpu_kms->rm, &hw_iter))
1007                        break;
1008                dpu_enc->hw_pp[i] = (struct dpu_hw_pingpong *) hw_iter.hw;
1009        }
1010
1011        dpu_rm_init_hw_iter(&hw_iter, drm_enc->base.id, DPU_HW_BLK_CTL);
1012        for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1013                if (!dpu_rm_get_hw(&dpu_kms->rm, &hw_iter))
1014                        break;
1015                hw_ctl[i] = (struct dpu_hw_ctl *)hw_iter.hw;
1016                num_ctl++;
1017        }
1018
1019        dpu_rm_init_hw_iter(&hw_iter, drm_enc->base.id, DPU_HW_BLK_LM);
1020        for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1021                if (!dpu_rm_get_hw(&dpu_kms->rm, &hw_iter))
1022                        break;
1023                hw_lm[i] = (struct dpu_hw_mixer *)hw_iter.hw;
1024                num_lm++;
1025        }
1026
1027        cstate = to_dpu_crtc_state(drm_crtc->state);
1028
1029        for (i = 0; i < num_lm; i++) {
1030                int ctl_idx = (i < num_ctl) ? i : (num_ctl-1);
1031
1032                cstate->mixers[i].hw_lm = hw_lm[i];
1033                cstate->mixers[i].lm_ctl = hw_ctl[ctl_idx];
1034        }
1035
1036        cstate->num_mixers = num_lm;
1037
1038        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1039                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1040
1041                if (phys) {
1042                        if (!dpu_enc->hw_pp[i]) {
1043                                DPU_ERROR_ENC(dpu_enc, "no pp block assigned"
1044                                             "at idx: %d\n", i);
1045                                goto error;
1046                        }
1047
1048                        if (!hw_ctl[i]) {
1049                                DPU_ERROR_ENC(dpu_enc, "no ctl block assigned"
1050                                             "at idx: %d\n", i);
1051                                goto error;
1052                        }
1053
1054                        phys->hw_pp = dpu_enc->hw_pp[i];
1055                        phys->hw_ctl = hw_ctl[i];
1056
1057                        dpu_rm_init_hw_iter(&hw_iter, drm_enc->base.id,
1058                                            DPU_HW_BLK_INTF);
1059                        for (j = 0; j < MAX_CHANNELS_PER_ENC; j++) {
1060                                struct dpu_hw_intf *hw_intf;
1061
1062                                if (!dpu_rm_get_hw(&dpu_kms->rm, &hw_iter))
1063                                        break;
1064
1065                                hw_intf = (struct dpu_hw_intf *)hw_iter.hw;
1066                                if (hw_intf->idx == phys->intf_idx)
1067                                        phys->hw_intf = hw_intf;
1068                        }
1069
1070                        if (!phys->hw_intf) {
1071                                DPU_ERROR_ENC(dpu_enc,
1072                                              "no intf block assigned at idx: %d\n",
1073                                              i);
1074                                goto error;
1075                        }
1076
1077                        phys->connector = conn->state->connector;
1078                        if (phys->ops.mode_set)
1079                                phys->ops.mode_set(phys, mode, adj_mode);
1080                }
1081        }
1082
1083        dpu_enc->mode_set_complete = true;
1084
1085error:
1086        dpu_rm_release(&dpu_kms->rm, drm_enc);
1087}
1088
1089static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
1090{
1091        struct dpu_encoder_virt *dpu_enc = NULL;
1092        struct msm_drm_private *priv;
1093        struct dpu_kms *dpu_kms;
1094
1095        if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1096                DPU_ERROR("invalid parameters\n");
1097                return;
1098        }
1099
1100        priv = drm_enc->dev->dev_private;
1101        dpu_kms = to_dpu_kms(priv->kms);
1102        if (!dpu_kms) {
1103                DPU_ERROR("invalid dpu_kms\n");
1104                return;
1105        }
1106
1107        dpu_enc = to_dpu_encoder_virt(drm_enc);
1108        if (!dpu_enc || !dpu_enc->cur_master) {
1109                DPU_ERROR("invalid dpu encoder/master\n");
1110                return;
1111        }
1112
1113        if (dpu_enc->cur_master->hw_mdptop &&
1114                        dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc)
1115                dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc(
1116                                dpu_enc->cur_master->hw_mdptop,
1117                                dpu_kms->catalog);
1118
1119        _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
1120}
1121
1122void dpu_encoder_virt_runtime_resume(struct drm_encoder *drm_enc)
1123{
1124        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1125
1126        mutex_lock(&dpu_enc->enc_lock);
1127
1128        if (!dpu_enc->enabled)
1129                goto out;
1130
1131        if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.restore)
1132                dpu_enc->cur_slave->ops.restore(dpu_enc->cur_slave);
1133        if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
1134                dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
1135
1136        _dpu_encoder_virt_enable_helper(drm_enc);
1137
1138out:
1139        mutex_unlock(&dpu_enc->enc_lock);
1140}
1141
1142static void dpu_encoder_virt_enable(struct drm_encoder *drm_enc)
1143{
1144        struct dpu_encoder_virt *dpu_enc = NULL;
1145        int ret = 0;
1146        struct drm_display_mode *cur_mode = NULL;
1147
1148        if (!drm_enc) {
1149                DPU_ERROR("invalid encoder\n");
1150                return;
1151        }
1152        dpu_enc = to_dpu_encoder_virt(drm_enc);
1153
1154        mutex_lock(&dpu_enc->enc_lock);
1155        cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
1156
1157        trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
1158                             cur_mode->vdisplay);
1159
1160        /* always enable slave encoder before master */
1161        if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.enable)
1162                dpu_enc->cur_slave->ops.enable(dpu_enc->cur_slave);
1163
1164        if (dpu_enc->cur_master && dpu_enc->cur_master->ops.enable)
1165                dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
1166
1167        ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1168        if (ret) {
1169                DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
1170                                ret);
1171                goto out;
1172        }
1173
1174        _dpu_encoder_virt_enable_helper(drm_enc);
1175
1176        dpu_enc->enabled = true;
1177
1178out:
1179        mutex_unlock(&dpu_enc->enc_lock);
1180}
1181
1182static void dpu_encoder_virt_disable(struct drm_encoder *drm_enc)
1183{
1184        struct dpu_encoder_virt *dpu_enc = NULL;
1185        struct msm_drm_private *priv;
1186        struct dpu_kms *dpu_kms;
1187        struct drm_display_mode *mode;
1188        int i = 0;
1189
1190        if (!drm_enc) {
1191                DPU_ERROR("invalid encoder\n");
1192                return;
1193        } else if (!drm_enc->dev) {
1194                DPU_ERROR("invalid dev\n");
1195                return;
1196        } else if (!drm_enc->dev->dev_private) {
1197                DPU_ERROR("invalid dev_private\n");
1198                return;
1199        }
1200
1201        dpu_enc = to_dpu_encoder_virt(drm_enc);
1202        DPU_DEBUG_ENC(dpu_enc, "\n");
1203
1204        mutex_lock(&dpu_enc->enc_lock);
1205        dpu_enc->enabled = false;
1206
1207        mode = &drm_enc->crtc->state->adjusted_mode;
1208
1209        priv = drm_enc->dev->dev_private;
1210        dpu_kms = to_dpu_kms(priv->kms);
1211
1212        trace_dpu_enc_disable(DRMID(drm_enc));
1213
1214        /* wait for idle */
1215        dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1216
1217        dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
1218
1219        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1220                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1221
1222                if (phys && phys->ops.disable)
1223                        phys->ops.disable(phys);
1224        }
1225
1226        /* after phys waits for frame-done, should be no more frames pending */
1227        if (atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
1228                DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1229                del_timer_sync(&dpu_enc->frame_done_timer);
1230        }
1231
1232        dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
1233
1234        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1235                if (dpu_enc->phys_encs[i])
1236                        dpu_enc->phys_encs[i]->connector = NULL;
1237        }
1238
1239        DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
1240
1241        dpu_rm_release(&dpu_kms->rm, drm_enc);
1242
1243        mutex_unlock(&dpu_enc->enc_lock);
1244}
1245
1246static enum dpu_intf dpu_encoder_get_intf(struct dpu_mdss_cfg *catalog,
1247                enum dpu_intf_type type, u32 controller_id)
1248{
1249        int i = 0;
1250
1251        for (i = 0; i < catalog->intf_count; i++) {
1252                if (catalog->intf[i].type == type
1253                    && catalog->intf[i].controller_id == controller_id) {
1254                        return catalog->intf[i].id;
1255                }
1256        }
1257
1258        return INTF_MAX;
1259}
1260
1261static void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
1262                struct dpu_encoder_phys *phy_enc)
1263{
1264        struct dpu_encoder_virt *dpu_enc = NULL;
1265        unsigned long lock_flags;
1266
1267        if (!drm_enc || !phy_enc)
1268                return;
1269
1270        DPU_ATRACE_BEGIN("encoder_vblank_callback");
1271        dpu_enc = to_dpu_encoder_virt(drm_enc);
1272
1273        spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1274        if (dpu_enc->crtc)
1275                dpu_crtc_vblank_callback(dpu_enc->crtc);
1276        spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1277
1278        atomic_inc(&phy_enc->vsync_cnt);
1279        DPU_ATRACE_END("encoder_vblank_callback");
1280}
1281
1282static void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
1283                struct dpu_encoder_phys *phy_enc)
1284{
1285        if (!phy_enc)
1286                return;
1287
1288        DPU_ATRACE_BEGIN("encoder_underrun_callback");
1289        atomic_inc(&phy_enc->underrun_cnt);
1290        trace_dpu_enc_underrun_cb(DRMID(drm_enc),
1291                                  atomic_read(&phy_enc->underrun_cnt));
1292        DPU_ATRACE_END("encoder_underrun_callback");
1293}
1294
1295void dpu_encoder_assign_crtc(struct drm_encoder *drm_enc, struct drm_crtc *crtc)
1296{
1297        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1298        unsigned long lock_flags;
1299
1300        spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1301        /* crtc should always be cleared before re-assigning */
1302        WARN_ON(crtc && dpu_enc->crtc);
1303        dpu_enc->crtc = crtc;
1304        spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1305}
1306
1307void dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder *drm_enc,
1308                                        struct drm_crtc *crtc, bool enable)
1309{
1310        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1311        unsigned long lock_flags;
1312        int i;
1313
1314        trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
1315
1316        spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1317        if (dpu_enc->crtc != crtc) {
1318                spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1319                return;
1320        }
1321        spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1322
1323        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1324                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1325
1326                if (phys && phys->ops.control_vblank_irq)
1327                        phys->ops.control_vblank_irq(phys, enable);
1328        }
1329}
1330
1331void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1332                void (*frame_event_cb)(void *, u32 event),
1333                void *frame_event_cb_data)
1334{
1335        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1336        unsigned long lock_flags;
1337        bool enable;
1338
1339        enable = frame_event_cb ? true : false;
1340
1341        if (!drm_enc) {
1342                DPU_ERROR("invalid encoder\n");
1343                return;
1344        }
1345        trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
1346
1347        spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1348        dpu_enc->crtc_frame_event_cb = frame_event_cb;
1349        dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1350        spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1351}
1352
1353static void dpu_encoder_frame_done_callback(
1354                struct drm_encoder *drm_enc,
1355                struct dpu_encoder_phys *ready_phys, u32 event)
1356{
1357        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1358        unsigned int i;
1359
1360        if (event & (DPU_ENCODER_FRAME_EVENT_DONE
1361                        | DPU_ENCODER_FRAME_EVENT_ERROR
1362                        | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
1363
1364                if (!dpu_enc->frame_busy_mask[0]) {
1365                        /**
1366                         * suppress frame_done without waiter,
1367                         * likely autorefresh
1368                         */
1369                        trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc),
1370                                        event, ready_phys->intf_idx);
1371                        return;
1372                }
1373
1374                /* One of the physical encoders has become idle */
1375                for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1376                        if (dpu_enc->phys_encs[i] == ready_phys) {
1377                                trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
1378                                                dpu_enc->frame_busy_mask[0]);
1379                                clear_bit(i, dpu_enc->frame_busy_mask);
1380                        }
1381                }
1382
1383                if (!dpu_enc->frame_busy_mask[0]) {
1384                        atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
1385                        del_timer(&dpu_enc->frame_done_timer);
1386
1387                        dpu_encoder_resource_control(drm_enc,
1388                                        DPU_ENC_RC_EVENT_FRAME_DONE);
1389
1390                        if (dpu_enc->crtc_frame_event_cb)
1391                                dpu_enc->crtc_frame_event_cb(
1392                                        dpu_enc->crtc_frame_event_cb_data,
1393                                        event);
1394                }
1395        } else {
1396                if (dpu_enc->crtc_frame_event_cb)
1397                        dpu_enc->crtc_frame_event_cb(
1398                                dpu_enc->crtc_frame_event_cb_data, event);
1399        }
1400}
1401
1402static void dpu_encoder_off_work(struct work_struct *work)
1403{
1404        struct dpu_encoder_virt *dpu_enc = container_of(work,
1405                        struct dpu_encoder_virt, delayed_off_work.work);
1406
1407        if (!dpu_enc) {
1408                DPU_ERROR("invalid dpu encoder\n");
1409                return;
1410        }
1411
1412        dpu_encoder_resource_control(&dpu_enc->base,
1413                                                DPU_ENC_RC_EVENT_ENTER_IDLE);
1414
1415        dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
1416                                DPU_ENCODER_FRAME_EVENT_IDLE);
1417}
1418
1419/**
1420 * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1421 * drm_enc: Pointer to drm encoder structure
1422 * phys: Pointer to physical encoder structure
1423 * extra_flush_bits: Additional bit mask to include in flush trigger
1424 */
1425static void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1426                struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
1427{
1428        struct dpu_hw_ctl *ctl;
1429        int pending_kickoff_cnt;
1430        u32 ret = UINT_MAX;
1431
1432        if (!phys->hw_pp) {
1433                DPU_ERROR("invalid pingpong hw\n");
1434                return;
1435        }
1436
1437        ctl = phys->hw_ctl;
1438        if (!ctl || !ctl->ops.trigger_flush) {
1439                DPU_ERROR("missing trigger cb\n");
1440                return;
1441        }
1442
1443        pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
1444
1445        if (extra_flush_bits && ctl->ops.update_pending_flush)
1446                ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1447
1448        ctl->ops.trigger_flush(ctl);
1449
1450        if (ctl->ops.get_pending_flush)
1451                ret = ctl->ops.get_pending_flush(ctl);
1452
1453        trace_dpu_enc_trigger_flush(DRMID(drm_enc), phys->intf_idx,
1454                                    pending_kickoff_cnt, ctl->idx,
1455                                    extra_flush_bits, ret);
1456}
1457
1458/**
1459 * _dpu_encoder_trigger_start - trigger start for a physical encoder
1460 * phys: Pointer to physical encoder structure
1461 */
1462static void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
1463{
1464        if (!phys) {
1465                DPU_ERROR("invalid argument(s)\n");
1466                return;
1467        }
1468
1469        if (!phys->hw_pp) {
1470                DPU_ERROR("invalid pingpong hw\n");
1471                return;
1472        }
1473
1474        if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
1475                phys->ops.trigger_start(phys);
1476}
1477
1478void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
1479{
1480        struct dpu_hw_ctl *ctl;
1481
1482        if (!phys_enc) {
1483                DPU_ERROR("invalid encoder\n");
1484                return;
1485        }
1486
1487        ctl = phys_enc->hw_ctl;
1488        if (ctl && ctl->ops.trigger_start) {
1489                ctl->ops.trigger_start(ctl);
1490                trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
1491        }
1492}
1493
1494static int dpu_encoder_helper_wait_event_timeout(
1495                int32_t drm_id,
1496                int32_t hw_id,
1497                struct dpu_encoder_wait_info *info)
1498{
1499        int rc = 0;
1500        s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
1501        s64 jiffies = msecs_to_jiffies(info->timeout_ms);
1502        s64 time;
1503
1504        do {
1505                rc = wait_event_timeout(*(info->wq),
1506                                atomic_read(info->atomic_cnt) == 0, jiffies);
1507                time = ktime_to_ms(ktime_get());
1508
1509                trace_dpu_enc_wait_event_timeout(drm_id, hw_id, rc, time,
1510                                                 expected_time,
1511                                                 atomic_read(info->atomic_cnt));
1512        /* If we timed out, counter is valid and time is less, wait again */
1513        } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
1514                        (time < expected_time));
1515
1516        return rc;
1517}
1518
1519static void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
1520{
1521        struct dpu_encoder_virt *dpu_enc;
1522        struct dpu_hw_ctl *ctl;
1523        int rc;
1524
1525        if (!phys_enc) {
1526                DPU_ERROR("invalid encoder\n");
1527                return;
1528        }
1529        dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
1530        ctl = phys_enc->hw_ctl;
1531
1532        if (!ctl || !ctl->ops.reset)
1533                return;
1534
1535        DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(phys_enc->parent),
1536                      ctl->idx);
1537
1538        rc = ctl->ops.reset(ctl);
1539        if (rc)
1540                DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n",  ctl->idx);
1541
1542        phys_enc->enable_state = DPU_ENC_ENABLED;
1543}
1544
1545/**
1546 * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
1547 *      Iterate through the physical encoders and perform consolidated flush
1548 *      and/or control start triggering as needed. This is done in the virtual
1549 *      encoder rather than the individual physical ones in order to handle
1550 *      use cases that require visibility into multiple physical encoders at
1551 *      a time.
1552 * dpu_enc: Pointer to virtual encoder structure
1553 */
1554static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
1555{
1556        struct dpu_hw_ctl *ctl;
1557        uint32_t i, pending_flush;
1558        unsigned long lock_flags;
1559
1560        pending_flush = 0x0;
1561
1562        /* update pending counts and trigger kickoff ctl flush atomically */
1563        spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1564
1565        /* don't perform flush/start operations for slave encoders */
1566        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1567                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1568
1569                if (!phys || phys->enable_state == DPU_ENC_DISABLED)
1570                        continue;
1571
1572                ctl = phys->hw_ctl;
1573                if (!ctl)
1574                        continue;
1575
1576                /*
1577                 * This is cleared in frame_done worker, which isn't invoked
1578                 * for async commits. So don't set this for async, since it'll
1579                 * roll over to the next commit.
1580                 */
1581                if (phys->split_role != ENC_ROLE_SLAVE)
1582                        set_bit(i, dpu_enc->frame_busy_mask);
1583
1584                if (!phys->ops.needs_single_flush ||
1585                                !phys->ops.needs_single_flush(phys))
1586                        _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
1587                else if (ctl->ops.get_pending_flush)
1588                        pending_flush |= ctl->ops.get_pending_flush(ctl);
1589        }
1590
1591        /* for split flush, combine pending flush masks and send to master */
1592        if (pending_flush && dpu_enc->cur_master) {
1593                _dpu_encoder_trigger_flush(
1594                                &dpu_enc->base,
1595                                dpu_enc->cur_master,
1596                                pending_flush);
1597        }
1598
1599        _dpu_encoder_trigger_start(dpu_enc->cur_master);
1600
1601        spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1602}
1603
1604void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
1605{
1606        struct dpu_encoder_virt *dpu_enc;
1607        struct dpu_encoder_phys *phys;
1608        unsigned int i;
1609        struct dpu_hw_ctl *ctl;
1610        struct msm_display_info *disp_info;
1611
1612        if (!drm_enc) {
1613                DPU_ERROR("invalid encoder\n");
1614                return;
1615        }
1616        dpu_enc = to_dpu_encoder_virt(drm_enc);
1617        disp_info = &dpu_enc->disp_info;
1618
1619        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1620                phys = dpu_enc->phys_encs[i];
1621
1622                if (phys && phys->hw_ctl) {
1623                        ctl = phys->hw_ctl;
1624                        if (ctl->ops.clear_pending_flush)
1625                                ctl->ops.clear_pending_flush(ctl);
1626
1627                        /* update only for command mode primary ctl */
1628                        if ((phys == dpu_enc->cur_master) &&
1629                           (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1630                            && ctl->ops.trigger_pending)
1631                                ctl->ops.trigger_pending(ctl);
1632                }
1633        }
1634}
1635
1636static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
1637                struct drm_display_mode *mode)
1638{
1639        u64 pclk_rate;
1640        u32 pclk_period;
1641        u32 line_time;
1642
1643        /*
1644         * For linetime calculation, only operate on master encoder.
1645         */
1646        if (!dpu_enc->cur_master)
1647                return 0;
1648
1649        if (!dpu_enc->cur_master->ops.get_line_count) {
1650                DPU_ERROR("get_line_count function not defined\n");
1651                return 0;
1652        }
1653
1654        pclk_rate = mode->clock; /* pixel clock in kHz */
1655        if (pclk_rate == 0) {
1656                DPU_ERROR("pclk is 0, cannot calculate line time\n");
1657                return 0;
1658        }
1659
1660        pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
1661        if (pclk_period == 0) {
1662                DPU_ERROR("pclk period is 0\n");
1663                return 0;
1664        }
1665
1666        /*
1667         * Line time calculation based on Pixel clock and HTOTAL.
1668         * Final unit is in ns.
1669         */
1670        line_time = (pclk_period * mode->htotal) / 1000;
1671        if (line_time == 0) {
1672                DPU_ERROR("line time calculation is 0\n");
1673                return 0;
1674        }
1675
1676        DPU_DEBUG_ENC(dpu_enc,
1677                        "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1678                        pclk_rate, pclk_period, line_time);
1679
1680        return line_time;
1681}
1682
1683int dpu_encoder_vsync_time(struct drm_encoder *drm_enc, ktime_t *wakeup_time)
1684{
1685        struct drm_display_mode *mode;
1686        struct dpu_encoder_virt *dpu_enc;
1687        u32 cur_line;
1688        u32 line_time;
1689        u32 vtotal, time_to_vsync;
1690        ktime_t cur_time;
1691
1692        dpu_enc = to_dpu_encoder_virt(drm_enc);
1693
1694        if (!drm_enc->crtc || !drm_enc->crtc->state) {
1695                DPU_ERROR("crtc/crtc state object is NULL\n");
1696                return -EINVAL;
1697        }
1698        mode = &drm_enc->crtc->state->adjusted_mode;
1699
1700        line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
1701        if (!line_time)
1702                return -EINVAL;
1703
1704        cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
1705
1706        vtotal = mode->vtotal;
1707        if (cur_line >= vtotal)
1708                time_to_vsync = line_time * vtotal;
1709        else
1710                time_to_vsync = line_time * (vtotal - cur_line);
1711
1712        if (time_to_vsync == 0) {
1713                DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1714                                vtotal);
1715                return -EINVAL;
1716        }
1717
1718        cur_time = ktime_get();
1719        *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
1720
1721        DPU_DEBUG_ENC(dpu_enc,
1722                        "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1723                        cur_line, vtotal, time_to_vsync,
1724                        ktime_to_ms(cur_time),
1725                        ktime_to_ms(*wakeup_time));
1726        return 0;
1727}
1728
1729static void dpu_encoder_vsync_event_handler(struct timer_list *t)
1730{
1731        struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
1732                        vsync_event_timer);
1733        struct drm_encoder *drm_enc = &dpu_enc->base;
1734        struct msm_drm_private *priv;
1735        struct msm_drm_thread *event_thread;
1736
1737        if (!drm_enc->dev || !drm_enc->dev->dev_private ||
1738                        !drm_enc->crtc) {
1739                DPU_ERROR("invalid parameters\n");
1740                return;
1741        }
1742
1743        priv = drm_enc->dev->dev_private;
1744
1745        if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
1746                DPU_ERROR("invalid crtc index\n");
1747                return;
1748        }
1749        event_thread = &priv->event_thread[drm_enc->crtc->index];
1750        if (!event_thread) {
1751                DPU_ERROR("event_thread not found for crtc:%d\n",
1752                                drm_enc->crtc->index);
1753                return;
1754        }
1755
1756        del_timer(&dpu_enc->vsync_event_timer);
1757}
1758
1759static void dpu_encoder_vsync_event_work_handler(struct kthread_work *work)
1760{
1761        struct dpu_encoder_virt *dpu_enc = container_of(work,
1762                        struct dpu_encoder_virt, vsync_event_work);
1763        ktime_t wakeup_time;
1764
1765        if (!dpu_enc) {
1766                DPU_ERROR("invalid dpu encoder\n");
1767                return;
1768        }
1769
1770        if (dpu_encoder_vsync_time(&dpu_enc->base, &wakeup_time))
1771                return;
1772
1773        trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc->base), wakeup_time);
1774        mod_timer(&dpu_enc->vsync_event_timer,
1775                        nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1776}
1777
1778void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
1779{
1780        struct dpu_encoder_virt *dpu_enc;
1781        struct dpu_encoder_phys *phys;
1782        bool needs_hw_reset = false;
1783        unsigned int i;
1784
1785        dpu_enc = to_dpu_encoder_virt(drm_enc);
1786
1787        trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
1788
1789        /* prepare for next kickoff, may include waiting on previous kickoff */
1790        DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1791        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1792                phys = dpu_enc->phys_encs[i];
1793                if (phys) {
1794                        if (phys->ops.prepare_for_kickoff)
1795                                phys->ops.prepare_for_kickoff(phys);
1796                        if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
1797                                needs_hw_reset = true;
1798                }
1799        }
1800        DPU_ATRACE_END("enc_prepare_for_kickoff");
1801
1802        dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1803
1804        /* if any phys needs reset, reset all phys, in-order */
1805        if (needs_hw_reset) {
1806                trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
1807                for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1808                        dpu_encoder_helper_hw_reset(dpu_enc->phys_encs[i]);
1809                }
1810        }
1811}
1812
1813void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
1814{
1815        struct dpu_encoder_virt *dpu_enc;
1816        struct dpu_encoder_phys *phys;
1817        ktime_t wakeup_time;
1818        unsigned long timeout_ms;
1819        unsigned int i;
1820
1821        DPU_ATRACE_BEGIN("encoder_kickoff");
1822        dpu_enc = to_dpu_encoder_virt(drm_enc);
1823
1824        trace_dpu_enc_kickoff(DRMID(drm_enc));
1825
1826        timeout_ms = DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES * 1000 /
1827                        drm_mode_vrefresh(&drm_enc->crtc->state->adjusted_mode);
1828
1829        atomic_set(&dpu_enc->frame_done_timeout_ms, timeout_ms);
1830        mod_timer(&dpu_enc->frame_done_timer,
1831                        jiffies + msecs_to_jiffies(timeout_ms));
1832
1833        /* All phys encs are ready to go, trigger the kickoff */
1834        _dpu_encoder_kickoff_phys(dpu_enc);
1835
1836        /* allow phys encs to handle any post-kickoff business */
1837        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1838                phys = dpu_enc->phys_encs[i];
1839                if (phys && phys->ops.handle_post_kickoff)
1840                        phys->ops.handle_post_kickoff(phys);
1841        }
1842
1843        if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1844                        !dpu_encoder_vsync_time(drm_enc, &wakeup_time)) {
1845                trace_dpu_enc_early_kickoff(DRMID(drm_enc),
1846                                            ktime_to_ms(wakeup_time));
1847                mod_timer(&dpu_enc->vsync_event_timer,
1848                                nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1849        }
1850
1851        DPU_ATRACE_END("encoder_kickoff");
1852}
1853
1854void dpu_encoder_prepare_commit(struct drm_encoder *drm_enc)
1855{
1856        struct dpu_encoder_virt *dpu_enc;
1857        struct dpu_encoder_phys *phys;
1858        int i;
1859
1860        if (!drm_enc) {
1861                DPU_ERROR("invalid encoder\n");
1862                return;
1863        }
1864        dpu_enc = to_dpu_encoder_virt(drm_enc);
1865
1866        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1867                phys = dpu_enc->phys_encs[i];
1868                if (phys && phys->ops.prepare_commit)
1869                        phys->ops.prepare_commit(phys);
1870        }
1871}
1872
1873#ifdef CONFIG_DEBUG_FS
1874static int _dpu_encoder_status_show(struct seq_file *s, void *data)
1875{
1876        struct dpu_encoder_virt *dpu_enc = s->private;
1877        int i;
1878
1879        mutex_lock(&dpu_enc->enc_lock);
1880        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1881                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1882
1883                if (!phys)
1884                        continue;
1885
1886                seq_printf(s, "intf:%d    vsync:%8d     underrun:%8d    ",
1887                                phys->intf_idx - INTF_0,
1888                                atomic_read(&phys->vsync_cnt),
1889                                atomic_read(&phys->underrun_cnt));
1890
1891                switch (phys->intf_mode) {
1892                case INTF_MODE_VIDEO:
1893                        seq_puts(s, "mode: video\n");
1894                        break;
1895                case INTF_MODE_CMD:
1896                        seq_puts(s, "mode: command\n");
1897                        break;
1898                default:
1899                        seq_puts(s, "mode: ???\n");
1900                        break;
1901                }
1902        }
1903        mutex_unlock(&dpu_enc->enc_lock);
1904
1905        return 0;
1906}
1907
1908static int _dpu_encoder_debugfs_status_open(struct inode *inode,
1909                struct file *file)
1910{
1911        return single_open(file, _dpu_encoder_status_show, inode->i_private);
1912}
1913
1914static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
1915{
1916        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1917        struct msm_drm_private *priv;
1918        struct dpu_kms *dpu_kms;
1919        int i;
1920
1921        static const struct file_operations debugfs_status_fops = {
1922                .open =         _dpu_encoder_debugfs_status_open,
1923                .read =         seq_read,
1924                .llseek =       seq_lseek,
1925                .release =      single_release,
1926        };
1927
1928        char name[DPU_NAME_SIZE];
1929
1930        if (!drm_enc->dev || !drm_enc->dev->dev_private) {
1931                DPU_ERROR("invalid encoder or kms\n");
1932                return -EINVAL;
1933        }
1934
1935        priv = drm_enc->dev->dev_private;
1936        dpu_kms = to_dpu_kms(priv->kms);
1937
1938        snprintf(name, DPU_NAME_SIZE, "encoder%u", drm_enc->base.id);
1939
1940        /* create overall sub-directory for the encoder */
1941        dpu_enc->debugfs_root = debugfs_create_dir(name,
1942                        drm_enc->dev->primary->debugfs_root);
1943
1944        /* don't error check these */
1945        debugfs_create_file("status", 0600,
1946                dpu_enc->debugfs_root, dpu_enc, &debugfs_status_fops);
1947
1948        for (i = 0; i < dpu_enc->num_phys_encs; i++)
1949                if (dpu_enc->phys_encs[i] &&
1950                                dpu_enc->phys_encs[i]->ops.late_register)
1951                        dpu_enc->phys_encs[i]->ops.late_register(
1952                                        dpu_enc->phys_encs[i],
1953                                        dpu_enc->debugfs_root);
1954
1955        return 0;
1956}
1957#else
1958static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
1959{
1960        return 0;
1961}
1962#endif
1963
1964static int dpu_encoder_late_register(struct drm_encoder *encoder)
1965{
1966        return _dpu_encoder_init_debugfs(encoder);
1967}
1968
1969static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
1970{
1971        struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
1972
1973        debugfs_remove_recursive(dpu_enc->debugfs_root);
1974}
1975
1976static int dpu_encoder_virt_add_phys_encs(
1977                u32 display_caps,
1978                struct dpu_encoder_virt *dpu_enc,
1979                struct dpu_enc_phys_init_params *params)
1980{
1981        struct dpu_encoder_phys *enc = NULL;
1982
1983        DPU_DEBUG_ENC(dpu_enc, "\n");
1984
1985        /*
1986         * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
1987         * in this function, check up-front.
1988         */
1989        if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
1990                        ARRAY_SIZE(dpu_enc->phys_encs)) {
1991                DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
1992                          dpu_enc->num_phys_encs);
1993                return -EINVAL;
1994        }
1995
1996        if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
1997                enc = dpu_encoder_phys_vid_init(params);
1998
1999                if (IS_ERR_OR_NULL(enc)) {
2000                        DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
2001                                PTR_ERR(enc));
2002                        return enc == 0 ? -EINVAL : PTR_ERR(enc);
2003                }
2004
2005                dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2006                ++dpu_enc->num_phys_encs;
2007        }
2008
2009        if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
2010                enc = dpu_encoder_phys_cmd_init(params);
2011
2012                if (IS_ERR_OR_NULL(enc)) {
2013                        DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
2014                                PTR_ERR(enc));
2015                        return enc == 0 ? -EINVAL : PTR_ERR(enc);
2016                }
2017
2018                dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2019                ++dpu_enc->num_phys_encs;
2020        }
2021
2022        if (params->split_role == ENC_ROLE_SLAVE)
2023                dpu_enc->cur_slave = enc;
2024        else
2025                dpu_enc->cur_master = enc;
2026
2027        return 0;
2028}
2029
2030static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops = {
2031        .handle_vblank_virt = dpu_encoder_vblank_callback,
2032        .handle_underrun_virt = dpu_encoder_underrun_callback,
2033        .handle_frame_done = dpu_encoder_frame_done_callback,
2034};
2035
2036static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
2037                                 struct dpu_kms *dpu_kms,
2038                                 struct msm_display_info *disp_info)
2039{
2040        int ret = 0;
2041        int i = 0;
2042        enum dpu_intf_type intf_type;
2043        struct dpu_enc_phys_init_params phys_params;
2044
2045        if (!dpu_enc || !dpu_kms) {
2046                DPU_ERROR("invalid arg(s), enc %d kms %d\n",
2047                                dpu_enc != 0, dpu_kms != 0);
2048                return -EINVAL;
2049        }
2050
2051        dpu_enc->cur_master = NULL;
2052
2053        memset(&phys_params, 0, sizeof(phys_params));
2054        phys_params.dpu_kms = dpu_kms;
2055        phys_params.parent = &dpu_enc->base;
2056        phys_params.parent_ops = &dpu_encoder_parent_ops;
2057        phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
2058
2059        DPU_DEBUG("\n");
2060
2061        switch (disp_info->intf_type) {
2062        case DRM_MODE_ENCODER_DSI:
2063                intf_type = INTF_DSI;
2064                break;
2065        default:
2066                DPU_ERROR_ENC(dpu_enc, "unsupported display interface type\n");
2067                return -EINVAL;
2068        }
2069
2070        WARN_ON(disp_info->num_of_h_tiles < 1);
2071
2072        DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
2073
2074        if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
2075            (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
2076                dpu_enc->idle_pc_supported =
2077                                dpu_kms->catalog->caps->has_idle_pc;
2078
2079        mutex_lock(&dpu_enc->enc_lock);
2080        for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
2081                /*
2082                 * Left-most tile is at index 0, content is controller id
2083                 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2084                 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2085                 */
2086                u32 controller_id = disp_info->h_tile_instance[i];
2087
2088                if (disp_info->num_of_h_tiles > 1) {
2089                        if (i == 0)
2090                                phys_params.split_role = ENC_ROLE_MASTER;
2091                        else
2092                                phys_params.split_role = ENC_ROLE_SLAVE;
2093                } else {
2094                        phys_params.split_role = ENC_ROLE_SOLO;
2095                }
2096
2097                DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2098                                i, controller_id, phys_params.split_role);
2099
2100                phys_params.intf_idx = dpu_encoder_get_intf(dpu_kms->catalog,
2101                                                                                                        intf_type,
2102                                                                                                        controller_id);
2103                if (phys_params.intf_idx == INTF_MAX) {
2104                        DPU_ERROR_ENC(dpu_enc, "could not get intf: type %d, id %d\n",
2105                                                  intf_type, controller_id);
2106                        ret = -EINVAL;
2107                }
2108
2109                if (!ret) {
2110                        ret = dpu_encoder_virt_add_phys_encs(disp_info->capabilities,
2111                                                                                                 dpu_enc,
2112                                                                                                 &phys_params);
2113                        if (ret)
2114                                DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
2115                }
2116        }
2117
2118        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2119                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2120
2121                if (phys) {
2122                        atomic_set(&phys->vsync_cnt, 0);
2123                        atomic_set(&phys->underrun_cnt, 0);
2124                }
2125        }
2126        mutex_unlock(&dpu_enc->enc_lock);
2127
2128        return ret;
2129}
2130
2131static void dpu_encoder_frame_done_timeout(struct timer_list *t)
2132{
2133        struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
2134                        frame_done_timer);
2135        struct drm_encoder *drm_enc = &dpu_enc->base;
2136        struct msm_drm_private *priv;
2137        u32 event;
2138
2139        if (!drm_enc->dev || !drm_enc->dev->dev_private) {
2140                DPU_ERROR("invalid parameters\n");
2141                return;
2142        }
2143        priv = drm_enc->dev->dev_private;
2144
2145        if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
2146                DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2147                              DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
2148                return;
2149        } else if (!atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
2150                DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
2151                return;
2152        }
2153
2154        DPU_ERROR_ENC(dpu_enc, "frame done timeout\n");
2155
2156        event = DPU_ENCODER_FRAME_EVENT_ERROR;
2157        trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
2158        dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
2159}
2160
2161static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2162        .mode_set = dpu_encoder_virt_mode_set,
2163        .disable = dpu_encoder_virt_disable,
2164        .enable = dpu_kms_encoder_enable,
2165        .atomic_check = dpu_encoder_virt_atomic_check,
2166
2167        /* This is called by dpu_kms_encoder_enable */
2168        .commit = dpu_encoder_virt_enable,
2169};
2170
2171static const struct drm_encoder_funcs dpu_encoder_funcs = {
2172                .destroy = dpu_encoder_destroy,
2173                .late_register = dpu_encoder_late_register,
2174                .early_unregister = dpu_encoder_early_unregister,
2175};
2176
2177int dpu_encoder_setup(struct drm_device *dev, struct drm_encoder *enc,
2178                struct msm_display_info *disp_info)
2179{
2180        struct msm_drm_private *priv = dev->dev_private;
2181        struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
2182        struct drm_encoder *drm_enc = NULL;
2183        struct dpu_encoder_virt *dpu_enc = NULL;
2184        int ret = 0;
2185
2186        dpu_enc = to_dpu_encoder_virt(enc);
2187
2188        mutex_init(&dpu_enc->enc_lock);
2189        ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info);
2190        if (ret)
2191                goto fail;
2192
2193        atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
2194        timer_setup(&dpu_enc->frame_done_timer,
2195                        dpu_encoder_frame_done_timeout, 0);
2196
2197        if (disp_info->intf_type == DRM_MODE_ENCODER_DSI)
2198                timer_setup(&dpu_enc->vsync_event_timer,
2199                                dpu_encoder_vsync_event_handler,
2200                                0);
2201
2202
2203        mutex_init(&dpu_enc->rc_lock);
2204        INIT_DELAYED_WORK(&dpu_enc->delayed_off_work,
2205                        dpu_encoder_off_work);
2206        dpu_enc->idle_timeout = IDLE_TIMEOUT;
2207
2208        kthread_init_work(&dpu_enc->vsync_event_work,
2209                        dpu_encoder_vsync_event_work_handler);
2210
2211        memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
2212
2213        DPU_DEBUG_ENC(dpu_enc, "created\n");
2214
2215        return ret;
2216
2217fail:
2218        DPU_ERROR("failed to create encoder\n");
2219        if (drm_enc)
2220                dpu_encoder_destroy(drm_enc);
2221
2222        return ret;
2223
2224
2225}
2226
2227struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
2228                int drm_enc_mode)
2229{
2230        struct dpu_encoder_virt *dpu_enc = NULL;
2231        int rc = 0;
2232
2233        dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
2234        if (!dpu_enc)
2235                return ERR_PTR(ENOMEM);
2236
2237        rc = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
2238                        drm_enc_mode, NULL);
2239        if (rc) {
2240                devm_kfree(dev->dev, dpu_enc);
2241                return ERR_PTR(rc);
2242        }
2243
2244        drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
2245
2246        spin_lock_init(&dpu_enc->enc_spinlock);
2247        dpu_enc->enabled = false;
2248
2249        return &dpu_enc->base;
2250}
2251
2252int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2253        enum msm_event_wait event)
2254{
2255        int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
2256        struct dpu_encoder_virt *dpu_enc = NULL;
2257        int i, ret = 0;
2258
2259        if (!drm_enc) {
2260                DPU_ERROR("invalid encoder\n");
2261                return -EINVAL;
2262        }
2263        dpu_enc = to_dpu_encoder_virt(drm_enc);
2264        DPU_DEBUG_ENC(dpu_enc, "\n");
2265
2266        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2267                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2268                if (!phys)
2269                        continue;
2270
2271                switch (event) {
2272                case MSM_ENC_COMMIT_DONE:
2273                        fn_wait = phys->ops.wait_for_commit_done;
2274                        break;
2275                case MSM_ENC_TX_COMPLETE:
2276                        fn_wait = phys->ops.wait_for_tx_complete;
2277                        break;
2278                case MSM_ENC_VBLANK:
2279                        fn_wait = phys->ops.wait_for_vblank;
2280                        break;
2281                default:
2282                        DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2283                                        event);
2284                        return -EINVAL;
2285                };
2286
2287                if (fn_wait) {
2288                        DPU_ATRACE_BEGIN("wait_for_completion_event");
2289                        ret = fn_wait(phys);
2290                        DPU_ATRACE_END("wait_for_completion_event");
2291                        if (ret)
2292                                return ret;
2293                }
2294        }
2295
2296        return ret;
2297}
2298
2299enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
2300{
2301        struct dpu_encoder_virt *dpu_enc = NULL;
2302        int i;
2303
2304        if (!encoder) {
2305                DPU_ERROR("invalid encoder\n");
2306                return INTF_MODE_NONE;
2307        }
2308        dpu_enc = to_dpu_encoder_virt(encoder);
2309
2310        if (dpu_enc->cur_master)
2311                return dpu_enc->cur_master->intf_mode;
2312
2313        for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2314                struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2315
2316                if (phys)
2317                        return phys->intf_mode;
2318        }
2319
2320        return INTF_MODE_NONE;
2321}
2322