1
2
3
4
5
6#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7#include <linux/delay.h>
8#include "dpu_encoder_phys.h"
9#include "dpu_hw_interrupts.h"
10#include "dpu_hw_pingpong.h"
11#include "dpu_core_irq.h"
12#include "dpu_formats.h"
13#include "dpu_trace.h"
14#include "disp/msm_disp_snapshot.h"
15
16#define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
17 (e) && (e)->base.parent ? \
18 (e)->base.parent->base.id : -1, \
19 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
20
21#define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
22 (e) && (e)->base.parent ? \
23 (e)->base.parent->base.id : -1, \
24 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
25
26#define to_dpu_encoder_phys_cmd(x) \
27 container_of(x, struct dpu_encoder_phys_cmd, base)
28
29#define PP_TIMEOUT_MAX_TRIALS 10
30
31
32
33
34
35
36#define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
37#define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
38
39#define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
40
41#define DPU_ENC_MAX_POLL_TIMEOUT_US 2000
42
43static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
44{
45 return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
46}
47
48static bool dpu_encoder_phys_cmd_mode_fixup(
49 struct dpu_encoder_phys *phys_enc,
50 const struct drm_display_mode *mode,
51 struct drm_display_mode *adj_mode)
52{
53 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
54 return true;
55}
56
57static void _dpu_encoder_phys_cmd_update_intf_cfg(
58 struct dpu_encoder_phys *phys_enc)
59{
60 struct dpu_encoder_phys_cmd *cmd_enc =
61 to_dpu_encoder_phys_cmd(phys_enc);
62 struct dpu_hw_ctl *ctl;
63 struct dpu_hw_intf_cfg intf_cfg = { 0 };
64
65 ctl = phys_enc->hw_ctl;
66 if (!ctl->ops.setup_intf_cfg)
67 return;
68
69 intf_cfg.intf = phys_enc->intf_idx;
70 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
71 intf_cfg.stream_sel = cmd_enc->stream_sel;
72 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
73 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
74}
75
76static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
77{
78 struct dpu_encoder_phys *phys_enc = arg;
79 unsigned long lock_flags;
80 int new_cnt;
81 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
82
83 if (!phys_enc->hw_pp)
84 return;
85
86 DPU_ATRACE_BEGIN("pp_done_irq");
87
88 if (phys_enc->parent_ops->handle_frame_done)
89 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
90 phys_enc, event);
91
92 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
93 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
94 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
95
96 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
97 phys_enc->hw_pp->idx - PINGPONG_0,
98 new_cnt, event);
99
100
101 wake_up_all(&phys_enc->pending_kickoff_wq);
102 DPU_ATRACE_END("pp_done_irq");
103}
104
105static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
106{
107 struct dpu_encoder_phys *phys_enc = arg;
108 struct dpu_encoder_phys_cmd *cmd_enc;
109
110 if (!phys_enc->hw_pp)
111 return;
112
113 DPU_ATRACE_BEGIN("rd_ptr_irq");
114 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
115
116 if (phys_enc->parent_ops->handle_vblank_virt)
117 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
118 phys_enc);
119
120 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
121 wake_up_all(&cmd_enc->pending_vblank_wq);
122 DPU_ATRACE_END("rd_ptr_irq");
123}
124
125static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
126{
127 struct dpu_encoder_phys *phys_enc = arg;
128
129 DPU_ATRACE_BEGIN("ctl_start_irq");
130
131 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
132
133
134 wake_up_all(&phys_enc->pending_kickoff_wq);
135 DPU_ATRACE_END("ctl_start_irq");
136}
137
138static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
139{
140 struct dpu_encoder_phys *phys_enc = arg;
141
142 if (phys_enc->parent_ops->handle_underrun_virt)
143 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
144 phys_enc);
145}
146
147static void dpu_encoder_phys_cmd_mode_set(
148 struct dpu_encoder_phys *phys_enc,
149 struct drm_display_mode *mode,
150 struct drm_display_mode *adj_mode)
151{
152 struct dpu_encoder_phys_cmd *cmd_enc =
153 to_dpu_encoder_phys_cmd(phys_enc);
154 struct dpu_encoder_irq *irq;
155
156 if (!mode || !adj_mode) {
157 DPU_ERROR("invalid args\n");
158 return;
159 }
160 phys_enc->cached_mode = *adj_mode;
161 DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
162 drm_mode_debug_printmodeline(adj_mode);
163
164 irq = &phys_enc->irq[INTR_IDX_CTL_START];
165 irq->irq_idx = phys_enc->hw_ctl->caps->intr_start;
166
167 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
168 irq->irq_idx = phys_enc->hw_pp->caps->intr_done;
169
170 irq = &phys_enc->irq[INTR_IDX_RDPTR];
171 irq->irq_idx = phys_enc->hw_pp->caps->intr_rdptr;
172
173 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
174 irq->irq_idx = phys_enc->hw_intf->cap->intr_underrun;
175}
176
177static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
178 struct dpu_encoder_phys *phys_enc)
179{
180 struct dpu_encoder_phys_cmd *cmd_enc =
181 to_dpu_encoder_phys_cmd(phys_enc);
182 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
183 bool do_log = false;
184 struct drm_encoder *drm_enc;
185
186 if (!phys_enc->hw_pp)
187 return -EINVAL;
188
189 drm_enc = phys_enc->parent;
190
191 cmd_enc->pp_timeout_report_cnt++;
192 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
193 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
194 do_log = true;
195 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
196 do_log = true;
197 }
198
199 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
200 phys_enc->hw_pp->idx - PINGPONG_0,
201 cmd_enc->pp_timeout_report_cnt,
202 atomic_read(&phys_enc->pending_kickoff_cnt),
203 frame_event);
204
205
206 if (do_log) {
207 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
208 DRMID(drm_enc),
209 phys_enc->hw_pp->idx - PINGPONG_0,
210 phys_enc->hw_ctl->idx - CTL_0,
211 cmd_enc->pp_timeout_report_cnt,
212 atomic_read(&phys_enc->pending_kickoff_cnt));
213 msm_disp_snapshot_state(drm_enc->dev);
214 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
215 }
216
217 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
218
219
220 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
221
222 if (phys_enc->parent_ops->handle_frame_done)
223 phys_enc->parent_ops->handle_frame_done(
224 drm_enc, phys_enc, frame_event);
225
226 return -ETIMEDOUT;
227}
228
229static int _dpu_encoder_phys_cmd_wait_for_idle(
230 struct dpu_encoder_phys *phys_enc)
231{
232 struct dpu_encoder_phys_cmd *cmd_enc =
233 to_dpu_encoder_phys_cmd(phys_enc);
234 struct dpu_encoder_wait_info wait_info;
235 int ret;
236
237 wait_info.wq = &phys_enc->pending_kickoff_wq;
238 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
239 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
240
241 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
242 &wait_info);
243 if (ret == -ETIMEDOUT)
244 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
245 else if (!ret)
246 cmd_enc->pp_timeout_report_cnt = 0;
247
248 return ret;
249}
250
251static int dpu_encoder_phys_cmd_control_vblank_irq(
252 struct dpu_encoder_phys *phys_enc,
253 bool enable)
254{
255 int ret = 0;
256 int refcount;
257
258 if (!phys_enc->hw_pp) {
259 DPU_ERROR("invalid encoder\n");
260 return -EINVAL;
261 }
262
263 refcount = atomic_read(&phys_enc->vblank_refcount);
264
265
266 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
267 goto end;
268
269
270 if (!enable && refcount == 0) {
271 ret = -EINVAL;
272 goto end;
273 }
274
275 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
276 phys_enc->hw_pp->idx - PINGPONG_0,
277 enable ? "true" : "false", refcount);
278
279 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
280 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
281 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
282 ret = dpu_encoder_helper_unregister_irq(phys_enc,
283 INTR_IDX_RDPTR);
284
285end:
286 if (ret) {
287 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
288 DRMID(phys_enc->parent),
289 phys_enc->hw_pp->idx - PINGPONG_0, ret,
290 enable ? "true" : "false", refcount);
291 }
292
293 return ret;
294}
295
296static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
297 bool enable)
298{
299 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
300 phys_enc->hw_pp->idx - PINGPONG_0,
301 enable, atomic_read(&phys_enc->vblank_refcount));
302
303 if (enable) {
304 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
305 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
306 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
307
308 if (dpu_encoder_phys_cmd_is_master(phys_enc))
309 dpu_encoder_helper_register_irq(phys_enc,
310 INTR_IDX_CTL_START);
311 } else {
312 if (dpu_encoder_phys_cmd_is_master(phys_enc))
313 dpu_encoder_helper_unregister_irq(phys_enc,
314 INTR_IDX_CTL_START);
315
316 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
317 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
318 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
319 }
320}
321
322static void dpu_encoder_phys_cmd_tearcheck_config(
323 struct dpu_encoder_phys *phys_enc)
324{
325 struct dpu_encoder_phys_cmd *cmd_enc =
326 to_dpu_encoder_phys_cmd(phys_enc);
327 struct dpu_hw_tear_check tc_cfg = { 0 };
328 struct drm_display_mode *mode;
329 bool tc_enable = true;
330 u32 vsync_hz;
331 struct dpu_kms *dpu_kms;
332
333 if (!phys_enc->hw_pp) {
334 DPU_ERROR("invalid encoder\n");
335 return;
336 }
337 mode = &phys_enc->cached_mode;
338
339 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
340
341 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
342 !phys_enc->hw_pp->ops.enable_tearcheck) {
343 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
344 return;
345 }
346
347 dpu_kms = phys_enc->dpu_kms;
348
349
350
351
352
353
354
355
356
357
358 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
359 if (vsync_hz <= 0) {
360 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
361 vsync_hz);
362 return;
363 }
364
365 tc_cfg.vsync_count = vsync_hz /
366 (mode->vtotal * drm_mode_vrefresh(mode));
367
368
369
370
371
372 tc_cfg.hw_vsync_mode = 1;
373 tc_cfg.sync_cfg_height = mode->vtotal * 2;
374 tc_cfg.vsync_init_val = mode->vdisplay;
375 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
376 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
377 tc_cfg.start_pos = mode->vdisplay;
378 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
379
380 DPU_DEBUG_CMDENC(cmd_enc,
381 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
382 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
383 mode->vtotal, drm_mode_vrefresh(mode));
384 DPU_DEBUG_CMDENC(cmd_enc,
385 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
386 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
387 tc_cfg.rd_ptr_irq);
388 DPU_DEBUG_CMDENC(cmd_enc,
389 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
390 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
391 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
392 DPU_DEBUG_CMDENC(cmd_enc,
393 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
394 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
395 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
396
397 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
398 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
399}
400
401static void _dpu_encoder_phys_cmd_pingpong_config(
402 struct dpu_encoder_phys *phys_enc)
403{
404 struct dpu_encoder_phys_cmd *cmd_enc =
405 to_dpu_encoder_phys_cmd(phys_enc);
406
407 if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
408 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
409 return;
410 }
411
412 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
413 phys_enc->hw_pp->idx - PINGPONG_0);
414 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
415
416 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
417 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
418}
419
420static bool dpu_encoder_phys_cmd_needs_single_flush(
421 struct dpu_encoder_phys *phys_enc)
422{
423
424
425
426
427 return false;
428}
429
430static void dpu_encoder_phys_cmd_enable_helper(
431 struct dpu_encoder_phys *phys_enc)
432{
433 struct dpu_hw_ctl *ctl;
434
435 if (!phys_enc->hw_pp) {
436 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
437 return;
438 }
439
440 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
441
442 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
443
444 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
445 return;
446
447 ctl = phys_enc->hw_ctl;
448 ctl->ops.update_pending_flush_intf(ctl, phys_enc->intf_idx);
449}
450
451static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
452{
453 struct dpu_encoder_phys_cmd *cmd_enc =
454 to_dpu_encoder_phys_cmd(phys_enc);
455
456 if (!phys_enc->hw_pp) {
457 DPU_ERROR("invalid phys encoder\n");
458 return;
459 }
460
461 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
462
463 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
464 DPU_ERROR("already enabled\n");
465 return;
466 }
467
468 dpu_encoder_phys_cmd_enable_helper(phys_enc);
469 phys_enc->enable_state = DPU_ENC_ENABLED;
470}
471
472static void _dpu_encoder_phys_cmd_connect_te(
473 struct dpu_encoder_phys *phys_enc, bool enable)
474{
475 if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
476 return;
477
478 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
479 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
480}
481
482static void dpu_encoder_phys_cmd_prepare_idle_pc(
483 struct dpu_encoder_phys *phys_enc)
484{
485 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
486}
487
488static int dpu_encoder_phys_cmd_get_line_count(
489 struct dpu_encoder_phys *phys_enc)
490{
491 struct dpu_hw_pingpong *hw_pp;
492
493 if (!phys_enc->hw_pp)
494 return -EINVAL;
495
496 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
497 return -EINVAL;
498
499 hw_pp = phys_enc->hw_pp;
500 if (!hw_pp->ops.get_line_count)
501 return -EINVAL;
502
503 return hw_pp->ops.get_line_count(hw_pp);
504}
505
506static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
507{
508 struct dpu_encoder_phys_cmd *cmd_enc =
509 to_dpu_encoder_phys_cmd(phys_enc);
510
511 if (!phys_enc->hw_pp) {
512 DPU_ERROR("invalid encoder\n");
513 return;
514 }
515 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
516 phys_enc->hw_pp->idx - PINGPONG_0,
517 phys_enc->enable_state);
518
519 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
520 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
521 return;
522 }
523
524 if (phys_enc->hw_pp->ops.enable_tearcheck)
525 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
526 phys_enc->enable_state = DPU_ENC_DISABLED;
527}
528
529static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
530{
531 struct dpu_encoder_phys_cmd *cmd_enc =
532 to_dpu_encoder_phys_cmd(phys_enc);
533
534 kfree(cmd_enc);
535}
536
537static void dpu_encoder_phys_cmd_get_hw_resources(
538 struct dpu_encoder_phys *phys_enc,
539 struct dpu_encoder_hw_resources *hw_res)
540{
541 hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
542}
543
544static void dpu_encoder_phys_cmd_prepare_for_kickoff(
545 struct dpu_encoder_phys *phys_enc)
546{
547 struct dpu_encoder_phys_cmd *cmd_enc =
548 to_dpu_encoder_phys_cmd(phys_enc);
549 int ret;
550
551 if (!phys_enc->hw_pp) {
552 DPU_ERROR("invalid encoder\n");
553 return;
554 }
555 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
556 phys_enc->hw_pp->idx - PINGPONG_0,
557 atomic_read(&phys_enc->pending_kickoff_cnt));
558
559
560
561
562
563 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
564 if (ret) {
565
566 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
567 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
568 DRMID(phys_enc->parent), ret,
569 phys_enc->hw_pp->idx - PINGPONG_0);
570 }
571
572 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
573 phys_enc->hw_pp->idx - PINGPONG_0,
574 atomic_read(&phys_enc->pending_kickoff_cnt));
575}
576
577static bool dpu_encoder_phys_cmd_is_ongoing_pptx(
578 struct dpu_encoder_phys *phys_enc)
579{
580 struct dpu_hw_pp_vsync_info info;
581
582 if (!phys_enc)
583 return false;
584
585 phys_enc->hw_pp->ops.get_vsync_info(phys_enc->hw_pp, &info);
586 if (info.wr_ptr_line_count > 0 &&
587 info.wr_ptr_line_count < phys_enc->cached_mode.vdisplay)
588 return true;
589
590 return false;
591}
592
593static void dpu_encoder_phys_cmd_prepare_commit(
594 struct dpu_encoder_phys *phys_enc)
595{
596 struct dpu_encoder_phys_cmd *cmd_enc =
597 to_dpu_encoder_phys_cmd(phys_enc);
598 int trial = 0;
599
600 if (!phys_enc)
601 return;
602 if (!phys_enc->hw_pp)
603 return;
604 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
605 return;
606
607
608 if (!phys_enc->hw_pp->ops.get_autorefresh(phys_enc->hw_pp, NULL))
609 return;
610
611
612
613
614
615
616
617
618
619 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
620 phys_enc->hw_pp->ops.setup_autorefresh(phys_enc->hw_pp, 0, false);
621
622 do {
623 udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
624 if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
625 > (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
626 DPU_ERROR_CMDENC(cmd_enc,
627 "disable autorefresh failed\n");
628 break;
629 }
630
631 trial++;
632 } while (dpu_encoder_phys_cmd_is_ongoing_pptx(phys_enc));
633
634 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
635
636 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc),
637 "disabled autorefresh\n");
638}
639
640static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
641 struct dpu_encoder_phys *phys_enc)
642{
643 struct dpu_encoder_phys_cmd *cmd_enc =
644 to_dpu_encoder_phys_cmd(phys_enc);
645 struct dpu_encoder_wait_info wait_info;
646 int ret;
647
648 wait_info.wq = &phys_enc->pending_kickoff_wq;
649 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
650 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
651
652 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
653 &wait_info);
654 if (ret == -ETIMEDOUT) {
655 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
656 ret = -EINVAL;
657 } else if (!ret)
658 ret = 0;
659
660 return ret;
661}
662
663static int dpu_encoder_phys_cmd_wait_for_tx_complete(
664 struct dpu_encoder_phys *phys_enc)
665{
666 int rc;
667
668 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
669 if (rc) {
670 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
671 DRMID(phys_enc->parent), rc,
672 phys_enc->intf_idx - INTF_0);
673 }
674
675 return rc;
676}
677
678static int dpu_encoder_phys_cmd_wait_for_commit_done(
679 struct dpu_encoder_phys *phys_enc)
680{
681
682 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
683 return 0;
684
685 return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
686}
687
688static int dpu_encoder_phys_cmd_wait_for_vblank(
689 struct dpu_encoder_phys *phys_enc)
690{
691 int rc = 0;
692 struct dpu_encoder_phys_cmd *cmd_enc;
693 struct dpu_encoder_wait_info wait_info;
694
695 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
696
697
698 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
699 return rc;
700
701 wait_info.wq = &cmd_enc->pending_vblank_wq;
702 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
703 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
704
705 atomic_inc(&cmd_enc->pending_vblank_cnt);
706
707 rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
708 &wait_info);
709
710 return rc;
711}
712
713static void dpu_encoder_phys_cmd_handle_post_kickoff(
714 struct dpu_encoder_phys *phys_enc)
715{
716
717
718
719
720 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
721}
722
723static void dpu_encoder_phys_cmd_trigger_start(
724 struct dpu_encoder_phys *phys_enc)
725{
726 dpu_encoder_helper_trigger_start(phys_enc);
727}
728
729static void dpu_encoder_phys_cmd_init_ops(
730 struct dpu_encoder_phys_ops *ops)
731{
732 ops->prepare_commit = dpu_encoder_phys_cmd_prepare_commit;
733 ops->is_master = dpu_encoder_phys_cmd_is_master;
734 ops->mode_set = dpu_encoder_phys_cmd_mode_set;
735 ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
736 ops->enable = dpu_encoder_phys_cmd_enable;
737 ops->disable = dpu_encoder_phys_cmd_disable;
738 ops->destroy = dpu_encoder_phys_cmd_destroy;
739 ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
740 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
741 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
742 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
743 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
744 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
745 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
746 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
747 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
748 ops->restore = dpu_encoder_phys_cmd_enable_helper;
749 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
750 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
751 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
752}
753
754struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
755 struct dpu_enc_phys_init_params *p)
756{
757 struct dpu_encoder_phys *phys_enc = NULL;
758 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
759 struct dpu_encoder_irq *irq;
760 int i, ret = 0;
761
762 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
763
764 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
765 if (!cmd_enc) {
766 ret = -ENOMEM;
767 DPU_ERROR("failed to allocate\n");
768 return ERR_PTR(ret);
769 }
770 phys_enc = &cmd_enc->base;
771 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
772 phys_enc->intf_idx = p->intf_idx;
773
774 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
775 phys_enc->parent = p->parent;
776 phys_enc->parent_ops = p->parent_ops;
777 phys_enc->dpu_kms = p->dpu_kms;
778 phys_enc->split_role = p->split_role;
779 phys_enc->intf_mode = INTF_MODE_CMD;
780 phys_enc->enc_spinlock = p->enc_spinlock;
781 cmd_enc->stream_sel = 0;
782 phys_enc->enable_state = DPU_ENC_DISABLED;
783 for (i = 0; i < INTR_IDX_MAX; i++) {
784 irq = &phys_enc->irq[i];
785 INIT_LIST_HEAD(&irq->cb.list);
786 irq->irq_idx = -EINVAL;
787 irq->cb.arg = phys_enc;
788 }
789
790 irq = &phys_enc->irq[INTR_IDX_CTL_START];
791 irq->name = "ctl_start";
792 irq->intr_idx = INTR_IDX_CTL_START;
793 irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
794
795 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
796 irq->name = "pp_done";
797 irq->intr_idx = INTR_IDX_PINGPONG;
798 irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
799
800 irq = &phys_enc->irq[INTR_IDX_RDPTR];
801 irq->name = "pp_rd_ptr";
802 irq->intr_idx = INTR_IDX_RDPTR;
803 irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
804
805 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
806 irq->name = "underrun";
807 irq->intr_idx = INTR_IDX_UNDERRUN;
808 irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
809
810 atomic_set(&phys_enc->vblank_refcount, 0);
811 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
812 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
813 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
814 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
815 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
816
817 DPU_DEBUG_CMDENC(cmd_enc, "created\n");
818
819 return phys_enc;
820}
821