1
2
3
4
5
6#include <linux/delay.h>
7#include "dpu_hwio.h"
8#include "dpu_hw_ctl.h"
9#include "dpu_kms.h"
10#include "dpu_trace.h"
11
12#define CTL_LAYER(lm) \
13 (((lm) == LM_5) ? (0x024) : (((lm) - LM_0) * 0x004))
14#define CTL_LAYER_EXT(lm) \
15 (0x40 + (((lm) - LM_0) * 0x004))
16#define CTL_LAYER_EXT2(lm) \
17 (0x70 + (((lm) - LM_0) * 0x004))
18#define CTL_LAYER_EXT3(lm) \
19 (0xA0 + (((lm) - LM_0) * 0x004))
20#define CTL_TOP 0x014
21#define CTL_FLUSH 0x018
22#define CTL_START 0x01C
23#define CTL_PREPARE 0x0d0
24#define CTL_SW_RESET 0x030
25#define CTL_LAYER_EXTN_OFFSET 0x40
26#define CTL_MERGE_3D_ACTIVE 0x0E4
27#define CTL_WB_ACTIVE 0x0EC
28#define CTL_INTF_ACTIVE 0x0F4
29#define CTL_MERGE_3D_FLUSH 0x100
30#define CTL_DSC_ACTIVE 0x0E8
31#define CTL_DSC_FLUSH 0x104
32#define CTL_WB_FLUSH 0x108
33#define CTL_INTF_FLUSH 0x110
34#define CTL_INTF_MASTER 0x134
35#define CTL_FETCH_PIPE_ACTIVE 0x0FC
36
37#define CTL_MIXER_BORDER_OUT BIT(24)
38#define CTL_FLUSH_MASK_CTL BIT(17)
39
40#define DPU_REG_RESET_TIMEOUT_US 2000
41#define MERGE_3D_IDX 23
42#define DSC_IDX 22
43#define INTF_IDX 31
44#define WB_IDX 16
45#define CTL_INVALID_BIT 0xffff
46#define CTL_DEFAULT_GROUP_ID 0xf
47
48static const u32 fetch_tbl[SSPP_MAX] = {CTL_INVALID_BIT, 16, 17, 18, 19,
49 CTL_INVALID_BIT, CTL_INVALID_BIT, CTL_INVALID_BIT, CTL_INVALID_BIT, 0,
50 1, 2, 3, CTL_INVALID_BIT, CTL_INVALID_BIT};
51
52static const struct dpu_ctl_cfg *_ctl_offset(enum dpu_ctl ctl,
53 const struct dpu_mdss_cfg *m,
54 void __iomem *addr,
55 struct dpu_hw_blk_reg_map *b)
56{
57 int i;
58
59 for (i = 0; i < m->ctl_count; i++) {
60 if (ctl == m->ctl[i].id) {
61 b->base_off = addr;
62 b->blk_off = m->ctl[i].base;
63 b->length = m->ctl[i].len;
64 b->hwversion = m->hwversion;
65 b->log_mask = DPU_DBG_MASK_CTL;
66 return &m->ctl[i];
67 }
68 }
69 return ERR_PTR(-ENOMEM);
70}
71
72static int _mixer_stages(const struct dpu_lm_cfg *mixer, int count,
73 enum dpu_lm lm)
74{
75 int i;
76 int stages = -EINVAL;
77
78 for (i = 0; i < count; i++) {
79 if (lm == mixer[i].id) {
80 stages = mixer[i].sblk->maxblendstages;
81 break;
82 }
83 }
84
85 return stages;
86}
87
88static inline u32 dpu_hw_ctl_get_flush_register(struct dpu_hw_ctl *ctx)
89{
90 struct dpu_hw_blk_reg_map *c = &ctx->hw;
91
92 return DPU_REG_READ(c, CTL_FLUSH);
93}
94
95static inline void dpu_hw_ctl_trigger_start(struct dpu_hw_ctl *ctx)
96{
97 trace_dpu_hw_ctl_trigger_start(ctx->pending_flush_mask,
98 dpu_hw_ctl_get_flush_register(ctx));
99 DPU_REG_WRITE(&ctx->hw, CTL_START, 0x1);
100}
101
102static inline bool dpu_hw_ctl_is_started(struct dpu_hw_ctl *ctx)
103{
104 return !!(DPU_REG_READ(&ctx->hw, CTL_START) & BIT(0));
105}
106
107static inline void dpu_hw_ctl_trigger_pending(struct dpu_hw_ctl *ctx)
108{
109 trace_dpu_hw_ctl_trigger_prepare(ctx->pending_flush_mask,
110 dpu_hw_ctl_get_flush_register(ctx));
111 DPU_REG_WRITE(&ctx->hw, CTL_PREPARE, 0x1);
112}
113
114static inline void dpu_hw_ctl_clear_pending_flush(struct dpu_hw_ctl *ctx)
115{
116 trace_dpu_hw_ctl_clear_pending_flush(ctx->pending_flush_mask,
117 dpu_hw_ctl_get_flush_register(ctx));
118 ctx->pending_flush_mask = 0x0;
119}
120
121static inline void dpu_hw_ctl_update_pending_flush(struct dpu_hw_ctl *ctx,
122 u32 flushbits)
123{
124 trace_dpu_hw_ctl_update_pending_flush(flushbits,
125 ctx->pending_flush_mask);
126 ctx->pending_flush_mask |= flushbits;
127}
128
129static u32 dpu_hw_ctl_get_pending_flush(struct dpu_hw_ctl *ctx)
130{
131 return ctx->pending_flush_mask;
132}
133
134static inline void dpu_hw_ctl_trigger_flush_v1(struct dpu_hw_ctl *ctx)
135{
136 if (ctx->pending_flush_mask & BIT(MERGE_3D_IDX))
137 DPU_REG_WRITE(&ctx->hw, CTL_MERGE_3D_FLUSH,
138 ctx->pending_merge_3d_flush_mask);
139 if (ctx->pending_flush_mask & BIT(INTF_IDX))
140 DPU_REG_WRITE(&ctx->hw, CTL_INTF_FLUSH,
141 ctx->pending_intf_flush_mask);
142 if (ctx->pending_flush_mask & BIT(WB_IDX))
143 DPU_REG_WRITE(&ctx->hw, CTL_WB_FLUSH,
144 ctx->pending_wb_flush_mask);
145
146 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
147}
148
149static inline void dpu_hw_ctl_trigger_flush(struct dpu_hw_ctl *ctx)
150{
151 trace_dpu_hw_ctl_trigger_pending_flush(ctx->pending_flush_mask,
152 dpu_hw_ctl_get_flush_register(ctx));
153 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
154}
155
156static uint32_t dpu_hw_ctl_get_bitmask_sspp(struct dpu_hw_ctl *ctx,
157 enum dpu_sspp sspp)
158{
159 uint32_t flushbits = 0;
160
161 switch (sspp) {
162 case SSPP_VIG0:
163 flushbits = BIT(0);
164 break;
165 case SSPP_VIG1:
166 flushbits = BIT(1);
167 break;
168 case SSPP_VIG2:
169 flushbits = BIT(2);
170 break;
171 case SSPP_VIG3:
172 flushbits = BIT(18);
173 break;
174 case SSPP_RGB0:
175 flushbits = BIT(3);
176 break;
177 case SSPP_RGB1:
178 flushbits = BIT(4);
179 break;
180 case SSPP_RGB2:
181 flushbits = BIT(5);
182 break;
183 case SSPP_RGB3:
184 flushbits = BIT(19);
185 break;
186 case SSPP_DMA0:
187 flushbits = BIT(11);
188 break;
189 case SSPP_DMA1:
190 flushbits = BIT(12);
191 break;
192 case SSPP_DMA2:
193 flushbits = BIT(24);
194 break;
195 case SSPP_DMA3:
196 flushbits = BIT(25);
197 break;
198 case SSPP_CURSOR0:
199 flushbits = BIT(22);
200 break;
201 case SSPP_CURSOR1:
202 flushbits = BIT(23);
203 break;
204 default:
205 break;
206 }
207
208 return flushbits;
209}
210
211static uint32_t dpu_hw_ctl_get_bitmask_mixer(struct dpu_hw_ctl *ctx,
212 enum dpu_lm lm)
213{
214 uint32_t flushbits = 0;
215
216 switch (lm) {
217 case LM_0:
218 flushbits = BIT(6);
219 break;
220 case LM_1:
221 flushbits = BIT(7);
222 break;
223 case LM_2:
224 flushbits = BIT(8);
225 break;
226 case LM_3:
227 flushbits = BIT(9);
228 break;
229 case LM_4:
230 flushbits = BIT(10);
231 break;
232 case LM_5:
233 flushbits = BIT(20);
234 break;
235 default:
236 return -EINVAL;
237 }
238
239 flushbits |= CTL_FLUSH_MASK_CTL;
240
241 return flushbits;
242}
243
244static void dpu_hw_ctl_update_pending_flush_intf(struct dpu_hw_ctl *ctx,
245 enum dpu_intf intf)
246{
247 switch (intf) {
248 case INTF_0:
249 ctx->pending_flush_mask |= BIT(31);
250 break;
251 case INTF_1:
252 ctx->pending_flush_mask |= BIT(30);
253 break;
254 case INTF_2:
255 ctx->pending_flush_mask |= BIT(29);
256 break;
257 case INTF_3:
258 ctx->pending_flush_mask |= BIT(28);
259 break;
260 default:
261 break;
262 }
263}
264
265static void dpu_hw_ctl_update_pending_flush_wb(struct dpu_hw_ctl *ctx,
266 enum dpu_wb wb)
267{
268 switch (wb) {
269 case WB_0:
270 case WB_1:
271 case WB_2:
272 ctx->pending_flush_mask |= BIT(WB_IDX);
273 break;
274 default:
275 break;
276 }
277}
278
279static void dpu_hw_ctl_update_pending_flush_wb_v1(struct dpu_hw_ctl *ctx,
280 enum dpu_wb wb)
281{
282 ctx->pending_wb_flush_mask |= BIT(wb - WB_0);
283 ctx->pending_flush_mask |= BIT(WB_IDX);
284}
285
286static void dpu_hw_ctl_update_pending_flush_intf_v1(struct dpu_hw_ctl *ctx,
287 enum dpu_intf intf)
288{
289 ctx->pending_intf_flush_mask |= BIT(intf - INTF_0);
290 ctx->pending_flush_mask |= BIT(INTF_IDX);
291}
292
293static void dpu_hw_ctl_update_pending_flush_merge_3d_v1(struct dpu_hw_ctl *ctx,
294 enum dpu_merge_3d merge_3d)
295{
296 ctx->pending_merge_3d_flush_mask |= BIT(merge_3d - MERGE_3D_0);
297 ctx->pending_flush_mask |= BIT(MERGE_3D_IDX);
298}
299
300static uint32_t dpu_hw_ctl_get_bitmask_dspp(struct dpu_hw_ctl *ctx,
301 enum dpu_dspp dspp)
302{
303 uint32_t flushbits = 0;
304
305 switch (dspp) {
306 case DSPP_0:
307 flushbits = BIT(13);
308 break;
309 case DSPP_1:
310 flushbits = BIT(14);
311 break;
312 case DSPP_2:
313 flushbits = BIT(15);
314 break;
315 case DSPP_3:
316 flushbits = BIT(21);
317 break;
318 default:
319 return 0;
320 }
321
322 return flushbits;
323}
324
325static u32 dpu_hw_ctl_poll_reset_status(struct dpu_hw_ctl *ctx, u32 timeout_us)
326{
327 struct dpu_hw_blk_reg_map *c = &ctx->hw;
328 ktime_t timeout;
329 u32 status;
330
331 timeout = ktime_add_us(ktime_get(), timeout_us);
332
333
334
335
336
337 do {
338 status = DPU_REG_READ(c, CTL_SW_RESET);
339 status &= 0x1;
340 if (status)
341 usleep_range(20, 50);
342 } while (status && ktime_compare_safe(ktime_get(), timeout) < 0);
343
344 return status;
345}
346
347static int dpu_hw_ctl_reset_control(struct dpu_hw_ctl *ctx)
348{
349 struct dpu_hw_blk_reg_map *c = &ctx->hw;
350
351 pr_debug("issuing hw ctl reset for ctl:%d\n", ctx->idx);
352 DPU_REG_WRITE(c, CTL_SW_RESET, 0x1);
353 if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US))
354 return -EINVAL;
355
356 return 0;
357}
358
359static int dpu_hw_ctl_wait_reset_status(struct dpu_hw_ctl *ctx)
360{
361 struct dpu_hw_blk_reg_map *c = &ctx->hw;
362 u32 status;
363
364 status = DPU_REG_READ(c, CTL_SW_RESET);
365 status &= 0x01;
366 if (!status)
367 return 0;
368
369 pr_debug("hw ctl reset is set for ctl:%d\n", ctx->idx);
370 if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US)) {
371 pr_err("hw recovery is not complete for ctl:%d\n", ctx->idx);
372 return -EINVAL;
373 }
374
375 return 0;
376}
377
378static void dpu_hw_ctl_clear_all_blendstages(struct dpu_hw_ctl *ctx)
379{
380 struct dpu_hw_blk_reg_map *c = &ctx->hw;
381 int i;
382
383 for (i = 0; i < ctx->mixer_count; i++) {
384 enum dpu_lm mixer_id = ctx->mixer_hw_caps[i].id;
385
386 DPU_REG_WRITE(c, CTL_LAYER(mixer_id), 0);
387 DPU_REG_WRITE(c, CTL_LAYER_EXT(mixer_id), 0);
388 DPU_REG_WRITE(c, CTL_LAYER_EXT2(mixer_id), 0);
389 DPU_REG_WRITE(c, CTL_LAYER_EXT3(mixer_id), 0);
390 }
391
392 DPU_REG_WRITE(c, CTL_FETCH_PIPE_ACTIVE, 0);
393}
394
395static void dpu_hw_ctl_setup_blendstage(struct dpu_hw_ctl *ctx,
396 enum dpu_lm lm, struct dpu_hw_stage_cfg *stage_cfg)
397{
398 struct dpu_hw_blk_reg_map *c = &ctx->hw;
399 u32 mixercfg = 0, mixercfg_ext = 0, mix, ext;
400 u32 mixercfg_ext2 = 0, mixercfg_ext3 = 0;
401 int i, j;
402 int stages;
403 int pipes_per_stage;
404
405 stages = _mixer_stages(ctx->mixer_hw_caps, ctx->mixer_count, lm);
406 if (stages < 0)
407 return;
408
409 if (test_bit(DPU_MIXER_SOURCESPLIT,
410 &ctx->mixer_hw_caps->features))
411 pipes_per_stage = PIPES_PER_STAGE;
412 else
413 pipes_per_stage = 1;
414
415 mixercfg = CTL_MIXER_BORDER_OUT;
416
417 if (!stage_cfg)
418 goto exit;
419
420 for (i = 0; i <= stages; i++) {
421
422 mix = (i + 1) & 0x7;
423 ext = i >= 7;
424
425 for (j = 0 ; j < pipes_per_stage; j++) {
426 enum dpu_sspp_multirect_index rect_index =
427 stage_cfg->multirect_index[i][j];
428
429 switch (stage_cfg->stage[i][j]) {
430 case SSPP_VIG0:
431 if (rect_index == DPU_SSPP_RECT_1) {
432 mixercfg_ext3 |= ((i + 1) & 0xF) << 0;
433 } else {
434 mixercfg |= mix << 0;
435 mixercfg_ext |= ext << 0;
436 }
437 break;
438 case SSPP_VIG1:
439 if (rect_index == DPU_SSPP_RECT_1) {
440 mixercfg_ext3 |= ((i + 1) & 0xF) << 4;
441 } else {
442 mixercfg |= mix << 3;
443 mixercfg_ext |= ext << 2;
444 }
445 break;
446 case SSPP_VIG2:
447 if (rect_index == DPU_SSPP_RECT_1) {
448 mixercfg_ext3 |= ((i + 1) & 0xF) << 8;
449 } else {
450 mixercfg |= mix << 6;
451 mixercfg_ext |= ext << 4;
452 }
453 break;
454 case SSPP_VIG3:
455 if (rect_index == DPU_SSPP_RECT_1) {
456 mixercfg_ext3 |= ((i + 1) & 0xF) << 12;
457 } else {
458 mixercfg |= mix << 26;
459 mixercfg_ext |= ext << 6;
460 }
461 break;
462 case SSPP_RGB0:
463 mixercfg |= mix << 9;
464 mixercfg_ext |= ext << 8;
465 break;
466 case SSPP_RGB1:
467 mixercfg |= mix << 12;
468 mixercfg_ext |= ext << 10;
469 break;
470 case SSPP_RGB2:
471 mixercfg |= mix << 15;
472 mixercfg_ext |= ext << 12;
473 break;
474 case SSPP_RGB3:
475 mixercfg |= mix << 29;
476 mixercfg_ext |= ext << 14;
477 break;
478 case SSPP_DMA0:
479 if (rect_index == DPU_SSPP_RECT_1) {
480 mixercfg_ext2 |= ((i + 1) & 0xF) << 8;
481 } else {
482 mixercfg |= mix << 18;
483 mixercfg_ext |= ext << 16;
484 }
485 break;
486 case SSPP_DMA1:
487 if (rect_index == DPU_SSPP_RECT_1) {
488 mixercfg_ext2 |= ((i + 1) & 0xF) << 12;
489 } else {
490 mixercfg |= mix << 21;
491 mixercfg_ext |= ext << 18;
492 }
493 break;
494 case SSPP_DMA2:
495 if (rect_index == DPU_SSPP_RECT_1) {
496 mixercfg_ext2 |= ((i + 1) & 0xF) << 16;
497 } else {
498 mix |= (i + 1) & 0xF;
499 mixercfg_ext2 |= mix << 0;
500 }
501 break;
502 case SSPP_DMA3:
503 if (rect_index == DPU_SSPP_RECT_1) {
504 mixercfg_ext2 |= ((i + 1) & 0xF) << 20;
505 } else {
506 mix |= (i + 1) & 0xF;
507 mixercfg_ext2 |= mix << 4;
508 }
509 break;
510 case SSPP_CURSOR0:
511 mixercfg_ext |= ((i + 1) & 0xF) << 20;
512 break;
513 case SSPP_CURSOR1:
514 mixercfg_ext |= ((i + 1) & 0xF) << 26;
515 break;
516 default:
517 break;
518 }
519 }
520 }
521
522exit:
523 DPU_REG_WRITE(c, CTL_LAYER(lm), mixercfg);
524 DPU_REG_WRITE(c, CTL_LAYER_EXT(lm), mixercfg_ext);
525 DPU_REG_WRITE(c, CTL_LAYER_EXT2(lm), mixercfg_ext2);
526 DPU_REG_WRITE(c, CTL_LAYER_EXT3(lm), mixercfg_ext3);
527}
528
529
530static void dpu_hw_ctl_intf_cfg_v1(struct dpu_hw_ctl *ctx,
531 struct dpu_hw_intf_cfg *cfg)
532{
533 struct dpu_hw_blk_reg_map *c = &ctx->hw;
534 u32 intf_active = 0;
535 u32 wb_active = 0;
536 u32 mode_sel = 0;
537
538
539
540
541
542 if ((test_bit(DPU_CTL_VM_CFG, &ctx->caps->features)))
543 mode_sel = CTL_DEFAULT_GROUP_ID << 28;
544
545 if (cfg->dsc)
546 DPU_REG_WRITE(&ctx->hw, CTL_DSC_FLUSH, cfg->dsc);
547
548 if (cfg->intf_mode_sel == DPU_CTL_MODE_SEL_CMD)
549 mode_sel |= BIT(17);
550
551 intf_active = DPU_REG_READ(c, CTL_INTF_ACTIVE);
552 wb_active = DPU_REG_READ(c, CTL_WB_ACTIVE);
553
554 if (cfg->intf)
555 intf_active |= BIT(cfg->intf - INTF_0);
556
557 if (cfg->wb)
558 wb_active |= BIT(cfg->wb - WB_0);
559
560 DPU_REG_WRITE(c, CTL_TOP, mode_sel);
561 DPU_REG_WRITE(c, CTL_INTF_ACTIVE, intf_active);
562 DPU_REG_WRITE(c, CTL_WB_ACTIVE, wb_active);
563
564 if (cfg->merge_3d)
565 DPU_REG_WRITE(c, CTL_MERGE_3D_ACTIVE,
566 BIT(cfg->merge_3d - MERGE_3D_0));
567 if (cfg->dsc) {
568 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, DSC_IDX);
569 DPU_REG_WRITE(c, CTL_DSC_ACTIVE, cfg->dsc);
570 }
571}
572
573static void dpu_hw_ctl_intf_cfg(struct dpu_hw_ctl *ctx,
574 struct dpu_hw_intf_cfg *cfg)
575{
576 struct dpu_hw_blk_reg_map *c = &ctx->hw;
577 u32 intf_cfg = 0;
578
579 intf_cfg |= (cfg->intf & 0xF) << 4;
580
581 if (cfg->mode_3d) {
582 intf_cfg |= BIT(19);
583 intf_cfg |= (cfg->mode_3d - 0x1) << 20;
584 }
585
586 if (cfg->wb)
587 intf_cfg |= (cfg->wb & 0x3) + 2;
588
589 switch (cfg->intf_mode_sel) {
590 case DPU_CTL_MODE_SEL_VID:
591 intf_cfg &= ~BIT(17);
592 intf_cfg &= ~(0x3 << 15);
593 break;
594 case DPU_CTL_MODE_SEL_CMD:
595 intf_cfg |= BIT(17);
596 intf_cfg |= ((cfg->stream_sel & 0x3) << 15);
597 break;
598 default:
599 pr_err("unknown interface type %d\n", cfg->intf_mode_sel);
600 return;
601 }
602
603 DPU_REG_WRITE(c, CTL_TOP, intf_cfg);
604}
605
606static void dpu_hw_ctl_reset_intf_cfg_v1(struct dpu_hw_ctl *ctx,
607 struct dpu_hw_intf_cfg *cfg)
608{
609 struct dpu_hw_blk_reg_map *c = &ctx->hw;
610 u32 intf_active = 0;
611 u32 wb_active = 0;
612 u32 merge3d_active = 0;
613
614
615
616
617
618
619
620
621
622 if (cfg->merge_3d) {
623 merge3d_active = DPU_REG_READ(c, CTL_MERGE_3D_ACTIVE);
624 merge3d_active &= ~BIT(cfg->merge_3d - MERGE_3D_0);
625 DPU_REG_WRITE(c, CTL_MERGE_3D_ACTIVE,
626 merge3d_active);
627 }
628
629 dpu_hw_ctl_clear_all_blendstages(ctx);
630
631 if (cfg->intf) {
632 intf_active = DPU_REG_READ(c, CTL_INTF_ACTIVE);
633 intf_active &= ~BIT(cfg->intf - INTF_0);
634 DPU_REG_WRITE(c, CTL_INTF_ACTIVE, intf_active);
635 }
636
637 if (cfg->wb) {
638 wb_active = DPU_REG_READ(c, CTL_WB_ACTIVE);
639 wb_active &= ~BIT(cfg->wb - WB_0);
640 DPU_REG_WRITE(c, CTL_WB_ACTIVE, wb_active);
641 }
642}
643
644static void dpu_hw_ctl_set_fetch_pipe_active(struct dpu_hw_ctl *ctx,
645 unsigned long *fetch_active)
646{
647 int i;
648 u32 val = 0;
649
650 if (fetch_active) {
651 for (i = 0; i < SSPP_MAX; i++) {
652 if (test_bit(i, fetch_active) &&
653 fetch_tbl[i] != CTL_INVALID_BIT)
654 val |= BIT(fetch_tbl[i]);
655 }
656 }
657
658 DPU_REG_WRITE(&ctx->hw, CTL_FETCH_PIPE_ACTIVE, val);
659}
660
661static void _setup_ctl_ops(struct dpu_hw_ctl_ops *ops,
662 unsigned long cap)
663{
664 if (cap & BIT(DPU_CTL_ACTIVE_CFG)) {
665 ops->trigger_flush = dpu_hw_ctl_trigger_flush_v1;
666 ops->setup_intf_cfg = dpu_hw_ctl_intf_cfg_v1;
667 ops->reset_intf_cfg = dpu_hw_ctl_reset_intf_cfg_v1;
668 ops->update_pending_flush_intf =
669 dpu_hw_ctl_update_pending_flush_intf_v1;
670 ops->update_pending_flush_merge_3d =
671 dpu_hw_ctl_update_pending_flush_merge_3d_v1;
672 ops->update_pending_flush_wb = dpu_hw_ctl_update_pending_flush_wb_v1;
673 } else {
674 ops->trigger_flush = dpu_hw_ctl_trigger_flush;
675 ops->setup_intf_cfg = dpu_hw_ctl_intf_cfg;
676 ops->update_pending_flush_intf =
677 dpu_hw_ctl_update_pending_flush_intf;
678 ops->update_pending_flush_wb = dpu_hw_ctl_update_pending_flush_wb;
679 }
680 ops->clear_pending_flush = dpu_hw_ctl_clear_pending_flush;
681 ops->update_pending_flush = dpu_hw_ctl_update_pending_flush;
682 ops->get_pending_flush = dpu_hw_ctl_get_pending_flush;
683 ops->get_flush_register = dpu_hw_ctl_get_flush_register;
684 ops->trigger_start = dpu_hw_ctl_trigger_start;
685 ops->is_started = dpu_hw_ctl_is_started;
686 ops->trigger_pending = dpu_hw_ctl_trigger_pending;
687 ops->reset = dpu_hw_ctl_reset_control;
688 ops->wait_reset_status = dpu_hw_ctl_wait_reset_status;
689 ops->clear_all_blendstages = dpu_hw_ctl_clear_all_blendstages;
690 ops->setup_blendstage = dpu_hw_ctl_setup_blendstage;
691 ops->get_bitmask_sspp = dpu_hw_ctl_get_bitmask_sspp;
692 ops->get_bitmask_mixer = dpu_hw_ctl_get_bitmask_mixer;
693 ops->get_bitmask_dspp = dpu_hw_ctl_get_bitmask_dspp;
694 if (cap & BIT(DPU_CTL_FETCH_ACTIVE))
695 ops->set_active_pipes = dpu_hw_ctl_set_fetch_pipe_active;
696};
697
698struct dpu_hw_ctl *dpu_hw_ctl_init(enum dpu_ctl idx,
699 void __iomem *addr,
700 const struct dpu_mdss_cfg *m)
701{
702 struct dpu_hw_ctl *c;
703 const struct dpu_ctl_cfg *cfg;
704
705 c = kzalloc(sizeof(*c), GFP_KERNEL);
706 if (!c)
707 return ERR_PTR(-ENOMEM);
708
709 cfg = _ctl_offset(idx, m, addr, &c->hw);
710 if (IS_ERR_OR_NULL(cfg)) {
711 kfree(c);
712 pr_err("failed to create dpu_hw_ctl %d\n", idx);
713 return ERR_PTR(-EINVAL);
714 }
715
716 c->caps = cfg;
717 _setup_ctl_ops(&c->ops, c->caps->features);
718 c->idx = idx;
719 c->mixer_count = m->mixer_count;
720 c->mixer_hw_caps = m->mixer;
721
722 return c;
723}
724
725void dpu_hw_ctl_destroy(struct dpu_hw_ctl *ctx)
726{
727 kfree(ctx);
728}
729