1
2
3
4
5
6
7#include <linux/delay.h>
8
9#include "bdisp.h"
10#include "bdisp-filter.h"
11#include "bdisp-reg.h"
12
13
14#define MAX_SRC_WIDTH 2048
15
16
17#define POLL_RST_MAX 50
18#define POLL_RST_DELAY_MS 20
19
20enum bdisp_target_plan {
21 BDISP_RGB,
22 BDISP_Y,
23 BDISP_CBCR
24};
25
26struct bdisp_op_cfg {
27 bool cconv;
28 bool hflip;
29 bool vflip;
30 bool wide;
31 bool scale;
32 u16 h_inc;
33 u16 v_inc;
34 bool src_interlaced;
35 u8 src_nbp;
36 bool src_yuv;
37 bool src_420;
38 u8 dst_nbp;
39 bool dst_yuv;
40 bool dst_420;
41};
42
43struct bdisp_filter_addr {
44 u16 min;
45 u16 max;
46 void *virt;
47 dma_addr_t paddr;
48};
49
50static struct bdisp_filter_addr bdisp_h_filter[NB_H_FILTER];
51static struct bdisp_filter_addr bdisp_v_filter[NB_V_FILTER];
52
53
54
55
56
57
58
59
60
61
62int bdisp_hw_reset(struct bdisp_dev *bdisp)
63{
64 unsigned int i;
65
66 dev_dbg(bdisp->dev, "%s\n", __func__);
67
68
69 writel(0, bdisp->regs + BLT_ITM0);
70
71
72 writel(readl(bdisp->regs + BLT_CTL) | BLT_CTL_RESET,
73 bdisp->regs + BLT_CTL);
74 writel(0, bdisp->regs + BLT_CTL);
75
76
77 for (i = 0; i < POLL_RST_MAX; i++) {
78 if (readl(bdisp->regs + BLT_STA1) & BLT_STA1_IDLE)
79 break;
80 msleep(POLL_RST_DELAY_MS);
81 }
82 if (i == POLL_RST_MAX)
83 dev_err(bdisp->dev, "Reset timeout\n");
84
85 return (i == POLL_RST_MAX) ? -EAGAIN : 0;
86}
87
88
89
90
91
92
93
94
95
96
97int bdisp_hw_get_and_clear_irq(struct bdisp_dev *bdisp)
98{
99 u32 its;
100
101 its = readl(bdisp->regs + BLT_ITS);
102
103
104 if (!(its & BLT_ITS_AQ1_LNA)) {
105 dev_dbg(bdisp->dev, "Unexpected IT status: 0x%08X\n", its);
106 writel(its, bdisp->regs + BLT_ITS);
107 return -1;
108 }
109
110
111 writel(its, bdisp->regs + BLT_ITS);
112 writel(0, bdisp->regs + BLT_ITM0);
113
114 return 0;
115}
116
117
118
119
120
121
122
123
124
125
126void bdisp_hw_free_nodes(struct bdisp_ctx *ctx)
127{
128 if (ctx && ctx->node[0]) {
129 DEFINE_DMA_ATTRS(attrs);
130
131 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
132 dma_free_attrs(ctx->bdisp_dev->dev,
133 sizeof(struct bdisp_node) * MAX_NB_NODE,
134 ctx->node[0], ctx->node_paddr[0], &attrs);
135 }
136}
137
138
139
140
141
142
143
144
145
146
147int bdisp_hw_alloc_nodes(struct bdisp_ctx *ctx)
148{
149 struct device *dev = ctx->bdisp_dev->dev;
150 unsigned int i, node_size = sizeof(struct bdisp_node);
151 void *base;
152 dma_addr_t paddr;
153 DEFINE_DMA_ATTRS(attrs);
154
155
156 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
157 base = dma_alloc_attrs(dev, node_size * MAX_NB_NODE, &paddr,
158 GFP_KERNEL | GFP_DMA, &attrs);
159 if (!base) {
160 dev_err(dev, "%s no mem\n", __func__);
161 return -ENOMEM;
162 }
163
164 memset(base, 0, node_size * MAX_NB_NODE);
165
166 for (i = 0; i < MAX_NB_NODE; i++) {
167 ctx->node[i] = base;
168 ctx->node_paddr[i] = paddr;
169 dev_dbg(dev, "node[%d]=0x%p (paddr=%pad)\n", i, ctx->node[i],
170 &paddr);
171 base += node_size;
172 paddr += node_size;
173 }
174
175 return 0;
176}
177
178
179
180
181
182
183
184
185
186
187void bdisp_hw_free_filters(struct device *dev)
188{
189 int size = (BDISP_HF_NB * NB_H_FILTER) + (BDISP_VF_NB * NB_V_FILTER);
190
191 if (bdisp_h_filter[0].virt) {
192 DEFINE_DMA_ATTRS(attrs);
193
194 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
195 dma_free_attrs(dev, size, bdisp_h_filter[0].virt,
196 bdisp_h_filter[0].paddr, &attrs);
197 }
198}
199
200
201
202
203
204
205
206
207
208
209int bdisp_hw_alloc_filters(struct device *dev)
210{
211 unsigned int i, size;
212 void *base;
213 dma_addr_t paddr;
214 DEFINE_DMA_ATTRS(attrs);
215
216
217 size = (BDISP_HF_NB * NB_H_FILTER) + (BDISP_VF_NB * NB_V_FILTER);
218 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
219 base = dma_alloc_attrs(dev, size, &paddr, GFP_KERNEL | GFP_DMA, &attrs);
220 if (!base)
221 return -ENOMEM;
222
223
224 for (i = 0; i < NB_H_FILTER; i++) {
225 bdisp_h_filter[i].min = bdisp_h_spec[i].min;
226 bdisp_h_filter[i].max = bdisp_h_spec[i].max;
227 memcpy(base, bdisp_h_spec[i].coef, BDISP_HF_NB);
228 bdisp_h_filter[i].virt = base;
229 bdisp_h_filter[i].paddr = paddr;
230 base += BDISP_HF_NB;
231 paddr += BDISP_HF_NB;
232 }
233
234 for (i = 0; i < NB_V_FILTER; i++) {
235 bdisp_v_filter[i].min = bdisp_v_spec[i].min;
236 bdisp_v_filter[i].max = bdisp_v_spec[i].max;
237 memcpy(base, bdisp_v_spec[i].coef, BDISP_VF_NB);
238 bdisp_v_filter[i].virt = base;
239 bdisp_v_filter[i].paddr = paddr;
240 base += BDISP_VF_NB;
241 paddr += BDISP_VF_NB;
242 }
243
244 return 0;
245}
246
247
248
249
250
251
252
253
254
255
256static dma_addr_t bdisp_hw_get_hf_addr(u16 inc)
257{
258 unsigned int i;
259
260 for (i = NB_H_FILTER - 1; i > 0; i--)
261 if ((bdisp_h_filter[i].min < inc) &&
262 (inc <= bdisp_h_filter[i].max))
263 break;
264
265 return bdisp_h_filter[i].paddr;
266}
267
268
269
270
271
272
273
274
275
276
277static dma_addr_t bdisp_hw_get_vf_addr(u16 inc)
278{
279 unsigned int i;
280
281 for (i = NB_V_FILTER - 1; i > 0; i--)
282 if ((bdisp_v_filter[i].min < inc) &&
283 (inc <= bdisp_v_filter[i].max))
284 break;
285
286 return bdisp_v_filter[i].paddr;
287}
288
289
290
291
292
293
294
295
296
297
298
299
300static int bdisp_hw_get_inc(u32 from, u32 to, u16 *inc)
301{
302 u32 tmp;
303
304 if (!to)
305 return -EINVAL;
306
307 if (to == from) {
308 *inc = 1 << 10;
309 return 0;
310 }
311
312 tmp = (from << 10) / to;
313 if ((tmp > 0xFFFF) || (!tmp))
314
315 return -EINVAL;
316
317 *inc = (u16)tmp;
318
319 return 0;
320}
321
322
323
324
325
326
327
328
329
330
331
332
333static int bdisp_hw_get_hv_inc(struct bdisp_ctx *ctx, u16 *h_inc, u16 *v_inc)
334{
335 u32 src_w, src_h, dst_w, dst_h;
336
337 src_w = ctx->src.crop.width;
338 src_h = ctx->src.crop.height;
339 dst_w = ctx->dst.crop.width;
340 dst_h = ctx->dst.crop.height;
341
342 if (bdisp_hw_get_inc(src_w, dst_w, h_inc) ||
343 bdisp_hw_get_inc(src_h, dst_h, v_inc)) {
344 dev_err(ctx->bdisp_dev->dev,
345 "scale factors failed (%dx%d)->(%dx%d)\n",
346 src_w, src_h, dst_w, dst_h);
347 return -EINVAL;
348 }
349
350 return 0;
351}
352
353
354
355
356
357
358
359
360
361
362
363static int bdisp_hw_get_op_cfg(struct bdisp_ctx *ctx, struct bdisp_op_cfg *c)
364{
365 struct device *dev = ctx->bdisp_dev->dev;
366 struct bdisp_frame *src = &ctx->src;
367 struct bdisp_frame *dst = &ctx->dst;
368
369 if (src->width > MAX_SRC_WIDTH * MAX_VERTICAL_STRIDES) {
370 dev_err(dev, "Image width out of HW caps\n");
371 return -EINVAL;
372 }
373
374 c->wide = src->width > MAX_SRC_WIDTH;
375
376 c->hflip = ctx->hflip;
377 c->vflip = ctx->vflip;
378
379 c->src_interlaced = (src->field == V4L2_FIELD_INTERLACED);
380
381 c->src_nbp = src->fmt->nb_planes;
382 c->src_yuv = (src->fmt->pixelformat == V4L2_PIX_FMT_NV12) ||
383 (src->fmt->pixelformat == V4L2_PIX_FMT_YUV420);
384 c->src_420 = c->src_yuv;
385
386 c->dst_nbp = dst->fmt->nb_planes;
387 c->dst_yuv = (dst->fmt->pixelformat == V4L2_PIX_FMT_NV12) ||
388 (dst->fmt->pixelformat == V4L2_PIX_FMT_YUV420);
389 c->dst_420 = c->dst_yuv;
390
391 c->cconv = (c->src_yuv != c->dst_yuv);
392
393 if (bdisp_hw_get_hv_inc(ctx, &c->h_inc, &c->v_inc)) {
394 dev_err(dev, "Scale factor out of HW caps\n");
395 return -EINVAL;
396 }
397
398
399 if (c->src_interlaced)
400 c->v_inc /= 2;
401
402 if ((c->h_inc != (1 << 10)) || (c->v_inc != (1 << 10)))
403 c->scale = true;
404 else
405 c->scale = false;
406
407 return 0;
408}
409
410
411
412
413
414
415
416
417
418
419static u32 bdisp_hw_color_format(u32 pixelformat)
420{
421 u32 ret;
422
423 switch (pixelformat) {
424 case V4L2_PIX_FMT_YUV420:
425 ret = (BDISP_YUV_3B << BLT_TTY_COL_SHIFT);
426 break;
427 case V4L2_PIX_FMT_NV12:
428 ret = (BDISP_NV12 << BLT_TTY_COL_SHIFT) | BLT_TTY_BIG_END;
429 break;
430 case V4L2_PIX_FMT_RGB565:
431 ret = (BDISP_RGB565 << BLT_TTY_COL_SHIFT);
432 break;
433 case V4L2_PIX_FMT_XBGR32:
434 ret = (BDISP_XRGB8888 << BLT_TTY_COL_SHIFT);
435 break;
436 case V4L2_PIX_FMT_RGB24:
437 ret = (BDISP_RGB888 << BLT_TTY_COL_SHIFT) | BLT_TTY_BIG_END;
438 break;
439 case V4L2_PIX_FMT_ABGR32:
440
441 default:
442 ret = (BDISP_ARGB8888 << BLT_TTY_COL_SHIFT) | BLT_TTY_ALPHA_R;
443 break;
444 }
445
446 return ret;
447}
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462static void bdisp_hw_build_node(struct bdisp_ctx *ctx,
463 struct bdisp_op_cfg *cfg,
464 struct bdisp_node *node,
465 enum bdisp_target_plan t_plan, int src_x_offset)
466{
467 struct bdisp_frame *src = &ctx->src;
468 struct bdisp_frame *dst = &ctx->dst;
469 u16 h_inc, v_inc, yh_inc, yv_inc;
470 struct v4l2_rect src_rect = src->crop;
471 struct v4l2_rect dst_rect = dst->crop;
472 int dst_x_offset;
473 s32 dst_width = dst->crop.width;
474 u32 src_fmt, dst_fmt;
475 const u32 *ivmx;
476
477 dev_dbg(ctx->bdisp_dev->dev, "%s\n", __func__);
478
479 memset(node, 0, sizeof(*node));
480
481
482 src_rect.left += src_x_offset;
483 src_rect.width -= src_x_offset;
484 src_rect.width = min_t(__s32, MAX_SRC_WIDTH, src_rect.width);
485
486 dst_x_offset = (src_x_offset * dst_width) / ctx->src.crop.width;
487 dst_rect.left += dst_x_offset;
488 dst_rect.width = (src_rect.width * dst_width) / ctx->src.crop.width;
489
490
491 src_fmt = src->fmt->pixelformat;
492 dst_fmt = dst->fmt->pixelformat;
493
494 node->nip = 0;
495 node->cic = BLT_CIC_ALL_GRP;
496 node->ack = BLT_ACK_BYPASS_S2S3;
497
498 switch (cfg->src_nbp) {
499 case 1:
500
501 node->ins = BLT_INS_S1_OFF | BLT_INS_S2_MEM | BLT_INS_S3_OFF;
502 break;
503 case 2:
504
505
506
507 node->ins = BLT_INS_S1_OFF | BLT_INS_S3_MEM;
508 if (t_plan == BDISP_Y)
509 node->ins |= BLT_INS_S2_CF;
510 else
511 node->ins |= BLT_INS_S2_MEM;
512 break;
513 case 3:
514 default:
515
516
517
518 node->ins = BLT_INS_S3_MEM;
519 if (t_plan == BDISP_Y)
520 node->ins |= BLT_INS_S2_CF | BLT_INS_S1_CF;
521 else
522 node->ins |= BLT_INS_S2_MEM | BLT_INS_S1_MEM;
523 break;
524 }
525
526
527 node->ins |= cfg->cconv ? BLT_INS_IVMX : 0;
528
529 node->ins |= (cfg->scale || cfg->src_420 || cfg->dst_420) ?
530 BLT_INS_SCALE : 0;
531
532
533 node->tba = (t_plan == BDISP_CBCR) ? dst->paddr[1] : dst->paddr[0];
534
535 node->tty = dst->bytesperline;
536 node->tty |= bdisp_hw_color_format(dst_fmt);
537 node->tty |= BLT_TTY_DITHER;
538 node->tty |= (t_plan == BDISP_CBCR) ? BLT_TTY_CHROMA : 0;
539 node->tty |= cfg->hflip ? BLT_TTY_HSO : 0;
540 node->tty |= cfg->vflip ? BLT_TTY_VSO : 0;
541
542 if (cfg->dst_420 && (t_plan == BDISP_CBCR)) {
543
544 dst_rect.height /= 2;
545 dst_rect.width /= 2;
546 dst_rect.left /= 2;
547 dst_rect.top /= 2;
548 dst_x_offset /= 2;
549 dst_width /= 2;
550 }
551
552 node->txy = cfg->vflip ? (dst_rect.height - 1) : dst_rect.top;
553 node->txy <<= 16;
554 node->txy |= cfg->hflip ? (dst_width - dst_x_offset - 1) :
555 dst_rect.left;
556
557 node->tsz = dst_rect.height << 16 | dst_rect.width;
558
559 if (cfg->src_interlaced) {
560
561 src_rect.top /= 2;
562 src_rect.height /= 2;
563 }
564
565 if (cfg->src_nbp == 1) {
566
567 node->s2ba = src->paddr[0];
568
569 node->s2ty = src->bytesperline;
570 if (cfg->src_interlaced)
571 node->s2ty *= 2;
572
573 node->s2ty |= bdisp_hw_color_format(src_fmt);
574
575 node->s2xy = src_rect.top << 16 | src_rect.left;
576 node->s2sz = src_rect.height << 16 | src_rect.width;
577 } else {
578
579 if (cfg->src_420) {
580
581 src_rect.top /= 2;
582 src_rect.left /= 2;
583 src_rect.width /= 2;
584 src_rect.height /= 2;
585 }
586
587 node->s2ba = src->paddr[1];
588
589 node->s2ty = src->bytesperline;
590 if (cfg->src_nbp == 3)
591 node->s2ty /= 2;
592 if (cfg->src_interlaced)
593 node->s2ty *= 2;
594
595 node->s2ty |= bdisp_hw_color_format(src_fmt);
596
597 node->s2xy = src_rect.top << 16 | src_rect.left;
598 node->s2sz = src_rect.height << 16 | src_rect.width;
599
600 if (cfg->src_nbp == 3) {
601
602 node->s1ba = src->paddr[2];
603
604 node->s1ty = node->s2ty;
605 node->s1xy = node->s2xy;
606 }
607
608
609 node->s3ba = src->paddr[0];
610
611 node->s3ty = src->bytesperline;
612 if (cfg->src_interlaced)
613 node->s3ty *= 2;
614 node->s3ty |= bdisp_hw_color_format(src_fmt);
615
616 if ((t_plan != BDISP_CBCR) && cfg->src_420) {
617
618 node->s3xy = node->s2xy * 2;
619 node->s3sz = node->s2sz * 2;
620 } else {
621
622 node->s3ty |= BLT_S3TY_BLANK_ACC;
623 node->s3xy = node->s2xy;
624 node->s3sz = node->s2sz;
625 }
626 }
627
628
629 if (node->ins & BLT_INS_SCALE) {
630
631 bool skip_y = (t_plan == BDISP_CBCR) && !cfg->src_yuv;
632
633
634 if (cfg->scale) {
635 node->fctl = BLT_FCTL_HV_SCALE;
636 if (!skip_y)
637 node->fctl |= BLT_FCTL_Y_HV_SCALE;
638 } else {
639 node->fctl = BLT_FCTL_HV_SAMPLE;
640 if (!skip_y)
641 node->fctl |= BLT_FCTL_Y_HV_SAMPLE;
642 }
643
644
645 h_inc = cfg->h_inc;
646 v_inc = cfg->v_inc;
647 if (!cfg->src_420 && cfg->dst_420 && (t_plan == BDISP_CBCR)) {
648
649 h_inc *= 2;
650 v_inc *= 2;
651 } else if (cfg->src_420 && !cfg->dst_420) {
652
653 h_inc /= 2;
654 v_inc /= 2;
655 }
656 node->rsf = v_inc << 16 | h_inc;
657
658
659 node->rzi = BLT_RZI_DEFAULT;
660
661
662 node->hfp = bdisp_hw_get_hf_addr(h_inc);
663 node->vfp = bdisp_hw_get_vf_addr(v_inc);
664
665
666 if (!skip_y) {
667 yh_inc = cfg->h_inc;
668 yv_inc = cfg->v_inc;
669
670 node->y_rsf = yv_inc << 16 | yh_inc;
671 node->y_rzi = BLT_RZI_DEFAULT;
672 node->y_hfp = bdisp_hw_get_hf_addr(yh_inc);
673 node->y_vfp = bdisp_hw_get_vf_addr(yv_inc);
674 }
675 }
676
677
678 if (cfg->cconv) {
679 ivmx = cfg->src_yuv ? bdisp_yuv_to_rgb : bdisp_rgb_to_yuv;
680
681 node->ivmx0 = ivmx[0];
682 node->ivmx1 = ivmx[1];
683 node->ivmx2 = ivmx[2];
684 node->ivmx3 = ivmx[3];
685 }
686}
687
688
689
690
691
692
693
694
695
696
697static int bdisp_hw_build_all_nodes(struct bdisp_ctx *ctx)
698{
699 struct bdisp_op_cfg cfg;
700 unsigned int i, nid = 0;
701 int src_x_offset = 0;
702
703 for (i = 0; i < MAX_NB_NODE; i++)
704 if (!ctx->node[i]) {
705 dev_err(ctx->bdisp_dev->dev, "node %d is null\n", i);
706 return -EINVAL;
707 }
708
709
710 if (bdisp_hw_get_op_cfg(ctx, &cfg))
711 return -EINVAL;
712
713
714 for (i = 0; i < MAX_VERTICAL_STRIDES; i++) {
715
716 bdisp_hw_build_node(ctx, &cfg, ctx->node[nid],
717 cfg.dst_nbp == 1 ? BDISP_RGB : BDISP_Y,
718 src_x_offset);
719 if (nid)
720 ctx->node[nid - 1]->nip = ctx->node_paddr[nid];
721 nid++;
722
723
724 if (cfg.dst_nbp > 1) {
725 bdisp_hw_build_node(ctx, &cfg, ctx->node[nid],
726 BDISP_CBCR, src_x_offset);
727 ctx->node[nid - 1]->nip = ctx->node_paddr[nid];
728 nid++;
729 }
730
731
732 src_x_offset += MAX_SRC_WIDTH;
733 if (src_x_offset >= ctx->src.crop.width)
734 break;
735 }
736
737
738 ctx->node[nid - 1]->nip = 0;
739
740 return 0;
741}
742
743
744
745
746
747
748
749
750
751
752static void bdisp_hw_save_request(struct bdisp_ctx *ctx)
753{
754 struct bdisp_node **copy_node = ctx->bdisp_dev->dbg.copy_node;
755 struct bdisp_request *request = &ctx->bdisp_dev->dbg.copy_request;
756 struct bdisp_node **node = ctx->node;
757 int i;
758
759
760 request->src = ctx->src;
761 request->dst = ctx->dst;
762 request->hflip = ctx->hflip;
763 request->vflip = ctx->vflip;
764 request->nb_req++;
765
766
767 for (i = 0; i < MAX_NB_NODE; i++) {
768
769 if (!copy_node[i]) {
770 copy_node[i] = devm_kzalloc(ctx->bdisp_dev->dev,
771 sizeof(*copy_node[i]),
772 GFP_KERNEL);
773 if (!copy_node[i])
774 return;
775 }
776 *copy_node[i] = *node[i];
777 }
778}
779
780
781
782
783
784
785
786
787
788
789int bdisp_hw_update(struct bdisp_ctx *ctx)
790{
791 int ret;
792 struct bdisp_dev *bdisp = ctx->bdisp_dev;
793 struct device *dev = bdisp->dev;
794 unsigned int node_id;
795
796 dev_dbg(dev, "%s\n", __func__);
797
798
799 ret = bdisp_hw_build_all_nodes(ctx);
800 if (ret) {
801 dev_err(dev, "cannot build nodes (%d)\n", ret);
802 return ret;
803 }
804
805
806 bdisp_hw_save_request(ctx);
807
808
809 writel(BLT_AQ1_CTL_CFG, bdisp->regs + BLT_AQ1_CTL);
810 writel(BLT_ITS_AQ1_LNA, bdisp->regs + BLT_ITM0);
811
812
813 writel(ctx->node_paddr[0], bdisp->regs + BLT_AQ1_IP);
814
815
816 for (node_id = 0; node_id < MAX_NB_NODE - 1; node_id++) {
817 if (!ctx->node[node_id]->nip)
818 break;
819 }
820 writel(ctx->node_paddr[node_id], bdisp->regs + BLT_AQ1_LNA);
821
822 return 0;
823}
824