1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include "vc4_drv.h"
22#include "vc4_regs.h"
23#include "drm_atomic.h"
24#include "drm_atomic_helper.h"
25#include "drm_fb_cma_helper.h"
26#include "drm_plane_helper.h"
27
28enum vc4_scaling_mode {
29 VC4_SCALING_NONE,
30 VC4_SCALING_TPZ,
31 VC4_SCALING_PPF,
32};
33
34struct vc4_plane_state {
35 struct drm_plane_state base;
36
37
38
39 u32 *dlist;
40 u32 dlist_size;
41 u32 dlist_count;
42
43
44
45
46 u32 pos0_offset;
47 u32 pos2_offset;
48 u32 ptr0_offset;
49
50
51
52
53 u32 __iomem *hw_dlist;
54
55
56 int crtc_x, crtc_y, crtc_w, crtc_h;
57
58 u32 src_x, src_y;
59
60 u32 src_w[2], src_h[2];
61
62
63 enum vc4_scaling_mode x_scaling[2], y_scaling[2];
64 bool is_unity;
65 bool is_yuv;
66
67
68
69
70 u32 offsets[3];
71
72
73 struct drm_mm_node lbm;
74};
75
76static inline struct vc4_plane_state *
77to_vc4_plane_state(struct drm_plane_state *state)
78{
79 return (struct vc4_plane_state *)state;
80}
81
82static const struct hvs_format {
83 u32 drm;
84 u32 hvs;
85 u32 pixel_order;
86 bool has_alpha;
87 bool flip_cbcr;
88} hvs_formats[] = {
89 {
90 .drm = DRM_FORMAT_XRGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
91 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = false,
92 },
93 {
94 .drm = DRM_FORMAT_ARGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
95 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = true,
96 },
97 {
98 .drm = DRM_FORMAT_ABGR8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
99 .pixel_order = HVS_PIXEL_ORDER_ARGB, .has_alpha = true,
100 },
101 {
102 .drm = DRM_FORMAT_XBGR8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
103 .pixel_order = HVS_PIXEL_ORDER_ARGB, .has_alpha = false,
104 },
105 {
106 .drm = DRM_FORMAT_RGB565, .hvs = HVS_PIXEL_FORMAT_RGB565,
107 .pixel_order = HVS_PIXEL_ORDER_XRGB, .has_alpha = false,
108 },
109 {
110 .drm = DRM_FORMAT_BGR565, .hvs = HVS_PIXEL_FORMAT_RGB565,
111 .pixel_order = HVS_PIXEL_ORDER_XBGR, .has_alpha = false,
112 },
113 {
114 .drm = DRM_FORMAT_ARGB1555, .hvs = HVS_PIXEL_FORMAT_RGBA5551,
115 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = true,
116 },
117 {
118 .drm = DRM_FORMAT_XRGB1555, .hvs = HVS_PIXEL_FORMAT_RGBA5551,
119 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = false,
120 },
121 {
122 .drm = DRM_FORMAT_YUV422,
123 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_3PLANE,
124 },
125 {
126 .drm = DRM_FORMAT_YVU422,
127 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_3PLANE,
128 .flip_cbcr = true,
129 },
130 {
131 .drm = DRM_FORMAT_YUV420,
132 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_3PLANE,
133 },
134 {
135 .drm = DRM_FORMAT_YVU420,
136 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_3PLANE,
137 .flip_cbcr = true,
138 },
139 {
140 .drm = DRM_FORMAT_NV12,
141 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_2PLANE,
142 },
143 {
144 .drm = DRM_FORMAT_NV16,
145 .hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_2PLANE,
146 },
147};
148
149static const struct hvs_format *vc4_get_hvs_format(u32 drm_format)
150{
151 unsigned i;
152
153 for (i = 0; i < ARRAY_SIZE(hvs_formats); i++) {
154 if (hvs_formats[i].drm == drm_format)
155 return &hvs_formats[i];
156 }
157
158 return NULL;
159}
160
161static enum vc4_scaling_mode vc4_get_scaling_mode(u32 src, u32 dst)
162{
163 if (dst > src)
164 return VC4_SCALING_PPF;
165 else if (dst < src)
166 return VC4_SCALING_TPZ;
167 else
168 return VC4_SCALING_NONE;
169}
170
171static bool plane_enabled(struct drm_plane_state *state)
172{
173 return state->fb && state->crtc;
174}
175
176static struct drm_plane_state *vc4_plane_duplicate_state(struct drm_plane *plane)
177{
178 struct vc4_plane_state *vc4_state;
179
180 if (WARN_ON(!plane->state))
181 return NULL;
182
183 vc4_state = kmemdup(plane->state, sizeof(*vc4_state), GFP_KERNEL);
184 if (!vc4_state)
185 return NULL;
186
187 memset(&vc4_state->lbm, 0, sizeof(vc4_state->lbm));
188
189 __drm_atomic_helper_plane_duplicate_state(plane, &vc4_state->base);
190
191 if (vc4_state->dlist) {
192 vc4_state->dlist = kmemdup(vc4_state->dlist,
193 vc4_state->dlist_count * 4,
194 GFP_KERNEL);
195 if (!vc4_state->dlist) {
196 kfree(vc4_state);
197 return NULL;
198 }
199 vc4_state->dlist_size = vc4_state->dlist_count;
200 }
201
202 return &vc4_state->base;
203}
204
205static void vc4_plane_destroy_state(struct drm_plane *plane,
206 struct drm_plane_state *state)
207{
208 struct vc4_dev *vc4 = to_vc4_dev(plane->dev);
209 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
210
211 if (vc4_state->lbm.allocated) {
212 unsigned long irqflags;
213
214 spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags);
215 drm_mm_remove_node(&vc4_state->lbm);
216 spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags);
217 }
218
219 kfree(vc4_state->dlist);
220 __drm_atomic_helper_plane_destroy_state(&vc4_state->base);
221 kfree(state);
222}
223
224
225static void vc4_plane_reset(struct drm_plane *plane)
226{
227 struct vc4_plane_state *vc4_state;
228
229 WARN_ON(plane->state);
230
231 vc4_state = kzalloc(sizeof(*vc4_state), GFP_KERNEL);
232 if (!vc4_state)
233 return;
234
235 plane->state = &vc4_state->base;
236 vc4_state->base.plane = plane;
237}
238
239static void vc4_dlist_write(struct vc4_plane_state *vc4_state, u32 val)
240{
241 if (vc4_state->dlist_count == vc4_state->dlist_size) {
242 u32 new_size = max(4u, vc4_state->dlist_count * 2);
243 u32 *new_dlist = kmalloc(new_size * 4, GFP_KERNEL);
244
245 if (!new_dlist)
246 return;
247 memcpy(new_dlist, vc4_state->dlist, vc4_state->dlist_count * 4);
248
249 kfree(vc4_state->dlist);
250 vc4_state->dlist = new_dlist;
251 vc4_state->dlist_size = new_size;
252 }
253
254 vc4_state->dlist[vc4_state->dlist_count++] = val;
255}
256
257
258
259
260
261
262static u32 vc4_get_scl_field(struct drm_plane_state *state, int plane)
263{
264 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
265
266 switch (vc4_state->x_scaling[plane] << 2 | vc4_state->y_scaling[plane]) {
267 case VC4_SCALING_PPF << 2 | VC4_SCALING_PPF:
268 return SCALER_CTL0_SCL_H_PPF_V_PPF;
269 case VC4_SCALING_TPZ << 2 | VC4_SCALING_PPF:
270 return SCALER_CTL0_SCL_H_TPZ_V_PPF;
271 case VC4_SCALING_PPF << 2 | VC4_SCALING_TPZ:
272 return SCALER_CTL0_SCL_H_PPF_V_TPZ;
273 case VC4_SCALING_TPZ << 2 | VC4_SCALING_TPZ:
274 return SCALER_CTL0_SCL_H_TPZ_V_TPZ;
275 case VC4_SCALING_PPF << 2 | VC4_SCALING_NONE:
276 return SCALER_CTL0_SCL_H_PPF_V_NONE;
277 case VC4_SCALING_NONE << 2 | VC4_SCALING_PPF:
278 return SCALER_CTL0_SCL_H_NONE_V_PPF;
279 case VC4_SCALING_NONE << 2 | VC4_SCALING_TPZ:
280 return SCALER_CTL0_SCL_H_NONE_V_TPZ;
281 case VC4_SCALING_TPZ << 2 | VC4_SCALING_NONE:
282 return SCALER_CTL0_SCL_H_TPZ_V_NONE;
283 default:
284 case VC4_SCALING_NONE << 2 | VC4_SCALING_NONE:
285
286
287
288 return 0;
289 }
290}
291
292static int vc4_plane_setup_clipping_and_scaling(struct drm_plane_state *state)
293{
294 struct drm_plane *plane = state->plane;
295 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
296 struct drm_framebuffer *fb = state->fb;
297 struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
298 u32 subpixel_src_mask = (1 << 16) - 1;
299 u32 format = fb->format->format;
300 int num_planes = fb->format->num_planes;
301 u32 h_subsample = 1;
302 u32 v_subsample = 1;
303 int i;
304
305 for (i = 0; i < num_planes; i++)
306 vc4_state->offsets[i] = bo->paddr + fb->offsets[i];
307
308
309 if ((state->src_x & subpixel_src_mask) ||
310 (state->src_y & subpixel_src_mask) ||
311 (state->src_w & subpixel_src_mask) ||
312 (state->src_h & subpixel_src_mask)) {
313 return -EINVAL;
314 }
315
316 vc4_state->src_x = state->src_x >> 16;
317 vc4_state->src_y = state->src_y >> 16;
318 vc4_state->src_w[0] = state->src_w >> 16;
319 vc4_state->src_h[0] = state->src_h >> 16;
320
321 vc4_state->crtc_x = state->crtc_x;
322 vc4_state->crtc_y = state->crtc_y;
323 vc4_state->crtc_w = state->crtc_w;
324 vc4_state->crtc_h = state->crtc_h;
325
326 vc4_state->x_scaling[0] = vc4_get_scaling_mode(vc4_state->src_w[0],
327 vc4_state->crtc_w);
328 vc4_state->y_scaling[0] = vc4_get_scaling_mode(vc4_state->src_h[0],
329 vc4_state->crtc_h);
330
331 if (num_planes > 1) {
332 vc4_state->is_yuv = true;
333
334 h_subsample = drm_format_horz_chroma_subsampling(format);
335 v_subsample = drm_format_vert_chroma_subsampling(format);
336 vc4_state->src_w[1] = vc4_state->src_w[0] / h_subsample;
337 vc4_state->src_h[1] = vc4_state->src_h[0] / v_subsample;
338
339 vc4_state->x_scaling[1] =
340 vc4_get_scaling_mode(vc4_state->src_w[1],
341 vc4_state->crtc_w);
342 vc4_state->y_scaling[1] =
343 vc4_get_scaling_mode(vc4_state->src_h[1],
344 vc4_state->crtc_h);
345
346
347
348
349
350 if (vc4_state->x_scaling[0] == VC4_SCALING_NONE)
351 vc4_state->x_scaling[0] = VC4_SCALING_TPZ;
352 if (vc4_state->y_scaling[0] == VC4_SCALING_NONE)
353 vc4_state->y_scaling[0] = VC4_SCALING_TPZ;
354 }
355
356 vc4_state->is_unity = (vc4_state->x_scaling[0] == VC4_SCALING_NONE &&
357 vc4_state->y_scaling[0] == VC4_SCALING_NONE &&
358 vc4_state->x_scaling[1] == VC4_SCALING_NONE &&
359 vc4_state->y_scaling[1] == VC4_SCALING_NONE);
360
361
362
363
364
365 if (plane->type == DRM_PLANE_TYPE_CURSOR && !vc4_state->is_unity)
366 return -EINVAL;
367
368
369
370
371 if (vc4_state->crtc_x < 0) {
372 for (i = 0; i < num_planes; i++) {
373 u32 cpp = fb->format->cpp[i];
374 u32 subs = ((i == 0) ? 1 : h_subsample);
375
376 vc4_state->offsets[i] += (cpp *
377 (-vc4_state->crtc_x) / subs);
378 }
379 vc4_state->src_w[0] += vc4_state->crtc_x;
380 vc4_state->src_w[1] += vc4_state->crtc_x / h_subsample;
381 vc4_state->crtc_x = 0;
382 }
383
384 if (vc4_state->crtc_y < 0) {
385 for (i = 0; i < num_planes; i++) {
386 u32 subs = ((i == 0) ? 1 : v_subsample);
387
388 vc4_state->offsets[i] += (fb->pitches[i] *
389 (-vc4_state->crtc_y) / subs);
390 }
391 vc4_state->src_h[0] += vc4_state->crtc_y;
392 vc4_state->src_h[1] += vc4_state->crtc_y / v_subsample;
393 vc4_state->crtc_y = 0;
394 }
395
396 return 0;
397}
398
399static void vc4_write_tpz(struct vc4_plane_state *vc4_state, u32 src, u32 dst)
400{
401 u32 scale, recip;
402
403 scale = (1 << 16) * src / dst;
404
405
406
407
408 recip = ~0 / scale;
409
410 vc4_dlist_write(vc4_state,
411 VC4_SET_FIELD(scale, SCALER_TPZ0_SCALE) |
412 VC4_SET_FIELD(0, SCALER_TPZ0_IPHASE));
413 vc4_dlist_write(vc4_state,
414 VC4_SET_FIELD(recip, SCALER_TPZ1_RECIP));
415}
416
417static void vc4_write_ppf(struct vc4_plane_state *vc4_state, u32 src, u32 dst)
418{
419 u32 scale = (1 << 16) * src / dst;
420
421 vc4_dlist_write(vc4_state,
422 SCALER_PPF_AGC |
423 VC4_SET_FIELD(scale, SCALER_PPF_SCALE) |
424 VC4_SET_FIELD(0, SCALER_PPF_IPHASE));
425}
426
427static u32 vc4_lbm_size(struct drm_plane_state *state)
428{
429 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
430
431
432
433 u32 pix_per_line = max(vc4_state->src_w[0], (u32)vc4_state->crtc_w);
434 u32 lbm;
435
436 if (!vc4_state->is_yuv) {
437 if (vc4_state->is_unity)
438 return 0;
439 else if (vc4_state->y_scaling[0] == VC4_SCALING_TPZ)
440 lbm = pix_per_line * 8;
441 else {
442
443 lbm = pix_per_line * 16;
444 }
445 } else {
446
447
448
449
450 lbm = pix_per_line * 16;
451 }
452
453 lbm = roundup(lbm, 32);
454
455 return lbm;
456}
457
458static void vc4_write_scaling_parameters(struct drm_plane_state *state,
459 int channel)
460{
461 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
462
463
464 if (vc4_state->x_scaling[channel] == VC4_SCALING_PPF) {
465 vc4_write_ppf(vc4_state,
466 vc4_state->src_w[channel], vc4_state->crtc_w);
467 }
468
469
470 if (vc4_state->y_scaling[channel] == VC4_SCALING_PPF) {
471 vc4_write_ppf(vc4_state,
472 vc4_state->src_h[channel], vc4_state->crtc_h);
473 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
474 }
475
476
477 if (vc4_state->x_scaling[channel] == VC4_SCALING_TPZ) {
478 vc4_write_tpz(vc4_state,
479 vc4_state->src_w[channel], vc4_state->crtc_w);
480 }
481
482
483 if (vc4_state->y_scaling[channel] == VC4_SCALING_TPZ) {
484 vc4_write_tpz(vc4_state,
485 vc4_state->src_h[channel], vc4_state->crtc_h);
486 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
487 }
488}
489
490
491
492
493static int vc4_plane_mode_set(struct drm_plane *plane,
494 struct drm_plane_state *state)
495{
496 struct vc4_dev *vc4 = to_vc4_dev(plane->dev);
497 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
498 struct drm_framebuffer *fb = state->fb;
499 u32 ctl0_offset = vc4_state->dlist_count;
500 const struct hvs_format *format = vc4_get_hvs_format(fb->format->format);
501 int num_planes = drm_format_num_planes(format->drm);
502 u32 scl0, scl1;
503 u32 lbm_size;
504 unsigned long irqflags;
505 int ret, i;
506
507 ret = vc4_plane_setup_clipping_and_scaling(state);
508 if (ret)
509 return ret;
510
511
512
513
514 lbm_size = vc4_lbm_size(state);
515 if (lbm_size) {
516 if (!vc4_state->lbm.allocated) {
517 spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags);
518 ret = drm_mm_insert_node_generic(&vc4->hvs->lbm_mm,
519 &vc4_state->lbm,
520 lbm_size, 32, 0, 0);
521 spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags);
522 } else {
523 WARN_ON_ONCE(lbm_size != vc4_state->lbm.size);
524 }
525 }
526
527 if (ret)
528 return ret;
529
530
531
532
533
534
535
536 if (num_planes == 1) {
537 scl0 = vc4_get_scl_field(state, 1);
538 scl1 = scl0;
539 } else {
540 scl0 = vc4_get_scl_field(state, 1);
541 scl1 = vc4_get_scl_field(state, 0);
542 }
543
544
545 vc4_dlist_write(vc4_state,
546 SCALER_CTL0_VALID |
547 (format->pixel_order << SCALER_CTL0_ORDER_SHIFT) |
548 (format->hvs << SCALER_CTL0_PIXEL_FORMAT_SHIFT) |
549 (vc4_state->is_unity ? SCALER_CTL0_UNITY : 0) |
550 VC4_SET_FIELD(scl0, SCALER_CTL0_SCL0) |
551 VC4_SET_FIELD(scl1, SCALER_CTL0_SCL1));
552
553
554 vc4_state->pos0_offset = vc4_state->dlist_count;
555 vc4_dlist_write(vc4_state,
556 VC4_SET_FIELD(0xff, SCALER_POS0_FIXED_ALPHA) |
557 VC4_SET_FIELD(vc4_state->crtc_x, SCALER_POS0_START_X) |
558 VC4_SET_FIELD(vc4_state->crtc_y, SCALER_POS0_START_Y));
559
560
561 if (!vc4_state->is_unity) {
562 vc4_dlist_write(vc4_state,
563 VC4_SET_FIELD(vc4_state->crtc_w,
564 SCALER_POS1_SCL_WIDTH) |
565 VC4_SET_FIELD(vc4_state->crtc_h,
566 SCALER_POS1_SCL_HEIGHT));
567 }
568
569
570 vc4_state->pos2_offset = vc4_state->dlist_count;
571 vc4_dlist_write(vc4_state,
572 VC4_SET_FIELD(format->has_alpha ?
573 SCALER_POS2_ALPHA_MODE_PIPELINE :
574 SCALER_POS2_ALPHA_MODE_FIXED,
575 SCALER_POS2_ALPHA_MODE) |
576 VC4_SET_FIELD(vc4_state->src_w[0], SCALER_POS2_WIDTH) |
577 VC4_SET_FIELD(vc4_state->src_h[0], SCALER_POS2_HEIGHT));
578
579
580 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
581
582
583
584
585
586
587 vc4_state->ptr0_offset = vc4_state->dlist_count;
588 if (!format->flip_cbcr) {
589 for (i = 0; i < num_planes; i++)
590 vc4_dlist_write(vc4_state, vc4_state->offsets[i]);
591 } else {
592 WARN_ON_ONCE(num_planes != 3);
593 vc4_dlist_write(vc4_state, vc4_state->offsets[0]);
594 vc4_dlist_write(vc4_state, vc4_state->offsets[2]);
595 vc4_dlist_write(vc4_state, vc4_state->offsets[1]);
596 }
597
598
599 for (i = 0; i < num_planes; i++)
600 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
601
602
603 for (i = 0; i < num_planes; i++) {
604 vc4_dlist_write(vc4_state,
605 VC4_SET_FIELD(fb->pitches[i], SCALER_SRC_PITCH));
606 }
607
608
609 if (vc4_state->is_yuv) {
610 vc4_dlist_write(vc4_state, SCALER_CSC0_ITR_R_601_5);
611 vc4_dlist_write(vc4_state, SCALER_CSC1_ITR_R_601_5);
612 vc4_dlist_write(vc4_state, SCALER_CSC2_ITR_R_601_5);
613 }
614
615 if (!vc4_state->is_unity) {
616
617 if (vc4_state->y_scaling[0] != VC4_SCALING_NONE ||
618 vc4_state->y_scaling[1] != VC4_SCALING_NONE) {
619 vc4_dlist_write(vc4_state, vc4_state->lbm.start);
620 }
621
622 if (num_planes > 1) {
623
624
625
626
627 vc4_write_scaling_parameters(state, 1);
628 }
629 vc4_write_scaling_parameters(state, 0);
630
631
632
633
634 if (vc4_state->x_scaling[0] == VC4_SCALING_PPF ||
635 vc4_state->y_scaling[0] == VC4_SCALING_PPF ||
636 vc4_state->x_scaling[1] == VC4_SCALING_PPF ||
637 vc4_state->y_scaling[1] == VC4_SCALING_PPF) {
638 u32 kernel = VC4_SET_FIELD(vc4->hvs->mitchell_netravali_filter.start,
639 SCALER_PPF_KERNEL_OFFSET);
640
641
642 vc4_dlist_write(vc4_state, kernel);
643
644 vc4_dlist_write(vc4_state, kernel);
645
646 vc4_dlist_write(vc4_state, kernel);
647
648 vc4_dlist_write(vc4_state, kernel);
649 }
650 }
651
652 vc4_state->dlist[ctl0_offset] |=
653 VC4_SET_FIELD(vc4_state->dlist_count, SCALER_CTL0_SIZE);
654
655 return 0;
656}
657
658
659
660
661
662
663
664
665static int vc4_plane_atomic_check(struct drm_plane *plane,
666 struct drm_plane_state *state)
667{
668 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
669
670 vc4_state->dlist_count = 0;
671
672 if (plane_enabled(state))
673 return vc4_plane_mode_set(plane, state);
674 else
675 return 0;
676}
677
678static void vc4_plane_atomic_update(struct drm_plane *plane,
679 struct drm_plane_state *old_state)
680{
681
682
683
684
685
686}
687
688u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist)
689{
690 struct vc4_plane_state *vc4_state = to_vc4_plane_state(plane->state);
691 int i;
692
693 vc4_state->hw_dlist = dlist;
694
695
696 for (i = 0; i < vc4_state->dlist_count; i++)
697 writel(vc4_state->dlist[i], &dlist[i]);
698
699 return vc4_state->dlist_count;
700}
701
702u32 vc4_plane_dlist_size(const struct drm_plane_state *state)
703{
704 const struct vc4_plane_state *vc4_state =
705 container_of(state, typeof(*vc4_state), base);
706
707 return vc4_state->dlist_count;
708}
709
710
711
712
713void vc4_plane_async_set_fb(struct drm_plane *plane, struct drm_framebuffer *fb)
714{
715 struct vc4_plane_state *vc4_state = to_vc4_plane_state(plane->state);
716 struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
717 uint32_t addr;
718
719
720
721
722 WARN_ON_ONCE(plane->state->crtc_x < 0 || plane->state->crtc_y < 0);
723 addr = bo->paddr + fb->offsets[0];
724
725
726
727
728
729 writel(addr, &vc4_state->hw_dlist[vc4_state->ptr0_offset]);
730
731
732
733
734
735 vc4_state->dlist[vc4_state->ptr0_offset] = addr;
736}
737
738static const struct drm_plane_helper_funcs vc4_plane_helper_funcs = {
739 .atomic_check = vc4_plane_atomic_check,
740 .atomic_update = vc4_plane_atomic_update,
741};
742
743static void vc4_plane_destroy(struct drm_plane *plane)
744{
745 drm_plane_helper_disable(plane);
746 drm_plane_cleanup(plane);
747}
748
749
750
751
752static int
753vc4_update_plane(struct drm_plane *plane,
754 struct drm_crtc *crtc,
755 struct drm_framebuffer *fb,
756 int crtc_x, int crtc_y,
757 unsigned int crtc_w, unsigned int crtc_h,
758 uint32_t src_x, uint32_t src_y,
759 uint32_t src_w, uint32_t src_h,
760 struct drm_modeset_acquire_ctx *ctx)
761{
762 struct drm_plane_state *plane_state;
763 struct vc4_plane_state *vc4_state;
764
765 if (plane != crtc->cursor)
766 goto out;
767
768 plane_state = plane->state;
769 vc4_state = to_vc4_plane_state(plane_state);
770
771 if (!plane_state)
772 goto out;
773
774
775 if (crtc_w != plane_state->crtc_w ||
776 crtc_h != plane_state->crtc_h ||
777 src_w != plane_state->src_w ||
778 src_h != plane_state->src_h) {
779 goto out;
780 }
781
782 if (fb != plane_state->fb) {
783 drm_atomic_set_fb_for_plane(plane->state, fb);
784 vc4_plane_async_set_fb(plane, fb);
785 }
786
787
788
789
790
791 plane_state->crtc_x = crtc_x;
792 plane_state->crtc_y = crtc_y;
793
794
795
796
797 plane_state->src_x = src_x;
798 plane_state->src_y = src_y;
799
800
801 vc4_plane_atomic_check(plane, plane_state);
802
803
804
805
806
807 writel(vc4_state->dlist[vc4_state->pos0_offset],
808 &vc4_state->hw_dlist[vc4_state->pos0_offset]);
809 writel(vc4_state->dlist[vc4_state->pos2_offset],
810 &vc4_state->hw_dlist[vc4_state->pos2_offset]);
811 writel(vc4_state->dlist[vc4_state->ptr0_offset],
812 &vc4_state->hw_dlist[vc4_state->ptr0_offset]);
813
814 return 0;
815
816out:
817 return drm_atomic_helper_update_plane(plane, crtc, fb,
818 crtc_x, crtc_y,
819 crtc_w, crtc_h,
820 src_x, src_y,
821 src_w, src_h,
822 ctx);
823}
824
825static const struct drm_plane_funcs vc4_plane_funcs = {
826 .update_plane = vc4_update_plane,
827 .disable_plane = drm_atomic_helper_disable_plane,
828 .destroy = vc4_plane_destroy,
829 .set_property = NULL,
830 .reset = vc4_plane_reset,
831 .atomic_duplicate_state = vc4_plane_duplicate_state,
832 .atomic_destroy_state = vc4_plane_destroy_state,
833};
834
835struct drm_plane *vc4_plane_init(struct drm_device *dev,
836 enum drm_plane_type type)
837{
838 struct drm_plane *plane = NULL;
839 struct vc4_plane *vc4_plane;
840 u32 formats[ARRAY_SIZE(hvs_formats)];
841 u32 num_formats = 0;
842 int ret = 0;
843 unsigned i;
844
845 vc4_plane = devm_kzalloc(dev->dev, sizeof(*vc4_plane),
846 GFP_KERNEL);
847 if (!vc4_plane)
848 return ERR_PTR(-ENOMEM);
849
850 for (i = 0; i < ARRAY_SIZE(hvs_formats); i++) {
851
852
853
854
855 if (type != DRM_PLANE_TYPE_CURSOR ||
856 hvs_formats[i].hvs < HVS_PIXEL_FORMAT_YCBCR_YUV420_3PLANE) {
857 formats[num_formats++] = hvs_formats[i].drm;
858 }
859 }
860 plane = &vc4_plane->base;
861 ret = drm_universal_plane_init(dev, plane, 0,
862 &vc4_plane_funcs,
863 formats, num_formats,
864 type, NULL);
865
866 drm_plane_helper_add(plane, &vc4_plane_helper_funcs);
867
868 return plane;
869}
870