1
2
3
4
5
6
7#include <linux/clk.h>
8#include <linux/component.h>
9#include <linux/module.h>
10#include <linux/of_device.h>
11#include <linux/platform_device.h>
12
13#include <drm/drm_atomic.h>
14#include <drm/drm_atomic_helper.h>
15#include <drm/drm_plane_helper.h>
16#include <drm/drm_probe_helper.h>
17#include <drm/drm_vblank.h>
18
19#include "armada_crtc.h"
20#include "armada_drm.h"
21#include "armada_fb.h"
22#include "armada_gem.h"
23#include "armada_hw.h"
24#include "armada_plane.h"
25#include "armada_trace.h"
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81void
82armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
83{
84 while (regs->offset != ~0) {
85 void __iomem *reg = dcrtc->base + regs->offset;
86 uint32_t val;
87
88 val = regs->mask;
89 if (val != 0)
90 val &= readl_relaxed(reg);
91 writel_relaxed(val | regs->val, reg);
92 ++regs;
93 }
94}
95
96static void armada_drm_crtc_update(struct armada_crtc *dcrtc, bool enable)
97{
98 uint32_t dumb_ctrl;
99
100 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
101
102 if (enable)
103 dumb_ctrl |= CFG_DUMB_ENA;
104
105
106
107
108
109
110
111 if (!enable && (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
112 dumb_ctrl &= ~DUMB_MASK;
113 dumb_ctrl |= DUMB_BLANK;
114 }
115
116 armada_updatel(dumb_ctrl,
117 ~(CFG_INV_CSYNC | CFG_INV_HSYNC | CFG_INV_VSYNC),
118 dcrtc->base + LCD_SPU_DUMB_CTRL);
119}
120
121static void armada_drm_crtc_queue_state_event(struct drm_crtc *crtc)
122{
123 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
124 struct drm_pending_vblank_event *event;
125
126
127 event = xchg(&crtc->state->event, NULL);
128 if (event) {
129 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
130 dcrtc->event = event;
131 }
132}
133
134static void armada_drm_update_gamma(struct drm_crtc *crtc)
135{
136 struct drm_property_blob *blob = crtc->state->gamma_lut;
137 void __iomem *base = drm_to_armada_crtc(crtc)->base;
138 int i;
139
140 if (blob) {
141 struct drm_color_lut *lut = blob->data;
142
143 armada_updatel(CFG_CSB_256x8, CFG_CSB_256x8 | CFG_PDWN256x8,
144 base + LCD_SPU_SRAM_PARA1);
145
146 for (i = 0; i < 256; i++) {
147 writel_relaxed(drm_color_lut_extract(lut[i].red, 8),
148 base + LCD_SPU_SRAM_WRDAT);
149 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_YR,
150 base + LCD_SPU_SRAM_CTRL);
151 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
152 writel_relaxed(drm_color_lut_extract(lut[i].green, 8),
153 base + LCD_SPU_SRAM_WRDAT);
154 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_UG,
155 base + LCD_SPU_SRAM_CTRL);
156 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
157 writel_relaxed(drm_color_lut_extract(lut[i].blue, 8),
158 base + LCD_SPU_SRAM_WRDAT);
159 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_VB,
160 base + LCD_SPU_SRAM_CTRL);
161 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
162 }
163 armada_updatel(CFG_GAMMA_ENA, CFG_GAMMA_ENA,
164 base + LCD_SPU_DMA_CTRL0);
165 } else {
166 armada_updatel(0, CFG_GAMMA_ENA, base + LCD_SPU_DMA_CTRL0);
167 armada_updatel(CFG_PDWN256x8, CFG_CSB_256x8 | CFG_PDWN256x8,
168 base + LCD_SPU_SRAM_PARA1);
169 }
170}
171
172static enum drm_mode_status armada_drm_crtc_mode_valid(struct drm_crtc *crtc,
173 const struct drm_display_mode *mode)
174{
175 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
176
177 if (mode->vscan > 1)
178 return MODE_NO_VSCAN;
179
180 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
181 return MODE_NO_DBLESCAN;
182
183 if (mode->flags & DRM_MODE_FLAG_HSKEW)
184 return MODE_H_ILLEGAL;
185
186
187 if (!dcrtc->variant->has_spu_adv_reg &&
188 mode->flags & DRM_MODE_FLAG_INTERLACE)
189 return MODE_NO_INTERLACE;
190
191 if (mode->flags & (DRM_MODE_FLAG_BCAST | DRM_MODE_FLAG_PIXMUX |
192 DRM_MODE_FLAG_CLKDIV2))
193 return MODE_BAD;
194
195 return MODE_OK;
196}
197
198
199static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
200 const struct drm_display_mode *mode, struct drm_display_mode *adj)
201{
202 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
203 int ret;
204
205
206
207
208
209
210 drm_mode_set_crtcinfo(adj, CRTC_INTERLACE_HALVE_V);
211
212
213
214
215
216 if (armada_drm_crtc_mode_valid(crtc, adj) != MODE_OK)
217 return false;
218
219
220 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
221 if (ret)
222 return false;
223
224 return true;
225}
226
227
228static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
229{
230 if (dcrtc->irq_ena & mask) {
231 dcrtc->irq_ena &= ~mask;
232 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
233 }
234}
235
236static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
237{
238 if ((dcrtc->irq_ena & mask) != mask) {
239 dcrtc->irq_ena |= mask;
240 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
241 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
242 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
243 }
244}
245
246static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
247{
248 struct drm_pending_vblank_event *event;
249 void __iomem *base = dcrtc->base;
250
251 if (stat & DMA_FF_UNDERFLOW)
252 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
253 if (stat & GRA_FF_UNDERFLOW)
254 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
255
256 if (stat & VSYNC_IRQ)
257 drm_crtc_handle_vblank(&dcrtc->crtc);
258
259 spin_lock(&dcrtc->irq_lock);
260 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
261 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
262 uint32_t val;
263
264 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
265 writel_relaxed(dcrtc->v[i].spu_v_h_total,
266 base + LCD_SPUT_V_H_TOTAL);
267
268 val = readl_relaxed(base + LCD_SPU_ADV_REG);
269 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
270 val |= dcrtc->v[i].spu_adv_reg;
271 writel_relaxed(val, base + LCD_SPU_ADV_REG);
272 }
273
274 if (stat & dcrtc->irq_ena & DUMB_FRAMEDONE) {
275 if (dcrtc->update_pending) {
276 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
277 dcrtc->update_pending = false;
278 }
279 if (dcrtc->cursor_update) {
280 writel_relaxed(dcrtc->cursor_hw_pos,
281 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
282 writel_relaxed(dcrtc->cursor_hw_sz,
283 base + LCD_SPU_HWC_HPXL_VLN);
284 armada_updatel(CFG_HWC_ENA,
285 CFG_HWC_ENA | CFG_HWC_1BITMOD |
286 CFG_HWC_1BITENA,
287 base + LCD_SPU_DMA_CTRL0);
288 dcrtc->cursor_update = false;
289 }
290 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
291 }
292 spin_unlock(&dcrtc->irq_lock);
293
294 if (stat & VSYNC_IRQ && !dcrtc->update_pending) {
295 event = xchg(&dcrtc->event, NULL);
296 if (event) {
297 spin_lock(&dcrtc->crtc.dev->event_lock);
298 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
299 spin_unlock(&dcrtc->crtc.dev->event_lock);
300 drm_crtc_vblank_put(&dcrtc->crtc);
301 }
302 }
303}
304
305static irqreturn_t armada_drm_irq(int irq, void *arg)
306{
307 struct armada_crtc *dcrtc = arg;
308 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
309
310
311
312
313
314
315 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
316
317 trace_armada_drm_irq(&dcrtc->crtc, stat);
318
319
320 v = stat & dcrtc->irq_ena;
321
322 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
323 armada_drm_crtc_irq(dcrtc, stat);
324 return IRQ_HANDLED;
325 }
326 return IRQ_NONE;
327}
328
329
330static void armada_drm_crtc_mode_set_nofb(struct drm_crtc *crtc)
331{
332 struct drm_display_mode *adj = &crtc->state->adjusted_mode;
333 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
334 struct armada_regs regs[17];
335 uint32_t lm, rm, tm, bm, val, sclk;
336 unsigned long flags;
337 unsigned i;
338 bool interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
339
340 i = 0;
341 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
342 lm = adj->crtc_htotal - adj->crtc_hsync_end;
343 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
344 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
345
346 DRM_DEBUG_KMS("[CRTC:%d:%s] mode " DRM_MODE_FMT "\n",
347 crtc->base.id, crtc->name, DRM_MODE_ARG(adj));
348 DRM_DEBUG_KMS("lm %d rm %d tm %d bm %d\n", lm, rm, tm, bm);
349
350
351 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
352
353 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
354
355 spin_lock_irqsave(&dcrtc->irq_lock, flags);
356
357 dcrtc->interlaced = interlaced;
358
359 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
360 adj->crtc_htotal;
361 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
362 val = adj->crtc_hsync_start;
363 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
364
365 if (interlaced) {
366
367 val -= adj->crtc_htotal / 2;
368 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
369 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
370 (1 << 16);
371 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
372 } else {
373 dcrtc->v[0] = dcrtc->v[1];
374 }
375
376 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
377
378 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
379 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
380 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
381 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
382 LCD_SPUT_V_H_TOTAL);
383
384 if (dcrtc->variant->has_spu_adv_reg)
385 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
386 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
387 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
388
389 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
390 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
391
392
393
394
395
396
397
398
399
400 val = 0;
401 if (adj->flags & DRM_MODE_FLAG_NCSYNC)
402 val |= CFG_INV_CSYNC;
403 if (adj->flags & DRM_MODE_FLAG_NHSYNC)
404 val |= CFG_INV_HSYNC;
405 if (adj->flags & DRM_MODE_FLAG_NVSYNC)
406 val |= CFG_INV_VSYNC;
407 armada_reg_queue_mod(regs, i, val, CFG_INV_CSYNC | CFG_INV_HSYNC |
408 CFG_INV_VSYNC, LCD_SPU_DUMB_CTRL);
409 armada_reg_queue_end(regs, i);
410
411 armada_drm_crtc_update_regs(dcrtc, regs);
412 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
413}
414
415static int armada_drm_crtc_atomic_check(struct drm_crtc *crtc,
416 struct drm_crtc_state *state)
417{
418 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
419
420 if (state->gamma_lut && drm_color_lut_size(state->gamma_lut) != 256)
421 return -EINVAL;
422
423 if (state->color_mgmt_changed)
424 state->planes_changed = true;
425
426 return 0;
427}
428
429static void armada_drm_crtc_atomic_begin(struct drm_crtc *crtc,
430 struct drm_crtc_state *old_crtc_state)
431{
432 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
433
434 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
435
436 if (crtc->state->color_mgmt_changed)
437 armada_drm_update_gamma(crtc);
438
439 dcrtc->regs_idx = 0;
440 dcrtc->regs = dcrtc->atomic_regs;
441}
442
443static void armada_drm_crtc_atomic_flush(struct drm_crtc *crtc,
444 struct drm_crtc_state *old_crtc_state)
445{
446 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
447
448 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
449
450 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
451
452
453
454
455
456 if (!drm_atomic_crtc_needs_modeset(crtc->state)) {
457 dcrtc->update_pending = true;
458 armada_drm_crtc_queue_state_event(crtc);
459 spin_lock_irq(&dcrtc->irq_lock);
460 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
461 spin_unlock_irq(&dcrtc->irq_lock);
462 } else {
463 spin_lock_irq(&dcrtc->irq_lock);
464 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
465 spin_unlock_irq(&dcrtc->irq_lock);
466 }
467}
468
469static void armada_drm_crtc_atomic_disable(struct drm_crtc *crtc,
470 struct drm_crtc_state *old_state)
471{
472 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
473 struct drm_pending_vblank_event *event;
474
475 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
476
477 if (old_state->adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
478 drm_crtc_vblank_put(crtc);
479
480 drm_crtc_vblank_off(crtc);
481 armada_drm_crtc_update(dcrtc, false);
482
483 if (!crtc->state->active) {
484
485
486
487
488 if (dcrtc->variant->disable)
489 dcrtc->variant->disable(dcrtc);
490
491
492
493
494
495 event = crtc->state->event;
496 crtc->state->event = NULL;
497 if (event) {
498 spin_lock_irq(&crtc->dev->event_lock);
499 drm_crtc_send_vblank_event(crtc, event);
500 spin_unlock_irq(&crtc->dev->event_lock);
501 }
502 }
503}
504
505static void armada_drm_crtc_atomic_enable(struct drm_crtc *crtc,
506 struct drm_crtc_state *old_state)
507{
508 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
509
510 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
511
512 if (!old_state->active) {
513
514
515
516
517
518 if (dcrtc->variant->enable)
519 dcrtc->variant->enable(dcrtc, &crtc->state->adjusted_mode);
520 }
521 armada_drm_crtc_update(dcrtc, true);
522 drm_crtc_vblank_on(crtc);
523
524 if (crtc->state->adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
525 WARN_ON(drm_crtc_vblank_get(crtc));
526
527 armada_drm_crtc_queue_state_event(crtc);
528}
529
530static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
531 .mode_valid = armada_drm_crtc_mode_valid,
532 .mode_fixup = armada_drm_crtc_mode_fixup,
533 .mode_set_nofb = armada_drm_crtc_mode_set_nofb,
534 .atomic_check = armada_drm_crtc_atomic_check,
535 .atomic_begin = armada_drm_crtc_atomic_begin,
536 .atomic_flush = armada_drm_crtc_atomic_flush,
537 .atomic_disable = armada_drm_crtc_atomic_disable,
538 .atomic_enable = armada_drm_crtc_atomic_enable,
539};
540
541static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
542 unsigned stride, unsigned width, unsigned height)
543{
544 uint32_t addr;
545 unsigned y;
546
547 addr = SRAM_HWC32_RAM1;
548 for (y = 0; y < height; y++) {
549 uint32_t *p = &pix[y * stride];
550 unsigned x;
551
552 for (x = 0; x < width; x++, p++) {
553 uint32_t val = *p;
554
555
556
557
558
559
560
561
562 val = (val & 0xff00ff00) |
563 (val & 0x000000ff) << 16 |
564 (val & 0x00ff0000) >> 16;
565
566 writel_relaxed(val,
567 base + LCD_SPU_SRAM_WRDAT);
568 writel_relaxed(addr | SRAM_WRITE,
569 base + LCD_SPU_SRAM_CTRL);
570 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
571 addr += 1;
572 if ((addr & 0x00ff) == 0)
573 addr += 0xf00;
574 if ((addr & 0x30ff) == 0)
575 addr = SRAM_HWC32_RAM2;
576 }
577 }
578}
579
580static void armada_drm_crtc_cursor_tran(void __iomem *base)
581{
582 unsigned addr;
583
584 for (addr = 0; addr < 256; addr++) {
585
586 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
587 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
588 base + LCD_SPU_SRAM_CTRL);
589 }
590}
591
592static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
593{
594 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
595 uint32_t yoff, yscr, h = dcrtc->cursor_h;
596 uint32_t para1;
597
598
599
600
601
602 if (dcrtc->cursor_x < 0) {
603 xoff = -dcrtc->cursor_x;
604 xscr = 0;
605 w -= min(xoff, w);
606 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
607 xoff = 0;
608 xscr = dcrtc->cursor_x;
609 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
610 } else {
611 xoff = 0;
612 xscr = dcrtc->cursor_x;
613 }
614
615 if (dcrtc->cursor_y < 0) {
616 yoff = -dcrtc->cursor_y;
617 yscr = 0;
618 h -= min(yoff, h);
619 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
620 yoff = 0;
621 yscr = dcrtc->cursor_y;
622 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
623 } else {
624 yoff = 0;
625 yscr = dcrtc->cursor_y;
626 }
627
628
629 s = dcrtc->cursor_w;
630 if (dcrtc->interlaced) {
631 s *= 2;
632 yscr /= 2;
633 h /= 2;
634 }
635
636 if (!dcrtc->cursor_obj || !h || !w) {
637 spin_lock_irq(&dcrtc->irq_lock);
638 dcrtc->cursor_update = false;
639 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
640 spin_unlock_irq(&dcrtc->irq_lock);
641 return 0;
642 }
643
644 spin_lock_irq(&dcrtc->irq_lock);
645 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
646 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
647 dcrtc->base + LCD_SPU_SRAM_PARA1);
648 spin_unlock_irq(&dcrtc->irq_lock);
649
650
651
652
653
654 if (!(para1 & CFG_CSB_256x32)) {
655 armada_drm_crtc_cursor_tran(dcrtc->base);
656 reload = true;
657 }
658
659 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
660 spin_lock_irq(&dcrtc->irq_lock);
661 dcrtc->cursor_update = false;
662 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
663 spin_unlock_irq(&dcrtc->irq_lock);
664 reload = true;
665 }
666 if (reload) {
667 struct armada_gem_object *obj = dcrtc->cursor_obj;
668 uint32_t *pix;
669
670 pix = obj->addr;
671 pix += yoff * s + xoff;
672 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
673 }
674
675
676 spin_lock_irq(&dcrtc->irq_lock);
677 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
678 dcrtc->cursor_hw_sz = h << 16 | w;
679 dcrtc->cursor_update = true;
680 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
681 spin_unlock_irq(&dcrtc->irq_lock);
682
683 return 0;
684}
685
686static void cursor_update(void *data)
687{
688 armada_drm_crtc_cursor_update(data, true);
689}
690
691static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
692 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
693{
694 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
695 struct armada_gem_object *obj = NULL;
696 int ret;
697
698
699 if (!dcrtc->variant->has_spu_adv_reg)
700 return -ENXIO;
701
702 if (handle && w > 0 && h > 0) {
703
704 if (w > 64 || h > 64 || (w > 32 && h > 32))
705 return -ENOMEM;
706
707 obj = armada_gem_object_lookup(file, handle);
708 if (!obj)
709 return -ENOENT;
710
711
712 if (!obj->addr) {
713 drm_gem_object_put_unlocked(&obj->obj);
714 return -EINVAL;
715 }
716
717 if (obj->obj.size < w * h * 4) {
718 DRM_ERROR("buffer is too small\n");
719 drm_gem_object_put_unlocked(&obj->obj);
720 return -ENOMEM;
721 }
722 }
723
724 if (dcrtc->cursor_obj) {
725 dcrtc->cursor_obj->update = NULL;
726 dcrtc->cursor_obj->update_data = NULL;
727 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
728 }
729 dcrtc->cursor_obj = obj;
730 dcrtc->cursor_w = w;
731 dcrtc->cursor_h = h;
732 ret = armada_drm_crtc_cursor_update(dcrtc, true);
733 if (obj) {
734 obj->update_data = dcrtc;
735 obj->update = cursor_update;
736 }
737
738 return ret;
739}
740
741static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
742{
743 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
744 int ret;
745
746
747 if (!dcrtc->variant->has_spu_adv_reg)
748 return -EFAULT;
749
750 dcrtc->cursor_x = x;
751 dcrtc->cursor_y = y;
752 ret = armada_drm_crtc_cursor_update(dcrtc, false);
753
754 return ret;
755}
756
757static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
758{
759 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
760 struct armada_private *priv = crtc->dev->dev_private;
761
762 if (dcrtc->cursor_obj)
763 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
764
765 priv->dcrtc[dcrtc->num] = NULL;
766 drm_crtc_cleanup(&dcrtc->crtc);
767
768 if (dcrtc->variant->disable)
769 dcrtc->variant->disable(dcrtc);
770
771 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
772
773 of_node_put(dcrtc->crtc.port);
774
775 kfree(dcrtc);
776}
777
778static int armada_drm_crtc_late_register(struct drm_crtc *crtc)
779{
780 if (IS_ENABLED(CONFIG_DEBUG_FS))
781 armada_drm_crtc_debugfs_init(drm_to_armada_crtc(crtc));
782
783 return 0;
784}
785
786
787static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
788{
789 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
790 unsigned long flags;
791
792 spin_lock_irqsave(&dcrtc->irq_lock, flags);
793 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
794 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
795 return 0;
796}
797
798static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
799{
800 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
801 unsigned long flags;
802
803 spin_lock_irqsave(&dcrtc->irq_lock, flags);
804 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
805 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
806}
807
808static const struct drm_crtc_funcs armada_crtc_funcs = {
809 .reset = drm_atomic_helper_crtc_reset,
810 .cursor_set = armada_drm_crtc_cursor_set,
811 .cursor_move = armada_drm_crtc_cursor_move,
812 .destroy = armada_drm_crtc_destroy,
813 .gamma_set = drm_atomic_helper_legacy_gamma_set,
814 .set_config = drm_atomic_helper_set_config,
815 .page_flip = drm_atomic_helper_page_flip,
816 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
817 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
818 .late_register = armada_drm_crtc_late_register,
819 .enable_vblank = armada_drm_crtc_enable_vblank,
820 .disable_vblank = armada_drm_crtc_disable_vblank,
821};
822
823int armada_crtc_select_clock(struct armada_crtc *dcrtc,
824 struct armada_clk_result *res,
825 const struct armada_clocking_params *params,
826 struct clk *clks[], size_t num_clks,
827 unsigned long desired_khz)
828{
829 unsigned long desired_hz = desired_khz * 1000;
830 unsigned long desired_clk_hz;
831 unsigned long real_clk_hz;
832 unsigned long real_hz;
833 unsigned long permillage;
834 struct clk *clk;
835 u32 div;
836 int i;
837
838 DRM_DEBUG_KMS("[CRTC:%u:%s] desired clock=%luHz\n",
839 dcrtc->crtc.base.id, dcrtc->crtc.name, desired_hz);
840
841 for (i = 0; i < num_clks; i++) {
842 clk = clks[i];
843 if (!clk)
844 continue;
845
846 if (params->settable & BIT(i)) {
847 real_clk_hz = clk_round_rate(clk, desired_hz);
848 desired_clk_hz = desired_hz;
849 } else {
850 real_clk_hz = clk_get_rate(clk);
851 desired_clk_hz = real_clk_hz;
852 }
853
854
855 if (real_clk_hz == desired_hz) {
856 real_hz = real_clk_hz;
857 div = 1;
858 goto found;
859 }
860
861
862 div = DIV_ROUND_CLOSEST(real_clk_hz, desired_hz);
863 if (div == 0 || div > params->div_max)
864 continue;
865
866
867 real_hz = DIV_ROUND_CLOSEST(real_clk_hz, div);
868
869 DRM_DEBUG_KMS("[CRTC:%u:%s] clk=%u %luHz div=%u real=%luHz\n",
870 dcrtc->crtc.base.id, dcrtc->crtc.name,
871 i, real_clk_hz, div, real_hz);
872
873
874 if (real_hz < desired_hz) {
875 permillage = real_hz / desired_khz;
876 if (permillage < params->permillage_min)
877 continue;
878 } else {
879 permillage = DIV_ROUND_UP(real_hz, desired_khz);
880 if (permillage > params->permillage_max)
881 continue;
882 }
883 goto found;
884 }
885
886 return -ERANGE;
887
888found:
889 DRM_DEBUG_KMS("[CRTC:%u:%s] selected clk=%u %luHz div=%u real=%luHz\n",
890 dcrtc->crtc.base.id, dcrtc->crtc.name,
891 i, real_clk_hz, div, real_hz);
892
893 res->desired_clk_hz = desired_clk_hz;
894 res->clk = clk;
895 res->div = div;
896
897 return i;
898}
899
900static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
901 struct resource *res, int irq, const struct armada_variant *variant,
902 struct device_node *port)
903{
904 struct armada_private *priv = drm->dev_private;
905 struct armada_crtc *dcrtc;
906 struct drm_plane *primary;
907 void __iomem *base;
908 int ret;
909
910 base = devm_ioremap_resource(dev, res);
911 if (IS_ERR(base))
912 return PTR_ERR(base);
913
914 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
915 if (!dcrtc) {
916 DRM_ERROR("failed to allocate Armada crtc\n");
917 return -ENOMEM;
918 }
919
920 if (dev != drm->dev)
921 dev_set_drvdata(dev, dcrtc);
922
923 dcrtc->variant = variant;
924 dcrtc->base = base;
925 dcrtc->num = drm->mode_config.num_crtc;
926 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
927 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
928 spin_lock_init(&dcrtc->irq_lock);
929 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
930
931
932 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
933 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
934 writel_relaxed(dcrtc->spu_iopad_ctrl,
935 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
936 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
937 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
938 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
939 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
940 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
941 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
942 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
943 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
944
945 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
946 dcrtc);
947 if (ret < 0)
948 goto err_crtc;
949
950 if (dcrtc->variant->init) {
951 ret = dcrtc->variant->init(dcrtc, dev);
952 if (ret)
953 goto err_crtc;
954 }
955
956
957 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
958
959 priv->dcrtc[dcrtc->num] = dcrtc;
960
961 dcrtc->crtc.port = port;
962
963 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
964 if (!primary) {
965 ret = -ENOMEM;
966 goto err_crtc;
967 }
968
969 ret = armada_drm_primary_plane_init(drm, primary);
970 if (ret) {
971 kfree(primary);
972 goto err_crtc;
973 }
974
975 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, primary, NULL,
976 &armada_crtc_funcs, NULL);
977 if (ret)
978 goto err_crtc_init;
979
980 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
981
982 ret = drm_mode_crtc_set_gamma_size(&dcrtc->crtc, 256);
983 if (ret)
984 return ret;
985
986 drm_crtc_enable_color_mgmt(&dcrtc->crtc, 0, false, 256);
987
988 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
989
990err_crtc_init:
991 primary->funcs->destroy(primary);
992err_crtc:
993 kfree(dcrtc);
994
995 return ret;
996}
997
998static int
999armada_lcd_bind(struct device *dev, struct device *master, void *data)
1000{
1001 struct platform_device *pdev = to_platform_device(dev);
1002 struct drm_device *drm = data;
1003 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1004 int irq = platform_get_irq(pdev, 0);
1005 const struct armada_variant *variant;
1006 struct device_node *port = NULL;
1007
1008 if (irq < 0)
1009 return irq;
1010
1011 if (!dev->of_node) {
1012 const struct platform_device_id *id;
1013
1014 id = platform_get_device_id(pdev);
1015 if (!id)
1016 return -ENXIO;
1017
1018 variant = (const struct armada_variant *)id->driver_data;
1019 } else {
1020 const struct of_device_id *match;
1021 struct device_node *np, *parent = dev->of_node;
1022
1023 match = of_match_device(dev->driver->of_match_table, dev);
1024 if (!match)
1025 return -ENXIO;
1026
1027 np = of_get_child_by_name(parent, "ports");
1028 if (np)
1029 parent = np;
1030 port = of_get_child_by_name(parent, "port");
1031 of_node_put(np);
1032 if (!port) {
1033 dev_err(dev, "no port node found in %pOF\n", parent);
1034 return -ENXIO;
1035 }
1036
1037 variant = match->data;
1038 }
1039
1040 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
1041}
1042
1043static void
1044armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1045{
1046 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1047
1048 armada_drm_crtc_destroy(&dcrtc->crtc);
1049}
1050
1051static const struct component_ops armada_lcd_ops = {
1052 .bind = armada_lcd_bind,
1053 .unbind = armada_lcd_unbind,
1054};
1055
1056static int armada_lcd_probe(struct platform_device *pdev)
1057{
1058 return component_add(&pdev->dev, &armada_lcd_ops);
1059}
1060
1061static int armada_lcd_remove(struct platform_device *pdev)
1062{
1063 component_del(&pdev->dev, &armada_lcd_ops);
1064 return 0;
1065}
1066
1067static const struct of_device_id armada_lcd_of_match[] = {
1068 {
1069 .compatible = "marvell,dove-lcd",
1070 .data = &armada510_ops,
1071 },
1072 {}
1073};
1074MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1075
1076static const struct platform_device_id armada_lcd_platform_ids[] = {
1077 {
1078 .name = "armada-lcd",
1079 .driver_data = (unsigned long)&armada510_ops,
1080 }, {
1081 .name = "armada-510-lcd",
1082 .driver_data = (unsigned long)&armada510_ops,
1083 },
1084 { },
1085};
1086MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1087
1088struct platform_driver armada_lcd_platform_driver = {
1089 .probe = armada_lcd_probe,
1090 .remove = armada_lcd_remove,
1091 .driver = {
1092 .name = "armada-lcd",
1093 .owner = THIS_MODULE,
1094 .of_match_table = armada_lcd_of_match,
1095 },
1096 .id_table = armada_lcd_platform_ids,
1097};
1098