1
2
3
4
5
6#include <linux/clk.h>
7#include <linux/component.h>
8#include <linux/of_device.h>
9#include <linux/platform_device.h>
10#include <drm/drmP.h>
11#include <drm/drm_atomic.h>
12#include <drm/drm_probe_helper.h>
13#include <drm/drm_plane_helper.h>
14#include <drm/drm_atomic_helper.h>
15#include "armada_crtc.h"
16#include "armada_drm.h"
17#include "armada_fb.h"
18#include "armada_gem.h"
19#include "armada_hw.h"
20#include "armada_plane.h"
21#include "armada_trace.h"
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77void
78armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
79{
80 while (regs->offset != ~0) {
81 void __iomem *reg = dcrtc->base + regs->offset;
82 uint32_t val;
83
84 val = regs->mask;
85 if (val != 0)
86 val &= readl_relaxed(reg);
87 writel_relaxed(val | regs->val, reg);
88 ++regs;
89 }
90}
91
92static void armada_drm_crtc_update(struct armada_crtc *dcrtc, bool enable)
93{
94 uint32_t dumb_ctrl;
95
96 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
97
98 if (enable)
99 dumb_ctrl |= CFG_DUMB_ENA;
100
101
102
103
104
105
106
107 if (!enable && (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
108 dumb_ctrl &= ~DUMB_MASK;
109 dumb_ctrl |= DUMB_BLANK;
110 }
111
112 armada_updatel(dumb_ctrl,
113 ~(CFG_INV_CSYNC | CFG_INV_HSYNC | CFG_INV_VSYNC),
114 dcrtc->base + LCD_SPU_DUMB_CTRL);
115}
116
117static void armada_drm_crtc_queue_state_event(struct drm_crtc *crtc)
118{
119 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
120 struct drm_pending_vblank_event *event;
121
122
123 event = xchg(&crtc->state->event, NULL);
124 if (event) {
125 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
126 dcrtc->event = event;
127 }
128}
129
130static void armada_drm_update_gamma(struct drm_crtc *crtc)
131{
132 struct drm_property_blob *blob = crtc->state->gamma_lut;
133 void __iomem *base = drm_to_armada_crtc(crtc)->base;
134 int i;
135
136 if (blob) {
137 struct drm_color_lut *lut = blob->data;
138
139 armada_updatel(CFG_CSB_256x8, CFG_CSB_256x8 | CFG_PDWN256x8,
140 base + LCD_SPU_SRAM_PARA1);
141
142 for (i = 0; i < 256; i++) {
143 writel_relaxed(drm_color_lut_extract(lut[i].red, 8),
144 base + LCD_SPU_SRAM_WRDAT);
145 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_YR,
146 base + LCD_SPU_SRAM_CTRL);
147 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
148 writel_relaxed(drm_color_lut_extract(lut[i].green, 8),
149 base + LCD_SPU_SRAM_WRDAT);
150 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_UG,
151 base + LCD_SPU_SRAM_CTRL);
152 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
153 writel_relaxed(drm_color_lut_extract(lut[i].blue, 8),
154 base + LCD_SPU_SRAM_WRDAT);
155 writel_relaxed(i | SRAM_WRITE | SRAM_GAMMA_VB,
156 base + LCD_SPU_SRAM_CTRL);
157 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
158 }
159 armada_updatel(CFG_GAMMA_ENA, CFG_GAMMA_ENA,
160 base + LCD_SPU_DMA_CTRL0);
161 } else {
162 armada_updatel(0, CFG_GAMMA_ENA, base + LCD_SPU_DMA_CTRL0);
163 armada_updatel(CFG_PDWN256x8, CFG_CSB_256x8 | CFG_PDWN256x8,
164 base + LCD_SPU_SRAM_PARA1);
165 }
166}
167
168static enum drm_mode_status armada_drm_crtc_mode_valid(struct drm_crtc *crtc,
169 const struct drm_display_mode *mode)
170{
171 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
172
173 if (mode->vscan > 1)
174 return MODE_NO_VSCAN;
175
176 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
177 return MODE_NO_DBLESCAN;
178
179 if (mode->flags & DRM_MODE_FLAG_HSKEW)
180 return MODE_H_ILLEGAL;
181
182
183 if (!dcrtc->variant->has_spu_adv_reg &&
184 mode->flags & DRM_MODE_FLAG_INTERLACE)
185 return MODE_NO_INTERLACE;
186
187 if (mode->flags & (DRM_MODE_FLAG_BCAST | DRM_MODE_FLAG_PIXMUX |
188 DRM_MODE_FLAG_CLKDIV2))
189 return MODE_BAD;
190
191 return MODE_OK;
192}
193
194
195static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
196 const struct drm_display_mode *mode, struct drm_display_mode *adj)
197{
198 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
199 int ret;
200
201
202
203
204
205
206 drm_mode_set_crtcinfo(adj, CRTC_INTERLACE_HALVE_V);
207
208
209
210
211
212 if (armada_drm_crtc_mode_valid(crtc, adj) != MODE_OK)
213 return false;
214
215
216 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
217 if (ret)
218 return false;
219
220 return true;
221}
222
223
224static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
225{
226 if (dcrtc->irq_ena & mask) {
227 dcrtc->irq_ena &= ~mask;
228 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
229 }
230}
231
232static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
233{
234 if ((dcrtc->irq_ena & mask) != mask) {
235 dcrtc->irq_ena |= mask;
236 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
237 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
238 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
239 }
240}
241
242static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
243{
244 struct drm_pending_vblank_event *event;
245 void __iomem *base = dcrtc->base;
246
247 if (stat & DMA_FF_UNDERFLOW)
248 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
249 if (stat & GRA_FF_UNDERFLOW)
250 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
251
252 if (stat & VSYNC_IRQ)
253 drm_crtc_handle_vblank(&dcrtc->crtc);
254
255 spin_lock(&dcrtc->irq_lock);
256 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
257 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
258 uint32_t val;
259
260 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
261 writel_relaxed(dcrtc->v[i].spu_v_h_total,
262 base + LCD_SPUT_V_H_TOTAL);
263
264 val = readl_relaxed(base + LCD_SPU_ADV_REG);
265 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
266 val |= dcrtc->v[i].spu_adv_reg;
267 writel_relaxed(val, base + LCD_SPU_ADV_REG);
268 }
269
270 if (stat & dcrtc->irq_ena & DUMB_FRAMEDONE) {
271 if (dcrtc->update_pending) {
272 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
273 dcrtc->update_pending = false;
274 }
275 if (dcrtc->cursor_update) {
276 writel_relaxed(dcrtc->cursor_hw_pos,
277 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
278 writel_relaxed(dcrtc->cursor_hw_sz,
279 base + LCD_SPU_HWC_HPXL_VLN);
280 armada_updatel(CFG_HWC_ENA,
281 CFG_HWC_ENA | CFG_HWC_1BITMOD |
282 CFG_HWC_1BITENA,
283 base + LCD_SPU_DMA_CTRL0);
284 dcrtc->cursor_update = false;
285 }
286 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
287 }
288 spin_unlock(&dcrtc->irq_lock);
289
290 if (stat & VSYNC_IRQ && !dcrtc->update_pending) {
291 event = xchg(&dcrtc->event, NULL);
292 if (event) {
293 spin_lock(&dcrtc->crtc.dev->event_lock);
294 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
295 spin_unlock(&dcrtc->crtc.dev->event_lock);
296 drm_crtc_vblank_put(&dcrtc->crtc);
297 }
298 }
299}
300
301static irqreturn_t armada_drm_irq(int irq, void *arg)
302{
303 struct armada_crtc *dcrtc = arg;
304 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
305
306
307
308
309
310
311 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
312
313 trace_armada_drm_irq(&dcrtc->crtc, stat);
314
315
316 v = stat & dcrtc->irq_ena;
317
318 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
319 armada_drm_crtc_irq(dcrtc, stat);
320 return IRQ_HANDLED;
321 }
322 return IRQ_NONE;
323}
324
325
326static void armada_drm_crtc_mode_set_nofb(struct drm_crtc *crtc)
327{
328 struct drm_display_mode *adj = &crtc->state->adjusted_mode;
329 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
330 struct armada_regs regs[17];
331 uint32_t lm, rm, tm, bm, val, sclk;
332 unsigned long flags;
333 unsigned i;
334 bool interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
335
336 i = 0;
337 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
338 lm = adj->crtc_htotal - adj->crtc_hsync_end;
339 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
340 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
341
342 DRM_DEBUG_KMS("[CRTC:%d:%s] mode " DRM_MODE_FMT "\n",
343 crtc->base.id, crtc->name, DRM_MODE_ARG(adj));
344 DRM_DEBUG_KMS("lm %d rm %d tm %d bm %d\n", lm, rm, tm, bm);
345
346
347 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
348
349 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
350
351 spin_lock_irqsave(&dcrtc->irq_lock, flags);
352
353 dcrtc->interlaced = interlaced;
354
355 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
356 adj->crtc_htotal;
357 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
358 val = adj->crtc_hsync_start;
359 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
360
361 if (interlaced) {
362
363 val -= adj->crtc_htotal / 2;
364 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
365 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
366 (1 << 16);
367 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
368 } else {
369 dcrtc->v[0] = dcrtc->v[1];
370 }
371
372 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
373
374 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
375 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
376 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
377 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
378 LCD_SPUT_V_H_TOTAL);
379
380 if (dcrtc->variant->has_spu_adv_reg)
381 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
382 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
383 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
384
385 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
386 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
387
388
389
390
391
392
393
394
395
396 val = 0;
397 if (adj->flags & DRM_MODE_FLAG_NCSYNC)
398 val |= CFG_INV_CSYNC;
399 if (adj->flags & DRM_MODE_FLAG_NHSYNC)
400 val |= CFG_INV_HSYNC;
401 if (adj->flags & DRM_MODE_FLAG_NVSYNC)
402 val |= CFG_INV_VSYNC;
403 armada_reg_queue_mod(regs, i, val, CFG_INV_CSYNC | CFG_INV_HSYNC |
404 CFG_INV_VSYNC, LCD_SPU_DUMB_CTRL);
405 armada_reg_queue_end(regs, i);
406
407 armada_drm_crtc_update_regs(dcrtc, regs);
408 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
409}
410
411static int armada_drm_crtc_atomic_check(struct drm_crtc *crtc,
412 struct drm_crtc_state *state)
413{
414 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
415
416 if (state->gamma_lut && drm_color_lut_size(state->gamma_lut) != 256)
417 return -EINVAL;
418
419 if (state->color_mgmt_changed)
420 state->planes_changed = true;
421
422 return 0;
423}
424
425static void armada_drm_crtc_atomic_begin(struct drm_crtc *crtc,
426 struct drm_crtc_state *old_crtc_state)
427{
428 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
429
430 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
431
432 if (crtc->state->color_mgmt_changed)
433 armada_drm_update_gamma(crtc);
434
435 dcrtc->regs_idx = 0;
436 dcrtc->regs = dcrtc->atomic_regs;
437}
438
439static void armada_drm_crtc_atomic_flush(struct drm_crtc *crtc,
440 struct drm_crtc_state *old_crtc_state)
441{
442 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
443
444 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
445
446 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
447
448
449
450
451
452 if (!drm_atomic_crtc_needs_modeset(crtc->state)) {
453 dcrtc->update_pending = true;
454 armada_drm_crtc_queue_state_event(crtc);
455 spin_lock_irq(&dcrtc->irq_lock);
456 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
457 spin_unlock_irq(&dcrtc->irq_lock);
458 } else {
459 spin_lock_irq(&dcrtc->irq_lock);
460 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
461 spin_unlock_irq(&dcrtc->irq_lock);
462 }
463}
464
465static void armada_drm_crtc_atomic_disable(struct drm_crtc *crtc,
466 struct drm_crtc_state *old_state)
467{
468 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
469 struct drm_pending_vblank_event *event;
470
471 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
472
473 if (old_state->adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
474 drm_crtc_vblank_put(crtc);
475
476 drm_crtc_vblank_off(crtc);
477 armada_drm_crtc_update(dcrtc, false);
478
479 if (!crtc->state->active) {
480
481
482
483
484 if (dcrtc->variant->disable)
485 dcrtc->variant->disable(dcrtc);
486
487
488
489
490
491 event = crtc->state->event;
492 crtc->state->event = NULL;
493 if (event) {
494 spin_lock_irq(&crtc->dev->event_lock);
495 drm_crtc_send_vblank_event(crtc, event);
496 spin_unlock_irq(&crtc->dev->event_lock);
497 }
498 }
499}
500
501static void armada_drm_crtc_atomic_enable(struct drm_crtc *crtc,
502 struct drm_crtc_state *old_state)
503{
504 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
505
506 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
507
508 if (!old_state->active) {
509
510
511
512
513
514 if (dcrtc->variant->enable)
515 dcrtc->variant->enable(dcrtc, &crtc->state->adjusted_mode);
516 }
517 armada_drm_crtc_update(dcrtc, true);
518 drm_crtc_vblank_on(crtc);
519
520 if (crtc->state->adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
521 WARN_ON(drm_crtc_vblank_get(crtc));
522
523 armada_drm_crtc_queue_state_event(crtc);
524}
525
526static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
527 .mode_valid = armada_drm_crtc_mode_valid,
528 .mode_fixup = armada_drm_crtc_mode_fixup,
529 .mode_set_nofb = armada_drm_crtc_mode_set_nofb,
530 .atomic_check = armada_drm_crtc_atomic_check,
531 .atomic_begin = armada_drm_crtc_atomic_begin,
532 .atomic_flush = armada_drm_crtc_atomic_flush,
533 .atomic_disable = armada_drm_crtc_atomic_disable,
534 .atomic_enable = armada_drm_crtc_atomic_enable,
535};
536
537static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
538 unsigned stride, unsigned width, unsigned height)
539{
540 uint32_t addr;
541 unsigned y;
542
543 addr = SRAM_HWC32_RAM1;
544 for (y = 0; y < height; y++) {
545 uint32_t *p = &pix[y * stride];
546 unsigned x;
547
548 for (x = 0; x < width; x++, p++) {
549 uint32_t val = *p;
550
551
552
553
554
555
556
557
558 val = (val & 0xff00ff00) |
559 (val & 0x000000ff) << 16 |
560 (val & 0x00ff0000) >> 16;
561
562 writel_relaxed(val,
563 base + LCD_SPU_SRAM_WRDAT);
564 writel_relaxed(addr | SRAM_WRITE,
565 base + LCD_SPU_SRAM_CTRL);
566 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
567 addr += 1;
568 if ((addr & 0x00ff) == 0)
569 addr += 0xf00;
570 if ((addr & 0x30ff) == 0)
571 addr = SRAM_HWC32_RAM2;
572 }
573 }
574}
575
576static void armada_drm_crtc_cursor_tran(void __iomem *base)
577{
578 unsigned addr;
579
580 for (addr = 0; addr < 256; addr++) {
581
582 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
583 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
584 base + LCD_SPU_SRAM_CTRL);
585 }
586}
587
588static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
589{
590 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
591 uint32_t yoff, yscr, h = dcrtc->cursor_h;
592 uint32_t para1;
593
594
595
596
597
598 if (dcrtc->cursor_x < 0) {
599 xoff = -dcrtc->cursor_x;
600 xscr = 0;
601 w -= min(xoff, w);
602 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
603 xoff = 0;
604 xscr = dcrtc->cursor_x;
605 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
606 } else {
607 xoff = 0;
608 xscr = dcrtc->cursor_x;
609 }
610
611 if (dcrtc->cursor_y < 0) {
612 yoff = -dcrtc->cursor_y;
613 yscr = 0;
614 h -= min(yoff, h);
615 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
616 yoff = 0;
617 yscr = dcrtc->cursor_y;
618 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
619 } else {
620 yoff = 0;
621 yscr = dcrtc->cursor_y;
622 }
623
624
625 s = dcrtc->cursor_w;
626 if (dcrtc->interlaced) {
627 s *= 2;
628 yscr /= 2;
629 h /= 2;
630 }
631
632 if (!dcrtc->cursor_obj || !h || !w) {
633 spin_lock_irq(&dcrtc->irq_lock);
634 dcrtc->cursor_update = false;
635 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
636 spin_unlock_irq(&dcrtc->irq_lock);
637 return 0;
638 }
639
640 spin_lock_irq(&dcrtc->irq_lock);
641 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
642 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
643 dcrtc->base + LCD_SPU_SRAM_PARA1);
644 spin_unlock_irq(&dcrtc->irq_lock);
645
646
647
648
649
650 if (!(para1 & CFG_CSB_256x32)) {
651 armada_drm_crtc_cursor_tran(dcrtc->base);
652 reload = true;
653 }
654
655 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
656 spin_lock_irq(&dcrtc->irq_lock);
657 dcrtc->cursor_update = false;
658 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
659 spin_unlock_irq(&dcrtc->irq_lock);
660 reload = true;
661 }
662 if (reload) {
663 struct armada_gem_object *obj = dcrtc->cursor_obj;
664 uint32_t *pix;
665
666 pix = obj->addr;
667 pix += yoff * s + xoff;
668 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
669 }
670
671
672 spin_lock_irq(&dcrtc->irq_lock);
673 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
674 dcrtc->cursor_hw_sz = h << 16 | w;
675 dcrtc->cursor_update = true;
676 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
677 spin_unlock_irq(&dcrtc->irq_lock);
678
679 return 0;
680}
681
682static void cursor_update(void *data)
683{
684 armada_drm_crtc_cursor_update(data, true);
685}
686
687static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
688 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
689{
690 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
691 struct armada_gem_object *obj = NULL;
692 int ret;
693
694
695 if (!dcrtc->variant->has_spu_adv_reg)
696 return -ENXIO;
697
698 if (handle && w > 0 && h > 0) {
699
700 if (w > 64 || h > 64 || (w > 32 && h > 32))
701 return -ENOMEM;
702
703 obj = armada_gem_object_lookup(file, handle);
704 if (!obj)
705 return -ENOENT;
706
707
708 if (!obj->addr) {
709 drm_gem_object_put_unlocked(&obj->obj);
710 return -EINVAL;
711 }
712
713 if (obj->obj.size < w * h * 4) {
714 DRM_ERROR("buffer is too small\n");
715 drm_gem_object_put_unlocked(&obj->obj);
716 return -ENOMEM;
717 }
718 }
719
720 if (dcrtc->cursor_obj) {
721 dcrtc->cursor_obj->update = NULL;
722 dcrtc->cursor_obj->update_data = NULL;
723 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
724 }
725 dcrtc->cursor_obj = obj;
726 dcrtc->cursor_w = w;
727 dcrtc->cursor_h = h;
728 ret = armada_drm_crtc_cursor_update(dcrtc, true);
729 if (obj) {
730 obj->update_data = dcrtc;
731 obj->update = cursor_update;
732 }
733
734 return ret;
735}
736
737static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
738{
739 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
740 int ret;
741
742
743 if (!dcrtc->variant->has_spu_adv_reg)
744 return -EFAULT;
745
746 dcrtc->cursor_x = x;
747 dcrtc->cursor_y = y;
748 ret = armada_drm_crtc_cursor_update(dcrtc, false);
749
750 return ret;
751}
752
753static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
754{
755 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
756 struct armada_private *priv = crtc->dev->dev_private;
757
758 if (dcrtc->cursor_obj)
759 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
760
761 priv->dcrtc[dcrtc->num] = NULL;
762 drm_crtc_cleanup(&dcrtc->crtc);
763
764 if (dcrtc->variant->disable)
765 dcrtc->variant->disable(dcrtc);
766
767 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
768
769 of_node_put(dcrtc->crtc.port);
770
771 kfree(dcrtc);
772}
773
774static int armada_drm_crtc_late_register(struct drm_crtc *crtc)
775{
776 if (IS_ENABLED(CONFIG_DEBUG_FS))
777 armada_drm_crtc_debugfs_init(drm_to_armada_crtc(crtc));
778
779 return 0;
780}
781
782
783static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
784{
785 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
786 unsigned long flags;
787
788 spin_lock_irqsave(&dcrtc->irq_lock, flags);
789 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
790 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
791 return 0;
792}
793
794static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
795{
796 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
797 unsigned long flags;
798
799 spin_lock_irqsave(&dcrtc->irq_lock, flags);
800 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
801 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
802}
803
804static const struct drm_crtc_funcs armada_crtc_funcs = {
805 .reset = drm_atomic_helper_crtc_reset,
806 .cursor_set = armada_drm_crtc_cursor_set,
807 .cursor_move = armada_drm_crtc_cursor_move,
808 .destroy = armada_drm_crtc_destroy,
809 .gamma_set = drm_atomic_helper_legacy_gamma_set,
810 .set_config = drm_atomic_helper_set_config,
811 .page_flip = drm_atomic_helper_page_flip,
812 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
813 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
814 .late_register = armada_drm_crtc_late_register,
815 .enable_vblank = armada_drm_crtc_enable_vblank,
816 .disable_vblank = armada_drm_crtc_disable_vblank,
817};
818
819int armada_crtc_select_clock(struct armada_crtc *dcrtc,
820 struct armada_clk_result *res,
821 const struct armada_clocking_params *params,
822 struct clk *clks[], size_t num_clks,
823 unsigned long desired_khz)
824{
825 unsigned long desired_hz = desired_khz * 1000;
826 unsigned long desired_clk_hz;
827 unsigned long real_clk_hz;
828 unsigned long real_hz;
829 unsigned long permillage;
830 struct clk *clk;
831 u32 div;
832 int i;
833
834 DRM_DEBUG_KMS("[CRTC:%u:%s] desired clock=%luHz\n",
835 dcrtc->crtc.base.id, dcrtc->crtc.name, desired_hz);
836
837 for (i = 0; i < num_clks; i++) {
838 clk = clks[i];
839 if (!clk)
840 continue;
841
842 if (params->settable & BIT(i)) {
843 real_clk_hz = clk_round_rate(clk, desired_hz);
844 desired_clk_hz = desired_hz;
845 } else {
846 real_clk_hz = clk_get_rate(clk);
847 desired_clk_hz = real_clk_hz;
848 }
849
850
851 if (real_clk_hz == desired_hz) {
852 real_hz = real_clk_hz;
853 div = 1;
854 goto found;
855 }
856
857
858 div = DIV_ROUND_CLOSEST(real_clk_hz, desired_hz);
859 if (div == 0 || div > params->div_max)
860 continue;
861
862
863 real_hz = DIV_ROUND_CLOSEST(real_clk_hz, div);
864
865 DRM_DEBUG_KMS("[CRTC:%u:%s] clk=%u %luHz div=%u real=%luHz\n",
866 dcrtc->crtc.base.id, dcrtc->crtc.name,
867 i, real_clk_hz, div, real_hz);
868
869
870 if (real_hz < desired_hz) {
871 permillage = real_hz / desired_khz;
872 if (permillage < params->permillage_min)
873 continue;
874 } else {
875 permillage = DIV_ROUND_UP(real_hz, desired_khz);
876 if (permillage > params->permillage_max)
877 continue;
878 }
879 goto found;
880 }
881
882 return -ERANGE;
883
884found:
885 DRM_DEBUG_KMS("[CRTC:%u:%s] selected clk=%u %luHz div=%u real=%luHz\n",
886 dcrtc->crtc.base.id, dcrtc->crtc.name,
887 i, real_clk_hz, div, real_hz);
888
889 res->desired_clk_hz = desired_clk_hz;
890 res->clk = clk;
891 res->div = div;
892
893 return i;
894}
895
896static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
897 struct resource *res, int irq, const struct armada_variant *variant,
898 struct device_node *port)
899{
900 struct armada_private *priv = drm->dev_private;
901 struct armada_crtc *dcrtc;
902 struct drm_plane *primary;
903 void __iomem *base;
904 int ret;
905
906 base = devm_ioremap_resource(dev, res);
907 if (IS_ERR(base))
908 return PTR_ERR(base);
909
910 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
911 if (!dcrtc) {
912 DRM_ERROR("failed to allocate Armada crtc\n");
913 return -ENOMEM;
914 }
915
916 if (dev != drm->dev)
917 dev_set_drvdata(dev, dcrtc);
918
919 dcrtc->variant = variant;
920 dcrtc->base = base;
921 dcrtc->num = drm->mode_config.num_crtc;
922 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
923 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
924 spin_lock_init(&dcrtc->irq_lock);
925 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
926
927
928 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
929 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
930 writel_relaxed(dcrtc->spu_iopad_ctrl,
931 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
932 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
933 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
934 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
935 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
936 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
937 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
938 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
939 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
940
941 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
942 dcrtc);
943 if (ret < 0)
944 goto err_crtc;
945
946 if (dcrtc->variant->init) {
947 ret = dcrtc->variant->init(dcrtc, dev);
948 if (ret)
949 goto err_crtc;
950 }
951
952
953 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
954
955 priv->dcrtc[dcrtc->num] = dcrtc;
956
957 dcrtc->crtc.port = port;
958
959 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
960 if (!primary) {
961 ret = -ENOMEM;
962 goto err_crtc;
963 }
964
965 ret = armada_drm_primary_plane_init(drm, primary);
966 if (ret) {
967 kfree(primary);
968 goto err_crtc;
969 }
970
971 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, primary, NULL,
972 &armada_crtc_funcs, NULL);
973 if (ret)
974 goto err_crtc_init;
975
976 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
977
978 ret = drm_mode_crtc_set_gamma_size(&dcrtc->crtc, 256);
979 if (ret)
980 return ret;
981
982 drm_crtc_enable_color_mgmt(&dcrtc->crtc, 0, false, 256);
983
984 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
985
986err_crtc_init:
987 primary->funcs->destroy(primary);
988err_crtc:
989 kfree(dcrtc);
990
991 return ret;
992}
993
994static int
995armada_lcd_bind(struct device *dev, struct device *master, void *data)
996{
997 struct platform_device *pdev = to_platform_device(dev);
998 struct drm_device *drm = data;
999 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1000 int irq = platform_get_irq(pdev, 0);
1001 const struct armada_variant *variant;
1002 struct device_node *port = NULL;
1003
1004 if (irq < 0)
1005 return irq;
1006
1007 if (!dev->of_node) {
1008 const struct platform_device_id *id;
1009
1010 id = platform_get_device_id(pdev);
1011 if (!id)
1012 return -ENXIO;
1013
1014 variant = (const struct armada_variant *)id->driver_data;
1015 } else {
1016 const struct of_device_id *match;
1017 struct device_node *np, *parent = dev->of_node;
1018
1019 match = of_match_device(dev->driver->of_match_table, dev);
1020 if (!match)
1021 return -ENXIO;
1022
1023 np = of_get_child_by_name(parent, "ports");
1024 if (np)
1025 parent = np;
1026 port = of_get_child_by_name(parent, "port");
1027 of_node_put(np);
1028 if (!port) {
1029 dev_err(dev, "no port node found in %pOF\n", parent);
1030 return -ENXIO;
1031 }
1032
1033 variant = match->data;
1034 }
1035
1036 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
1037}
1038
1039static void
1040armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1041{
1042 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1043
1044 armada_drm_crtc_destroy(&dcrtc->crtc);
1045}
1046
1047static const struct component_ops armada_lcd_ops = {
1048 .bind = armada_lcd_bind,
1049 .unbind = armada_lcd_unbind,
1050};
1051
1052static int armada_lcd_probe(struct platform_device *pdev)
1053{
1054 return component_add(&pdev->dev, &armada_lcd_ops);
1055}
1056
1057static int armada_lcd_remove(struct platform_device *pdev)
1058{
1059 component_del(&pdev->dev, &armada_lcd_ops);
1060 return 0;
1061}
1062
1063static const struct of_device_id armada_lcd_of_match[] = {
1064 {
1065 .compatible = "marvell,dove-lcd",
1066 .data = &armada510_ops,
1067 },
1068 {}
1069};
1070MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1071
1072static const struct platform_device_id armada_lcd_platform_ids[] = {
1073 {
1074 .name = "armada-lcd",
1075 .driver_data = (unsigned long)&armada510_ops,
1076 }, {
1077 .name = "armada-510-lcd",
1078 .driver_data = (unsigned long)&armada510_ops,
1079 },
1080 { },
1081};
1082MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1083
1084struct platform_driver armada_lcd_platform_driver = {
1085 .probe = armada_lcd_probe,
1086 .remove = armada_lcd_remove,
1087 .driver = {
1088 .name = "armada-lcd",
1089 .owner = THIS_MODULE,
1090 .of_match_table = armada_lcd_of_match,
1091 },
1092 .id_table = armada_lcd_platform_ids,
1093};
1094