1
2
3
4
5
6
7
8
9#include <linux/clk.h>
10#include <linux/component.h>
11#include <linux/of_device.h>
12#include <linux/platform_device.h>
13#include <drm/drmP.h>
14#include <drm/drm_atomic.h>
15#include <drm/drm_crtc_helper.h>
16#include <drm/drm_plane_helper.h>
17#include <drm/drm_atomic_helper.h>
18#include "armada_crtc.h"
19#include "armada_drm.h"
20#include "armada_fb.h"
21#include "armada_gem.h"
22#include "armada_hw.h"
23#include "armada_plane.h"
24#include "armada_trace.h"
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80void
81armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
82{
83 while (regs->offset != ~0) {
84 void __iomem *reg = dcrtc->base + regs->offset;
85 uint32_t val;
86
87 val = regs->mask;
88 if (val != 0)
89 val &= readl_relaxed(reg);
90 writel_relaxed(val | regs->val, reg);
91 ++regs;
92 }
93}
94
95static void armada_drm_crtc_update(struct armada_crtc *dcrtc, bool enable)
96{
97 uint32_t dumb_ctrl;
98
99 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
100
101 if (enable)
102 dumb_ctrl |= CFG_DUMB_ENA;
103
104
105
106
107
108
109
110 if (!enable && (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
111 dumb_ctrl &= ~DUMB_MASK;
112 dumb_ctrl |= DUMB_BLANK;
113 }
114
115 armada_updatel(dumb_ctrl,
116 ~(CFG_INV_CSYNC | CFG_INV_HSYNC | CFG_INV_VSYNC),
117 dcrtc->base + LCD_SPU_DUMB_CTRL);
118}
119
120static void armada_drm_crtc_queue_state_event(struct drm_crtc *crtc)
121{
122 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
123 struct drm_pending_vblank_event *event;
124
125
126 event = xchg(&crtc->state->event, NULL);
127 if (event) {
128 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
129 dcrtc->event = event;
130 }
131}
132
133
134static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
135 const struct drm_display_mode *mode, struct drm_display_mode *adj)
136{
137 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
138 int ret;
139
140
141 if (!dcrtc->variant->has_spu_adv_reg &&
142 adj->flags & DRM_MODE_FLAG_INTERLACE)
143 return false;
144
145
146 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
147 if (ret)
148 return false;
149
150 return true;
151}
152
153
154static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
155{
156 if (dcrtc->irq_ena & mask) {
157 dcrtc->irq_ena &= ~mask;
158 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
159 }
160}
161
162static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
163{
164 if ((dcrtc->irq_ena & mask) != mask) {
165 dcrtc->irq_ena |= mask;
166 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
167 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
168 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
169 }
170}
171
172static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
173{
174 struct drm_pending_vblank_event *event;
175 void __iomem *base = dcrtc->base;
176
177 if (stat & DMA_FF_UNDERFLOW)
178 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
179 if (stat & GRA_FF_UNDERFLOW)
180 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
181
182 if (stat & VSYNC_IRQ)
183 drm_crtc_handle_vblank(&dcrtc->crtc);
184
185 spin_lock(&dcrtc->irq_lock);
186 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
187 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
188 uint32_t val;
189
190 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
191 writel_relaxed(dcrtc->v[i].spu_v_h_total,
192 base + LCD_SPUT_V_H_TOTAL);
193
194 val = readl_relaxed(base + LCD_SPU_ADV_REG);
195 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
196 val |= dcrtc->v[i].spu_adv_reg;
197 writel_relaxed(val, base + LCD_SPU_ADV_REG);
198 }
199
200 if (stat & dcrtc->irq_ena & DUMB_FRAMEDONE) {
201 if (dcrtc->update_pending) {
202 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
203 dcrtc->update_pending = false;
204 }
205 if (dcrtc->cursor_update) {
206 writel_relaxed(dcrtc->cursor_hw_pos,
207 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
208 writel_relaxed(dcrtc->cursor_hw_sz,
209 base + LCD_SPU_HWC_HPXL_VLN);
210 armada_updatel(CFG_HWC_ENA,
211 CFG_HWC_ENA | CFG_HWC_1BITMOD |
212 CFG_HWC_1BITENA,
213 base + LCD_SPU_DMA_CTRL0);
214 dcrtc->cursor_update = false;
215 }
216 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
217 }
218 spin_unlock(&dcrtc->irq_lock);
219
220 if (stat & VSYNC_IRQ && !dcrtc->update_pending) {
221 event = xchg(&dcrtc->event, NULL);
222 if (event) {
223 spin_lock(&dcrtc->crtc.dev->event_lock);
224 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
225 spin_unlock(&dcrtc->crtc.dev->event_lock);
226 drm_crtc_vblank_put(&dcrtc->crtc);
227 }
228 }
229}
230
231static irqreturn_t armada_drm_irq(int irq, void *arg)
232{
233 struct armada_crtc *dcrtc = arg;
234 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
235
236
237
238
239
240
241 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
242
243 trace_armada_drm_irq(&dcrtc->crtc, stat);
244
245
246 v = stat & dcrtc->irq_ena;
247
248 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
249 armada_drm_crtc_irq(dcrtc, stat);
250 return IRQ_HANDLED;
251 }
252 return IRQ_NONE;
253}
254
255
256static void armada_drm_crtc_mode_set_nofb(struct drm_crtc *crtc)
257{
258 struct drm_display_mode *adj = &crtc->state->adjusted_mode;
259 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
260 struct armada_regs regs[17];
261 uint32_t lm, rm, tm, bm, val, sclk;
262 unsigned long flags;
263 unsigned i;
264 bool interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
265
266 i = 0;
267 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
268 lm = adj->crtc_htotal - adj->crtc_hsync_end;
269 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
270 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
271
272 DRM_DEBUG_KMS("[CRTC:%d:%s] mode " DRM_MODE_FMT "\n",
273 crtc->base.id, crtc->name,
274 adj->base.id, adj->name, adj->vrefresh, adj->clock,
275 adj->crtc_hdisplay, adj->crtc_hsync_start,
276 adj->crtc_hsync_end, adj->crtc_htotal,
277 adj->crtc_vdisplay, adj->crtc_vsync_start,
278 adj->crtc_vsync_end, adj->crtc_vtotal,
279 adj->type, adj->flags);
280 DRM_DEBUG_KMS("lm %d rm %d tm %d bm %d\n", lm, rm, tm, bm);
281
282
283 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
284
285 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
286
287 if (interlaced ^ dcrtc->interlaced) {
288 if (adj->flags & DRM_MODE_FLAG_INTERLACE)
289 drm_crtc_vblank_get(&dcrtc->crtc);
290 else
291 drm_crtc_vblank_put(&dcrtc->crtc);
292 dcrtc->interlaced = interlaced;
293 }
294
295 spin_lock_irqsave(&dcrtc->irq_lock, flags);
296
297
298 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
299 adj->crtc_htotal;
300 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
301 val = adj->crtc_hsync_start;
302 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
303
304 if (interlaced) {
305
306 val -= adj->crtc_htotal / 2;
307 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
308 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
309 (1 << 16);
310 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
311 } else {
312 dcrtc->v[0] = dcrtc->v[1];
313 }
314
315 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
316
317 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
318 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
319 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
320 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
321 LCD_SPUT_V_H_TOTAL);
322
323 if (dcrtc->variant->has_spu_adv_reg)
324 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
325 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
326 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
327
328 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
329 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
330
331
332
333
334
335
336
337
338
339 val = 0;
340 if (adj->flags & DRM_MODE_FLAG_NCSYNC)
341 val |= CFG_INV_CSYNC;
342 if (adj->flags & DRM_MODE_FLAG_NHSYNC)
343 val |= CFG_INV_HSYNC;
344 if (adj->flags & DRM_MODE_FLAG_NVSYNC)
345 val |= CFG_INV_VSYNC;
346 armada_reg_queue_mod(regs, i, val, CFG_INV_CSYNC | CFG_INV_HSYNC |
347 CFG_INV_VSYNC, LCD_SPU_DUMB_CTRL);
348 armada_reg_queue_end(regs, i);
349
350 armada_drm_crtc_update_regs(dcrtc, regs);
351 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
352}
353
354static void armada_drm_crtc_atomic_begin(struct drm_crtc *crtc,
355 struct drm_crtc_state *old_crtc_state)
356{
357 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
358
359 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
360
361 dcrtc->regs_idx = 0;
362 dcrtc->regs = dcrtc->atomic_regs;
363}
364
365static void armada_drm_crtc_atomic_flush(struct drm_crtc *crtc,
366 struct drm_crtc_state *old_crtc_state)
367{
368 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
369
370 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
371
372 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
373
374
375
376
377
378 if (!drm_atomic_crtc_needs_modeset(crtc->state)) {
379 dcrtc->update_pending = true;
380 armada_drm_crtc_queue_state_event(crtc);
381 spin_lock_irq(&dcrtc->irq_lock);
382 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
383 spin_unlock_irq(&dcrtc->irq_lock);
384 } else {
385 spin_lock_irq(&dcrtc->irq_lock);
386 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
387 spin_unlock_irq(&dcrtc->irq_lock);
388 }
389}
390
391static void armada_drm_crtc_atomic_disable(struct drm_crtc *crtc,
392 struct drm_crtc_state *old_state)
393{
394 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
395 struct drm_pending_vblank_event *event;
396
397 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
398
399 drm_crtc_vblank_off(crtc);
400 armada_drm_crtc_update(dcrtc, false);
401
402 if (!crtc->state->active) {
403
404
405
406
407 if (dcrtc->variant->disable)
408 dcrtc->variant->disable(dcrtc);
409
410
411
412
413
414 event = crtc->state->event;
415 crtc->state->event = NULL;
416 if (event) {
417 spin_lock_irq(&crtc->dev->event_lock);
418 drm_crtc_send_vblank_event(crtc, event);
419 spin_unlock_irq(&crtc->dev->event_lock);
420 }
421 }
422}
423
424static void armada_drm_crtc_atomic_enable(struct drm_crtc *crtc,
425 struct drm_crtc_state *old_state)
426{
427 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
428
429 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
430
431 if (!old_state->active) {
432
433
434
435
436
437 if (dcrtc->variant->enable)
438 dcrtc->variant->enable(dcrtc, &crtc->state->adjusted_mode);
439 }
440 armada_drm_crtc_update(dcrtc, true);
441 drm_crtc_vblank_on(crtc);
442
443 armada_drm_crtc_queue_state_event(crtc);
444}
445
446static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
447 .mode_fixup = armada_drm_crtc_mode_fixup,
448 .mode_set_nofb = armada_drm_crtc_mode_set_nofb,
449 .atomic_begin = armada_drm_crtc_atomic_begin,
450 .atomic_flush = armada_drm_crtc_atomic_flush,
451 .atomic_disable = armada_drm_crtc_atomic_disable,
452 .atomic_enable = armada_drm_crtc_atomic_enable,
453};
454
455static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
456 unsigned stride, unsigned width, unsigned height)
457{
458 uint32_t addr;
459 unsigned y;
460
461 addr = SRAM_HWC32_RAM1;
462 for (y = 0; y < height; y++) {
463 uint32_t *p = &pix[y * stride];
464 unsigned x;
465
466 for (x = 0; x < width; x++, p++) {
467 uint32_t val = *p;
468
469 val = (val & 0xff00ff00) |
470 (val & 0x000000ff) << 16 |
471 (val & 0x00ff0000) >> 16;
472
473 writel_relaxed(val,
474 base + LCD_SPU_SRAM_WRDAT);
475 writel_relaxed(addr | SRAM_WRITE,
476 base + LCD_SPU_SRAM_CTRL);
477 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
478 addr += 1;
479 if ((addr & 0x00ff) == 0)
480 addr += 0xf00;
481 if ((addr & 0x30ff) == 0)
482 addr = SRAM_HWC32_RAM2;
483 }
484 }
485}
486
487static void armada_drm_crtc_cursor_tran(void __iomem *base)
488{
489 unsigned addr;
490
491 for (addr = 0; addr < 256; addr++) {
492
493 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
494 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
495 base + LCD_SPU_SRAM_CTRL);
496 }
497}
498
499static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
500{
501 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
502 uint32_t yoff, yscr, h = dcrtc->cursor_h;
503 uint32_t para1;
504
505
506
507
508
509 if (dcrtc->cursor_x < 0) {
510 xoff = -dcrtc->cursor_x;
511 xscr = 0;
512 w -= min(xoff, w);
513 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
514 xoff = 0;
515 xscr = dcrtc->cursor_x;
516 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
517 } else {
518 xoff = 0;
519 xscr = dcrtc->cursor_x;
520 }
521
522 if (dcrtc->cursor_y < 0) {
523 yoff = -dcrtc->cursor_y;
524 yscr = 0;
525 h -= min(yoff, h);
526 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
527 yoff = 0;
528 yscr = dcrtc->cursor_y;
529 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
530 } else {
531 yoff = 0;
532 yscr = dcrtc->cursor_y;
533 }
534
535
536 s = dcrtc->cursor_w;
537 if (dcrtc->interlaced) {
538 s *= 2;
539 yscr /= 2;
540 h /= 2;
541 }
542
543 if (!dcrtc->cursor_obj || !h || !w) {
544 spin_lock_irq(&dcrtc->irq_lock);
545 dcrtc->cursor_update = false;
546 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
547 spin_unlock_irq(&dcrtc->irq_lock);
548 return 0;
549 }
550
551 spin_lock_irq(&dcrtc->irq_lock);
552 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
553 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
554 dcrtc->base + LCD_SPU_SRAM_PARA1);
555 spin_unlock_irq(&dcrtc->irq_lock);
556
557
558
559
560
561 if (!(para1 & CFG_CSB_256x32)) {
562 armada_drm_crtc_cursor_tran(dcrtc->base);
563 reload = true;
564 }
565
566 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
567 spin_lock_irq(&dcrtc->irq_lock);
568 dcrtc->cursor_update = false;
569 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
570 spin_unlock_irq(&dcrtc->irq_lock);
571 reload = true;
572 }
573 if (reload) {
574 struct armada_gem_object *obj = dcrtc->cursor_obj;
575 uint32_t *pix;
576
577 pix = obj->addr;
578 pix += yoff * s + xoff;
579 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
580 }
581
582
583 spin_lock_irq(&dcrtc->irq_lock);
584 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
585 dcrtc->cursor_hw_sz = h << 16 | w;
586 dcrtc->cursor_update = true;
587 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
588 spin_unlock_irq(&dcrtc->irq_lock);
589
590 return 0;
591}
592
593static void cursor_update(void *data)
594{
595 armada_drm_crtc_cursor_update(data, true);
596}
597
598static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
599 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
600{
601 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
602 struct armada_gem_object *obj = NULL;
603 int ret;
604
605
606 if (!dcrtc->variant->has_spu_adv_reg)
607 return -ENXIO;
608
609 if (handle && w > 0 && h > 0) {
610
611 if (w > 64 || h > 64 || (w > 32 && h > 32))
612 return -ENOMEM;
613
614 obj = armada_gem_object_lookup(file, handle);
615 if (!obj)
616 return -ENOENT;
617
618
619 if (!obj->addr) {
620 drm_gem_object_put_unlocked(&obj->obj);
621 return -EINVAL;
622 }
623
624 if (obj->obj.size < w * h * 4) {
625 DRM_ERROR("buffer is too small\n");
626 drm_gem_object_put_unlocked(&obj->obj);
627 return -ENOMEM;
628 }
629 }
630
631 if (dcrtc->cursor_obj) {
632 dcrtc->cursor_obj->update = NULL;
633 dcrtc->cursor_obj->update_data = NULL;
634 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
635 }
636 dcrtc->cursor_obj = obj;
637 dcrtc->cursor_w = w;
638 dcrtc->cursor_h = h;
639 ret = armada_drm_crtc_cursor_update(dcrtc, true);
640 if (obj) {
641 obj->update_data = dcrtc;
642 obj->update = cursor_update;
643 }
644
645 return ret;
646}
647
648static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
649{
650 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
651 int ret;
652
653
654 if (!dcrtc->variant->has_spu_adv_reg)
655 return -EFAULT;
656
657 dcrtc->cursor_x = x;
658 dcrtc->cursor_y = y;
659 ret = armada_drm_crtc_cursor_update(dcrtc, false);
660
661 return ret;
662}
663
664static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
665{
666 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
667 struct armada_private *priv = crtc->dev->dev_private;
668
669 if (dcrtc->cursor_obj)
670 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
671
672 priv->dcrtc[dcrtc->num] = NULL;
673 drm_crtc_cleanup(&dcrtc->crtc);
674
675 if (dcrtc->variant->disable)
676 dcrtc->variant->disable(dcrtc);
677
678 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
679
680 of_node_put(dcrtc->crtc.port);
681
682 kfree(dcrtc);
683}
684
685
686static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
687{
688 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
689 unsigned long flags;
690
691 spin_lock_irqsave(&dcrtc->irq_lock, flags);
692 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
693 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
694 return 0;
695}
696
697static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
698{
699 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
700 unsigned long flags;
701
702 spin_lock_irqsave(&dcrtc->irq_lock, flags);
703 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
704 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
705}
706
707static const struct drm_crtc_funcs armada_crtc_funcs = {
708 .reset = drm_atomic_helper_crtc_reset,
709 .cursor_set = armada_drm_crtc_cursor_set,
710 .cursor_move = armada_drm_crtc_cursor_move,
711 .destroy = armada_drm_crtc_destroy,
712 .set_config = drm_atomic_helper_set_config,
713 .page_flip = drm_atomic_helper_page_flip,
714 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
715 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
716 .enable_vblank = armada_drm_crtc_enable_vblank,
717 .disable_vblank = armada_drm_crtc_disable_vblank,
718};
719
720static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
721 struct resource *res, int irq, const struct armada_variant *variant,
722 struct device_node *port)
723{
724 struct armada_private *priv = drm->dev_private;
725 struct armada_crtc *dcrtc;
726 struct drm_plane *primary;
727 void __iomem *base;
728 int ret;
729
730 base = devm_ioremap_resource(dev, res);
731 if (IS_ERR(base))
732 return PTR_ERR(base);
733
734 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
735 if (!dcrtc) {
736 DRM_ERROR("failed to allocate Armada crtc\n");
737 return -ENOMEM;
738 }
739
740 if (dev != drm->dev)
741 dev_set_drvdata(dev, dcrtc);
742
743 dcrtc->variant = variant;
744 dcrtc->base = base;
745 dcrtc->num = drm->mode_config.num_crtc;
746 dcrtc->clk = ERR_PTR(-EINVAL);
747 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
748 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
749 spin_lock_init(&dcrtc->irq_lock);
750 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
751
752
753 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
754 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
755 writel_relaxed(dcrtc->spu_iopad_ctrl,
756 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
757 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
758 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
759 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
760 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
761 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
762 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
763 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
764 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
765
766 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
767 dcrtc);
768 if (ret < 0)
769 goto err_crtc;
770
771 if (dcrtc->variant->init) {
772 ret = dcrtc->variant->init(dcrtc, dev);
773 if (ret)
774 goto err_crtc;
775 }
776
777
778 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
779
780 priv->dcrtc[dcrtc->num] = dcrtc;
781
782 dcrtc->crtc.port = port;
783
784 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
785 if (!primary) {
786 ret = -ENOMEM;
787 goto err_crtc;
788 }
789
790 ret = armada_drm_primary_plane_init(drm, primary);
791 if (ret) {
792 kfree(primary);
793 goto err_crtc;
794 }
795
796 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, primary, NULL,
797 &armada_crtc_funcs, NULL);
798 if (ret)
799 goto err_crtc_init;
800
801 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
802
803 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
804
805err_crtc_init:
806 primary->funcs->destroy(primary);
807err_crtc:
808 kfree(dcrtc);
809
810 return ret;
811}
812
813static int
814armada_lcd_bind(struct device *dev, struct device *master, void *data)
815{
816 struct platform_device *pdev = to_platform_device(dev);
817 struct drm_device *drm = data;
818 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
819 int irq = platform_get_irq(pdev, 0);
820 const struct armada_variant *variant;
821 struct device_node *port = NULL;
822
823 if (irq < 0)
824 return irq;
825
826 if (!dev->of_node) {
827 const struct platform_device_id *id;
828
829 id = platform_get_device_id(pdev);
830 if (!id)
831 return -ENXIO;
832
833 variant = (const struct armada_variant *)id->driver_data;
834 } else {
835 const struct of_device_id *match;
836 struct device_node *np, *parent = dev->of_node;
837
838 match = of_match_device(dev->driver->of_match_table, dev);
839 if (!match)
840 return -ENXIO;
841
842 np = of_get_child_by_name(parent, "ports");
843 if (np)
844 parent = np;
845 port = of_get_child_by_name(parent, "port");
846 of_node_put(np);
847 if (!port) {
848 dev_err(dev, "no port node found in %pOF\n", parent);
849 return -ENXIO;
850 }
851
852 variant = match->data;
853 }
854
855 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
856}
857
858static void
859armada_lcd_unbind(struct device *dev, struct device *master, void *data)
860{
861 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
862
863 armada_drm_crtc_destroy(&dcrtc->crtc);
864}
865
866static const struct component_ops armada_lcd_ops = {
867 .bind = armada_lcd_bind,
868 .unbind = armada_lcd_unbind,
869};
870
871static int armada_lcd_probe(struct platform_device *pdev)
872{
873 return component_add(&pdev->dev, &armada_lcd_ops);
874}
875
876static int armada_lcd_remove(struct platform_device *pdev)
877{
878 component_del(&pdev->dev, &armada_lcd_ops);
879 return 0;
880}
881
882static const struct of_device_id armada_lcd_of_match[] = {
883 {
884 .compatible = "marvell,dove-lcd",
885 .data = &armada510_ops,
886 },
887 {}
888};
889MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
890
891static const struct platform_device_id armada_lcd_platform_ids[] = {
892 {
893 .name = "armada-lcd",
894 .driver_data = (unsigned long)&armada510_ops,
895 }, {
896 .name = "armada-510-lcd",
897 .driver_data = (unsigned long)&armada510_ops,
898 },
899 { },
900};
901MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
902
903struct platform_driver armada_lcd_platform_driver = {
904 .probe = armada_lcd_probe,
905 .remove = armada_lcd_remove,
906 .driver = {
907 .name = "armada-lcd",
908 .owner = THIS_MODULE,
909 .of_match_table = armada_lcd_of_match,
910 },
911 .id_table = armada_lcd_platform_ids,
912};
913