1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#include <common.h>
16#include <nand.h>
17#include <linux/mtd/nand_ecc.h>
18#include <linux/errno.h>
19#include <asm/io.h>
20#include <asm/arch/config.h>
21#include <asm/arch/clk.h>
22#include <asm/arch/sys_proto.h>
23#include <asm/arch/dma.h>
24#include <asm/arch/cpu.h>
25
26#if defined(CONFIG_DMA_LPC32XX) && defined(CONFIG_SPL_BUILD)
27#warning "DMA support in SPL image is not tested"
28#endif
29
30struct lpc32xx_nand_slc_regs {
31 u32 data;
32 u32 addr;
33 u32 cmd;
34 u32 stop;
35 u32 ctrl;
36 u32 cfg;
37 u32 stat;
38 u32 int_stat;
39 u32 ien;
40 u32 isr;
41 u32 icr;
42 u32 tac;
43 u32 tc;
44 u32 ecc;
45 u32 dma_data;
46};
47
48
49#define CFG_CE_LOW (1 << 5)
50#define CFG_DMA_ECC (1 << 4)
51#define CFG_ECC_EN (1 << 3)
52#define CFG_DMA_BURST (1 << 2)
53#define CFG_DMA_DIR (1 << 1)
54
55
56#define CTRL_SW_RESET (1 << 2)
57#define CTRL_ECC_CLEAR (1 << 1)
58#define CTRL_DMA_START (1 << 0)
59
60
61#define STAT_DMA_FIFO (1 << 2)
62#define STAT_NAND_READY (1 << 0)
63
64
65#define INT_STAT_TC (1 << 1)
66#define INT_STAT_RDY (1 << 0)
67
68
69#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
70#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
71#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
72#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
73#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
74#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
75#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
76#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
77
78
79
80static struct nand_ecclayout lpc32xx_nand_oob_16 = {
81 .eccbytes = 6,
82 .eccpos = {10, 11, 12, 13, 14, 15},
83 .oobfree = {
84 {.offset = 0,
85 . length = 4},
86 {.offset = 6,
87 . length = 4}
88 }
89};
90
91#if defined(CONFIG_DMA_LPC32XX)
92#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
93
94
95
96
97
98
99static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
100static u32 ecc_buffer[8];
101static unsigned int dmachan = (unsigned int)-1;
102
103
104
105
106
107
108
109
110#define lpc32xx_dmac_next_lli(x) ((u32)x)
111#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
112#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
113#endif
114
115static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
116 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
117
118static void lpc32xx_nand_init(void)
119{
120 uint32_t hclk = get_hclk_clk_rate();
121
122
123 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
124
125
126 writel(0, &lpc32xx_nand_slc_regs->cfg);
127
128
129 writel(0, &lpc32xx_nand_slc_regs->ien);
130 writel(INT_STAT_TC | INT_STAT_RDY,
131 &lpc32xx_nand_slc_regs->icr);
132
133
134 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
135 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
136 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
137 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
138 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
139 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
140 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
141 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
142 &lpc32xx_nand_slc_regs->tac);
143}
144
145static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
146 int cmd, unsigned int ctrl)
147{
148 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
149
150 if (ctrl & NAND_NCE)
151 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
152 else
153 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
154
155 if (cmd == NAND_CMD_NONE)
156 return;
157
158 if (ctrl & NAND_CLE)
159 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
160 else if (ctrl & NAND_ALE)
161 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
162}
163
164static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
165{
166 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
167}
168
169#if defined(CONFIG_DMA_LPC32XX)
170
171
172
173
174
175static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
176 const u8 *buffer, int size,
177 int read)
178{
179 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
180 struct lpc32xx_dmac_ll *dmalist_cur;
181 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
182
183
184
185
186
187 ecc_ctrl = 0x5 |
188 DMAC_CHAN_SRC_BURST_1 |
189 DMAC_CHAN_DEST_BURST_1 |
190 DMAC_CHAN_SRC_WIDTH_32 |
191 DMAC_CHAN_DEST_WIDTH_32 |
192 DMAC_CHAN_DEST_AHB1;
193
194
195 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
196 DMAC_CHAN_SRC_BURST_4 |
197 DMAC_CHAN_DEST_BURST_4 |
198 DMAC_CHAN_SRC_WIDTH_32 |
199 DMAC_CHAN_DEST_WIDTH_32 |
200 DMAC_CHAN_DEST_AHB1;
201
202
203 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
204 DMAC_CHAN_SRC_BURST_4 |
205 DMAC_CHAN_DEST_BURST_4 |
206 DMAC_CHAN_SRC_WIDTH_32 |
207 DMAC_CHAN_DEST_WIDTH_32 |
208 DMAC_CHAN_DEST_AHB1;
209
210 if (read) {
211 dmasrc = lpc32xx_dmac_set_dma_data();
212 dmadst = (u32)buffer;
213 ctrl |= DMAC_CHAN_DEST_AUTOINC;
214 } else {
215 dmadst = lpc32xx_dmac_set_dma_data();
216 dmasrc = (u32)buffer;
217 ctrl |= DMAC_CHAN_SRC_AUTOINC;
218 }
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
250 dmalist_cur = &dmalist[i * 2];
251 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
252
253 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
254 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
255 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
256 dmalist_cur->next_ctrl = ctrl;
257
258 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
259 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
260 dmalist_cur_ecc->next_lli =
261 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
262 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
263 }
264
265 if (i) {
266 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
267 dmalist_cur_ecc->next_lli = 0;
268 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
269 return;
270 }
271
272
273 if (read) {
274 dmasrc = lpc32xx_dmac_set_dma_data();
275 dmadst = (u32)buffer;
276 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
277 } else {
278 dmadst = lpc32xx_dmac_set_dma_data();
279 dmasrc = (u32)buffer;
280 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
281 }
282
283
284 dmalist_cur = &dmalist[i * 2];
285 dmalist_cur->dma_src = dmasrc;
286 dmalist_cur->dma_dest = dmadst;
287 dmalist_cur->next_lli = 0;
288 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
289}
290
291static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
292 int len, int read)
293{
294 struct nand_chip *chip = mtd_to_nand(mtd);
295 u32 config;
296 int ret;
297
298
299 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
300 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
301 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
302 DMAC_CHAN_ENABLE;
303
304
305 lpc32xx_nand_dma_configure(chip, buf, len, read);
306
307
308 if (read)
309 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
310 else
311 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
312 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
313
314
315 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
316 readl(&lpc32xx_nand_slc_regs->tc))) {
317 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
318 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
319 }
320
321 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
322
323
324 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
325 if (unlikely(ret < 0))
326 BUG();
327
328
329
330 while (!lpc32xx_nand_dev_ready(mtd))
331 ;
332
333
334 if (lpc32xx_dma_wait_status(dmachan))
335 pr_err("NAND DMA transfer error!\r\n");
336
337
338 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
339 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
340 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
341}
342
343static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
344{
345 int i;
346 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
347 i += CONFIG_SYS_NAND_ECCBYTES) {
348 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
349 ce = ~(ce << 2) & 0xFFFFFF;
350 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
351 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
352 spare[i] = (u8)(ce & 0xFF);
353 }
354 return 0;
355}
356
357static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
358 uint8_t *ecc_code)
359{
360 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
361}
362
363
364
365
366
367static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
368{
369
370 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
371
372
373 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
374}
375
376
377
378
379
380
381
382
383
384
385int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
386 u_char *read_ecc, u_char *calc_ecc)
387{
388 unsigned int i;
389 int ret1, ret2 = 0;
390 u_char *r = read_ecc;
391 u_char *c = calc_ecc;
392 u16 data_offset = 0;
393
394 for (i = 0 ; i < ECCSTEPS ; i++) {
395 r += CONFIG_SYS_NAND_ECCBYTES;
396 c += CONFIG_SYS_NAND_ECCBYTES;
397 data_offset += CONFIG_SYS_NAND_ECCSIZE;
398
399 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
400 if (ret1 < 0)
401 return -EBADMSG;
402 else
403 ret2 += ret1;
404 }
405
406 return ret2;
407}
408#endif
409
410#if defined(CONFIG_DMA_LPC32XX)
411static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
412{
413 lpc32xx_nand_xfer(mtd, buf, len, 1);
414}
415#else
416static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
417{
418 while (len-- > 0)
419 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
420}
421#endif
422
423static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
424{
425 return readl(&lpc32xx_nand_slc_regs->data);
426}
427
428#if defined(CONFIG_DMA_LPC32XX)
429static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
430 int len)
431{
432 lpc32xx_nand_xfer(mtd, buf, len, 0);
433}
434#else
435static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
436{
437 while (len-- > 0)
438 writel(*buf++, &lpc32xx_nand_slc_regs->data);
439}
440#endif
441
442static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
443{
444 writel(byte, &lpc32xx_nand_slc_regs->data);
445}
446
447#if defined(CONFIG_DMA_LPC32XX)
448
449static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
450 uint8_t *buf, int oob_required, int page)
451{
452 int i;
453 int stat;
454 uint8_t *p = buf;
455 uint8_t *ecc_calc = chip->buffers->ecccalc;
456 uint8_t *ecc_code = chip->buffers->ecccode;
457 uint32_t *eccpos = chip->ecc.layout->eccpos;
458 unsigned int max_bitflips = 0;
459
460
461
462
463
464
465
466
467 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
468 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
469 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
470 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
471
472 for (i = 0; i < chip->ecc.total; i++)
473 ecc_code[i] = chip->oob_poi[eccpos[i]];
474
475 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
476 if (stat < 0)
477 mtd->ecc_stats.failed++;
478 else {
479 mtd->ecc_stats.corrected += stat;
480 max_bitflips = max_t(unsigned int, max_bitflips, stat);
481 }
482
483 return max_bitflips;
484}
485
486
487static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
488 struct nand_chip *chip,
489 const uint8_t *buf, int oob_required,
490 int page)
491{
492 int i;
493 uint8_t *ecc_calc = chip->buffers->ecccalc;
494 const uint8_t *p = buf;
495 uint32_t *eccpos = chip->ecc.layout->eccpos;
496
497
498
499
500
501
502
503
504 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
505 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
506 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
507
508 for (i = 0; i < chip->ecc.total; i++)
509 chip->oob_poi[eccpos[i]] = ecc_calc[i];
510
511 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
512
513 return 0;
514}
515#endif
516
517
518
519
520
521
522int board_nand_init(struct nand_chip *lpc32xx_chip)
523{
524#if defined(CONFIG_DMA_LPC32XX)
525 int ret;
526
527
528 ret = lpc32xx_dma_get_channel();
529 if (unlikely(ret < 0)) {
530 pr_info("Unable to get free DMA channel for NAND transfers\n");
531 return -1;
532 }
533 dmachan = (unsigned int)ret;
534#endif
535
536 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
537 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
538
539
540
541
542
543
544 lpc32xx_chip->read_byte = lpc32xx_read_byte;
545 lpc32xx_chip->write_byte = lpc32xx_write_byte;
546
547#if defined(CONFIG_DMA_LPC32XX)
548
549 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
550
551 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
552 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
553
554 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
555 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
556 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
557 lpc32xx_chip->chip_delay = 2000;
558
559 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
560 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
561 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
562#else
563
564
565
566
567
568 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
569
570
571
572
573
574
575 lpc32xx_chip->read_buf = lpc32xx_read_buf;
576 lpc32xx_chip->write_buf = lpc32xx_write_buf;
577#endif
578
579
580
581
582
583 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
584 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
585 lpc32xx_chip->ecc.strength = 1;
586
587 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
588 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
589
590#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
591 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
592#endif
593
594
595 lpc32xx_nand_init();
596
597 return 0;
598}
599