1
2
3
4
5
6
7
8
9
10
11
12
13#include <common.h>
14#include <log.h>
15#include <nand.h>
16#include <linux/bug.h>
17#include <linux/mtd/nand_ecc.h>
18#include <linux/errno.h>
19#include <asm/io.h>
20#include <asm/arch/config.h>
21#include <asm/arch/clk.h>
22#include <asm/arch/sys_proto.h>
23#include <asm/arch/dma.h>
24#include <asm/arch/cpu.h>
25
26struct lpc32xx_nand_slc_regs {
27 u32 data;
28 u32 addr;
29 u32 cmd;
30 u32 stop;
31 u32 ctrl;
32 u32 cfg;
33 u32 stat;
34 u32 int_stat;
35 u32 ien;
36 u32 isr;
37 u32 icr;
38 u32 tac;
39 u32 tc;
40 u32 ecc;
41 u32 dma_data;
42};
43
44
45#define CFG_CE_LOW (1 << 5)
46#define CFG_DMA_ECC (1 << 4)
47#define CFG_ECC_EN (1 << 3)
48#define CFG_DMA_BURST (1 << 2)
49#define CFG_DMA_DIR (1 << 1)
50
51
52#define CTRL_SW_RESET (1 << 2)
53#define CTRL_ECC_CLEAR (1 << 1)
54#define CTRL_DMA_START (1 << 0)
55
56
57#define STAT_DMA_FIFO (1 << 2)
58#define STAT_NAND_READY (1 << 0)
59
60
61#define INT_STAT_TC (1 << 1)
62#define INT_STAT_RDY (1 << 0)
63
64
65#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
66#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
67#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
68#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
69#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
70#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
71#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
72#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
73
74
75
76static struct nand_ecclayout lpc32xx_nand_oob_16 = {
77 .eccbytes = 6,
78 .eccpos = { 10, 11, 12, 13, 14, 15, },
79 .oobfree = {
80 { .offset = 0, .length = 4, },
81 { .offset = 6, .length = 4, },
82 }
83};
84
85#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
86#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
87
88
89
90
91
92
93static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
94static u32 ecc_buffer[8];
95static unsigned int dmachan = (unsigned int)-1;
96
97
98
99
100
101
102
103
104#define lpc32xx_dmac_next_lli(x) ((u32)x)
105#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
106#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
107#endif
108
109static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
110 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
111
112static void lpc32xx_nand_init(void)
113{
114 uint32_t hclk = get_hclk_clk_rate();
115
116
117 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
118
119
120 writel(0, &lpc32xx_nand_slc_regs->cfg);
121
122
123 writel(0, &lpc32xx_nand_slc_regs->ien);
124 writel(INT_STAT_TC | INT_STAT_RDY,
125 &lpc32xx_nand_slc_regs->icr);
126
127
128 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
129 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
130 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
131 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
132 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
133 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
134 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
135 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
136 &lpc32xx_nand_slc_regs->tac);
137}
138
139static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
140 int cmd, unsigned int ctrl)
141{
142 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
143
144 if (ctrl & NAND_NCE)
145 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
146 else
147 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
148
149 if (cmd == NAND_CMD_NONE)
150 return;
151
152 if (ctrl & NAND_CLE)
153 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
154 else if (ctrl & NAND_ALE)
155 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
156}
157
158static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
159{
160 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
161}
162
163#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
164
165
166
167
168
169static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
170 const u8 *buffer, int size,
171 int read)
172{
173 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
174 struct lpc32xx_dmac_ll *dmalist_cur;
175 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
176
177
178
179
180
181 ecc_ctrl = 0x5 |
182 DMAC_CHAN_SRC_BURST_1 |
183 DMAC_CHAN_DEST_BURST_1 |
184 DMAC_CHAN_SRC_WIDTH_32 |
185 DMAC_CHAN_DEST_WIDTH_32 |
186 DMAC_CHAN_DEST_AHB1;
187
188
189 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
190 DMAC_CHAN_SRC_BURST_4 |
191 DMAC_CHAN_DEST_BURST_4 |
192 DMAC_CHAN_SRC_WIDTH_32 |
193 DMAC_CHAN_DEST_WIDTH_32 |
194 DMAC_CHAN_DEST_AHB1;
195
196
197 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
198 DMAC_CHAN_SRC_BURST_4 |
199 DMAC_CHAN_DEST_BURST_4 |
200 DMAC_CHAN_SRC_WIDTH_32 |
201 DMAC_CHAN_DEST_WIDTH_32 |
202 DMAC_CHAN_DEST_AHB1;
203
204 if (read) {
205 dmasrc = lpc32xx_dmac_set_dma_data();
206 dmadst = (u32)buffer;
207 ctrl |= DMAC_CHAN_DEST_AUTOINC;
208 } else {
209 dmadst = lpc32xx_dmac_set_dma_data();
210 dmasrc = (u32)buffer;
211 ctrl |= DMAC_CHAN_SRC_AUTOINC;
212 }
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
244 dmalist_cur = &dmalist[i * 2];
245 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
246
247 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
248 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
249 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
250 dmalist_cur->next_ctrl = ctrl;
251
252 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
253 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
254 dmalist_cur_ecc->next_lli =
255 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
256 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
257 }
258
259 if (i) {
260 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
261 dmalist_cur_ecc->next_lli = 0;
262 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
263 return;
264 }
265
266
267 if (read) {
268 dmasrc = lpc32xx_dmac_set_dma_data();
269 dmadst = (u32)buffer;
270 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
271 } else {
272 dmadst = lpc32xx_dmac_set_dma_data();
273 dmasrc = (u32)buffer;
274 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
275 }
276
277
278 dmalist_cur = &dmalist[i * 2];
279 dmalist_cur->dma_src = dmasrc;
280 dmalist_cur->dma_dest = dmadst;
281 dmalist_cur->next_lli = 0;
282 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
283}
284
285static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
286 int len, int read)
287{
288 struct nand_chip *chip = mtd_to_nand(mtd);
289 u32 config;
290 int ret;
291
292
293 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
294 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
295 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
296 DMAC_CHAN_ENABLE;
297
298
299 lpc32xx_nand_dma_configure(chip, buf, len, read);
300
301
302 if (read)
303 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
304 else
305 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
306 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
307
308
309 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
310 readl(&lpc32xx_nand_slc_regs->tc))) {
311 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
312 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
313 }
314
315 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
316
317
318 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
319 if (unlikely(ret < 0))
320 BUG();
321
322
323 while (!lpc32xx_nand_dev_ready(mtd))
324 ;
325
326
327 if (lpc32xx_dma_wait_status(dmachan))
328 pr_err("NAND DMA transfer error!\r\n");
329
330
331 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
332 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
333 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
334}
335
336static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
337{
338 int i;
339 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
340 i += CONFIG_SYS_NAND_ECCBYTES) {
341 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
342 ce = ~(ce << 2) & 0xFFFFFF;
343 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
344 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
345 spare[i] = (u8)(ce & 0xFF);
346 }
347 return 0;
348}
349
350static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
351 uint8_t *ecc_code)
352{
353 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
354}
355
356
357
358
359
360static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
361{
362
363 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
364
365
366 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
367}
368
369
370
371
372
373
374
375
376
377
378int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
379 u_char *read_ecc, u_char *calc_ecc)
380{
381 unsigned int i;
382 int ret1, ret2 = 0;
383 u_char *r = read_ecc;
384 u_char *c = calc_ecc;
385 u16 data_offset = 0;
386
387 for (i = 0 ; i < ECCSTEPS ; i++) {
388 r += CONFIG_SYS_NAND_ECCBYTES;
389 c += CONFIG_SYS_NAND_ECCBYTES;
390 data_offset += CONFIG_SYS_NAND_ECCSIZE;
391
392 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
393 if (ret1 < 0)
394 return -EBADMSG;
395 else
396 ret2 += ret1;
397 }
398
399 return ret2;
400}
401
402static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
403{
404 lpc32xx_nand_xfer(mtd, buf, len, 1);
405}
406
407static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
408 int len)
409{
410 lpc32xx_nand_xfer(mtd, buf, len, 0);
411}
412
413
414static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
415 uint8_t *buf, int oob_required, int page)
416{
417 int i;
418 int stat;
419 uint8_t *p = buf;
420 uint8_t *ecc_calc = chip->buffers->ecccalc;
421 uint8_t *ecc_code = chip->buffers->ecccode;
422 uint32_t *eccpos = chip->ecc.layout->eccpos;
423 unsigned int max_bitflips = 0;
424
425
426
427
428
429
430
431
432 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
433 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
434 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
435 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
436
437 for (i = 0; i < chip->ecc.total; i++)
438 ecc_code[i] = chip->oob_poi[eccpos[i]];
439
440 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
441 if (stat < 0)
442 mtd->ecc_stats.failed++;
443 else {
444 mtd->ecc_stats.corrected += stat;
445 max_bitflips = max_t(unsigned int, max_bitflips, stat);
446 }
447
448 return max_bitflips;
449}
450
451
452static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
453 struct nand_chip *chip,
454 const uint8_t *buf, int oob_required,
455 int page)
456{
457 int i;
458 uint8_t *ecc_calc = chip->buffers->ecccalc;
459 const uint8_t *p = buf;
460 uint32_t *eccpos = chip->ecc.layout->eccpos;
461
462
463
464
465
466
467
468
469 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
470 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
471 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
472
473 for (i = 0; i < chip->ecc.total; i++)
474 chip->oob_poi[eccpos[i]] = ecc_calc[i];
475
476 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
477
478 return 0;
479}
480#else
481static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
482{
483 while (len-- > 0)
484 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
485}
486
487static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
488{
489 while (len-- > 0)
490 writel(*buf++, &lpc32xx_nand_slc_regs->data);
491}
492#endif
493
494static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
495{
496 return readl(&lpc32xx_nand_slc_regs->data);
497}
498
499static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
500{
501 writel(byte, &lpc32xx_nand_slc_regs->data);
502}
503
504
505
506
507
508
509int board_nand_init(struct nand_chip *lpc32xx_chip)
510{
511#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
512 int ret;
513
514
515 ret = lpc32xx_dma_get_channel();
516 if (unlikely(ret < 0)) {
517 pr_info("Unable to get free DMA channel for NAND transfers\n");
518 return -1;
519 }
520 dmachan = (unsigned int)ret;
521#endif
522
523 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
524 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
525
526
527
528
529
530
531 lpc32xx_chip->read_byte = lpc32xx_read_byte;
532 lpc32xx_chip->write_byte = lpc32xx_write_byte;
533
534#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
535
536 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
537
538 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
539 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
540
541 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
542 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
543 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
544 lpc32xx_chip->chip_delay = 2000;
545
546 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
547 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
548 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
549#else
550
551
552
553
554
555 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
556
557
558
559
560
561
562 lpc32xx_chip->read_buf = lpc32xx_read_buf;
563 lpc32xx_chip->write_buf = lpc32xx_write_buf;
564#endif
565
566
567
568
569
570 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
571 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
572 lpc32xx_chip->ecc.strength = 1;
573
574 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
575 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
576
577#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
578 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
579#endif
580
581
582 lpc32xx_nand_init();
583
584 return 0;
585}
586