1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/mtd/mtd.h>
29#include <linux/mtd/nand.h>
30#include <linux/types.h>
31#include <common.h>
32#include <malloc.h>
33#include <asm/errno.h>
34#include <asm/io.h>
35#include <asm/arch/clock.h>
36#include <asm/arch/imx-regs.h>
37#include <asm/arch/sys_proto.h>
38#include <asm/arch/dma.h>
39
40#define MXS_NAND_DMA_DESCRIPTOR_COUNT 4
41
42#define MXS_NAND_CHUNK_DATA_CHUNK_SIZE 512
43#define MXS_NAND_METADATA_SIZE 10
44
45#define MXS_NAND_COMMAND_BUFFER_SIZE 32
46
47#define MXS_NAND_BCH_TIMEOUT 10000
48
49struct mxs_nand_info {
50 int cur_chip;
51
52 uint32_t cmd_queue_len;
53 uint32_t data_buf_size;
54
55 uint8_t *cmd_buf;
56 uint8_t *data_buf;
57 uint8_t *oob_buf;
58
59 uint8_t marking_block_bad;
60 uint8_t raw_oob_mode;
61
62
63 int (*hooked_read_oob)(struct mtd_info *mtd,
64 loff_t from, struct mtd_oob_ops *ops);
65 int (*hooked_write_oob)(struct mtd_info *mtd,
66 loff_t to, struct mtd_oob_ops *ops);
67 int (*hooked_block_markbad)(struct mtd_info *mtd,
68 loff_t ofs);
69
70
71 struct mxs_dma_desc **desc;
72 uint32_t desc_index;
73};
74
75struct nand_ecclayout fake_ecc_layout;
76
77
78
79
80#ifndef CONFIG_SYS_DCACHE_OFF
81static void mxs_nand_flush_data_buf(struct mxs_nand_info *info)
82{
83 uint32_t addr = (uint32_t)info->data_buf;
84
85 flush_dcache_range(addr, addr + info->data_buf_size);
86}
87
88static void mxs_nand_inval_data_buf(struct mxs_nand_info *info)
89{
90 uint32_t addr = (uint32_t)info->data_buf;
91
92 invalidate_dcache_range(addr, addr + info->data_buf_size);
93}
94
95static void mxs_nand_flush_cmd_buf(struct mxs_nand_info *info)
96{
97 uint32_t addr = (uint32_t)info->cmd_buf;
98
99 flush_dcache_range(addr, addr + MXS_NAND_COMMAND_BUFFER_SIZE);
100}
101#else
102static inline void mxs_nand_flush_data_buf(struct mxs_nand_info *info) {}
103static inline void mxs_nand_inval_data_buf(struct mxs_nand_info *info) {}
104static inline void mxs_nand_flush_cmd_buf(struct mxs_nand_info *info) {}
105#endif
106
107static struct mxs_dma_desc *mxs_nand_get_dma_desc(struct mxs_nand_info *info)
108{
109 struct mxs_dma_desc *desc;
110
111 if (info->desc_index >= MXS_NAND_DMA_DESCRIPTOR_COUNT) {
112 printf("MXS NAND: Too many DMA descriptors requested\n");
113 return NULL;
114 }
115
116 desc = info->desc[info->desc_index];
117 info->desc_index++;
118
119 return desc;
120}
121
122static void mxs_nand_return_dma_descs(struct mxs_nand_info *info)
123{
124 int i;
125 struct mxs_dma_desc *desc;
126
127 for (i = 0; i < info->desc_index; i++) {
128 desc = info->desc[i];
129 memset(desc, 0, sizeof(struct mxs_dma_desc));
130 desc->address = (dma_addr_t)desc;
131 }
132
133 info->desc_index = 0;
134}
135
136static uint32_t mxs_nand_ecc_chunk_cnt(uint32_t page_data_size)
137{
138 return page_data_size / MXS_NAND_CHUNK_DATA_CHUNK_SIZE;
139}
140
141static uint32_t mxs_nand_ecc_size_in_bits(uint32_t ecc_strength)
142{
143 return ecc_strength * 13;
144}
145
146static uint32_t mxs_nand_aux_status_offset(void)
147{
148 return (MXS_NAND_METADATA_SIZE + 0x3) & ~0x3;
149}
150
151static inline uint32_t mxs_nand_get_ecc_strength(uint32_t page_data_size,
152 uint32_t page_oob_size)
153{
154 if (page_data_size == 2048)
155 return 8;
156
157 if (page_data_size == 4096) {
158 if (page_oob_size == 128)
159 return 8;
160
161 if (page_oob_size == 218)
162 return 16;
163 }
164
165 return 0;
166}
167
168static inline uint32_t mxs_nand_get_mark_offset(uint32_t page_data_size,
169 uint32_t ecc_strength)
170{
171 uint32_t chunk_data_size_in_bits;
172 uint32_t chunk_ecc_size_in_bits;
173 uint32_t chunk_total_size_in_bits;
174 uint32_t block_mark_chunk_number;
175 uint32_t block_mark_chunk_bit_offset;
176 uint32_t block_mark_bit_offset;
177
178 chunk_data_size_in_bits = MXS_NAND_CHUNK_DATA_CHUNK_SIZE * 8;
179 chunk_ecc_size_in_bits = mxs_nand_ecc_size_in_bits(ecc_strength);
180
181 chunk_total_size_in_bits =
182 chunk_data_size_in_bits + chunk_ecc_size_in_bits;
183
184
185 block_mark_bit_offset = page_data_size * 8;
186
187
188 block_mark_bit_offset -= MXS_NAND_METADATA_SIZE * 8;
189
190
191
192
193
194 block_mark_chunk_number =
195 block_mark_bit_offset / chunk_total_size_in_bits;
196
197
198
199
200
201 block_mark_chunk_bit_offset = block_mark_bit_offset -
202 (block_mark_chunk_number * chunk_total_size_in_bits);
203
204 if (block_mark_chunk_bit_offset > chunk_data_size_in_bits)
205 return 1;
206
207
208
209
210
211 block_mark_bit_offset -=
212 block_mark_chunk_number * chunk_ecc_size_in_bits;
213
214 return block_mark_bit_offset;
215}
216
217static uint32_t mxs_nand_mark_byte_offset(struct mtd_info *mtd)
218{
219 uint32_t ecc_strength;
220 ecc_strength = mxs_nand_get_ecc_strength(mtd->writesize, mtd->oobsize);
221 return mxs_nand_get_mark_offset(mtd->writesize, ecc_strength) >> 3;
222}
223
224static uint32_t mxs_nand_mark_bit_offset(struct mtd_info *mtd)
225{
226 uint32_t ecc_strength;
227 ecc_strength = mxs_nand_get_ecc_strength(mtd->writesize, mtd->oobsize);
228 return mxs_nand_get_mark_offset(mtd->writesize, ecc_strength) & 0x7;
229}
230
231
232
233
234static int mxs_nand_wait_for_bch_complete(void)
235{
236 struct mx28_bch_regs *bch_regs = (struct mx28_bch_regs *)MXS_BCH_BASE;
237 int timeout = MXS_NAND_BCH_TIMEOUT;
238 int ret;
239
240 ret = mx28_wait_mask_set(&bch_regs->hw_bch_ctrl_reg,
241 BCH_CTRL_COMPLETE_IRQ, timeout);
242
243 writel(BCH_CTRL_COMPLETE_IRQ, &bch_regs->hw_bch_ctrl_clr);
244
245 return ret;
246}
247
248
249
250
251
252
253
254
255
256
257
258static void mxs_nand_cmd_ctrl(struct mtd_info *mtd, int data, unsigned int ctrl)
259{
260 struct nand_chip *nand = mtd->priv;
261 struct mxs_nand_info *nand_info = nand->priv;
262 struct mxs_dma_desc *d;
263 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip;
264 int ret;
265
266
267
268
269
270 if (nand_info->cmd_queue_len == MXS_NAND_COMMAND_BUFFER_SIZE) {
271 printf("MXS NAND: Command queue too long\n");
272 return;
273 }
274
275
276
277
278
279
280
281
282
283
284
285
286 if (ctrl & (NAND_ALE | NAND_CLE)) {
287 if (data != NAND_CMD_NONE)
288 nand_info->cmd_buf[nand_info->cmd_queue_len++] = data;
289 return;
290 }
291
292
293
294
295
296
297 if (nand_info->cmd_queue_len == 0)
298 return;
299
300
301 d = mxs_nand_get_dma_desc(nand_info);
302 d->cmd.data =
303 MXS_DMA_DESC_COMMAND_DMA_READ | MXS_DMA_DESC_IRQ |
304 MXS_DMA_DESC_CHAIN | MXS_DMA_DESC_DEC_SEM |
305 MXS_DMA_DESC_WAIT4END | (3 << MXS_DMA_DESC_PIO_WORDS_OFFSET) |
306 (nand_info->cmd_queue_len << MXS_DMA_DESC_BYTES_OFFSET);
307
308 d->cmd.address = (dma_addr_t)nand_info->cmd_buf;
309
310 d->cmd.pio_words[0] =
311 GPMI_CTRL0_COMMAND_MODE_WRITE |
312 GPMI_CTRL0_WORD_LENGTH |
313 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
314 GPMI_CTRL0_ADDRESS_NAND_CLE |
315 GPMI_CTRL0_ADDRESS_INCREMENT |
316 nand_info->cmd_queue_len;
317
318 mxs_dma_desc_append(channel, d);
319
320
321 mxs_nand_flush_cmd_buf(nand_info);
322
323
324 ret = mxs_dma_go(channel);
325 if (ret)
326 printf("MXS NAND: Error sending command\n");
327
328 mxs_nand_return_dma_descs(nand_info);
329
330
331 nand_info->cmd_queue_len = 0;
332}
333
334
335
336
337static int mxs_nand_device_ready(struct mtd_info *mtd)
338{
339 struct nand_chip *chip = mtd->priv;
340 struct mxs_nand_info *nand_info = chip->priv;
341 struct mx28_gpmi_regs *gpmi_regs =
342 (struct mx28_gpmi_regs *)MXS_GPMI_BASE;
343 uint32_t tmp;
344
345 tmp = readl(&gpmi_regs->hw_gpmi_stat);
346 tmp >>= (GPMI_STAT_READY_BUSY_OFFSET + nand_info->cur_chip);
347
348 return tmp & 1;
349}
350
351
352
353
354static void mxs_nand_select_chip(struct mtd_info *mtd, int chip)
355{
356 struct nand_chip *nand = mtd->priv;
357 struct mxs_nand_info *nand_info = nand->priv;
358
359 nand_info->cur_chip = chip;
360}
361
362
363
364
365
366
367
368
369static void mxs_nand_swap_block_mark(struct mtd_info *mtd,
370 uint8_t *data_buf, uint8_t *oob_buf)
371{
372 uint32_t bit_offset;
373 uint32_t buf_offset;
374
375 uint32_t src;
376 uint32_t dst;
377
378 bit_offset = mxs_nand_mark_bit_offset(mtd);
379 buf_offset = mxs_nand_mark_byte_offset(mtd);
380
381
382
383
384
385
386
387 src = data_buf[buf_offset] >> bit_offset;
388 src |= data_buf[buf_offset + 1] << (8 - bit_offset);
389
390 dst = oob_buf[0];
391
392 oob_buf[0] = src;
393
394 data_buf[buf_offset] &= ~(0xff << bit_offset);
395 data_buf[buf_offset + 1] &= 0xff << bit_offset;
396
397 data_buf[buf_offset] |= dst << bit_offset;
398 data_buf[buf_offset + 1] |= dst >> (8 - bit_offset);
399}
400
401
402
403
404static void mxs_nand_read_buf(struct mtd_info *mtd, uint8_t *buf, int length)
405{
406 struct nand_chip *nand = mtd->priv;
407 struct mxs_nand_info *nand_info = nand->priv;
408 struct mxs_dma_desc *d;
409 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip;
410 int ret;
411
412 if (length > NAND_MAX_PAGESIZE) {
413 printf("MXS NAND: DMA buffer too big\n");
414 return;
415 }
416
417 if (!buf) {
418 printf("MXS NAND: DMA buffer is NULL\n");
419 return;
420 }
421
422
423 d = mxs_nand_get_dma_desc(nand_info);
424 d->cmd.data =
425 MXS_DMA_DESC_COMMAND_DMA_WRITE | MXS_DMA_DESC_IRQ |
426 MXS_DMA_DESC_DEC_SEM | MXS_DMA_DESC_WAIT4END |
427 (1 << MXS_DMA_DESC_PIO_WORDS_OFFSET) |
428 (length << MXS_DMA_DESC_BYTES_OFFSET);
429
430 d->cmd.address = (dma_addr_t)nand_info->data_buf;
431
432 d->cmd.pio_words[0] =
433 GPMI_CTRL0_COMMAND_MODE_READ |
434 GPMI_CTRL0_WORD_LENGTH |
435 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
436 GPMI_CTRL0_ADDRESS_NAND_DATA |
437 length;
438
439 mxs_dma_desc_append(channel, d);
440
441
442
443
444
445
446
447
448
449 d = mxs_nand_get_dma_desc(nand_info);
450 d->cmd.data =
451 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_IRQ |
452 MXS_DMA_DESC_NAND_WAIT_4_READY | MXS_DMA_DESC_DEC_SEM |
453 MXS_DMA_DESC_WAIT4END | (4 << MXS_DMA_DESC_PIO_WORDS_OFFSET);
454
455 d->cmd.address = 0;
456
457 d->cmd.pio_words[0] =
458 GPMI_CTRL0_COMMAND_MODE_WAIT_FOR_READY |
459 GPMI_CTRL0_WORD_LENGTH |
460 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
461 GPMI_CTRL0_ADDRESS_NAND_DATA;
462
463 mxs_dma_desc_append(channel, d);
464
465
466 ret = mxs_dma_go(channel);
467 if (ret) {
468 printf("MXS NAND: DMA read error\n");
469 goto rtn;
470 }
471
472
473 mxs_nand_inval_data_buf(nand_info);
474
475 memcpy(buf, nand_info->data_buf, length);
476
477rtn:
478 mxs_nand_return_dma_descs(nand_info);
479}
480
481
482
483
484static void mxs_nand_write_buf(struct mtd_info *mtd, const uint8_t *buf,
485 int length)
486{
487 struct nand_chip *nand = mtd->priv;
488 struct mxs_nand_info *nand_info = nand->priv;
489 struct mxs_dma_desc *d;
490 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip;
491 int ret;
492
493 if (length > NAND_MAX_PAGESIZE) {
494 printf("MXS NAND: DMA buffer too big\n");
495 return;
496 }
497
498 if (!buf) {
499 printf("MXS NAND: DMA buffer is NULL\n");
500 return;
501 }
502
503 memcpy(nand_info->data_buf, buf, length);
504
505
506 d = mxs_nand_get_dma_desc(nand_info);
507 d->cmd.data =
508 MXS_DMA_DESC_COMMAND_DMA_READ | MXS_DMA_DESC_IRQ |
509 MXS_DMA_DESC_DEC_SEM | MXS_DMA_DESC_WAIT4END |
510 (4 << MXS_DMA_DESC_PIO_WORDS_OFFSET) |
511 (length << MXS_DMA_DESC_BYTES_OFFSET);
512
513 d->cmd.address = (dma_addr_t)nand_info->data_buf;
514
515 d->cmd.pio_words[0] =
516 GPMI_CTRL0_COMMAND_MODE_WRITE |
517 GPMI_CTRL0_WORD_LENGTH |
518 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
519 GPMI_CTRL0_ADDRESS_NAND_DATA |
520 length;
521
522 mxs_dma_desc_append(channel, d);
523
524
525 mxs_nand_flush_data_buf(nand_info);
526
527
528 ret = mxs_dma_go(channel);
529 if (ret)
530 printf("MXS NAND: DMA write error\n");
531
532 mxs_nand_return_dma_descs(nand_info);
533}
534
535
536
537
538static uint8_t mxs_nand_read_byte(struct mtd_info *mtd)
539{
540 uint8_t buf;
541 mxs_nand_read_buf(mtd, &buf, 1);
542 return buf;
543}
544
545
546
547
548static int mxs_nand_ecc_read_page(struct mtd_info *mtd, struct nand_chip *nand,
549 uint8_t *buf, int page)
550{
551 struct mxs_nand_info *nand_info = nand->priv;
552 struct mxs_dma_desc *d;
553 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip;
554 uint32_t corrected = 0, failed = 0;
555 uint8_t *status;
556 int i, ret;
557
558
559 d = mxs_nand_get_dma_desc(nand_info);
560 d->cmd.data =
561 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_CHAIN |
562 MXS_DMA_DESC_NAND_WAIT_4_READY | MXS_DMA_DESC_WAIT4END |
563 (1 << MXS_DMA_DESC_PIO_WORDS_OFFSET);
564
565 d->cmd.address = 0;
566
567 d->cmd.pio_words[0] =
568 GPMI_CTRL0_COMMAND_MODE_WAIT_FOR_READY |
569 GPMI_CTRL0_WORD_LENGTH |
570 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
571 GPMI_CTRL0_ADDRESS_NAND_DATA;
572
573 mxs_dma_desc_append(channel, d);
574
575
576 d = mxs_nand_get_dma_desc(nand_info);
577 d->cmd.data =
578 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_CHAIN |
579 MXS_DMA_DESC_WAIT4END | (6 << MXS_DMA_DESC_PIO_WORDS_OFFSET);
580
581 d->cmd.address = 0;
582
583 d->cmd.pio_words[0] =
584 GPMI_CTRL0_COMMAND_MODE_READ |
585 GPMI_CTRL0_WORD_LENGTH |
586 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
587 GPMI_CTRL0_ADDRESS_NAND_DATA |
588 (mtd->writesize + mtd->oobsize);
589 d->cmd.pio_words[1] = 0;
590 d->cmd.pio_words[2] =
591 GPMI_ECCCTRL_ENABLE_ECC |
592 GPMI_ECCCTRL_ECC_CMD_DECODE |
593 GPMI_ECCCTRL_BUFFER_MASK_BCH_PAGE;
594 d->cmd.pio_words[3] = mtd->writesize + mtd->oobsize;
595 d->cmd.pio_words[4] = (dma_addr_t)nand_info->data_buf;
596 d->cmd.pio_words[5] = (dma_addr_t)nand_info->oob_buf;
597
598 mxs_dma_desc_append(channel, d);
599
600
601 d = mxs_nand_get_dma_desc(nand_info);
602 d->cmd.data =
603 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_CHAIN |
604 MXS_DMA_DESC_NAND_WAIT_4_READY | MXS_DMA_DESC_WAIT4END |
605 (3 << MXS_DMA_DESC_PIO_WORDS_OFFSET);
606
607 d->cmd.address = 0;
608
609 d->cmd.pio_words[0] =
610 GPMI_CTRL0_COMMAND_MODE_WAIT_FOR_READY |
611 GPMI_CTRL0_WORD_LENGTH |
612 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
613 GPMI_CTRL0_ADDRESS_NAND_DATA |
614 (mtd->writesize + mtd->oobsize);
615 d->cmd.pio_words[1] = 0;
616 d->cmd.pio_words[2] = 0;
617
618 mxs_dma_desc_append(channel, d);
619
620
621 d = mxs_nand_get_dma_desc(nand_info);
622 d->cmd.data =
623 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_IRQ |
624 MXS_DMA_DESC_DEC_SEM;
625
626 d->cmd.address = 0;
627
628 mxs_dma_desc_append(channel, d);
629
630
631 ret = mxs_dma_go(channel);
632 if (ret) {
633 printf("MXS NAND: DMA read error\n");
634 goto rtn;
635 }
636
637 ret = mxs_nand_wait_for_bch_complete();
638 if (ret) {
639 printf("MXS NAND: BCH read timeout\n");
640 goto rtn;
641 }
642
643
644 mxs_nand_inval_data_buf(nand_info);
645
646
647 mxs_nand_swap_block_mark(mtd, nand_info->data_buf, nand_info->oob_buf);
648
649
650 status = nand_info->oob_buf + mxs_nand_aux_status_offset();
651 for (i = 0; i < mxs_nand_ecc_chunk_cnt(mtd->writesize); i++) {
652 if (status[i] == 0x00)
653 continue;
654
655 if (status[i] == 0xff)
656 continue;
657
658 if (status[i] == 0xfe) {
659 failed++;
660 continue;
661 }
662
663 corrected += status[i];
664 }
665
666
667 mtd->ecc_stats.failed += failed;
668 mtd->ecc_stats.corrected += corrected;
669
670
671
672
673
674
675
676
677
678
679 memset(nand->oob_poi, 0xff, mtd->oobsize);
680
681 nand->oob_poi[0] = nand_info->oob_buf[0];
682
683 memcpy(buf, nand_info->data_buf, mtd->writesize);
684
685rtn:
686 mxs_nand_return_dma_descs(nand_info);
687
688 return ret;
689}
690
691
692
693
694static void mxs_nand_ecc_write_page(struct mtd_info *mtd,
695 struct nand_chip *nand, const uint8_t *buf)
696{
697 struct mxs_nand_info *nand_info = nand->priv;
698 struct mxs_dma_desc *d;
699 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip;
700 int ret;
701
702 memcpy(nand_info->data_buf, buf, mtd->writesize);
703 memcpy(nand_info->oob_buf, nand->oob_poi, mtd->oobsize);
704
705
706 mxs_nand_swap_block_mark(mtd, nand_info->data_buf, nand_info->oob_buf);
707
708
709 d = mxs_nand_get_dma_desc(nand_info);
710 d->cmd.data =
711 MXS_DMA_DESC_COMMAND_NO_DMAXFER | MXS_DMA_DESC_IRQ |
712 MXS_DMA_DESC_DEC_SEM | MXS_DMA_DESC_WAIT4END |
713 (6 << MXS_DMA_DESC_PIO_WORDS_OFFSET);
714
715 d->cmd.address = 0;
716
717 d->cmd.pio_words[0] =
718 GPMI_CTRL0_COMMAND_MODE_WRITE |
719 GPMI_CTRL0_WORD_LENGTH |
720 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) |
721 GPMI_CTRL0_ADDRESS_NAND_DATA;
722 d->cmd.pio_words[1] = 0;
723 d->cmd.pio_words[2] =
724 GPMI_ECCCTRL_ENABLE_ECC |
725 GPMI_ECCCTRL_ECC_CMD_ENCODE |
726 GPMI_ECCCTRL_BUFFER_MASK_BCH_PAGE;
727 d->cmd.pio_words[3] = (mtd->writesize + mtd->oobsize);
728 d->cmd.pio_words[4] = (dma_addr_t)nand_info->data_buf;
729 d->cmd.pio_words[5] = (dma_addr_t)nand_info->oob_buf;
730
731 mxs_dma_desc_append(channel, d);
732
733
734 mxs_nand_flush_data_buf(nand_info);
735
736
737 ret = mxs_dma_go(channel);
738 if (ret) {
739 printf("MXS NAND: DMA write error\n");
740 goto rtn;
741 }
742
743 ret = mxs_nand_wait_for_bch_complete();
744 if (ret) {
745 printf("MXS NAND: BCH write timeout\n");
746 goto rtn;
747 }
748
749rtn:
750 mxs_nand_return_dma_descs(nand_info);
751}
752
753
754
755
756
757
758
759static int mxs_nand_hook_read_oob(struct mtd_info *mtd, loff_t from,
760 struct mtd_oob_ops *ops)
761{
762 struct nand_chip *chip = mtd->priv;
763 struct mxs_nand_info *nand_info = chip->priv;
764 int ret;
765
766 if (ops->mode == MTD_OOB_RAW)
767 nand_info->raw_oob_mode = 1;
768 else
769 nand_info->raw_oob_mode = 0;
770
771 ret = nand_info->hooked_read_oob(mtd, from, ops);
772
773 nand_info->raw_oob_mode = 0;
774
775 return ret;
776}
777
778
779
780
781
782
783
784static int mxs_nand_hook_write_oob(struct mtd_info *mtd, loff_t to,
785 struct mtd_oob_ops *ops)
786{
787 struct nand_chip *chip = mtd->priv;
788 struct mxs_nand_info *nand_info = chip->priv;
789 int ret;
790
791 if (ops->mode == MTD_OOB_RAW)
792 nand_info->raw_oob_mode = 1;
793 else
794 nand_info->raw_oob_mode = 0;
795
796 ret = nand_info->hooked_write_oob(mtd, to, ops);
797
798 nand_info->raw_oob_mode = 0;
799
800 return ret;
801}
802
803
804
805
806
807
808
809static int mxs_nand_hook_block_markbad(struct mtd_info *mtd, loff_t ofs)
810{
811 struct nand_chip *chip = mtd->priv;
812 struct mxs_nand_info *nand_info = chip->priv;
813 int ret;
814
815 nand_info->marking_block_bad = 1;
816
817 ret = nand_info->hooked_block_markbad(mtd, ofs);
818
819 nand_info->marking_block_bad = 0;
820
821 return ret;
822}
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868static int mxs_nand_ecc_read_oob(struct mtd_info *mtd, struct nand_chip *nand,
869 int page, int cmd)
870{
871 struct mxs_nand_info *nand_info = nand->priv;
872
873
874
875
876
877
878 if (nand_info->raw_oob_mode) {
879
880
881
882
883 nand->cmdfunc(mtd, NAND_CMD_READ0, mtd->writesize, page);
884 nand->read_buf(mtd, nand->oob_poi, mtd->oobsize);
885 } else {
886
887
888
889
890 memset(nand->oob_poi, 0xff, mtd->oobsize);
891
892 nand->cmdfunc(mtd, NAND_CMD_READ0, mtd->writesize, page);
893 mxs_nand_read_buf(mtd, nand->oob_poi, 1);
894 }
895
896 return 0;
897
898}
899
900
901
902
903static int mxs_nand_ecc_write_oob(struct mtd_info *mtd, struct nand_chip *nand,
904 int page)
905{
906 struct mxs_nand_info *nand_info = nand->priv;
907 uint8_t block_mark = 0;
908
909
910
911
912
913
914
915
916
917
918 if (!nand_info->marking_block_bad) {
919 printf("NXS NAND: Writing OOB isn't supported\n");
920 return -EIO;
921 }
922
923
924 nand->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page);
925 nand->write_buf(mtd, &block_mark, 1);
926 nand->cmdfunc(mtd, NAND_CMD_PAGEPROG, -1, -1);
927
928
929 if (nand->waitfunc(mtd, nand) & NAND_STATUS_FAIL)
930 return -EIO;
931
932 return 0;
933}
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948static int mxs_nand_block_bad(struct mtd_info *mtd, loff_t ofs, int getchip)
949{
950 return 0;
951}
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967static int mxs_nand_scan_bbt(struct mtd_info *mtd)
968{
969 struct nand_chip *nand = mtd->priv;
970 struct mxs_nand_info *nand_info = nand->priv;
971 struct mx28_bch_regs *bch_regs = (struct mx28_bch_regs *)MXS_BCH_BASE;
972 uint32_t tmp;
973
974
975 mx28_reset_block(&bch_regs->hw_bch_ctrl_reg);
976
977
978 tmp = (mxs_nand_ecc_chunk_cnt(mtd->writesize) - 1)
979 << BCH_FLASHLAYOUT0_NBLOCKS_OFFSET;
980 tmp |= MXS_NAND_METADATA_SIZE << BCH_FLASHLAYOUT0_META_SIZE_OFFSET;
981 tmp |= (mxs_nand_get_ecc_strength(mtd->writesize, mtd->oobsize) >> 1)
982 << BCH_FLASHLAYOUT0_ECC0_OFFSET;
983 tmp |= MXS_NAND_CHUNK_DATA_CHUNK_SIZE;
984 writel(tmp, &bch_regs->hw_bch_flash0layout0);
985
986 tmp = (mtd->writesize + mtd->oobsize)
987 << BCH_FLASHLAYOUT1_PAGE_SIZE_OFFSET;
988 tmp |= (mxs_nand_get_ecc_strength(mtd->writesize, mtd->oobsize) >> 1)
989 << BCH_FLASHLAYOUT1_ECCN_OFFSET;
990 tmp |= MXS_NAND_CHUNK_DATA_CHUNK_SIZE;
991 writel(tmp, &bch_regs->hw_bch_flash0layout1);
992
993
994 writel(0, &bch_regs->hw_bch_layoutselect);
995
996
997 writel(BCH_CTRL_COMPLETE_IRQ_EN, &bch_regs->hw_bch_ctrl_set);
998
999
1000 if (mtd->read_oob != mxs_nand_hook_read_oob) {
1001 nand_info->hooked_read_oob = mtd->read_oob;
1002 mtd->read_oob = mxs_nand_hook_read_oob;
1003 }
1004
1005 if (mtd->write_oob != mxs_nand_hook_write_oob) {
1006 nand_info->hooked_write_oob = mtd->write_oob;
1007 mtd->write_oob = mxs_nand_hook_write_oob;
1008 }
1009
1010 if (mtd->block_markbad != mxs_nand_hook_block_markbad) {
1011 nand_info->hooked_block_markbad = mtd->block_markbad;
1012 mtd->block_markbad = mxs_nand_hook_block_markbad;
1013 }
1014
1015
1016 return nand_default_bbt(mtd);
1017}
1018
1019
1020
1021
1022int mxs_nand_alloc_buffers(struct mxs_nand_info *nand_info)
1023{
1024 uint8_t *buf;
1025 const int size = NAND_MAX_PAGESIZE + NAND_MAX_OOBSIZE;
1026
1027 nand_info->data_buf_size = roundup(size, MXS_DMA_ALIGNMENT);
1028
1029
1030 buf = memalign(MXS_DMA_ALIGNMENT, nand_info->data_buf_size);
1031 if (!buf) {
1032 printf("MXS NAND: Error allocating DMA buffers\n");
1033 return -ENOMEM;
1034 }
1035
1036 memset(buf, 0, nand_info->data_buf_size);
1037
1038 nand_info->data_buf = buf;
1039 nand_info->oob_buf = buf + NAND_MAX_PAGESIZE;
1040
1041 nand_info->cmd_buf = memalign(MXS_DMA_ALIGNMENT,
1042 MXS_NAND_COMMAND_BUFFER_SIZE);
1043 if (!nand_info->cmd_buf) {
1044 free(buf);
1045 printf("MXS NAND: Error allocating command buffers\n");
1046 return -ENOMEM;
1047 }
1048 memset(nand_info->cmd_buf, 0, MXS_NAND_COMMAND_BUFFER_SIZE);
1049 nand_info->cmd_queue_len = 0;
1050
1051 return 0;
1052}
1053
1054
1055
1056
1057int mxs_nand_init(struct mxs_nand_info *info)
1058{
1059 struct mx28_gpmi_regs *gpmi_regs =
1060 (struct mx28_gpmi_regs *)MXS_GPMI_BASE;
1061 int i = 0, j;
1062
1063 info->desc = malloc(sizeof(struct mxs_dma_desc *) *
1064 MXS_NAND_DMA_DESCRIPTOR_COUNT);
1065 if (!info->desc)
1066 goto err1;
1067
1068
1069 for (i = 0; i < MXS_NAND_DMA_DESCRIPTOR_COUNT; i++) {
1070 info->desc[i] = mxs_dma_desc_alloc();
1071 if (!info->desc[i])
1072 goto err2;
1073 }
1074
1075
1076 for (j = MXS_DMA_CHANNEL_AHB_APBH_GPMI0;
1077 j <= MXS_DMA_CHANNEL_AHB_APBH_GPMI7; j++) {
1078 if (mxs_dma_init_channel(j))
1079 goto err3;
1080 }
1081
1082
1083 mx28_reset_block(&gpmi_regs->hw_gpmi_ctrl0_reg);
1084
1085
1086
1087
1088
1089 clrsetbits_le32(&gpmi_regs->hw_gpmi_ctrl1,
1090 GPMI_CTRL1_GPMI_MODE,
1091 GPMI_CTRL1_ATA_IRQRDY_POLARITY | GPMI_CTRL1_DEV_RESET |
1092 GPMI_CTRL1_BCH_MODE);
1093
1094 return 0;
1095
1096err3:
1097 for (--j; j >= 0; j--)
1098 mxs_dma_release(j);
1099err2:
1100 free(info->desc);
1101err1:
1102 for (--i; i >= 0; i--)
1103 mxs_dma_desc_free(info->desc[i]);
1104 printf("MXS NAND: Unable to allocate DMA descriptors\n");
1105 return -ENOMEM;
1106}
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117int board_nand_init(struct nand_chip *nand)
1118{
1119 struct mxs_nand_info *nand_info;
1120 int err;
1121
1122 nand_info = malloc(sizeof(struct mxs_nand_info));
1123 if (!nand_info) {
1124 printf("MXS NAND: Failed to allocate private data\n");
1125 return -ENOMEM;
1126 }
1127 memset(nand_info, 0, sizeof(struct mxs_nand_info));
1128
1129 err = mxs_nand_alloc_buffers(nand_info);
1130 if (err)
1131 goto err1;
1132
1133 err = mxs_nand_init(nand_info);
1134 if (err)
1135 goto err2;
1136
1137 memset(&fake_ecc_layout, 0, sizeof(fake_ecc_layout));
1138
1139 nand->priv = nand_info;
1140 nand->options |= NAND_NO_SUBPAGE_WRITE;
1141
1142 nand->cmd_ctrl = mxs_nand_cmd_ctrl;
1143
1144 nand->dev_ready = mxs_nand_device_ready;
1145 nand->select_chip = mxs_nand_select_chip;
1146 nand->block_bad = mxs_nand_block_bad;
1147 nand->scan_bbt = mxs_nand_scan_bbt;
1148
1149 nand->read_byte = mxs_nand_read_byte;
1150
1151 nand->read_buf = mxs_nand_read_buf;
1152 nand->write_buf = mxs_nand_write_buf;
1153
1154 nand->ecc.read_page = mxs_nand_ecc_read_page;
1155 nand->ecc.write_page = mxs_nand_ecc_write_page;
1156 nand->ecc.read_oob = mxs_nand_ecc_read_oob;
1157 nand->ecc.write_oob = mxs_nand_ecc_write_oob;
1158
1159 nand->ecc.layout = &fake_ecc_layout;
1160 nand->ecc.mode = NAND_ECC_HW;
1161 nand->ecc.bytes = 9;
1162 nand->ecc.size = 512;
1163
1164 return 0;
1165
1166err2:
1167 free(nand_info->data_buf);
1168 free(nand_info->cmd_buf);
1169err1:
1170 free(nand_info);
1171 return err;
1172}
1173