1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <linux/kernel.h>
27#include <linux/module.h>
28#include <linux/platform_device.h>
29#include <linux/mfd/core.h>
30#include <linux/mfd/tmio.h>
31#include <linux/delay.h>
32#include <linux/io.h>
33#include <linux/irq.h>
34#include <linux/interrupt.h>
35#include <linux/ioport.h>
36#include <linux/mtd/mtd.h>
37#include <linux/mtd/nand.h>
38#include <linux/mtd/nand_ecc.h>
39#include <linux/mtd/partitions.h>
40#include <linux/slab.h>
41
42
43
44
45
46
47#define CCR_COMMAND 0x04
48#define CCR_BASE 0x10
49#define CCR_INTP 0x3d
50#define CCR_INTE 0x48
51#define CCR_EC 0x4a
52#define CCR_ICC 0x4c
53#define CCR_ECCC 0x5b
54#define CCR_NFTC 0x60
55#define CCR_NFM 0x61
56#define CCR_NFPSC 0x62
57#define CCR_NFDC 0x63
58
59
60
61
62#define FCR_DATA 0x00
63#define FCR_MODE 0x04
64#define FCR_STATUS 0x05
65#define FCR_ISR 0x06
66#define FCR_IMR 0x07
67
68
69#define FCR_MODE_DATA 0x94
70#define FCR_MODE_COMMAND 0x95
71#define FCR_MODE_ADDRESS 0x96
72
73#define FCR_MODE_HWECC_CALC 0xB4
74#define FCR_MODE_HWECC_RESULT 0xD4
75#define FCR_MODE_HWECC_RESET 0xF4
76
77#define FCR_MODE_POWER_ON 0x0C
78#define FCR_MODE_POWER_OFF 0x08
79
80#define FCR_MODE_LED_OFF 0x00
81#define FCR_MODE_LED_ON 0x04
82
83#define FCR_MODE_EJECT_ON 0x68
84#define FCR_MODE_EJECT_OFF 0x08
85
86#define FCR_MODE_LOCK 0x6C
87#define FCR_MODE_UNLOCK 0x0C
88
89#define FCR_MODE_CONTROLLER_ID 0x40
90#define FCR_MODE_STANDBY 0x00
91
92#define FCR_MODE_WE 0x80
93#define FCR_MODE_ECC1 0x40
94#define FCR_MODE_ECC0 0x20
95#define FCR_MODE_CE 0x10
96#define FCR_MODE_PCNT1 0x08
97#define FCR_MODE_PCNT0 0x04
98#define FCR_MODE_ALE 0x02
99#define FCR_MODE_CLE 0x01
100
101#define FCR_STATUS_BUSY 0x80
102
103
104
105struct tmio_nand {
106 struct mtd_info mtd;
107 struct nand_chip chip;
108
109 struct platform_device *dev;
110
111 void __iomem *ccr;
112 void __iomem *fcr;
113 unsigned long fcr_base;
114
115 unsigned int irq;
116
117
118 u8 read;
119 unsigned read_good:1;
120};
121
122#define mtd_to_tmio(m) container_of(m, struct tmio_nand, mtd)
123
124
125
126
127static void tmio_nand_hwcontrol(struct mtd_info *mtd, int cmd,
128 unsigned int ctrl)
129{
130 struct tmio_nand *tmio = mtd_to_tmio(mtd);
131 struct nand_chip *chip = mtd->priv;
132
133 if (ctrl & NAND_CTRL_CHANGE) {
134 u8 mode;
135
136 if (ctrl & NAND_NCE) {
137 mode = FCR_MODE_DATA;
138
139 if (ctrl & NAND_CLE)
140 mode |= FCR_MODE_CLE;
141 else
142 mode &= ~FCR_MODE_CLE;
143
144 if (ctrl & NAND_ALE)
145 mode |= FCR_MODE_ALE;
146 else
147 mode &= ~FCR_MODE_ALE;
148 } else {
149 mode = FCR_MODE_STANDBY;
150 }
151
152 tmio_iowrite8(mode, tmio->fcr + FCR_MODE);
153 tmio->read_good = 0;
154 }
155
156 if (cmd != NAND_CMD_NONE)
157 tmio_iowrite8(cmd, chip->IO_ADDR_W);
158}
159
160static int tmio_nand_dev_ready(struct mtd_info *mtd)
161{
162 struct tmio_nand *tmio = mtd_to_tmio(mtd);
163
164 return !(tmio_ioread8(tmio->fcr + FCR_STATUS) & FCR_STATUS_BUSY);
165}
166
167static irqreturn_t tmio_irq(int irq, void *__tmio)
168{
169 struct tmio_nand *tmio = __tmio;
170 struct nand_chip *nand_chip = &tmio->chip;
171
172
173 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
174
175 if (unlikely(!waitqueue_active(&nand_chip->controller->wq)))
176 dev_warn(&tmio->dev->dev, "spurious interrupt\n");
177
178 wake_up(&nand_chip->controller->wq);
179 return IRQ_HANDLED;
180}
181
182
183
184
185
186
187
188static int
189tmio_nand_wait(struct mtd_info *mtd, struct nand_chip *nand_chip)
190{
191 struct tmio_nand *tmio = mtd_to_tmio(mtd);
192 long timeout;
193
194
195 tmio_iowrite8(0x0f, tmio->fcr + FCR_ISR);
196 tmio_iowrite8(0x81, tmio->fcr + FCR_IMR);
197
198 timeout = wait_event_timeout(nand_chip->controller->wq,
199 tmio_nand_dev_ready(mtd),
200 msecs_to_jiffies(nand_chip->state == FL_ERASING ? 400 : 20));
201
202 if (unlikely(!tmio_nand_dev_ready(mtd))) {
203 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
204 dev_warn(&tmio->dev->dev, "still busy with %s after %d ms\n",
205 nand_chip->state == FL_ERASING ? "erase" : "program",
206 nand_chip->state == FL_ERASING ? 400 : 20);
207
208 } else if (unlikely(!timeout)) {
209 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
210 dev_warn(&tmio->dev->dev, "timeout waiting for interrupt\n");
211 }
212
213 nand_chip->cmdfunc(mtd, NAND_CMD_STATUS, -1, -1);
214 return nand_chip->read_byte(mtd);
215}
216
217
218
219
220
221
222
223
224
225static u_char tmio_nand_read_byte(struct mtd_info *mtd)
226{
227 struct tmio_nand *tmio = mtd_to_tmio(mtd);
228 unsigned int data;
229
230 if (tmio->read_good--)
231 return tmio->read;
232
233 data = tmio_ioread16(tmio->fcr + FCR_DATA);
234 tmio->read = data >> 8;
235 return data;
236}
237
238
239
240
241
242
243
244static void
245tmio_nand_write_buf(struct mtd_info *mtd, const u_char *buf, int len)
246{
247 struct tmio_nand *tmio = mtd_to_tmio(mtd);
248
249 tmio_iowrite16_rep(tmio->fcr + FCR_DATA, buf, len >> 1);
250}
251
252static void tmio_nand_read_buf(struct mtd_info *mtd, u_char *buf, int len)
253{
254 struct tmio_nand *tmio = mtd_to_tmio(mtd);
255
256 tmio_ioread16_rep(tmio->fcr + FCR_DATA, buf, len >> 1);
257}
258
259static void tmio_nand_enable_hwecc(struct mtd_info *mtd, int mode)
260{
261 struct tmio_nand *tmio = mtd_to_tmio(mtd);
262
263 tmio_iowrite8(FCR_MODE_HWECC_RESET, tmio->fcr + FCR_MODE);
264 tmio_ioread8(tmio->fcr + FCR_DATA);
265 tmio_iowrite8(FCR_MODE_HWECC_CALC, tmio->fcr + FCR_MODE);
266}
267
268static int tmio_nand_calculate_ecc(struct mtd_info *mtd, const u_char *dat,
269 u_char *ecc_code)
270{
271 struct tmio_nand *tmio = mtd_to_tmio(mtd);
272 unsigned int ecc;
273
274 tmio_iowrite8(FCR_MODE_HWECC_RESULT, tmio->fcr + FCR_MODE);
275
276 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
277 ecc_code[1] = ecc;
278 ecc_code[0] = ecc >> 8;
279 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
280 ecc_code[2] = ecc;
281 ecc_code[4] = ecc >> 8;
282 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
283 ecc_code[3] = ecc;
284 ecc_code[5] = ecc >> 8;
285
286 tmio_iowrite8(FCR_MODE_DATA, tmio->fcr + FCR_MODE);
287 return 0;
288}
289
290static int tmio_nand_correct_data(struct mtd_info *mtd, unsigned char *buf,
291 unsigned char *read_ecc, unsigned char *calc_ecc)
292{
293 int r0, r1;
294
295
296 r0 = __nand_correct_data(buf, read_ecc, calc_ecc, 256);
297 if (r0 < 0)
298 return r0;
299 r1 = __nand_correct_data(buf + 256, read_ecc + 3, calc_ecc + 3, 256);
300 if (r1 < 0)
301 return r1;
302 return r0 + r1;
303}
304
305static int tmio_hw_init(struct platform_device *dev, struct tmio_nand *tmio)
306{
307 const struct mfd_cell *cell = mfd_get_cell(dev);
308 int ret;
309
310 if (cell->enable) {
311 ret = cell->enable(dev);
312 if (ret)
313 return ret;
314 }
315
316
317 tmio_iowrite8(0x81, tmio->ccr + CCR_ICC);
318
319
320 tmio_iowrite16(tmio->fcr_base, tmio->ccr + CCR_BASE);
321 tmio_iowrite16(tmio->fcr_base >> 16, tmio->ccr + CCR_BASE + 2);
322
323
324 tmio_iowrite8(0x02, tmio->ccr + CCR_COMMAND);
325
326
327
328 tmio_iowrite8(0x02, tmio->ccr + CCR_NFPSC);
329
330
331 tmio_iowrite8(0x02, tmio->ccr + CCR_NFDC);
332
333
334 tmio_iowrite8(0x0f, tmio->fcr + FCR_ISR);
335
336
337 tmio_iowrite8(FCR_MODE_POWER_ON, tmio->fcr + FCR_MODE);
338 tmio_iowrite8(FCR_MODE_COMMAND, tmio->fcr + FCR_MODE);
339 tmio_iowrite8(NAND_CMD_RESET, tmio->fcr + FCR_DATA);
340
341
342 tmio_iowrite8(FCR_MODE_STANDBY, tmio->fcr + FCR_MODE);
343
344 mdelay(5);
345
346 return 0;
347}
348
349static void tmio_hw_stop(struct platform_device *dev, struct tmio_nand *tmio)
350{
351 const struct mfd_cell *cell = mfd_get_cell(dev);
352
353 tmio_iowrite8(FCR_MODE_POWER_OFF, tmio->fcr + FCR_MODE);
354 if (cell->disable)
355 cell->disable(dev);
356}
357
358static int tmio_probe(struct platform_device *dev)
359{
360 struct tmio_nand_data *data = dev_get_platdata(&dev->dev);
361 struct resource *fcr = platform_get_resource(dev,
362 IORESOURCE_MEM, 0);
363 struct resource *ccr = platform_get_resource(dev,
364 IORESOURCE_MEM, 1);
365 int irq = platform_get_irq(dev, 0);
366 struct tmio_nand *tmio;
367 struct mtd_info *mtd;
368 struct nand_chip *nand_chip;
369 int retval;
370
371 if (data == NULL)
372 dev_warn(&dev->dev, "NULL platform data!\n");
373
374 tmio = kzalloc(sizeof *tmio, GFP_KERNEL);
375 if (!tmio) {
376 retval = -ENOMEM;
377 goto err_kzalloc;
378 }
379
380 tmio->dev = dev;
381
382 platform_set_drvdata(dev, tmio);
383 mtd = &tmio->mtd;
384 nand_chip = &tmio->chip;
385 mtd->priv = nand_chip;
386 mtd->name = "tmio-nand";
387
388 tmio->ccr = ioremap(ccr->start, resource_size(ccr));
389 if (!tmio->ccr) {
390 retval = -EIO;
391 goto err_iomap_ccr;
392 }
393
394 tmio->fcr_base = fcr->start & 0xfffff;
395 tmio->fcr = ioremap(fcr->start, resource_size(fcr));
396 if (!tmio->fcr) {
397 retval = -EIO;
398 goto err_iomap_fcr;
399 }
400
401 retval = tmio_hw_init(dev, tmio);
402 if (retval)
403 goto err_hwinit;
404
405
406 nand_chip->IO_ADDR_R = tmio->fcr;
407 nand_chip->IO_ADDR_W = tmio->fcr;
408
409
410 nand_chip->cmd_ctrl = tmio_nand_hwcontrol;
411 nand_chip->dev_ready = tmio_nand_dev_ready;
412 nand_chip->read_byte = tmio_nand_read_byte;
413 nand_chip->write_buf = tmio_nand_write_buf;
414 nand_chip->read_buf = tmio_nand_read_buf;
415
416
417 nand_chip->ecc.mode = NAND_ECC_HW;
418 nand_chip->ecc.size = 512;
419 nand_chip->ecc.bytes = 6;
420 nand_chip->ecc.strength = 2;
421 nand_chip->ecc.hwctl = tmio_nand_enable_hwecc;
422 nand_chip->ecc.calculate = tmio_nand_calculate_ecc;
423 nand_chip->ecc.correct = tmio_nand_correct_data;
424
425 if (data)
426 nand_chip->badblock_pattern = data->badblock_pattern;
427
428
429 nand_chip->chip_delay = 15;
430
431 retval = request_irq(irq, &tmio_irq, 0, dev_name(&dev->dev), tmio);
432 if (retval) {
433 dev_err(&dev->dev, "request_irq error %d\n", retval);
434 goto err_irq;
435 }
436
437 tmio->irq = irq;
438 nand_chip->waitfunc = tmio_nand_wait;
439
440
441 if (nand_scan(mtd, 1)) {
442 retval = -ENODEV;
443 goto err_scan;
444 }
445
446 retval = mtd_device_parse_register(mtd, NULL, NULL,
447 data ? data->partition : NULL,
448 data ? data->num_partitions : 0);
449 if (!retval)
450 return retval;
451
452 nand_release(mtd);
453
454err_scan:
455 if (tmio->irq)
456 free_irq(tmio->irq, tmio);
457err_irq:
458 tmio_hw_stop(dev, tmio);
459err_hwinit:
460 iounmap(tmio->fcr);
461err_iomap_fcr:
462 iounmap(tmio->ccr);
463err_iomap_ccr:
464 kfree(tmio);
465err_kzalloc:
466 return retval;
467}
468
469static int tmio_remove(struct platform_device *dev)
470{
471 struct tmio_nand *tmio = platform_get_drvdata(dev);
472
473 nand_release(&tmio->mtd);
474 if (tmio->irq)
475 free_irq(tmio->irq, tmio);
476 tmio_hw_stop(dev, tmio);
477 iounmap(tmio->fcr);
478 iounmap(tmio->ccr);
479 kfree(tmio);
480 return 0;
481}
482
483#ifdef CONFIG_PM
484static int tmio_suspend(struct platform_device *dev, pm_message_t state)
485{
486 const struct mfd_cell *cell = mfd_get_cell(dev);
487
488 if (cell->suspend)
489 cell->suspend(dev);
490
491 tmio_hw_stop(dev, platform_get_drvdata(dev));
492 return 0;
493}
494
495static int tmio_resume(struct platform_device *dev)
496{
497 const struct mfd_cell *cell = mfd_get_cell(dev);
498
499
500
501
502 tmio_hw_init(dev, platform_get_drvdata(dev));
503
504 if (cell->resume)
505 cell->resume(dev);
506
507 return 0;
508}
509#else
510#define tmio_suspend NULL
511#define tmio_resume NULL
512#endif
513
514static struct platform_driver tmio_driver = {
515 .driver.name = "tmio-nand",
516 .driver.owner = THIS_MODULE,
517 .probe = tmio_probe,
518 .remove = tmio_remove,
519 .suspend = tmio_suspend,
520 .resume = tmio_resume,
521};
522
523module_platform_driver(tmio_driver);
524
525MODULE_LICENSE("GPL v2");
526MODULE_AUTHOR("Ian Molton, Dirk Opfer, Chris Humbert, Dmitry Baryshkov");
527MODULE_DESCRIPTION("NAND flash driver on Toshiba Mobile IO controller");
528MODULE_ALIAS("platform:tmio-nand");
529