1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <linux/kernel.h>
27#include <linux/module.h>
28#include <linux/platform_device.h>
29#include <linux/mfd/core.h>
30#include <linux/mfd/tmio.h>
31#include <linux/delay.h>
32#include <linux/io.h>
33#include <linux/irq.h>
34#include <linux/interrupt.h>
35#include <linux/ioport.h>
36#include <linux/mtd/mtd.h>
37#include <linux/mtd/rawnand.h>
38#include <linux/mtd/nand_ecc.h>
39#include <linux/mtd/partitions.h>
40#include <linux/slab.h>
41
42
43
44
45
46
47#define CCR_COMMAND 0x04
48#define CCR_BASE 0x10
49#define CCR_INTP 0x3d
50#define CCR_INTE 0x48
51#define CCR_EC 0x4a
52#define CCR_ICC 0x4c
53#define CCR_ECCC 0x5b
54#define CCR_NFTC 0x60
55#define CCR_NFM 0x61
56#define CCR_NFPSC 0x62
57#define CCR_NFDC 0x63
58
59
60
61
62#define FCR_DATA 0x00
63#define FCR_MODE 0x04
64#define FCR_STATUS 0x05
65#define FCR_ISR 0x06
66#define FCR_IMR 0x07
67
68
69#define FCR_MODE_DATA 0x94
70#define FCR_MODE_COMMAND 0x95
71#define FCR_MODE_ADDRESS 0x96
72
73#define FCR_MODE_HWECC_CALC 0xB4
74#define FCR_MODE_HWECC_RESULT 0xD4
75#define FCR_MODE_HWECC_RESET 0xF4
76
77#define FCR_MODE_POWER_ON 0x0C
78#define FCR_MODE_POWER_OFF 0x08
79
80#define FCR_MODE_LED_OFF 0x00
81#define FCR_MODE_LED_ON 0x04
82
83#define FCR_MODE_EJECT_ON 0x68
84#define FCR_MODE_EJECT_OFF 0x08
85
86#define FCR_MODE_LOCK 0x6C
87#define FCR_MODE_UNLOCK 0x0C
88
89#define FCR_MODE_CONTROLLER_ID 0x40
90#define FCR_MODE_STANDBY 0x00
91
92#define FCR_MODE_WE 0x80
93#define FCR_MODE_ECC1 0x40
94#define FCR_MODE_ECC0 0x20
95#define FCR_MODE_CE 0x10
96#define FCR_MODE_PCNT1 0x08
97#define FCR_MODE_PCNT0 0x04
98#define FCR_MODE_ALE 0x02
99#define FCR_MODE_CLE 0x01
100
101#define FCR_STATUS_BUSY 0x80
102
103
104
105struct tmio_nand {
106 struct nand_chip chip;
107 struct completion comp;
108
109 struct platform_device *dev;
110
111 void __iomem *ccr;
112 void __iomem *fcr;
113 unsigned long fcr_base;
114
115 unsigned int irq;
116
117
118 u8 read;
119 unsigned read_good:1;
120};
121
122static inline struct tmio_nand *mtd_to_tmio(struct mtd_info *mtd)
123{
124 return container_of(mtd_to_nand(mtd), struct tmio_nand, chip);
125}
126
127
128
129
130static void tmio_nand_hwcontrol(struct nand_chip *chip, int cmd,
131 unsigned int ctrl)
132{
133 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
134
135 if (ctrl & NAND_CTRL_CHANGE) {
136 u8 mode;
137
138 if (ctrl & NAND_NCE) {
139 mode = FCR_MODE_DATA;
140
141 if (ctrl & NAND_CLE)
142 mode |= FCR_MODE_CLE;
143 else
144 mode &= ~FCR_MODE_CLE;
145
146 if (ctrl & NAND_ALE)
147 mode |= FCR_MODE_ALE;
148 else
149 mode &= ~FCR_MODE_ALE;
150 } else {
151 mode = FCR_MODE_STANDBY;
152 }
153
154 tmio_iowrite8(mode, tmio->fcr + FCR_MODE);
155 tmio->read_good = 0;
156 }
157
158 if (cmd != NAND_CMD_NONE)
159 tmio_iowrite8(cmd, chip->legacy.IO_ADDR_W);
160}
161
162static int tmio_nand_dev_ready(struct nand_chip *chip)
163{
164 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
165
166 return !(tmio_ioread8(tmio->fcr + FCR_STATUS) & FCR_STATUS_BUSY);
167}
168
169static irqreturn_t tmio_irq(int irq, void *__tmio)
170{
171 struct tmio_nand *tmio = __tmio;
172
173
174 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
175 complete(&tmio->comp);
176
177 return IRQ_HANDLED;
178}
179
180
181
182
183
184
185
186static int tmio_nand_wait(struct nand_chip *nand_chip)
187{
188 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(nand_chip));
189 long timeout;
190 u8 status;
191
192
193
194 tmio_iowrite8(0x0f, tmio->fcr + FCR_ISR);
195 reinit_completion(&tmio->comp);
196 tmio_iowrite8(0x81, tmio->fcr + FCR_IMR);
197
198 timeout = 400;
199 timeout = wait_for_completion_timeout(&tmio->comp,
200 msecs_to_jiffies(timeout));
201
202 if (unlikely(!tmio_nand_dev_ready(nand_chip))) {
203 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
204 dev_warn(&tmio->dev->dev, "still busy after 400 ms\n");
205
206 } else if (unlikely(!timeout)) {
207 tmio_iowrite8(0x00, tmio->fcr + FCR_IMR);
208 dev_warn(&tmio->dev->dev, "timeout waiting for interrupt\n");
209 }
210
211 nand_status_op(nand_chip, &status);
212 return status;
213}
214
215
216
217
218
219
220
221
222
223static u_char tmio_nand_read_byte(struct nand_chip *chip)
224{
225 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
226 unsigned int data;
227
228 if (tmio->read_good--)
229 return tmio->read;
230
231 data = tmio_ioread16(tmio->fcr + FCR_DATA);
232 tmio->read = data >> 8;
233 return data;
234}
235
236
237
238
239
240
241
242static void
243tmio_nand_write_buf(struct nand_chip *chip, const u_char *buf, int len)
244{
245 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
246
247 tmio_iowrite16_rep(tmio->fcr + FCR_DATA, buf, len >> 1);
248}
249
250static void tmio_nand_read_buf(struct nand_chip *chip, u_char *buf, int len)
251{
252 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
253
254 tmio_ioread16_rep(tmio->fcr + FCR_DATA, buf, len >> 1);
255}
256
257static void tmio_nand_enable_hwecc(struct nand_chip *chip, int mode)
258{
259 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
260
261 tmio_iowrite8(FCR_MODE_HWECC_RESET, tmio->fcr + FCR_MODE);
262 tmio_ioread8(tmio->fcr + FCR_DATA);
263 tmio_iowrite8(FCR_MODE_HWECC_CALC, tmio->fcr + FCR_MODE);
264}
265
266static int tmio_nand_calculate_ecc(struct nand_chip *chip, const u_char *dat,
267 u_char *ecc_code)
268{
269 struct tmio_nand *tmio = mtd_to_tmio(nand_to_mtd(chip));
270 unsigned int ecc;
271
272 tmio_iowrite8(FCR_MODE_HWECC_RESULT, tmio->fcr + FCR_MODE);
273
274 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
275 ecc_code[1] = ecc;
276 ecc_code[0] = ecc >> 8;
277 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
278 ecc_code[2] = ecc;
279 ecc_code[4] = ecc >> 8;
280 ecc = tmio_ioread16(tmio->fcr + FCR_DATA);
281 ecc_code[3] = ecc;
282 ecc_code[5] = ecc >> 8;
283
284 tmio_iowrite8(FCR_MODE_DATA, tmio->fcr + FCR_MODE);
285 return 0;
286}
287
288static int tmio_nand_correct_data(struct nand_chip *chip, unsigned char *buf,
289 unsigned char *read_ecc,
290 unsigned char *calc_ecc)
291{
292 int r0, r1;
293
294
295 r0 = __nand_correct_data(buf, read_ecc, calc_ecc, 256, false);
296 if (r0 < 0)
297 return r0;
298 r1 = __nand_correct_data(buf + 256, read_ecc + 3, calc_ecc + 3, 256,
299 false);
300 if (r1 < 0)
301 return r1;
302 return r0 + r1;
303}
304
305static int tmio_hw_init(struct platform_device *dev, struct tmio_nand *tmio)
306{
307 const struct mfd_cell *cell = mfd_get_cell(dev);
308 int ret;
309
310 if (cell->enable) {
311 ret = cell->enable(dev);
312 if (ret)
313 return ret;
314 }
315
316
317 tmio_iowrite8(0x81, tmio->ccr + CCR_ICC);
318
319
320 tmio_iowrite16(tmio->fcr_base, tmio->ccr + CCR_BASE);
321 tmio_iowrite16(tmio->fcr_base >> 16, tmio->ccr + CCR_BASE + 2);
322
323
324 tmio_iowrite8(0x02, tmio->ccr + CCR_COMMAND);
325
326
327
328 tmio_iowrite8(0x02, tmio->ccr + CCR_NFPSC);
329
330
331 tmio_iowrite8(0x02, tmio->ccr + CCR_NFDC);
332
333
334 tmio_iowrite8(0x0f, tmio->fcr + FCR_ISR);
335
336
337 tmio_iowrite8(FCR_MODE_POWER_ON, tmio->fcr + FCR_MODE);
338 tmio_iowrite8(FCR_MODE_COMMAND, tmio->fcr + FCR_MODE);
339 tmio_iowrite8(NAND_CMD_RESET, tmio->fcr + FCR_DATA);
340
341
342 tmio_iowrite8(FCR_MODE_STANDBY, tmio->fcr + FCR_MODE);
343
344 mdelay(5);
345
346 return 0;
347}
348
349static void tmio_hw_stop(struct platform_device *dev, struct tmio_nand *tmio)
350{
351 const struct mfd_cell *cell = mfd_get_cell(dev);
352
353 tmio_iowrite8(FCR_MODE_POWER_OFF, tmio->fcr + FCR_MODE);
354 if (cell->disable)
355 cell->disable(dev);
356}
357
358static int tmio_probe(struct platform_device *dev)
359{
360 struct tmio_nand_data *data = dev_get_platdata(&dev->dev);
361 struct resource *fcr = platform_get_resource(dev,
362 IORESOURCE_MEM, 0);
363 struct resource *ccr = platform_get_resource(dev,
364 IORESOURCE_MEM, 1);
365 int irq = platform_get_irq(dev, 0);
366 struct tmio_nand *tmio;
367 struct mtd_info *mtd;
368 struct nand_chip *nand_chip;
369 int retval;
370
371 if (data == NULL)
372 dev_warn(&dev->dev, "NULL platform data!\n");
373
374 tmio = devm_kzalloc(&dev->dev, sizeof(*tmio), GFP_KERNEL);
375 if (!tmio)
376 return -ENOMEM;
377
378 init_completion(&tmio->comp);
379
380 tmio->dev = dev;
381
382 platform_set_drvdata(dev, tmio);
383 nand_chip = &tmio->chip;
384 mtd = nand_to_mtd(nand_chip);
385 mtd->name = "tmio-nand";
386 mtd->dev.parent = &dev->dev;
387
388 tmio->ccr = devm_ioremap(&dev->dev, ccr->start, resource_size(ccr));
389 if (!tmio->ccr)
390 return -EIO;
391
392 tmio->fcr_base = fcr->start & 0xfffff;
393 tmio->fcr = devm_ioremap(&dev->dev, fcr->start, resource_size(fcr));
394 if (!tmio->fcr)
395 return -EIO;
396
397 retval = tmio_hw_init(dev, tmio);
398 if (retval)
399 return retval;
400
401
402 nand_chip->legacy.IO_ADDR_R = tmio->fcr;
403 nand_chip->legacy.IO_ADDR_W = tmio->fcr;
404
405
406 nand_chip->legacy.cmd_ctrl = tmio_nand_hwcontrol;
407 nand_chip->legacy.dev_ready = tmio_nand_dev_ready;
408 nand_chip->legacy.read_byte = tmio_nand_read_byte;
409 nand_chip->legacy.write_buf = tmio_nand_write_buf;
410 nand_chip->legacy.read_buf = tmio_nand_read_buf;
411
412
413 nand_chip->ecc.mode = NAND_ECC_HW;
414 nand_chip->ecc.size = 512;
415 nand_chip->ecc.bytes = 6;
416 nand_chip->ecc.strength = 2;
417 nand_chip->ecc.hwctl = tmio_nand_enable_hwecc;
418 nand_chip->ecc.calculate = tmio_nand_calculate_ecc;
419 nand_chip->ecc.correct = tmio_nand_correct_data;
420
421 if (data)
422 nand_chip->badblock_pattern = data->badblock_pattern;
423
424
425 nand_chip->legacy.chip_delay = 15;
426
427 retval = devm_request_irq(&dev->dev, irq, &tmio_irq, 0,
428 dev_name(&dev->dev), tmio);
429 if (retval) {
430 dev_err(&dev->dev, "request_irq error %d\n", retval);
431 goto err_irq;
432 }
433
434 tmio->irq = irq;
435 nand_chip->legacy.waitfunc = tmio_nand_wait;
436
437
438 retval = nand_scan(nand_chip, 1);
439 if (retval)
440 goto err_irq;
441
442
443 retval = mtd_device_parse_register(mtd,
444 data ? data->part_parsers : NULL,
445 NULL,
446 data ? data->partition : NULL,
447 data ? data->num_partitions : 0);
448 if (!retval)
449 return retval;
450
451 nand_release(nand_chip);
452
453err_irq:
454 tmio_hw_stop(dev, tmio);
455 return retval;
456}
457
458static int tmio_remove(struct platform_device *dev)
459{
460 struct tmio_nand *tmio = platform_get_drvdata(dev);
461
462 nand_release(&tmio->chip);
463 tmio_hw_stop(dev, tmio);
464 return 0;
465}
466
467#ifdef CONFIG_PM
468static int tmio_suspend(struct platform_device *dev, pm_message_t state)
469{
470 const struct mfd_cell *cell = mfd_get_cell(dev);
471
472 if (cell->suspend)
473 cell->suspend(dev);
474
475 tmio_hw_stop(dev, platform_get_drvdata(dev));
476 return 0;
477}
478
479static int tmio_resume(struct platform_device *dev)
480{
481 const struct mfd_cell *cell = mfd_get_cell(dev);
482
483
484
485
486 tmio_hw_init(dev, platform_get_drvdata(dev));
487
488 if (cell->resume)
489 cell->resume(dev);
490
491 return 0;
492}
493#else
494#define tmio_suspend NULL
495#define tmio_resume NULL
496#endif
497
498static struct platform_driver tmio_driver = {
499 .driver.name = "tmio-nand",
500 .driver.owner = THIS_MODULE,
501 .probe = tmio_probe,
502 .remove = tmio_remove,
503 .suspend = tmio_suspend,
504 .resume = tmio_resume,
505};
506
507module_platform_driver(tmio_driver);
508
509MODULE_LICENSE("GPL v2");
510MODULE_AUTHOR("Ian Molton, Dirk Opfer, Chris Humbert, Dmitry Baryshkov");
511MODULE_DESCRIPTION("NAND flash driver on Toshiba Mobile IO controller");
512MODULE_ALIAS("platform:tmio-nand");
513