1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include <linux/clk.h>
22#include <linux/completion.h>
23#include <linux/delay.h>
24#include <linux/err.h>
25#include <linux/init.h>
26#include <linux/interrupt.h>
27#include <linux/io.h>
28#include <linux/kernel.h>
29#include <linux/kthread.h>
30#include <linux/module.h>
31#include <linux/platform_device.h>
32#include <linux/pm_runtime.h>
33#include <linux/of.h>
34#include <linux/of_device.h>
35#include <linux/spi/spi.h>
36#include <linux/clk/tegra.h>
37
38#define SPI_COMMAND 0x000
39#define SPI_GO BIT(30)
40#define SPI_M_S BIT(28)
41#define SPI_ACTIVE_SCLK_MASK (0x3 << 26)
42#define SPI_ACTIVE_SCLK_DRIVE_LOW (0 << 26)
43#define SPI_ACTIVE_SCLK_DRIVE_HIGH (1 << 26)
44#define SPI_ACTIVE_SCLK_PULL_LOW (2 << 26)
45#define SPI_ACTIVE_SCLK_PULL_HIGH (3 << 26)
46
47#define SPI_CK_SDA_FALLING (1 << 21)
48#define SPI_CK_SDA_RISING (0 << 21)
49#define SPI_CK_SDA_MASK (1 << 21)
50#define SPI_ACTIVE_SDA (0x3 << 18)
51#define SPI_ACTIVE_SDA_DRIVE_LOW (0 << 18)
52#define SPI_ACTIVE_SDA_DRIVE_HIGH (1 << 18)
53#define SPI_ACTIVE_SDA_PULL_LOW (2 << 18)
54#define SPI_ACTIVE_SDA_PULL_HIGH (3 << 18)
55
56#define SPI_CS_POL_INVERT BIT(16)
57#define SPI_TX_EN BIT(15)
58#define SPI_RX_EN BIT(14)
59#define SPI_CS_VAL_HIGH BIT(13)
60#define SPI_CS_VAL_LOW 0x0
61#define SPI_CS_SW BIT(12)
62#define SPI_CS_HW 0x0
63#define SPI_CS_DELAY_MASK (7 << 9)
64#define SPI_CS3_EN BIT(8)
65#define SPI_CS2_EN BIT(7)
66#define SPI_CS1_EN BIT(6)
67#define SPI_CS0_EN BIT(5)
68
69#define SPI_CS_MASK (SPI_CS3_EN | SPI_CS2_EN | \
70 SPI_CS1_EN | SPI_CS0_EN)
71#define SPI_BIT_LENGTH(x) (((x) & 0x1f) << 0)
72
73#define SPI_MODES (SPI_ACTIVE_SCLK_MASK | SPI_CK_SDA_MASK)
74
75#define SPI_STATUS 0x004
76#define SPI_BSY BIT(31)
77#define SPI_RDY BIT(30)
78#define SPI_TXF_FLUSH BIT(29)
79#define SPI_RXF_FLUSH BIT(28)
80#define SPI_RX_UNF BIT(27)
81#define SPI_TX_OVF BIT(26)
82#define SPI_RXF_EMPTY BIT(25)
83#define SPI_RXF_FULL BIT(24)
84#define SPI_TXF_EMPTY BIT(23)
85#define SPI_TXF_FULL BIT(22)
86#define SPI_BLK_CNT(count) (((count) & 0xffff) + 1)
87
88#define SPI_FIFO_ERROR (SPI_RX_UNF | SPI_TX_OVF)
89#define SPI_FIFO_EMPTY (SPI_TX_EMPTY | SPI_RX_EMPTY)
90
91#define SPI_RX_CMP 0x8
92#define SPI_DMA_CTL 0x0C
93#define SPI_DMA_EN BIT(31)
94#define SPI_IE_RXC BIT(27)
95#define SPI_IE_TXC BIT(26)
96#define SPI_PACKED BIT(20)
97#define SPI_RX_TRIG_MASK (0x3 << 18)
98#define SPI_RX_TRIG_1W (0x0 << 18)
99#define SPI_RX_TRIG_4W (0x1 << 18)
100#define SPI_TX_TRIG_MASK (0x3 << 16)
101#define SPI_TX_TRIG_1W (0x0 << 16)
102#define SPI_TX_TRIG_4W (0x1 << 16)
103#define SPI_DMA_BLK_COUNT(count) (((count) - 1) & 0xFFFF);
104
105#define SPI_TX_FIFO 0x10
106#define SPI_RX_FIFO 0x20
107
108#define DATA_DIR_TX (1 << 0)
109#define DATA_DIR_RX (1 << 1)
110
111#define MAX_CHIP_SELECT 4
112#define SPI_FIFO_DEPTH 4
113#define SPI_DMA_TIMEOUT (msecs_to_jiffies(1000))
114
115struct tegra_sflash_data {
116 struct device *dev;
117 struct spi_master *master;
118 spinlock_t lock;
119
120 struct clk *clk;
121 void __iomem *base;
122 unsigned irq;
123 u32 spi_max_frequency;
124 u32 cur_speed;
125
126 struct spi_device *cur_spi;
127 unsigned cur_pos;
128 unsigned cur_len;
129 unsigned bytes_per_word;
130 unsigned cur_direction;
131 unsigned curr_xfer_words;
132
133 unsigned cur_rx_pos;
134 unsigned cur_tx_pos;
135
136 u32 tx_status;
137 u32 rx_status;
138 u32 status_reg;
139
140 u32 def_command_reg;
141 u32 command_reg;
142 u32 dma_control_reg;
143
144 struct completion xfer_completion;
145 struct spi_transfer *curr_xfer;
146};
147
148static int tegra_sflash_runtime_suspend(struct device *dev);
149static int tegra_sflash_runtime_resume(struct device *dev);
150
151static inline unsigned long tegra_sflash_readl(struct tegra_sflash_data *tsd,
152 unsigned long reg)
153{
154 return readl(tsd->base + reg);
155}
156
157static inline void tegra_sflash_writel(struct tegra_sflash_data *tsd,
158 unsigned long val, unsigned long reg)
159{
160 writel(val, tsd->base + reg);
161}
162
163static void tegra_sflash_clear_status(struct tegra_sflash_data *tsd)
164{
165
166 tegra_sflash_writel(tsd, SPI_RDY | SPI_FIFO_ERROR, SPI_STATUS);
167}
168
169static unsigned tegra_sflash_calculate_curr_xfer_param(
170 struct spi_device *spi, struct tegra_sflash_data *tsd,
171 struct spi_transfer *t)
172{
173 unsigned remain_len = t->len - tsd->cur_pos;
174 unsigned max_word;
175
176 tsd->bytes_per_word = DIV_ROUND_UP(t->bits_per_word, 8);
177 max_word = remain_len / tsd->bytes_per_word;
178 if (max_word > SPI_FIFO_DEPTH)
179 max_word = SPI_FIFO_DEPTH;
180 tsd->curr_xfer_words = max_word;
181 return max_word;
182}
183
184static unsigned tegra_sflash_fill_tx_fifo_from_client_txbuf(
185 struct tegra_sflash_data *tsd, struct spi_transfer *t)
186{
187 unsigned nbytes;
188 unsigned long status;
189 unsigned max_n_32bit = tsd->curr_xfer_words;
190 u8 *tx_buf = (u8 *)t->tx_buf + tsd->cur_tx_pos;
191
192 if (max_n_32bit > SPI_FIFO_DEPTH)
193 max_n_32bit = SPI_FIFO_DEPTH;
194 nbytes = max_n_32bit * tsd->bytes_per_word;
195
196 status = tegra_sflash_readl(tsd, SPI_STATUS);
197 while (!(status & SPI_TXF_FULL)) {
198 int i;
199 unsigned int x = 0;
200
201 for (i = 0; nbytes && (i < tsd->bytes_per_word);
202 i++, nbytes--)
203 x |= ((*tx_buf++) << i*8);
204 tegra_sflash_writel(tsd, x, SPI_TX_FIFO);
205 if (!nbytes)
206 break;
207
208 status = tegra_sflash_readl(tsd, SPI_STATUS);
209 }
210 tsd->cur_tx_pos += max_n_32bit * tsd->bytes_per_word;
211 return max_n_32bit;
212}
213
214static int tegra_sflash_read_rx_fifo_to_client_rxbuf(
215 struct tegra_sflash_data *tsd, struct spi_transfer *t)
216{
217 unsigned long status;
218 unsigned int read_words = 0;
219 u8 *rx_buf = (u8 *)t->rx_buf + tsd->cur_rx_pos;
220
221 status = tegra_sflash_readl(tsd, SPI_STATUS);
222 while (!(status & SPI_RXF_EMPTY)) {
223 int i;
224 unsigned long x;
225
226 x = tegra_sflash_readl(tsd, SPI_RX_FIFO);
227 for (i = 0; (i < tsd->bytes_per_word); i++)
228 *rx_buf++ = (x >> (i*8)) & 0xFF;
229 read_words++;
230 status = tegra_sflash_readl(tsd, SPI_STATUS);
231 }
232 tsd->cur_rx_pos += read_words * tsd->bytes_per_word;
233 return 0;
234}
235
236static int tegra_sflash_start_cpu_based_transfer(
237 struct tegra_sflash_data *tsd, struct spi_transfer *t)
238{
239 unsigned long val = 0;
240 unsigned cur_words;
241
242 if (tsd->cur_direction & DATA_DIR_TX)
243 val |= SPI_IE_TXC;
244
245 if (tsd->cur_direction & DATA_DIR_RX)
246 val |= SPI_IE_RXC;
247
248 tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
249 tsd->dma_control_reg = val;
250
251 if (tsd->cur_direction & DATA_DIR_TX)
252 cur_words = tegra_sflash_fill_tx_fifo_from_client_txbuf(tsd, t);
253 else
254 cur_words = tsd->curr_xfer_words;
255 val |= SPI_DMA_BLK_COUNT(cur_words);
256 tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
257 tsd->dma_control_reg = val;
258 val |= SPI_DMA_EN;
259 tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
260 return 0;
261}
262
263static int tegra_sflash_start_transfer_one(struct spi_device *spi,
264 struct spi_transfer *t, bool is_first_of_msg,
265 bool is_single_xfer)
266{
267 struct tegra_sflash_data *tsd = spi_master_get_devdata(spi->master);
268 u32 speed;
269 unsigned long command;
270
271 speed = t->speed_hz;
272 if (speed != tsd->cur_speed) {
273 clk_set_rate(tsd->clk, speed);
274 tsd->cur_speed = speed;
275 }
276
277 tsd->cur_spi = spi;
278 tsd->cur_pos = 0;
279 tsd->cur_rx_pos = 0;
280 tsd->cur_tx_pos = 0;
281 tsd->curr_xfer = t;
282 tegra_sflash_calculate_curr_xfer_param(spi, tsd, t);
283 if (is_first_of_msg) {
284 command = tsd->def_command_reg;
285 command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
286 command |= SPI_CS_VAL_HIGH;
287
288 command &= ~SPI_MODES;
289 if (spi->mode & SPI_CPHA)
290 command |= SPI_CK_SDA_FALLING;
291
292 if (spi->mode & SPI_CPOL)
293 command |= SPI_ACTIVE_SCLK_DRIVE_HIGH;
294 else
295 command |= SPI_ACTIVE_SCLK_DRIVE_LOW;
296 command |= SPI_CS0_EN << spi->chip_select;
297 } else {
298 command = tsd->command_reg;
299 command &= ~SPI_BIT_LENGTH(~0);
300 command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
301 command &= ~(SPI_RX_EN | SPI_TX_EN);
302 }
303
304 tsd->cur_direction = 0;
305 if (t->rx_buf) {
306 command |= SPI_RX_EN;
307 tsd->cur_direction |= DATA_DIR_RX;
308 }
309 if (t->tx_buf) {
310 command |= SPI_TX_EN;
311 tsd->cur_direction |= DATA_DIR_TX;
312 }
313 tegra_sflash_writel(tsd, command, SPI_COMMAND);
314 tsd->command_reg = command;
315
316 return tegra_sflash_start_cpu_based_transfer(tsd, t);
317}
318
319static int tegra_sflash_setup(struct spi_device *spi)
320{
321 struct tegra_sflash_data *tsd = spi_master_get_devdata(spi->master);
322
323
324 spi->max_speed_hz = spi->max_speed_hz ? : tsd->spi_max_frequency;
325 return 0;
326}
327
328static int tegra_sflash_transfer_one_message(struct spi_master *master,
329 struct spi_message *msg)
330{
331 bool is_first_msg = true;
332 int single_xfer;
333 struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
334 struct spi_transfer *xfer;
335 struct spi_device *spi = msg->spi;
336 int ret;
337
338 msg->status = 0;
339 msg->actual_length = 0;
340 single_xfer = list_is_singular(&msg->transfers);
341 list_for_each_entry(xfer, &msg->transfers, transfer_list) {
342 reinit_completion(&tsd->xfer_completion);
343 ret = tegra_sflash_start_transfer_one(spi, xfer,
344 is_first_msg, single_xfer);
345 if (ret < 0) {
346 dev_err(tsd->dev,
347 "spi can not start transfer, err %d\n", ret);
348 goto exit;
349 }
350 is_first_msg = false;
351 ret = wait_for_completion_timeout(&tsd->xfer_completion,
352 SPI_DMA_TIMEOUT);
353 if (WARN_ON(ret == 0)) {
354 dev_err(tsd->dev,
355 "spi trasfer timeout, err %d\n", ret);
356 ret = -EIO;
357 goto exit;
358 }
359
360 if (tsd->tx_status || tsd->rx_status) {
361 dev_err(tsd->dev, "Error in Transfer\n");
362 ret = -EIO;
363 goto exit;
364 }
365 msg->actual_length += xfer->len;
366 if (xfer->cs_change && xfer->delay_usecs) {
367 tegra_sflash_writel(tsd, tsd->def_command_reg,
368 SPI_COMMAND);
369 udelay(xfer->delay_usecs);
370 }
371 }
372 ret = 0;
373exit:
374 tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
375 msg->status = ret;
376 spi_finalize_current_message(master);
377 return ret;
378}
379
380static irqreturn_t handle_cpu_based_xfer(struct tegra_sflash_data *tsd)
381{
382 struct spi_transfer *t = tsd->curr_xfer;
383 unsigned long flags;
384
385 spin_lock_irqsave(&tsd->lock, flags);
386 if (tsd->tx_status || tsd->rx_status || (tsd->status_reg & SPI_BSY)) {
387 dev_err(tsd->dev,
388 "CpuXfer ERROR bit set 0x%x\n", tsd->status_reg);
389 dev_err(tsd->dev,
390 "CpuXfer 0x%08x:0x%08x\n", tsd->command_reg,
391 tsd->dma_control_reg);
392 tegra_periph_reset_assert(tsd->clk);
393 udelay(2);
394 tegra_periph_reset_deassert(tsd->clk);
395 complete(&tsd->xfer_completion);
396 goto exit;
397 }
398
399 if (tsd->cur_direction & DATA_DIR_RX)
400 tegra_sflash_read_rx_fifo_to_client_rxbuf(tsd, t);
401
402 if (tsd->cur_direction & DATA_DIR_TX)
403 tsd->cur_pos = tsd->cur_tx_pos;
404 else
405 tsd->cur_pos = tsd->cur_rx_pos;
406
407 if (tsd->cur_pos == t->len) {
408 complete(&tsd->xfer_completion);
409 goto exit;
410 }
411
412 tegra_sflash_calculate_curr_xfer_param(tsd->cur_spi, tsd, t);
413 tegra_sflash_start_cpu_based_transfer(tsd, t);
414exit:
415 spin_unlock_irqrestore(&tsd->lock, flags);
416 return IRQ_HANDLED;
417}
418
419static irqreturn_t tegra_sflash_isr(int irq, void *context_data)
420{
421 struct tegra_sflash_data *tsd = context_data;
422
423 tsd->status_reg = tegra_sflash_readl(tsd, SPI_STATUS);
424 if (tsd->cur_direction & DATA_DIR_TX)
425 tsd->tx_status = tsd->status_reg & SPI_TX_OVF;
426
427 if (tsd->cur_direction & DATA_DIR_RX)
428 tsd->rx_status = tsd->status_reg & SPI_RX_UNF;
429 tegra_sflash_clear_status(tsd);
430
431 return handle_cpu_based_xfer(tsd);
432}
433
434static void tegra_sflash_parse_dt(struct tegra_sflash_data *tsd)
435{
436 struct device_node *np = tsd->dev->of_node;
437
438 if (of_property_read_u32(np, "spi-max-frequency",
439 &tsd->spi_max_frequency))
440 tsd->spi_max_frequency = 25000000;
441}
442
443static struct of_device_id tegra_sflash_of_match[] = {
444 { .compatible = "nvidia,tegra20-sflash", },
445 {}
446};
447MODULE_DEVICE_TABLE(of, tegra_sflash_of_match);
448
449static int tegra_sflash_probe(struct platform_device *pdev)
450{
451 struct spi_master *master;
452 struct tegra_sflash_data *tsd;
453 struct resource *r;
454 int ret;
455 const struct of_device_id *match;
456
457 match = of_match_device(tegra_sflash_of_match, &pdev->dev);
458 if (!match) {
459 dev_err(&pdev->dev, "Error: No device match found\n");
460 return -ENODEV;
461 }
462
463 master = spi_alloc_master(&pdev->dev, sizeof(*tsd));
464 if (!master) {
465 dev_err(&pdev->dev, "master allocation failed\n");
466 return -ENOMEM;
467 }
468
469
470 master->mode_bits = SPI_CPOL | SPI_CPHA;
471 master->setup = tegra_sflash_setup;
472 master->transfer_one_message = tegra_sflash_transfer_one_message;
473 master->auto_runtime_pm = true;
474 master->num_chipselect = MAX_CHIP_SELECT;
475 master->bus_num = -1;
476
477 platform_set_drvdata(pdev, master);
478 tsd = spi_master_get_devdata(master);
479 tsd->master = master;
480 tsd->dev = &pdev->dev;
481 spin_lock_init(&tsd->lock);
482
483 tegra_sflash_parse_dt(tsd);
484
485 r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
486 tsd->base = devm_ioremap_resource(&pdev->dev, r);
487 if (IS_ERR(tsd->base)) {
488 ret = PTR_ERR(tsd->base);
489 goto exit_free_master;
490 }
491
492 tsd->irq = platform_get_irq(pdev, 0);
493 ret = request_irq(tsd->irq, tegra_sflash_isr, 0,
494 dev_name(&pdev->dev), tsd);
495 if (ret < 0) {
496 dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
497 tsd->irq);
498 goto exit_free_master;
499 }
500
501 tsd->clk = devm_clk_get(&pdev->dev, NULL);
502 if (IS_ERR(tsd->clk)) {
503 dev_err(&pdev->dev, "can not get clock\n");
504 ret = PTR_ERR(tsd->clk);
505 goto exit_free_irq;
506 }
507
508 init_completion(&tsd->xfer_completion);
509 pm_runtime_enable(&pdev->dev);
510 if (!pm_runtime_enabled(&pdev->dev)) {
511 ret = tegra_sflash_runtime_resume(&pdev->dev);
512 if (ret)
513 goto exit_pm_disable;
514 }
515
516 ret = pm_runtime_get_sync(&pdev->dev);
517 if (ret < 0) {
518 dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
519 goto exit_pm_disable;
520 }
521
522
523 tegra_periph_reset_assert(tsd->clk);
524 udelay(2);
525 tegra_periph_reset_deassert(tsd->clk);
526
527 tsd->def_command_reg = SPI_M_S | SPI_CS_SW;
528 tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
529 pm_runtime_put(&pdev->dev);
530
531 master->dev.of_node = pdev->dev.of_node;
532 ret = devm_spi_register_master(&pdev->dev, master);
533 if (ret < 0) {
534 dev_err(&pdev->dev, "can not register to master err %d\n", ret);
535 goto exit_pm_disable;
536 }
537 return ret;
538
539exit_pm_disable:
540 pm_runtime_disable(&pdev->dev);
541 if (!pm_runtime_status_suspended(&pdev->dev))
542 tegra_sflash_runtime_suspend(&pdev->dev);
543exit_free_irq:
544 free_irq(tsd->irq, tsd);
545exit_free_master:
546 spi_master_put(master);
547 return ret;
548}
549
550static int tegra_sflash_remove(struct platform_device *pdev)
551{
552 struct spi_master *master = platform_get_drvdata(pdev);
553 struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
554
555 free_irq(tsd->irq, tsd);
556
557 pm_runtime_disable(&pdev->dev);
558 if (!pm_runtime_status_suspended(&pdev->dev))
559 tegra_sflash_runtime_suspend(&pdev->dev);
560
561 return 0;
562}
563
564#ifdef CONFIG_PM_SLEEP
565static int tegra_sflash_suspend(struct device *dev)
566{
567 struct spi_master *master = dev_get_drvdata(dev);
568
569 return spi_master_suspend(master);
570}
571
572static int tegra_sflash_resume(struct device *dev)
573{
574 struct spi_master *master = dev_get_drvdata(dev);
575 struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
576 int ret;
577
578 ret = pm_runtime_get_sync(dev);
579 if (ret < 0) {
580 dev_err(dev, "pm runtime failed, e = %d\n", ret);
581 return ret;
582 }
583 tegra_sflash_writel(tsd, tsd->command_reg, SPI_COMMAND);
584 pm_runtime_put(dev);
585
586 return spi_master_resume(master);
587}
588#endif
589
590static int tegra_sflash_runtime_suspend(struct device *dev)
591{
592 struct spi_master *master = dev_get_drvdata(dev);
593 struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
594
595
596 tegra_sflash_readl(tsd, SPI_COMMAND);
597
598 clk_disable_unprepare(tsd->clk);
599 return 0;
600}
601
602static int tegra_sflash_runtime_resume(struct device *dev)
603{
604 struct spi_master *master = dev_get_drvdata(dev);
605 struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
606 int ret;
607
608 ret = clk_prepare_enable(tsd->clk);
609 if (ret < 0) {
610 dev_err(tsd->dev, "clk_prepare failed: %d\n", ret);
611 return ret;
612 }
613 return 0;
614}
615
616static const struct dev_pm_ops slink_pm_ops = {
617 SET_RUNTIME_PM_OPS(tegra_sflash_runtime_suspend,
618 tegra_sflash_runtime_resume, NULL)
619 SET_SYSTEM_SLEEP_PM_OPS(tegra_sflash_suspend, tegra_sflash_resume)
620};
621static struct platform_driver tegra_sflash_driver = {
622 .driver = {
623 .name = "spi-tegra-sflash",
624 .owner = THIS_MODULE,
625 .pm = &slink_pm_ops,
626 .of_match_table = tegra_sflash_of_match,
627 },
628 .probe = tegra_sflash_probe,
629 .remove = tegra_sflash_remove,
630};
631module_platform_driver(tegra_sflash_driver);
632
633MODULE_ALIAS("platform:spi-tegra-sflash");
634MODULE_DESCRIPTION("NVIDIA Tegra20 Serial Flash Controller Driver");
635MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
636MODULE_LICENSE("GPL v2");
637