linux/drivers/spi/spi-tegra114.c
<<
>>
Prefs
   1/*
   2 * SPI driver for NVIDIA's Tegra114 SPI Controller.
   3 *
   4 * Copyright (c) 2013, NVIDIA CORPORATION.  All rights reserved.
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms and conditions of the GNU General Public License,
   8 * version 2, as published by the Free Software Foundation.
   9 *
  10 * This program is distributed in the hope it will be useful, but WITHOUT
  11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  13 * more details.
  14 *
  15 * You should have received a copy of the GNU General Public License
  16 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
  17 */
  18
  19#include <linux/clk.h>
  20#include <linux/completion.h>
  21#include <linux/delay.h>
  22#include <linux/dmaengine.h>
  23#include <linux/dma-mapping.h>
  24#include <linux/dmapool.h>
  25#include <linux/err.h>
  26#include <linux/interrupt.h>
  27#include <linux/io.h>
  28#include <linux/kernel.h>
  29#include <linux/kthread.h>
  30#include <linux/module.h>
  31#include <linux/platform_device.h>
  32#include <linux/pm_runtime.h>
  33#include <linux/of.h>
  34#include <linux/of_device.h>
  35#include <linux/reset.h>
  36#include <linux/spi/spi.h>
  37
  38#define SPI_COMMAND1                            0x000
  39#define SPI_BIT_LENGTH(x)                       (((x) & 0x1f) << 0)
  40#define SPI_PACKED                              (1 << 5)
  41#define SPI_TX_EN                               (1 << 11)
  42#define SPI_RX_EN                               (1 << 12)
  43#define SPI_BOTH_EN_BYTE                        (1 << 13)
  44#define SPI_BOTH_EN_BIT                         (1 << 14)
  45#define SPI_LSBYTE_FE                           (1 << 15)
  46#define SPI_LSBIT_FE                            (1 << 16)
  47#define SPI_BIDIROE                             (1 << 17)
  48#define SPI_IDLE_SDA_DRIVE_LOW                  (0 << 18)
  49#define SPI_IDLE_SDA_DRIVE_HIGH                 (1 << 18)
  50#define SPI_IDLE_SDA_PULL_LOW                   (2 << 18)
  51#define SPI_IDLE_SDA_PULL_HIGH                  (3 << 18)
  52#define SPI_IDLE_SDA_MASK                       (3 << 18)
  53#define SPI_CS_SS_VAL                           (1 << 20)
  54#define SPI_CS_SW_HW                            (1 << 21)
  55/* SPI_CS_POL_INACTIVE bits are default high */
  56                                                /* n from 0 to 3 */
  57#define SPI_CS_POL_INACTIVE(n)                  (1 << (22 + (n)))
  58#define SPI_CS_POL_INACTIVE_MASK                (0xF << 22)
  59
  60#define SPI_CS_SEL_0                            (0 << 26)
  61#define SPI_CS_SEL_1                            (1 << 26)
  62#define SPI_CS_SEL_2                            (2 << 26)
  63#define SPI_CS_SEL_3                            (3 << 26)
  64#define SPI_CS_SEL_MASK                         (3 << 26)
  65#define SPI_CS_SEL(x)                           (((x) & 0x3) << 26)
  66#define SPI_CONTROL_MODE_0                      (0 << 28)
  67#define SPI_CONTROL_MODE_1                      (1 << 28)
  68#define SPI_CONTROL_MODE_2                      (2 << 28)
  69#define SPI_CONTROL_MODE_3                      (3 << 28)
  70#define SPI_CONTROL_MODE_MASK                   (3 << 28)
  71#define SPI_MODE_SEL(x)                         (((x) & 0x3) << 28)
  72#define SPI_M_S                                 (1 << 30)
  73#define SPI_PIO                                 (1 << 31)
  74
  75#define SPI_COMMAND2                            0x004
  76#define SPI_TX_TAP_DELAY(x)                     (((x) & 0x3F) << 6)
  77#define SPI_RX_TAP_DELAY(x)                     (((x) & 0x3F) << 0)
  78
  79#define SPI_CS_TIMING1                          0x008
  80#define SPI_SETUP_HOLD(setup, hold)             (((setup) << 4) | (hold))
  81#define SPI_CS_SETUP_HOLD(reg, cs, val)                 \
  82                ((((val) & 0xFFu) << ((cs) * 8)) |      \
  83                ((reg) & ~(0xFFu << ((cs) * 8))))
  84
  85#define SPI_CS_TIMING2                          0x00C
  86#define CYCLES_BETWEEN_PACKETS_0(x)             (((x) & 0x1F) << 0)
  87#define CS_ACTIVE_BETWEEN_PACKETS_0             (1 << 5)
  88#define CYCLES_BETWEEN_PACKETS_1(x)             (((x) & 0x1F) << 8)
  89#define CS_ACTIVE_BETWEEN_PACKETS_1             (1 << 13)
  90#define CYCLES_BETWEEN_PACKETS_2(x)             (((x) & 0x1F) << 16)
  91#define CS_ACTIVE_BETWEEN_PACKETS_2             (1 << 21)
  92#define CYCLES_BETWEEN_PACKETS_3(x)             (((x) & 0x1F) << 24)
  93#define CS_ACTIVE_BETWEEN_PACKETS_3             (1 << 29)
  94#define SPI_SET_CS_ACTIVE_BETWEEN_PACKETS(reg, cs, val)         \
  95                (reg = (((val) & 0x1) << ((cs) * 8 + 5)) |      \
  96                        ((reg) & ~(1 << ((cs) * 8 + 5))))
  97#define SPI_SET_CYCLES_BETWEEN_PACKETS(reg, cs, val)            \
  98                (reg = (((val) & 0xF) << ((cs) * 8)) |          \
  99                        ((reg) & ~(0xF << ((cs) * 8))))
 100
 101#define SPI_TRANS_STATUS                        0x010
 102#define SPI_BLK_CNT(val)                        (((val) >> 0) & 0xFFFF)
 103#define SPI_SLV_IDLE_COUNT(val)                 (((val) >> 16) & 0xFF)
 104#define SPI_RDY                                 (1 << 30)
 105
 106#define SPI_FIFO_STATUS                         0x014
 107#define SPI_RX_FIFO_EMPTY                       (1 << 0)
 108#define SPI_RX_FIFO_FULL                        (1 << 1)
 109#define SPI_TX_FIFO_EMPTY                       (1 << 2)
 110#define SPI_TX_FIFO_FULL                        (1 << 3)
 111#define SPI_RX_FIFO_UNF                         (1 << 4)
 112#define SPI_RX_FIFO_OVF                         (1 << 5)
 113#define SPI_TX_FIFO_UNF                         (1 << 6)
 114#define SPI_TX_FIFO_OVF                         (1 << 7)
 115#define SPI_ERR                                 (1 << 8)
 116#define SPI_TX_FIFO_FLUSH                       (1 << 14)
 117#define SPI_RX_FIFO_FLUSH                       (1 << 15)
 118#define SPI_TX_FIFO_EMPTY_COUNT(val)            (((val) >> 16) & 0x7F)
 119#define SPI_RX_FIFO_FULL_COUNT(val)             (((val) >> 23) & 0x7F)
 120#define SPI_FRAME_END                           (1 << 30)
 121#define SPI_CS_INACTIVE                         (1 << 31)
 122
 123#define SPI_FIFO_ERROR                          (SPI_RX_FIFO_UNF | \
 124                        SPI_RX_FIFO_OVF | SPI_TX_FIFO_UNF | SPI_TX_FIFO_OVF)
 125#define SPI_FIFO_EMPTY                  (SPI_RX_FIFO_EMPTY | SPI_TX_FIFO_EMPTY)
 126
 127#define SPI_TX_DATA                             0x018
 128#define SPI_RX_DATA                             0x01C
 129
 130#define SPI_DMA_CTL                             0x020
 131#define SPI_TX_TRIG_1                           (0 << 15)
 132#define SPI_TX_TRIG_4                           (1 << 15)
 133#define SPI_TX_TRIG_8                           (2 << 15)
 134#define SPI_TX_TRIG_16                          (3 << 15)
 135#define SPI_TX_TRIG_MASK                        (3 << 15)
 136#define SPI_RX_TRIG_1                           (0 << 19)
 137#define SPI_RX_TRIG_4                           (1 << 19)
 138#define SPI_RX_TRIG_8                           (2 << 19)
 139#define SPI_RX_TRIG_16                          (3 << 19)
 140#define SPI_RX_TRIG_MASK                        (3 << 19)
 141#define SPI_IE_TX                               (1 << 28)
 142#define SPI_IE_RX                               (1 << 29)
 143#define SPI_CONT                                (1 << 30)
 144#define SPI_DMA                                 (1 << 31)
 145#define SPI_DMA_EN                              SPI_DMA
 146
 147#define SPI_DMA_BLK                             0x024
 148#define SPI_DMA_BLK_SET(x)                      (((x) & 0xFFFF) << 0)
 149
 150#define SPI_TX_FIFO                             0x108
 151#define SPI_RX_FIFO                             0x188
 152#define MAX_CHIP_SELECT                         4
 153#define SPI_FIFO_DEPTH                          64
 154#define DATA_DIR_TX                             (1 << 0)
 155#define DATA_DIR_RX                             (1 << 1)
 156
 157#define SPI_DMA_TIMEOUT                         (msecs_to_jiffies(1000))
 158#define DEFAULT_SPI_DMA_BUF_LEN                 (16*1024)
 159#define TX_FIFO_EMPTY_COUNT_MAX                 SPI_TX_FIFO_EMPTY_COUNT(0x40)
 160#define RX_FIFO_FULL_COUNT_ZERO                 SPI_RX_FIFO_FULL_COUNT(0)
 161#define MAX_HOLD_CYCLES                         16
 162#define SPI_DEFAULT_SPEED                       25000000
 163
 164struct tegra_spi_data {
 165        struct device                           *dev;
 166        struct spi_master                       *master;
 167        spinlock_t                              lock;
 168
 169        struct clk                              *clk;
 170        struct reset_control                    *rst;
 171        void __iomem                            *base;
 172        phys_addr_t                             phys;
 173        unsigned                                irq;
 174        u32                                     cur_speed;
 175
 176        struct spi_device                       *cur_spi;
 177        struct spi_device                       *cs_control;
 178        unsigned                                cur_pos;
 179        unsigned                                words_per_32bit;
 180        unsigned                                bytes_per_word;
 181        unsigned                                curr_dma_words;
 182        unsigned                                cur_direction;
 183
 184        unsigned                                cur_rx_pos;
 185        unsigned                                cur_tx_pos;
 186
 187        unsigned                                dma_buf_size;
 188        unsigned                                max_buf_size;
 189        bool                                    is_curr_dma_xfer;
 190
 191        struct completion                       rx_dma_complete;
 192        struct completion                       tx_dma_complete;
 193
 194        u32                                     tx_status;
 195        u32                                     rx_status;
 196        u32                                     status_reg;
 197        bool                                    is_packed;
 198
 199        u32                                     command1_reg;
 200        u32                                     dma_control_reg;
 201        u32                                     def_command1_reg;
 202
 203        struct completion                       xfer_completion;
 204        struct spi_transfer                     *curr_xfer;
 205        struct dma_chan                         *rx_dma_chan;
 206        u32                                     *rx_dma_buf;
 207        dma_addr_t                              rx_dma_phys;
 208        struct dma_async_tx_descriptor          *rx_dma_desc;
 209
 210        struct dma_chan                         *tx_dma_chan;
 211        u32                                     *tx_dma_buf;
 212        dma_addr_t                              tx_dma_phys;
 213        struct dma_async_tx_descriptor          *tx_dma_desc;
 214};
 215
 216static int tegra_spi_runtime_suspend(struct device *dev);
 217static int tegra_spi_runtime_resume(struct device *dev);
 218
 219static inline u32 tegra_spi_readl(struct tegra_spi_data *tspi,
 220                unsigned long reg)
 221{
 222        return readl(tspi->base + reg);
 223}
 224
 225static inline void tegra_spi_writel(struct tegra_spi_data *tspi,
 226                u32 val, unsigned long reg)
 227{
 228        writel(val, tspi->base + reg);
 229
 230        /* Read back register to make sure that register writes completed */
 231        if (reg != SPI_TX_FIFO)
 232                readl(tspi->base + SPI_COMMAND1);
 233}
 234
 235static void tegra_spi_clear_status(struct tegra_spi_data *tspi)
 236{
 237        u32 val;
 238
 239        /* Write 1 to clear status register */
 240        val = tegra_spi_readl(tspi, SPI_TRANS_STATUS);
 241        tegra_spi_writel(tspi, val, SPI_TRANS_STATUS);
 242
 243        /* Clear fifo status error if any */
 244        val = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 245        if (val & SPI_ERR)
 246                tegra_spi_writel(tspi, SPI_ERR | SPI_FIFO_ERROR,
 247                                SPI_FIFO_STATUS);
 248}
 249
 250static unsigned tegra_spi_calculate_curr_xfer_param(
 251        struct spi_device *spi, struct tegra_spi_data *tspi,
 252        struct spi_transfer *t)
 253{
 254        unsigned remain_len = t->len - tspi->cur_pos;
 255        unsigned max_word;
 256        unsigned bits_per_word = t->bits_per_word;
 257        unsigned max_len;
 258        unsigned total_fifo_words;
 259
 260        tspi->bytes_per_word = DIV_ROUND_UP(bits_per_word, 8);
 261
 262        if (bits_per_word == 8 || bits_per_word == 16) {
 263                tspi->is_packed = 1;
 264                tspi->words_per_32bit = 32/bits_per_word;
 265        } else {
 266                tspi->is_packed = 0;
 267                tspi->words_per_32bit = 1;
 268        }
 269
 270        if (tspi->is_packed) {
 271                max_len = min(remain_len, tspi->max_buf_size);
 272                tspi->curr_dma_words = max_len/tspi->bytes_per_word;
 273                total_fifo_words = (max_len + 3) / 4;
 274        } else {
 275                max_word = (remain_len - 1) / tspi->bytes_per_word + 1;
 276                max_word = min(max_word, tspi->max_buf_size/4);
 277                tspi->curr_dma_words = max_word;
 278                total_fifo_words = max_word;
 279        }
 280        return total_fifo_words;
 281}
 282
 283static unsigned tegra_spi_fill_tx_fifo_from_client_txbuf(
 284        struct tegra_spi_data *tspi, struct spi_transfer *t)
 285{
 286        unsigned nbytes;
 287        unsigned tx_empty_count;
 288        u32 fifo_status;
 289        unsigned max_n_32bit;
 290        unsigned i, count;
 291        unsigned int written_words;
 292        unsigned fifo_words_left;
 293        u8 *tx_buf = (u8 *)t->tx_buf + tspi->cur_tx_pos;
 294
 295        fifo_status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 296        tx_empty_count = SPI_TX_FIFO_EMPTY_COUNT(fifo_status);
 297
 298        if (tspi->is_packed) {
 299                fifo_words_left = tx_empty_count * tspi->words_per_32bit;
 300                written_words = min(fifo_words_left, tspi->curr_dma_words);
 301                nbytes = written_words * tspi->bytes_per_word;
 302                max_n_32bit = DIV_ROUND_UP(nbytes, 4);
 303                for (count = 0; count < max_n_32bit; count++) {
 304                        u32 x = 0;
 305                        for (i = 0; (i < 4) && nbytes; i++, nbytes--)
 306                                x |= (u32)(*tx_buf++) << (i * 8);
 307                        tegra_spi_writel(tspi, x, SPI_TX_FIFO);
 308                }
 309        } else {
 310                max_n_32bit = min(tspi->curr_dma_words,  tx_empty_count);
 311                written_words = max_n_32bit;
 312                nbytes = written_words * tspi->bytes_per_word;
 313                for (count = 0; count < max_n_32bit; count++) {
 314                        u32 x = 0;
 315                        for (i = 0; nbytes && (i < tspi->bytes_per_word);
 316                                                        i++, nbytes--)
 317                                x |= (u32)(*tx_buf++) << (i * 8);
 318                        tegra_spi_writel(tspi, x, SPI_TX_FIFO);
 319                }
 320        }
 321        tspi->cur_tx_pos += written_words * tspi->bytes_per_word;
 322        return written_words;
 323}
 324
 325static unsigned int tegra_spi_read_rx_fifo_to_client_rxbuf(
 326                struct tegra_spi_data *tspi, struct spi_transfer *t)
 327{
 328        unsigned rx_full_count;
 329        u32 fifo_status;
 330        unsigned i, count;
 331        unsigned int read_words = 0;
 332        unsigned len;
 333        u8 *rx_buf = (u8 *)t->rx_buf + tspi->cur_rx_pos;
 334
 335        fifo_status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 336        rx_full_count = SPI_RX_FIFO_FULL_COUNT(fifo_status);
 337        if (tspi->is_packed) {
 338                len = tspi->curr_dma_words * tspi->bytes_per_word;
 339                for (count = 0; count < rx_full_count; count++) {
 340                        u32 x = tegra_spi_readl(tspi, SPI_RX_FIFO);
 341                        for (i = 0; len && (i < 4); i++, len--)
 342                                *rx_buf++ = (x >> i*8) & 0xFF;
 343                }
 344                tspi->cur_rx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 345                read_words += tspi->curr_dma_words;
 346        } else {
 347                u32 rx_mask = ((u32)1 << t->bits_per_word) - 1;
 348                for (count = 0; count < rx_full_count; count++) {
 349                        u32 x = tegra_spi_readl(tspi, SPI_RX_FIFO) & rx_mask;
 350                        for (i = 0; (i < tspi->bytes_per_word); i++)
 351                                *rx_buf++ = (x >> (i*8)) & 0xFF;
 352                }
 353                tspi->cur_rx_pos += rx_full_count * tspi->bytes_per_word;
 354                read_words += rx_full_count;
 355        }
 356        return read_words;
 357}
 358
 359static void tegra_spi_copy_client_txbuf_to_spi_txbuf(
 360                struct tegra_spi_data *tspi, struct spi_transfer *t)
 361{
 362        /* Make the dma buffer to read by cpu */
 363        dma_sync_single_for_cpu(tspi->dev, tspi->tx_dma_phys,
 364                                tspi->dma_buf_size, DMA_TO_DEVICE);
 365
 366        if (tspi->is_packed) {
 367                unsigned len = tspi->curr_dma_words * tspi->bytes_per_word;
 368                memcpy(tspi->tx_dma_buf, t->tx_buf + tspi->cur_pos, len);
 369        } else {
 370                unsigned int i;
 371                unsigned int count;
 372                u8 *tx_buf = (u8 *)t->tx_buf + tspi->cur_tx_pos;
 373                unsigned consume = tspi->curr_dma_words * tspi->bytes_per_word;
 374
 375                for (count = 0; count < tspi->curr_dma_words; count++) {
 376                        u32 x = 0;
 377                        for (i = 0; consume && (i < tspi->bytes_per_word);
 378                                                        i++, consume--)
 379                                x |= (u32)(*tx_buf++) << (i * 8);
 380                        tspi->tx_dma_buf[count] = x;
 381                }
 382        }
 383        tspi->cur_tx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 384
 385        /* Make the dma buffer to read by dma */
 386        dma_sync_single_for_device(tspi->dev, tspi->tx_dma_phys,
 387                                tspi->dma_buf_size, DMA_TO_DEVICE);
 388}
 389
 390static void tegra_spi_copy_spi_rxbuf_to_client_rxbuf(
 391                struct tegra_spi_data *tspi, struct spi_transfer *t)
 392{
 393        /* Make the dma buffer to read by cpu */
 394        dma_sync_single_for_cpu(tspi->dev, tspi->rx_dma_phys,
 395                tspi->dma_buf_size, DMA_FROM_DEVICE);
 396
 397        if (tspi->is_packed) {
 398                unsigned len = tspi->curr_dma_words * tspi->bytes_per_word;
 399                memcpy(t->rx_buf + tspi->cur_rx_pos, tspi->rx_dma_buf, len);
 400        } else {
 401                unsigned int i;
 402                unsigned int count;
 403                unsigned char *rx_buf = t->rx_buf + tspi->cur_rx_pos;
 404                u32 rx_mask = ((u32)1 << t->bits_per_word) - 1;
 405
 406                for (count = 0; count < tspi->curr_dma_words; count++) {
 407                        u32 x = tspi->rx_dma_buf[count] & rx_mask;
 408                        for (i = 0; (i < tspi->bytes_per_word); i++)
 409                                *rx_buf++ = (x >> (i*8)) & 0xFF;
 410                }
 411        }
 412        tspi->cur_rx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 413
 414        /* Make the dma buffer to read by dma */
 415        dma_sync_single_for_device(tspi->dev, tspi->rx_dma_phys,
 416                tspi->dma_buf_size, DMA_FROM_DEVICE);
 417}
 418
 419static void tegra_spi_dma_complete(void *args)
 420{
 421        struct completion *dma_complete = args;
 422
 423        complete(dma_complete);
 424}
 425
 426static int tegra_spi_start_tx_dma(struct tegra_spi_data *tspi, int len)
 427{
 428        reinit_completion(&tspi->tx_dma_complete);
 429        tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan,
 430                                tspi->tx_dma_phys, len, DMA_MEM_TO_DEV,
 431                                DMA_PREP_INTERRUPT |  DMA_CTRL_ACK);
 432        if (!tspi->tx_dma_desc) {
 433                dev_err(tspi->dev, "Not able to get desc for Tx\n");
 434                return -EIO;
 435        }
 436
 437        tspi->tx_dma_desc->callback = tegra_spi_dma_complete;
 438        tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete;
 439
 440        dmaengine_submit(tspi->tx_dma_desc);
 441        dma_async_issue_pending(tspi->tx_dma_chan);
 442        return 0;
 443}
 444
 445static int tegra_spi_start_rx_dma(struct tegra_spi_data *tspi, int len)
 446{
 447        reinit_completion(&tspi->rx_dma_complete);
 448        tspi->rx_dma_desc = dmaengine_prep_slave_single(tspi->rx_dma_chan,
 449                                tspi->rx_dma_phys, len, DMA_DEV_TO_MEM,
 450                                DMA_PREP_INTERRUPT |  DMA_CTRL_ACK);
 451        if (!tspi->rx_dma_desc) {
 452                dev_err(tspi->dev, "Not able to get desc for Rx\n");
 453                return -EIO;
 454        }
 455
 456        tspi->rx_dma_desc->callback = tegra_spi_dma_complete;
 457        tspi->rx_dma_desc->callback_param = &tspi->rx_dma_complete;
 458
 459        dmaengine_submit(tspi->rx_dma_desc);
 460        dma_async_issue_pending(tspi->rx_dma_chan);
 461        return 0;
 462}
 463
 464static int tegra_spi_start_dma_based_transfer(
 465                struct tegra_spi_data *tspi, struct spi_transfer *t)
 466{
 467        u32 val;
 468        unsigned int len;
 469        int ret = 0;
 470        u32 status;
 471
 472        /* Make sure that Rx and Tx fifo are empty */
 473        status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 474        if ((status & SPI_FIFO_EMPTY) != SPI_FIFO_EMPTY) {
 475                dev_err(tspi->dev, "Rx/Tx fifo are not empty status 0x%08x\n",
 476                        (unsigned)status);
 477                return -EIO;
 478        }
 479
 480        val = SPI_DMA_BLK_SET(tspi->curr_dma_words - 1);
 481        tegra_spi_writel(tspi, val, SPI_DMA_BLK);
 482
 483        if (tspi->is_packed)
 484                len = DIV_ROUND_UP(tspi->curr_dma_words * tspi->bytes_per_word,
 485                                        4) * 4;
 486        else
 487                len = tspi->curr_dma_words * 4;
 488
 489        /* Set attention level based on length of transfer */
 490        if (len & 0xF)
 491                val |= SPI_TX_TRIG_1 | SPI_RX_TRIG_1;
 492        else if (((len) >> 4) & 0x1)
 493                val |= SPI_TX_TRIG_4 | SPI_RX_TRIG_4;
 494        else
 495                val |= SPI_TX_TRIG_8 | SPI_RX_TRIG_8;
 496
 497        if (tspi->cur_direction & DATA_DIR_TX)
 498                val |= SPI_IE_TX;
 499
 500        if (tspi->cur_direction & DATA_DIR_RX)
 501                val |= SPI_IE_RX;
 502
 503        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 504        tspi->dma_control_reg = val;
 505
 506        if (tspi->cur_direction & DATA_DIR_TX) {
 507                tegra_spi_copy_client_txbuf_to_spi_txbuf(tspi, t);
 508                ret = tegra_spi_start_tx_dma(tspi, len);
 509                if (ret < 0) {
 510                        dev_err(tspi->dev,
 511                                "Starting tx dma failed, err %d\n", ret);
 512                        return ret;
 513                }
 514        }
 515
 516        if (tspi->cur_direction & DATA_DIR_RX) {
 517                /* Make the dma buffer to read by dma */
 518                dma_sync_single_for_device(tspi->dev, tspi->rx_dma_phys,
 519                                tspi->dma_buf_size, DMA_FROM_DEVICE);
 520
 521                ret = tegra_spi_start_rx_dma(tspi, len);
 522                if (ret < 0) {
 523                        dev_err(tspi->dev,
 524                                "Starting rx dma failed, err %d\n", ret);
 525                        if (tspi->cur_direction & DATA_DIR_TX)
 526                                dmaengine_terminate_all(tspi->tx_dma_chan);
 527                        return ret;
 528                }
 529        }
 530        tspi->is_curr_dma_xfer = true;
 531        tspi->dma_control_reg = val;
 532
 533        val |= SPI_DMA_EN;
 534        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 535        return ret;
 536}
 537
 538static int tegra_spi_start_cpu_based_transfer(
 539                struct tegra_spi_data *tspi, struct spi_transfer *t)
 540{
 541        u32 val;
 542        unsigned cur_words;
 543
 544        if (tspi->cur_direction & DATA_DIR_TX)
 545                cur_words = tegra_spi_fill_tx_fifo_from_client_txbuf(tspi, t);
 546        else
 547                cur_words = tspi->curr_dma_words;
 548
 549        val = SPI_DMA_BLK_SET(cur_words - 1);
 550        tegra_spi_writel(tspi, val, SPI_DMA_BLK);
 551
 552        val = 0;
 553        if (tspi->cur_direction & DATA_DIR_TX)
 554                val |= SPI_IE_TX;
 555
 556        if (tspi->cur_direction & DATA_DIR_RX)
 557                val |= SPI_IE_RX;
 558
 559        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 560        tspi->dma_control_reg = val;
 561
 562        tspi->is_curr_dma_xfer = false;
 563
 564        val |= SPI_DMA_EN;
 565        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 566        return 0;
 567}
 568
 569static int tegra_spi_init_dma_param(struct tegra_spi_data *tspi,
 570                        bool dma_to_memory)
 571{
 572        struct dma_chan *dma_chan;
 573        u32 *dma_buf;
 574        dma_addr_t dma_phys;
 575        int ret;
 576        struct dma_slave_config dma_sconfig;
 577
 578        dma_chan = dma_request_slave_channel_reason(tspi->dev,
 579                                        dma_to_memory ? "rx" : "tx");
 580        if (IS_ERR(dma_chan)) {
 581                ret = PTR_ERR(dma_chan);
 582                if (ret != -EPROBE_DEFER)
 583                        dev_err(tspi->dev,
 584                                "Dma channel is not available: %d\n", ret);
 585                return ret;
 586        }
 587
 588        dma_buf = dma_alloc_coherent(tspi->dev, tspi->dma_buf_size,
 589                                &dma_phys, GFP_KERNEL);
 590        if (!dma_buf) {
 591                dev_err(tspi->dev, " Not able to allocate the dma buffer\n");
 592                dma_release_channel(dma_chan);
 593                return -ENOMEM;
 594        }
 595
 596        if (dma_to_memory) {
 597                dma_sconfig.src_addr = tspi->phys + SPI_RX_FIFO;
 598                dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
 599                dma_sconfig.src_maxburst = 0;
 600        } else {
 601                dma_sconfig.dst_addr = tspi->phys + SPI_TX_FIFO;
 602                dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
 603                dma_sconfig.dst_maxburst = 0;
 604        }
 605
 606        ret = dmaengine_slave_config(dma_chan, &dma_sconfig);
 607        if (ret)
 608                goto scrub;
 609        if (dma_to_memory) {
 610                tspi->rx_dma_chan = dma_chan;
 611                tspi->rx_dma_buf = dma_buf;
 612                tspi->rx_dma_phys = dma_phys;
 613        } else {
 614                tspi->tx_dma_chan = dma_chan;
 615                tspi->tx_dma_buf = dma_buf;
 616                tspi->tx_dma_phys = dma_phys;
 617        }
 618        return 0;
 619
 620scrub:
 621        dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys);
 622        dma_release_channel(dma_chan);
 623        return ret;
 624}
 625
 626static void tegra_spi_deinit_dma_param(struct tegra_spi_data *tspi,
 627        bool dma_to_memory)
 628{
 629        u32 *dma_buf;
 630        dma_addr_t dma_phys;
 631        struct dma_chan *dma_chan;
 632
 633        if (dma_to_memory) {
 634                dma_buf = tspi->rx_dma_buf;
 635                dma_chan = tspi->rx_dma_chan;
 636                dma_phys = tspi->rx_dma_phys;
 637                tspi->rx_dma_chan = NULL;
 638                tspi->rx_dma_buf = NULL;
 639        } else {
 640                dma_buf = tspi->tx_dma_buf;
 641                dma_chan = tspi->tx_dma_chan;
 642                dma_phys = tspi->tx_dma_phys;
 643                tspi->tx_dma_buf = NULL;
 644                tspi->tx_dma_chan = NULL;
 645        }
 646        if (!dma_chan)
 647                return;
 648
 649        dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys);
 650        dma_release_channel(dma_chan);
 651}
 652
 653static u32 tegra_spi_setup_transfer_one(struct spi_device *spi,
 654                struct spi_transfer *t, bool is_first_of_msg)
 655{
 656        struct tegra_spi_data *tspi = spi_master_get_devdata(spi->master);
 657        u32 speed = t->speed_hz;
 658        u8 bits_per_word = t->bits_per_word;
 659        u32 command1;
 660        int req_mode;
 661
 662        if (speed != tspi->cur_speed) {
 663                clk_set_rate(tspi->clk, speed);
 664                tspi->cur_speed = speed;
 665        }
 666
 667        tspi->cur_spi = spi;
 668        tspi->cur_pos = 0;
 669        tspi->cur_rx_pos = 0;
 670        tspi->cur_tx_pos = 0;
 671        tspi->curr_xfer = t;
 672
 673        if (is_first_of_msg) {
 674                tegra_spi_clear_status(tspi);
 675
 676                command1 = tspi->def_command1_reg;
 677                command1 |= SPI_BIT_LENGTH(bits_per_word - 1);
 678
 679                command1 &= ~SPI_CONTROL_MODE_MASK;
 680                req_mode = spi->mode & 0x3;
 681                if (req_mode == SPI_MODE_0)
 682                        command1 |= SPI_CONTROL_MODE_0;
 683                else if (req_mode == SPI_MODE_1)
 684                        command1 |= SPI_CONTROL_MODE_1;
 685                else if (req_mode == SPI_MODE_2)
 686                        command1 |= SPI_CONTROL_MODE_2;
 687                else if (req_mode == SPI_MODE_3)
 688                        command1 |= SPI_CONTROL_MODE_3;
 689
 690                if (tspi->cs_control) {
 691                        if (tspi->cs_control != spi)
 692                                tegra_spi_writel(tspi, command1, SPI_COMMAND1);
 693                        tspi->cs_control = NULL;
 694                } else
 695                        tegra_spi_writel(tspi, command1, SPI_COMMAND1);
 696
 697                command1 |= SPI_CS_SW_HW;
 698                if (spi->mode & SPI_CS_HIGH)
 699                        command1 |= SPI_CS_SS_VAL;
 700                else
 701                        command1 &= ~SPI_CS_SS_VAL;
 702
 703                tegra_spi_writel(tspi, 0, SPI_COMMAND2);
 704        } else {
 705                command1 = tspi->command1_reg;
 706                command1 &= ~SPI_BIT_LENGTH(~0);
 707                command1 |= SPI_BIT_LENGTH(bits_per_word - 1);
 708        }
 709
 710        return command1;
 711}
 712
 713static int tegra_spi_start_transfer_one(struct spi_device *spi,
 714                struct spi_transfer *t, u32 command1)
 715{
 716        struct tegra_spi_data *tspi = spi_master_get_devdata(spi->master);
 717        unsigned total_fifo_words;
 718        int ret;
 719
 720        total_fifo_words = tegra_spi_calculate_curr_xfer_param(spi, tspi, t);
 721
 722        if (tspi->is_packed)
 723                command1 |= SPI_PACKED;
 724
 725        command1 &= ~(SPI_CS_SEL_MASK | SPI_TX_EN | SPI_RX_EN);
 726        tspi->cur_direction = 0;
 727        if (t->rx_buf) {
 728                command1 |= SPI_RX_EN;
 729                tspi->cur_direction |= DATA_DIR_RX;
 730        }
 731        if (t->tx_buf) {
 732                command1 |= SPI_TX_EN;
 733                tspi->cur_direction |= DATA_DIR_TX;
 734        }
 735        command1 |= SPI_CS_SEL(spi->chip_select);
 736        tegra_spi_writel(tspi, command1, SPI_COMMAND1);
 737        tspi->command1_reg = command1;
 738
 739        dev_dbg(tspi->dev, "The def 0x%x and written 0x%x\n",
 740                tspi->def_command1_reg, (unsigned)command1);
 741
 742        if (total_fifo_words > SPI_FIFO_DEPTH)
 743                ret = tegra_spi_start_dma_based_transfer(tspi, t);
 744        else
 745                ret = tegra_spi_start_cpu_based_transfer(tspi, t);
 746        return ret;
 747}
 748
 749static int tegra_spi_setup(struct spi_device *spi)
 750{
 751        struct tegra_spi_data *tspi = spi_master_get_devdata(spi->master);
 752        u32 val;
 753        unsigned long flags;
 754        int ret;
 755
 756        dev_dbg(&spi->dev, "setup %d bpw, %scpol, %scpha, %dHz\n",
 757                spi->bits_per_word,
 758                spi->mode & SPI_CPOL ? "" : "~",
 759                spi->mode & SPI_CPHA ? "" : "~",
 760                spi->max_speed_hz);
 761
 762        ret = pm_runtime_get_sync(tspi->dev);
 763        if (ret < 0) {
 764                dev_err(tspi->dev, "pm runtime failed, e = %d\n", ret);
 765                return ret;
 766        }
 767
 768        spin_lock_irqsave(&tspi->lock, flags);
 769        val = tspi->def_command1_reg;
 770        if (spi->mode & SPI_CS_HIGH)
 771                val &= ~SPI_CS_POL_INACTIVE(spi->chip_select);
 772        else
 773                val |= SPI_CS_POL_INACTIVE(spi->chip_select);
 774        tspi->def_command1_reg = val;
 775        tegra_spi_writel(tspi, tspi->def_command1_reg, SPI_COMMAND1);
 776        spin_unlock_irqrestore(&tspi->lock, flags);
 777
 778        pm_runtime_put(tspi->dev);
 779        return 0;
 780}
 781
 782static void tegra_spi_transfer_delay(int delay)
 783{
 784        if (!delay)
 785                return;
 786
 787        if (delay >= 1000)
 788                mdelay(delay / 1000);
 789
 790        udelay(delay % 1000);
 791}
 792
 793static int tegra_spi_transfer_one_message(struct spi_master *master,
 794                        struct spi_message *msg)
 795{
 796        bool is_first_msg = true;
 797        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
 798        struct spi_transfer *xfer;
 799        struct spi_device *spi = msg->spi;
 800        int ret;
 801        bool skip = false;
 802
 803        msg->status = 0;
 804        msg->actual_length = 0;
 805
 806        list_for_each_entry(xfer, &msg->transfers, transfer_list) {
 807                u32 cmd1;
 808
 809                reinit_completion(&tspi->xfer_completion);
 810
 811                cmd1 = tegra_spi_setup_transfer_one(spi, xfer, is_first_msg);
 812
 813                if (!xfer->len) {
 814                        ret = 0;
 815                        skip = true;
 816                        goto complete_xfer;
 817                }
 818
 819                ret = tegra_spi_start_transfer_one(spi, xfer, cmd1);
 820                if (ret < 0) {
 821                        dev_err(tspi->dev,
 822                                "spi can not start transfer, err %d\n", ret);
 823                        goto complete_xfer;
 824                }
 825
 826                is_first_msg = false;
 827                ret = wait_for_completion_timeout(&tspi->xfer_completion,
 828                                                SPI_DMA_TIMEOUT);
 829                if (WARN_ON(ret == 0)) {
 830                        dev_err(tspi->dev,
 831                                "spi trasfer timeout, err %d\n", ret);
 832                        ret = -EIO;
 833                        goto complete_xfer;
 834                }
 835
 836                if (tspi->tx_status ||  tspi->rx_status) {
 837                        dev_err(tspi->dev, "Error in Transfer\n");
 838                        ret = -EIO;
 839                        goto complete_xfer;
 840                }
 841                msg->actual_length += xfer->len;
 842
 843complete_xfer:
 844                if (ret < 0 || skip) {
 845                        tegra_spi_writel(tspi, tspi->def_command1_reg,
 846                                        SPI_COMMAND1);
 847                        tegra_spi_transfer_delay(xfer->delay_usecs);
 848                        goto exit;
 849                } else if (list_is_last(&xfer->transfer_list,
 850                                        &msg->transfers)) {
 851                        if (xfer->cs_change)
 852                                tspi->cs_control = spi;
 853                        else {
 854                                tegra_spi_writel(tspi, tspi->def_command1_reg,
 855                                                SPI_COMMAND1);
 856                                tegra_spi_transfer_delay(xfer->delay_usecs);
 857                        }
 858                } else if (xfer->cs_change) {
 859                        tegra_spi_writel(tspi, tspi->def_command1_reg,
 860                                        SPI_COMMAND1);
 861                        tegra_spi_transfer_delay(xfer->delay_usecs);
 862                }
 863
 864        }
 865        ret = 0;
 866exit:
 867        msg->status = ret;
 868        spi_finalize_current_message(master);
 869        return ret;
 870}
 871
 872static irqreturn_t handle_cpu_based_xfer(struct tegra_spi_data *tspi)
 873{
 874        struct spi_transfer *t = tspi->curr_xfer;
 875        unsigned long flags;
 876
 877        spin_lock_irqsave(&tspi->lock, flags);
 878        if (tspi->tx_status ||  tspi->rx_status) {
 879                dev_err(tspi->dev, "CpuXfer ERROR bit set 0x%x\n",
 880                        tspi->status_reg);
 881                dev_err(tspi->dev, "CpuXfer 0x%08x:0x%08x\n",
 882                        tspi->command1_reg, tspi->dma_control_reg);
 883                reset_control_assert(tspi->rst);
 884                udelay(2);
 885                reset_control_deassert(tspi->rst);
 886                complete(&tspi->xfer_completion);
 887                goto exit;
 888        }
 889
 890        if (tspi->cur_direction & DATA_DIR_RX)
 891                tegra_spi_read_rx_fifo_to_client_rxbuf(tspi, t);
 892
 893        if (tspi->cur_direction & DATA_DIR_TX)
 894                tspi->cur_pos = tspi->cur_tx_pos;
 895        else
 896                tspi->cur_pos = tspi->cur_rx_pos;
 897
 898        if (tspi->cur_pos == t->len) {
 899                complete(&tspi->xfer_completion);
 900                goto exit;
 901        }
 902
 903        tegra_spi_calculate_curr_xfer_param(tspi->cur_spi, tspi, t);
 904        tegra_spi_start_cpu_based_transfer(tspi, t);
 905exit:
 906        spin_unlock_irqrestore(&tspi->lock, flags);
 907        return IRQ_HANDLED;
 908}
 909
 910static irqreturn_t handle_dma_based_xfer(struct tegra_spi_data *tspi)
 911{
 912        struct spi_transfer *t = tspi->curr_xfer;
 913        long wait_status;
 914        int err = 0;
 915        unsigned total_fifo_words;
 916        unsigned long flags;
 917
 918        /* Abort dmas if any error */
 919        if (tspi->cur_direction & DATA_DIR_TX) {
 920                if (tspi->tx_status) {
 921                        dmaengine_terminate_all(tspi->tx_dma_chan);
 922                        err += 1;
 923                } else {
 924                        wait_status = wait_for_completion_interruptible_timeout(
 925                                &tspi->tx_dma_complete, SPI_DMA_TIMEOUT);
 926                        if (wait_status <= 0) {
 927                                dmaengine_terminate_all(tspi->tx_dma_chan);
 928                                dev_err(tspi->dev, "TxDma Xfer failed\n");
 929                                err += 1;
 930                        }
 931                }
 932        }
 933
 934        if (tspi->cur_direction & DATA_DIR_RX) {
 935                if (tspi->rx_status) {
 936                        dmaengine_terminate_all(tspi->rx_dma_chan);
 937                        err += 2;
 938                } else {
 939                        wait_status = wait_for_completion_interruptible_timeout(
 940                                &tspi->rx_dma_complete, SPI_DMA_TIMEOUT);
 941                        if (wait_status <= 0) {
 942                                dmaengine_terminate_all(tspi->rx_dma_chan);
 943                                dev_err(tspi->dev, "RxDma Xfer failed\n");
 944                                err += 2;
 945                        }
 946                }
 947        }
 948
 949        spin_lock_irqsave(&tspi->lock, flags);
 950        if (err) {
 951                dev_err(tspi->dev, "DmaXfer: ERROR bit set 0x%x\n",
 952                        tspi->status_reg);
 953                dev_err(tspi->dev, "DmaXfer 0x%08x:0x%08x\n",
 954                        tspi->command1_reg, tspi->dma_control_reg);
 955                reset_control_assert(tspi->rst);
 956                udelay(2);
 957                reset_control_deassert(tspi->rst);
 958                complete(&tspi->xfer_completion);
 959                spin_unlock_irqrestore(&tspi->lock, flags);
 960                return IRQ_HANDLED;
 961        }
 962
 963        if (tspi->cur_direction & DATA_DIR_RX)
 964                tegra_spi_copy_spi_rxbuf_to_client_rxbuf(tspi, t);
 965
 966        if (tspi->cur_direction & DATA_DIR_TX)
 967                tspi->cur_pos = tspi->cur_tx_pos;
 968        else
 969                tspi->cur_pos = tspi->cur_rx_pos;
 970
 971        if (tspi->cur_pos == t->len) {
 972                complete(&tspi->xfer_completion);
 973                goto exit;
 974        }
 975
 976        /* Continue transfer in current message */
 977        total_fifo_words = tegra_spi_calculate_curr_xfer_param(tspi->cur_spi,
 978                                                        tspi, t);
 979        if (total_fifo_words > SPI_FIFO_DEPTH)
 980                err = tegra_spi_start_dma_based_transfer(tspi, t);
 981        else
 982                err = tegra_spi_start_cpu_based_transfer(tspi, t);
 983
 984exit:
 985        spin_unlock_irqrestore(&tspi->lock, flags);
 986        return IRQ_HANDLED;
 987}
 988
 989static irqreturn_t tegra_spi_isr_thread(int irq, void *context_data)
 990{
 991        struct tegra_spi_data *tspi = context_data;
 992
 993        if (!tspi->is_curr_dma_xfer)
 994                return handle_cpu_based_xfer(tspi);
 995        return handle_dma_based_xfer(tspi);
 996}
 997
 998static irqreturn_t tegra_spi_isr(int irq, void *context_data)
 999{
1000        struct tegra_spi_data *tspi = context_data;
1001
1002        tspi->status_reg = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
1003        if (tspi->cur_direction & DATA_DIR_TX)
1004                tspi->tx_status = tspi->status_reg &
1005                                        (SPI_TX_FIFO_UNF | SPI_TX_FIFO_OVF);
1006
1007        if (tspi->cur_direction & DATA_DIR_RX)
1008                tspi->rx_status = tspi->status_reg &
1009                                        (SPI_RX_FIFO_OVF | SPI_RX_FIFO_UNF);
1010        tegra_spi_clear_status(tspi);
1011
1012        return IRQ_WAKE_THREAD;
1013}
1014
1015static const struct of_device_id tegra_spi_of_match[] = {
1016        { .compatible = "nvidia,tegra114-spi", },
1017        {}
1018};
1019MODULE_DEVICE_TABLE(of, tegra_spi_of_match);
1020
1021static int tegra_spi_probe(struct platform_device *pdev)
1022{
1023        struct spi_master       *master;
1024        struct tegra_spi_data   *tspi;
1025        struct resource         *r;
1026        int ret, spi_irq;
1027
1028        master = spi_alloc_master(&pdev->dev, sizeof(*tspi));
1029        if (!master) {
1030                dev_err(&pdev->dev, "master allocation failed\n");
1031                return -ENOMEM;
1032        }
1033        platform_set_drvdata(pdev, master);
1034        tspi = spi_master_get_devdata(master);
1035
1036        if (of_property_read_u32(pdev->dev.of_node, "spi-max-frequency",
1037                                 &master->max_speed_hz))
1038                master->max_speed_hz = 25000000; /* 25MHz */
1039
1040        /* the spi->mode bits understood by this driver: */
1041        master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH;
1042        master->setup = tegra_spi_setup;
1043        master->transfer_one_message = tegra_spi_transfer_one_message;
1044        master->num_chipselect = MAX_CHIP_SELECT;
1045        master->auto_runtime_pm = true;
1046
1047        tspi->master = master;
1048        tspi->dev = &pdev->dev;
1049        spin_lock_init(&tspi->lock);
1050
1051        r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1052        tspi->base = devm_ioremap_resource(&pdev->dev, r);
1053        if (IS_ERR(tspi->base)) {
1054                ret = PTR_ERR(tspi->base);
1055                goto exit_free_master;
1056        }
1057        tspi->phys = r->start;
1058
1059        spi_irq = platform_get_irq(pdev, 0);
1060        tspi->irq = spi_irq;
1061        ret = request_threaded_irq(tspi->irq, tegra_spi_isr,
1062                        tegra_spi_isr_thread, IRQF_ONESHOT,
1063                        dev_name(&pdev->dev), tspi);
1064        if (ret < 0) {
1065                dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
1066                                        tspi->irq);
1067                goto exit_free_master;
1068        }
1069
1070        tspi->clk = devm_clk_get(&pdev->dev, "spi");
1071        if (IS_ERR(tspi->clk)) {
1072                dev_err(&pdev->dev, "can not get clock\n");
1073                ret = PTR_ERR(tspi->clk);
1074                goto exit_free_irq;
1075        }
1076
1077        tspi->rst = devm_reset_control_get(&pdev->dev, "spi");
1078        if (IS_ERR(tspi->rst)) {
1079                dev_err(&pdev->dev, "can not get reset\n");
1080                ret = PTR_ERR(tspi->rst);
1081                goto exit_free_irq;
1082        }
1083
1084        tspi->max_buf_size = SPI_FIFO_DEPTH << 2;
1085        tspi->dma_buf_size = DEFAULT_SPI_DMA_BUF_LEN;
1086
1087        ret = tegra_spi_init_dma_param(tspi, true);
1088        if (ret < 0)
1089                goto exit_free_irq;
1090        ret = tegra_spi_init_dma_param(tspi, false);
1091        if (ret < 0)
1092                goto exit_rx_dma_free;
1093        tspi->max_buf_size = tspi->dma_buf_size;
1094        init_completion(&tspi->tx_dma_complete);
1095        init_completion(&tspi->rx_dma_complete);
1096
1097        init_completion(&tspi->xfer_completion);
1098
1099        pm_runtime_enable(&pdev->dev);
1100        if (!pm_runtime_enabled(&pdev->dev)) {
1101                ret = tegra_spi_runtime_resume(&pdev->dev);
1102                if (ret)
1103                        goto exit_pm_disable;
1104        }
1105
1106        ret = pm_runtime_get_sync(&pdev->dev);
1107        if (ret < 0) {
1108                dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
1109                goto exit_pm_disable;
1110        }
1111        tspi->def_command1_reg  = SPI_M_S;
1112        tegra_spi_writel(tspi, tspi->def_command1_reg, SPI_COMMAND1);
1113        pm_runtime_put(&pdev->dev);
1114
1115        master->dev.of_node = pdev->dev.of_node;
1116        ret = devm_spi_register_master(&pdev->dev, master);
1117        if (ret < 0) {
1118                dev_err(&pdev->dev, "can not register to master err %d\n", ret);
1119                goto exit_pm_disable;
1120        }
1121        return ret;
1122
1123exit_pm_disable:
1124        pm_runtime_disable(&pdev->dev);
1125        if (!pm_runtime_status_suspended(&pdev->dev))
1126                tegra_spi_runtime_suspend(&pdev->dev);
1127        tegra_spi_deinit_dma_param(tspi, false);
1128exit_rx_dma_free:
1129        tegra_spi_deinit_dma_param(tspi, true);
1130exit_free_irq:
1131        free_irq(spi_irq, tspi);
1132exit_free_master:
1133        spi_master_put(master);
1134        return ret;
1135}
1136
1137static int tegra_spi_remove(struct platform_device *pdev)
1138{
1139        struct spi_master *master = platform_get_drvdata(pdev);
1140        struct tegra_spi_data   *tspi = spi_master_get_devdata(master);
1141
1142        free_irq(tspi->irq, tspi);
1143
1144        if (tspi->tx_dma_chan)
1145                tegra_spi_deinit_dma_param(tspi, false);
1146
1147        if (tspi->rx_dma_chan)
1148                tegra_spi_deinit_dma_param(tspi, true);
1149
1150        pm_runtime_disable(&pdev->dev);
1151        if (!pm_runtime_status_suspended(&pdev->dev))
1152                tegra_spi_runtime_suspend(&pdev->dev);
1153
1154        return 0;
1155}
1156
1157#ifdef CONFIG_PM_SLEEP
1158static int tegra_spi_suspend(struct device *dev)
1159{
1160        struct spi_master *master = dev_get_drvdata(dev);
1161
1162        return spi_master_suspend(master);
1163}
1164
1165static int tegra_spi_resume(struct device *dev)
1166{
1167        struct spi_master *master = dev_get_drvdata(dev);
1168        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1169        int ret;
1170
1171        ret = pm_runtime_get_sync(dev);
1172        if (ret < 0) {
1173                dev_err(dev, "pm runtime failed, e = %d\n", ret);
1174                return ret;
1175        }
1176        tegra_spi_writel(tspi, tspi->command1_reg, SPI_COMMAND1);
1177        pm_runtime_put(dev);
1178
1179        return spi_master_resume(master);
1180}
1181#endif
1182
1183static int tegra_spi_runtime_suspend(struct device *dev)
1184{
1185        struct spi_master *master = dev_get_drvdata(dev);
1186        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1187
1188        /* Flush all write which are in PPSB queue by reading back */
1189        tegra_spi_readl(tspi, SPI_COMMAND1);
1190
1191        clk_disable_unprepare(tspi->clk);
1192        return 0;
1193}
1194
1195static int tegra_spi_runtime_resume(struct device *dev)
1196{
1197        struct spi_master *master = dev_get_drvdata(dev);
1198        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1199        int ret;
1200
1201        ret = clk_prepare_enable(tspi->clk);
1202        if (ret < 0) {
1203                dev_err(tspi->dev, "clk_prepare failed: %d\n", ret);
1204                return ret;
1205        }
1206        return 0;
1207}
1208
1209static const struct dev_pm_ops tegra_spi_pm_ops = {
1210        SET_RUNTIME_PM_OPS(tegra_spi_runtime_suspend,
1211                tegra_spi_runtime_resume, NULL)
1212        SET_SYSTEM_SLEEP_PM_OPS(tegra_spi_suspend, tegra_spi_resume)
1213};
1214static struct platform_driver tegra_spi_driver = {
1215        .driver = {
1216                .name           = "spi-tegra114",
1217                .owner          = THIS_MODULE,
1218                .pm             = &tegra_spi_pm_ops,
1219                .of_match_table = tegra_spi_of_match,
1220        },
1221        .probe =        tegra_spi_probe,
1222        .remove =       tegra_spi_remove,
1223};
1224module_platform_driver(tegra_spi_driver);
1225
1226MODULE_ALIAS("platform:spi-tegra114");
1227MODULE_DESCRIPTION("NVIDIA Tegra114 SPI Controller Driver");
1228MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
1229MODULE_LICENSE("GPL v2");
1230