linux/drivers/usb/musb/ux500_dma.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * drivers/usb/musb/ux500_dma.c
   4 *
   5 * U8500 DMA support code
   6 *
   7 * Copyright (C) 2009 STMicroelectronics
   8 * Copyright (C) 2011 ST-Ericsson SA
   9 * Authors:
  10 *      Mian Yousaf Kaukab <mian.yousaf.kaukab@stericsson.com>
  11 *      Praveena Nadahally <praveen.nadahally@stericsson.com>
  12 *      Rajaram Regupathy <ragupathy.rajaram@stericsson.com>
  13 */
  14
  15#include <linux/device.h>
  16#include <linux/interrupt.h>
  17#include <linux/platform_device.h>
  18#include <linux/dma-mapping.h>
  19#include <linux/dmaengine.h>
  20#include <linux/pfn.h>
  21#include <linux/sizes.h>
  22#include <linux/platform_data/usb-musb-ux500.h>
  23#include "musb_core.h"
  24
  25static const char *iep_chan_names[] = { "iep_1_9", "iep_2_10", "iep_3_11", "iep_4_12",
  26                                        "iep_5_13", "iep_6_14", "iep_7_15", "iep_8" };
  27static const char *oep_chan_names[] = { "oep_1_9", "oep_2_10", "oep_3_11", "oep_4_12",
  28                                        "oep_5_13", "oep_6_14", "oep_7_15", "oep_8" };
  29
  30struct ux500_dma_channel {
  31        struct dma_channel channel;
  32        struct ux500_dma_controller *controller;
  33        struct musb_hw_ep *hw_ep;
  34        struct dma_chan *dma_chan;
  35        unsigned int cur_len;
  36        dma_cookie_t cookie;
  37        u8 ch_num;
  38        u8 is_tx;
  39        u8 is_allocated;
  40};
  41
  42struct ux500_dma_controller {
  43        struct dma_controller controller;
  44        struct ux500_dma_channel rx_channel[UX500_MUSB_DMA_NUM_RX_TX_CHANNELS];
  45        struct ux500_dma_channel tx_channel[UX500_MUSB_DMA_NUM_RX_TX_CHANNELS];
  46        void *private_data;
  47        dma_addr_t phy_base;
  48};
  49
  50/* Work function invoked from DMA callback to handle rx transfers. */
  51static void ux500_dma_callback(void *private_data)
  52{
  53        struct dma_channel *channel = private_data;
  54        struct ux500_dma_channel *ux500_channel = channel->private_data;
  55        struct musb_hw_ep       *hw_ep = ux500_channel->hw_ep;
  56        struct musb *musb = hw_ep->musb;
  57        unsigned long flags;
  58
  59        dev_dbg(musb->controller, "DMA rx transfer done on hw_ep=%d\n",
  60                hw_ep->epnum);
  61
  62        spin_lock_irqsave(&musb->lock, flags);
  63        ux500_channel->channel.actual_len = ux500_channel->cur_len;
  64        ux500_channel->channel.status = MUSB_DMA_STATUS_FREE;
  65        musb_dma_completion(musb, hw_ep->epnum, ux500_channel->is_tx);
  66        spin_unlock_irqrestore(&musb->lock, flags);
  67
  68}
  69
  70static bool ux500_configure_channel(struct dma_channel *channel,
  71                                u16 packet_sz, u8 mode,
  72                                dma_addr_t dma_addr, u32 len)
  73{
  74        struct ux500_dma_channel *ux500_channel = channel->private_data;
  75        struct musb_hw_ep *hw_ep = ux500_channel->hw_ep;
  76        struct dma_chan *dma_chan = ux500_channel->dma_chan;
  77        struct dma_async_tx_descriptor *dma_desc;
  78        enum dma_transfer_direction direction;
  79        struct scatterlist sg;
  80        struct dma_slave_config slave_conf;
  81        enum dma_slave_buswidth addr_width;
  82        struct musb *musb = ux500_channel->controller->private_data;
  83        dma_addr_t usb_fifo_addr = (musb->io.fifo_offset(hw_ep->epnum) +
  84                                        ux500_channel->controller->phy_base);
  85
  86        dev_dbg(musb->controller,
  87                "packet_sz=%d, mode=%d, dma_addr=0x%llx, len=%d is_tx=%d\n",
  88                packet_sz, mode, (unsigned long long) dma_addr,
  89                len, ux500_channel->is_tx);
  90
  91        ux500_channel->cur_len = len;
  92
  93        sg_init_table(&sg, 1);
  94        sg_set_page(&sg, pfn_to_page(PFN_DOWN(dma_addr)), len,
  95                                            offset_in_page(dma_addr));
  96        sg_dma_address(&sg) = dma_addr;
  97        sg_dma_len(&sg) = len;
  98
  99        direction = ux500_channel->is_tx ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
 100        addr_width = (len & 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE :
 101                                        DMA_SLAVE_BUSWIDTH_4_BYTES;
 102
 103        slave_conf.direction = direction;
 104        slave_conf.src_addr = usb_fifo_addr;
 105        slave_conf.src_addr_width = addr_width;
 106        slave_conf.src_maxburst = 16;
 107        slave_conf.dst_addr = usb_fifo_addr;
 108        slave_conf.dst_addr_width = addr_width;
 109        slave_conf.dst_maxburst = 16;
 110        slave_conf.device_fc = false;
 111
 112        dmaengine_slave_config(dma_chan, &slave_conf);
 113
 114        dma_desc = dmaengine_prep_slave_sg(dma_chan, &sg, 1, direction,
 115                                             DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
 116        if (!dma_desc)
 117                return false;
 118
 119        dma_desc->callback = ux500_dma_callback;
 120        dma_desc->callback_param = channel;
 121        ux500_channel->cookie = dma_desc->tx_submit(dma_desc);
 122
 123        dma_async_issue_pending(dma_chan);
 124
 125        return true;
 126}
 127
 128static struct dma_channel *ux500_dma_channel_allocate(struct dma_controller *c,
 129                                struct musb_hw_ep *hw_ep, u8 is_tx)
 130{
 131        struct ux500_dma_controller *controller = container_of(c,
 132                        struct ux500_dma_controller, controller);
 133        struct ux500_dma_channel *ux500_channel = NULL;
 134        struct musb *musb = controller->private_data;
 135        u8 ch_num = hw_ep->epnum - 1;
 136
 137        /* 8 DMA channels (0 - 7). Each DMA channel can only be allocated
 138         * to specified hw_ep. For example DMA channel 0 can only be allocated
 139         * to hw_ep 1 and 9.
 140         */
 141        if (ch_num > 7)
 142                ch_num -= 8;
 143
 144        if (ch_num >= UX500_MUSB_DMA_NUM_RX_TX_CHANNELS)
 145                return NULL;
 146
 147        ux500_channel = is_tx ? &(controller->tx_channel[ch_num]) :
 148                                &(controller->rx_channel[ch_num]) ;
 149
 150        /* Check if channel is already used. */
 151        if (ux500_channel->is_allocated)
 152                return NULL;
 153
 154        ux500_channel->hw_ep = hw_ep;
 155        ux500_channel->is_allocated = 1;
 156
 157        dev_dbg(musb->controller, "hw_ep=%d, is_tx=0x%x, channel=%d\n",
 158                hw_ep->epnum, is_tx, ch_num);
 159
 160        return &(ux500_channel->channel);
 161}
 162
 163static void ux500_dma_channel_release(struct dma_channel *channel)
 164{
 165        struct ux500_dma_channel *ux500_channel = channel->private_data;
 166        struct musb *musb = ux500_channel->controller->private_data;
 167
 168        dev_dbg(musb->controller, "channel=%d\n", ux500_channel->ch_num);
 169
 170        if (ux500_channel->is_allocated) {
 171                ux500_channel->is_allocated = 0;
 172                channel->status = MUSB_DMA_STATUS_FREE;
 173                channel->actual_len = 0;
 174        }
 175}
 176
 177static int ux500_dma_is_compatible(struct dma_channel *channel,
 178                u16 maxpacket, void *buf, u32 length)
 179{
 180        if ((maxpacket & 0x3)           ||
 181                ((unsigned long int) buf & 0x3) ||
 182                (length < 512)          ||
 183                (length & 0x3))
 184                return false;
 185        else
 186                return true;
 187}
 188
 189static int ux500_dma_channel_program(struct dma_channel *channel,
 190                                u16 packet_sz, u8 mode,
 191                                dma_addr_t dma_addr, u32 len)
 192{
 193        int ret;
 194
 195        BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN ||
 196                channel->status == MUSB_DMA_STATUS_BUSY);
 197
 198        channel->status = MUSB_DMA_STATUS_BUSY;
 199        channel->actual_len = 0;
 200        ret = ux500_configure_channel(channel, packet_sz, mode, dma_addr, len);
 201        if (!ret)
 202                channel->status = MUSB_DMA_STATUS_FREE;
 203
 204        return ret;
 205}
 206
 207static int ux500_dma_channel_abort(struct dma_channel *channel)
 208{
 209        struct ux500_dma_channel *ux500_channel = channel->private_data;
 210        struct ux500_dma_controller *controller = ux500_channel->controller;
 211        struct musb *musb = controller->private_data;
 212        void __iomem *epio = musb->endpoints[ux500_channel->hw_ep->epnum].regs;
 213        u16 csr;
 214
 215        dev_dbg(musb->controller, "channel=%d, is_tx=%d\n",
 216                ux500_channel->ch_num, ux500_channel->is_tx);
 217
 218        if (channel->status == MUSB_DMA_STATUS_BUSY) {
 219                if (ux500_channel->is_tx) {
 220                        csr = musb_readw(epio, MUSB_TXCSR);
 221                        csr &= ~(MUSB_TXCSR_AUTOSET |
 222                                 MUSB_TXCSR_DMAENAB |
 223                                 MUSB_TXCSR_DMAMODE);
 224                        musb_writew(epio, MUSB_TXCSR, csr);
 225                } else {
 226                        csr = musb_readw(epio, MUSB_RXCSR);
 227                        csr &= ~(MUSB_RXCSR_AUTOCLEAR |
 228                                 MUSB_RXCSR_DMAENAB |
 229                                 MUSB_RXCSR_DMAMODE);
 230                        musb_writew(epio, MUSB_RXCSR, csr);
 231                }
 232
 233                dmaengine_terminate_all(ux500_channel->dma_chan);
 234                channel->status = MUSB_DMA_STATUS_FREE;
 235        }
 236        return 0;
 237}
 238
 239static void ux500_dma_controller_stop(struct ux500_dma_controller *controller)
 240{
 241        struct ux500_dma_channel *ux500_channel;
 242        struct dma_channel *channel;
 243        u8 ch_num;
 244
 245        for (ch_num = 0; ch_num < UX500_MUSB_DMA_NUM_RX_TX_CHANNELS; ch_num++) {
 246                channel = &controller->rx_channel[ch_num].channel;
 247                ux500_channel = channel->private_data;
 248
 249                ux500_dma_channel_release(channel);
 250
 251                if (ux500_channel->dma_chan)
 252                        dma_release_channel(ux500_channel->dma_chan);
 253        }
 254
 255        for (ch_num = 0; ch_num < UX500_MUSB_DMA_NUM_RX_TX_CHANNELS; ch_num++) {
 256                channel = &controller->tx_channel[ch_num].channel;
 257                ux500_channel = channel->private_data;
 258
 259                ux500_dma_channel_release(channel);
 260
 261                if (ux500_channel->dma_chan)
 262                        dma_release_channel(ux500_channel->dma_chan);
 263        }
 264}
 265
 266static int ux500_dma_controller_start(struct ux500_dma_controller *controller)
 267{
 268        struct ux500_dma_channel *ux500_channel = NULL;
 269        struct musb *musb = controller->private_data;
 270        struct device *dev = musb->controller;
 271        struct musb_hdrc_platform_data *plat = dev_get_platdata(dev);
 272        struct ux500_musb_board_data *data;
 273        struct dma_channel *dma_channel = NULL;
 274        char **chan_names;
 275        u32 ch_num;
 276        u8 dir;
 277        u8 is_tx = 0;
 278
 279        void **param_array;
 280        struct ux500_dma_channel *channel_array;
 281        dma_cap_mask_t mask;
 282
 283        if (!plat) {
 284                dev_err(musb->controller, "No platform data\n");
 285                return -EINVAL;
 286        }
 287
 288        data = plat->board_data;
 289
 290        dma_cap_zero(mask);
 291        dma_cap_set(DMA_SLAVE, mask);
 292
 293        /* Prepare the loop for RX channels */
 294        channel_array = controller->rx_channel;
 295        param_array = data ? data->dma_rx_param_array : NULL;
 296        chan_names = (char **)iep_chan_names;
 297
 298        for (dir = 0; dir < 2; dir++) {
 299                for (ch_num = 0;
 300                     ch_num < UX500_MUSB_DMA_NUM_RX_TX_CHANNELS;
 301                     ch_num++) {
 302                        ux500_channel = &channel_array[ch_num];
 303                        ux500_channel->controller = controller;
 304                        ux500_channel->ch_num = ch_num;
 305                        ux500_channel->is_tx = is_tx;
 306
 307                        dma_channel = &(ux500_channel->channel);
 308                        dma_channel->private_data = ux500_channel;
 309                        dma_channel->status = MUSB_DMA_STATUS_FREE;
 310                        dma_channel->max_len = SZ_16M;
 311
 312                        ux500_channel->dma_chan =
 313                                dma_request_slave_channel(dev, chan_names[ch_num]);
 314
 315                        if (!ux500_channel->dma_chan)
 316                                ux500_channel->dma_chan =
 317                                        dma_request_channel(mask,
 318                                                            data ?
 319                                                            data->dma_filter :
 320                                                            NULL,
 321                                                            param_array ?
 322                                                            param_array[ch_num] :
 323                                                            NULL);
 324
 325                        if (!ux500_channel->dma_chan) {
 326                                ERR("Dma pipe allocation error dir=%d ch=%d\n",
 327                                        dir, ch_num);
 328
 329                                /* Release already allocated channels */
 330                                ux500_dma_controller_stop(controller);
 331
 332                                return -EBUSY;
 333                        }
 334
 335                }
 336
 337                /* Prepare the loop for TX channels */
 338                channel_array = controller->tx_channel;
 339                param_array = data ? data->dma_tx_param_array : NULL;
 340                chan_names = (char **)oep_chan_names;
 341                is_tx = 1;
 342        }
 343
 344        return 0;
 345}
 346
 347void ux500_dma_controller_destroy(struct dma_controller *c)
 348{
 349        struct ux500_dma_controller *controller = container_of(c,
 350                        struct ux500_dma_controller, controller);
 351
 352        ux500_dma_controller_stop(controller);
 353        kfree(controller);
 354}
 355EXPORT_SYMBOL_GPL(ux500_dma_controller_destroy);
 356
 357struct dma_controller *
 358ux500_dma_controller_create(struct musb *musb, void __iomem *base)
 359{
 360        struct ux500_dma_controller *controller;
 361        struct platform_device *pdev = to_platform_device(musb->controller);
 362        struct resource *iomem;
 363        int ret;
 364
 365        controller = kzalloc(sizeof(*controller), GFP_KERNEL);
 366        if (!controller)
 367                goto kzalloc_fail;
 368
 369        controller->private_data = musb;
 370
 371        /* Save physical address for DMA controller. */
 372        iomem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
 373        if (!iomem) {
 374                dev_err(musb->controller, "no memory resource defined\n");
 375                goto plat_get_fail;
 376        }
 377
 378        controller->phy_base = (dma_addr_t) iomem->start;
 379
 380        controller->controller.channel_alloc = ux500_dma_channel_allocate;
 381        controller->controller.channel_release = ux500_dma_channel_release;
 382        controller->controller.channel_program = ux500_dma_channel_program;
 383        controller->controller.channel_abort = ux500_dma_channel_abort;
 384        controller->controller.is_compatible = ux500_dma_is_compatible;
 385
 386        ret = ux500_dma_controller_start(controller);
 387        if (ret)
 388                goto plat_get_fail;
 389        return &controller->controller;
 390
 391plat_get_fail:
 392        kfree(controller);
 393kzalloc_fail:
 394        return NULL;
 395}
 396EXPORT_SYMBOL_GPL(ux500_dma_controller_create);
 397