linux/drivers/dma/xilinx/xilinx_frmbuf.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * DMAEngine driver for Xilinx Framebuffer IP
   4 *
   5 * Copyright (C) 2016 - 2021 Xilinx, Inc.
   6 *
   7 * Authors: Radhey Shyam Pandey <radheys@xilinx.com>
   8 *          John Nichols <jnichol@xilinx.com>
   9 *          Jeffrey Mouroux <jmouroux@xilinx.com>
  10 *
  11 * Based on the Freescale DMA driver.
  12 *
  13 * Description:
  14 * The AXI Framebuffer core is a soft Xilinx IP core that
  15 * provides high-bandwidth direct memory access between memory
  16 * and AXI4-Stream.
  17 */
  18
  19#include <linux/bitops.h>
  20#include <linux/clk.h>
  21#include <linux/delay.h>
  22#include <linux/dma/xilinx_frmbuf.h>
  23#include <linux/dmapool.h>
  24#include <linux/gpio/consumer.h>
  25#include <linux/init.h>
  26#include <linux/interrupt.h>
  27#include <linux/io.h>
  28#include <linux/iopoll.h>
  29#include <linux/module.h>
  30#include <linux/of_address.h>
  31#include <linux/of_dma.h>
  32#include <linux/of_irq.h>
  33#include <linux/of_platform.h>
  34#include <linux/slab.h>
  35#include <linux/videodev2.h>
  36
  37#include <drm/drm_fourcc.h>
  38
  39#include "../dmaengine.h"
  40
  41/* Register/Descriptor Offsets */
  42#define XILINX_FRMBUF_CTRL_OFFSET               0x00
  43#define XILINX_FRMBUF_GIE_OFFSET                0x04
  44#define XILINX_FRMBUF_IE_OFFSET                 0x08
  45#define XILINX_FRMBUF_ISR_OFFSET                0x0c
  46#define XILINX_FRMBUF_WIDTH_OFFSET              0x10
  47#define XILINX_FRMBUF_HEIGHT_OFFSET             0x18
  48#define XILINX_FRMBUF_STRIDE_OFFSET             0x20
  49#define XILINX_FRMBUF_FMT_OFFSET                0x28
  50#define XILINX_FRMBUF_ADDR_OFFSET               0x30
  51#define XILINX_FRMBUF_ADDR2_OFFSET              0x3c
  52#define XILINX_FRMBUF_FID_OFFSET                0x48
  53#define XILINX_FRMBUF_FID_MODE_OFFSET   0x50
  54#define XILINX_FRMBUF_ADDR3_OFFSET              0x54
  55#define XILINX_FRMBUF_FID_ERR_OFFSET    0x58
  56#define XILINX_FRMBUF_FID_OUT_OFFSET    0x60
  57#define XILINX_FRMBUF_RD_ADDR3_OFFSET           0x74
  58
  59/* Control Registers */
  60#define XILINX_FRMBUF_CTRL_AP_START             BIT(0)
  61#define XILINX_FRMBUF_CTRL_AP_DONE              BIT(1)
  62#define XILINX_FRMBUF_CTRL_AP_IDLE              BIT(2)
  63#define XILINX_FRMBUF_CTRL_AP_READY             BIT(3)
  64#define XILINX_FRMBUF_CTRL_FLUSH                BIT(5)
  65#define XILINX_FRMBUF_CTRL_FLUSH_DONE           BIT(6)
  66#define XILINX_FRMBUF_CTRL_AUTO_RESTART         BIT(7)
  67#define XILINX_FRMBUF_GIE_EN                    BIT(0)
  68
  69/* Interrupt Status and Control */
  70#define XILINX_FRMBUF_IE_AP_DONE                BIT(0)
  71#define XILINX_FRMBUF_IE_AP_READY               BIT(1)
  72
  73#define XILINX_FRMBUF_ISR_AP_DONE_IRQ           BIT(0)
  74#define XILINX_FRMBUF_ISR_AP_READY_IRQ          BIT(1)
  75
  76#define XILINX_FRMBUF_ISR_ALL_IRQ_MASK  \
  77                (XILINX_FRMBUF_ISR_AP_DONE_IRQ | \
  78                XILINX_FRMBUF_ISR_AP_READY_IRQ)
  79
  80/* Video Format Register Settings */
  81#define XILINX_FRMBUF_FMT_RGBX8                 10
  82#define XILINX_FRMBUF_FMT_YUVX8                 11
  83#define XILINX_FRMBUF_FMT_YUYV8                 12
  84#define XILINX_FRMBUF_FMT_RGBA8                 13
  85#define XILINX_FRMBUF_FMT_YUVA8                 14
  86#define XILINX_FRMBUF_FMT_RGBX10                15
  87#define XILINX_FRMBUF_FMT_YUVX10                16
  88#define XILINX_FRMBUF_FMT_Y_UV8                 18
  89#define XILINX_FRMBUF_FMT_Y_UV8_420             19
  90#define XILINX_FRMBUF_FMT_RGB8                  20
  91#define XILINX_FRMBUF_FMT_YUV8                  21
  92#define XILINX_FRMBUF_FMT_Y_UV10                22
  93#define XILINX_FRMBUF_FMT_Y_UV10_420            23
  94#define XILINX_FRMBUF_FMT_Y8                    24
  95#define XILINX_FRMBUF_FMT_Y10                   25
  96#define XILINX_FRMBUF_FMT_BGRA8                 26
  97#define XILINX_FRMBUF_FMT_BGRX8                 27
  98#define XILINX_FRMBUF_FMT_UYVY8                 28
  99#define XILINX_FRMBUF_FMT_BGR8                  29
 100#define XILINX_FRMBUF_FMT_RGBX12                30
 101#define XILINX_FRMBUF_FMT_RGB16                 35
 102#define XILINX_FRMBUF_FMT_Y_U_V8                42
 103
 104/* FID Register */
 105#define XILINX_FRMBUF_FID_MASK                  BIT(0)
 106
 107/* FID ERR Register */
 108#define XILINX_FRMBUF_FID_ERR_MASK              BIT(0)
 109#define XILINX_FRMBUF_FID_OUT_MASK              BIT(0)
 110
 111#define XILINX_FRMBUF_ALIGN_MUL                 8
 112
 113#define WAIT_FOR_FLUSH_DONE                     25
 114
 115/* Pixels per clock property flag */
 116#define XILINX_PPC_PROP                         BIT(0)
 117#define XILINX_FLUSH_PROP                       BIT(1)
 118#define XILINX_FID_PROP                         BIT(2)
 119#define XILINX_CLK_PROP                         BIT(3)
 120#define XILINX_THREE_PLANES_PROP                BIT(4)
 121#define XILINX_FID_ERR_DETECT_PROP              BIT(5)
 122
 123#define XILINX_FRMBUF_MIN_HEIGHT                (64)
 124#define XILINX_FRMBUF_MIN_WIDTH                 (64)
 125
 126/**
 127 * struct xilinx_frmbuf_desc_hw - Hardware Descriptor
 128 * @luma_plane_addr: Luma or packed plane buffer address
 129 * @chroma_plane_addr: Chroma plane buffer address
 130 * @vsize: Vertical Size
 131 * @hsize: Horizontal Size
 132 * @stride: Number of bytes between the first
 133 *          pixels of each horizontal line
 134 */
 135struct xilinx_frmbuf_desc_hw {
 136        dma_addr_t luma_plane_addr;
 137        dma_addr_t chroma_plane_addr[2];
 138        u32 vsize;
 139        u32 hsize;
 140        u32 stride;
 141};
 142
 143/**
 144 * struct xilinx_frmbuf_tx_descriptor - Per Transaction structure
 145 * @async_tx: Async transaction descriptor
 146 * @hw: Hardware descriptor
 147 * @node: Node in the channel descriptors list
 148 * @fid: Field ID of buffer
 149 * @earlycb: Whether the callback should be called when in staged state
 150 */
 151struct xilinx_frmbuf_tx_descriptor {
 152        struct dma_async_tx_descriptor async_tx;
 153        struct xilinx_frmbuf_desc_hw hw;
 154        struct list_head node;
 155        u32 fid;
 156        u32 earlycb;
 157};
 158
 159/**
 160 * struct xilinx_frmbuf_chan - Driver specific dma channel structure
 161 * @xdev: Driver specific device structure
 162 * @lock: Descriptor operation lock
 163 * @chan_node: Member of a list of framebuffer channel instances
 164 * @pending_list: Descriptors waiting
 165 * @done_list: Complete descriptors
 166 * @staged_desc: Next buffer to be programmed
 167 * @active_desc: Currently active buffer being read/written to
 168 * @common: DMA common channel
 169 * @dev: The dma device
 170 * @write_addr: callback that will write dma addresses to IP (32 or 64 bit)
 171 * @irq: Channel IRQ
 172 * @direction: Transfer direction
 173 * @idle: Channel idle state
 174 * @tasklet: Cleanup work after irq
 175 * @vid_fmt: Reference to currently assigned video format description
 176 * @hw_fid: FID enabled in hardware flag
 177 * @mode: Select operation mode
 178 * @fid_err_flag: Field id error detection flag
 179 * @fid_out_val: Field id out val
 180 * @fid_mode: Select fid mode
 181 */
 182struct xilinx_frmbuf_chan {
 183        struct xilinx_frmbuf_device *xdev;
 184        /* Descriptor operation lock */
 185        spinlock_t lock;
 186        struct list_head chan_node;
 187        struct list_head pending_list;
 188        struct list_head done_list;
 189        struct xilinx_frmbuf_tx_descriptor *staged_desc;
 190        struct xilinx_frmbuf_tx_descriptor *active_desc;
 191        struct dma_chan common;
 192        struct device *dev;
 193        void (*write_addr)(struct xilinx_frmbuf_chan *chan, u32 reg,
 194                           dma_addr_t value);
 195        int irq;
 196        enum dma_transfer_direction direction;
 197        bool idle;
 198        struct tasklet_struct tasklet;
 199        const struct xilinx_frmbuf_format_desc *vid_fmt;
 200        bool hw_fid;
 201        enum operation_mode mode;
 202        u8 fid_err_flag;
 203        u8 fid_out_val;
 204        enum fid_modes fid_mode;
 205};
 206
 207/**
 208 * struct xilinx_frmbuf_format_desc - lookup table to match fourcc to format
 209 * @dts_name: Device tree name for this entry.
 210 * @id: Format ID
 211 * @bpw: Bits of pixel data + padding in a 32-bit word (luma plane for semi-pl)
 212 * @ppw: Number of pixels represented in a 32-bit word (luma plane for semi-pl)
 213 * @num_planes: Expected number of plane buffers in framebuffer for this format
 214 * @drm_fmt: DRM video framework equivalent fourcc code
 215 * @v4l2_fmt: Video 4 Linux framework equivalent fourcc code
 216 * @fmt_bitmask: Flag identifying this format in device-specific "enabled"
 217 *      bitmap
 218 */
 219struct xilinx_frmbuf_format_desc {
 220        const char *dts_name;
 221        u32 id;
 222        u32 bpw;
 223        u32 ppw;
 224        u32 num_planes;
 225        u32 drm_fmt;
 226        u32 v4l2_fmt;
 227        u32 fmt_bitmask;
 228};
 229
 230static LIST_HEAD(frmbuf_chan_list);
 231static DEFINE_MUTEX(frmbuf_chan_list_lock);
 232
 233static const struct xilinx_frmbuf_format_desc xilinx_frmbuf_formats[] = {
 234        {
 235                .dts_name = "xbgr8888",
 236                .id = XILINX_FRMBUF_FMT_RGBX8,
 237                .bpw = 32,
 238                .ppw = 1,
 239                .num_planes = 1,
 240                .drm_fmt = DRM_FORMAT_XBGR8888,
 241                .v4l2_fmt = V4L2_PIX_FMT_BGRX32,
 242                .fmt_bitmask = BIT(0),
 243        },
 244        {
 245                .dts_name = "xbgr2101010",
 246                .id = XILINX_FRMBUF_FMT_RGBX10,
 247                .bpw = 32,
 248                .ppw = 1,
 249                .num_planes = 1,
 250                .drm_fmt = DRM_FORMAT_XBGR2101010,
 251                .v4l2_fmt = V4L2_PIX_FMT_XBGR30,
 252                .fmt_bitmask = BIT(1),
 253        },
 254        {
 255                .dts_name = "xrgb8888",
 256                .id = XILINX_FRMBUF_FMT_BGRX8,
 257                .bpw = 32,
 258                .ppw = 1,
 259                .num_planes = 1,
 260                .drm_fmt = DRM_FORMAT_XRGB8888,
 261                .v4l2_fmt = V4L2_PIX_FMT_XBGR32,
 262                .fmt_bitmask = BIT(2),
 263        },
 264        {
 265                .dts_name = "xvuy8888",
 266                .id = XILINX_FRMBUF_FMT_YUVX8,
 267                .bpw = 32,
 268                .ppw = 1,
 269                .num_planes = 1,
 270                .drm_fmt = DRM_FORMAT_XVUY8888,
 271                .v4l2_fmt = V4L2_PIX_FMT_XVUY32,
 272                .fmt_bitmask = BIT(5),
 273        },
 274        {
 275                .dts_name = "vuy888",
 276                .id = XILINX_FRMBUF_FMT_YUV8,
 277                .bpw = 24,
 278                .ppw = 1,
 279                .num_planes = 1,
 280                .drm_fmt = DRM_FORMAT_VUY888,
 281                .v4l2_fmt = V4L2_PIX_FMT_VUY24,
 282                .fmt_bitmask = BIT(6),
 283        },
 284        {
 285                .dts_name = "yuvx2101010",
 286                .id = XILINX_FRMBUF_FMT_YUVX10,
 287                .bpw = 32,
 288                .ppw = 1,
 289                .num_planes = 1,
 290                .drm_fmt = DRM_FORMAT_XVUY2101010,
 291                .v4l2_fmt = V4L2_PIX_FMT_XVUY10,
 292                .fmt_bitmask = BIT(7),
 293        },
 294        {
 295                .dts_name = "yuyv",
 296                .id = XILINX_FRMBUF_FMT_YUYV8,
 297                .bpw = 32,
 298                .ppw = 2,
 299                .num_planes = 1,
 300                .drm_fmt = DRM_FORMAT_YUYV,
 301                .v4l2_fmt = V4L2_PIX_FMT_YUYV,
 302                .fmt_bitmask = BIT(8),
 303        },
 304        {
 305                .dts_name = "uyvy",
 306                .id = XILINX_FRMBUF_FMT_UYVY8,
 307                .bpw = 32,
 308                .ppw = 2,
 309                .num_planes = 1,
 310                .drm_fmt = DRM_FORMAT_UYVY,
 311                .v4l2_fmt = V4L2_PIX_FMT_UYVY,
 312                .fmt_bitmask = BIT(9),
 313        },
 314        {
 315                .dts_name = "nv16",
 316                .id = XILINX_FRMBUF_FMT_Y_UV8,
 317                .bpw = 32,
 318                .ppw = 4,
 319                .num_planes = 2,
 320                .drm_fmt = DRM_FORMAT_NV16,
 321                .v4l2_fmt = V4L2_PIX_FMT_NV16M,
 322                .fmt_bitmask = BIT(11),
 323        },
 324        {
 325                .dts_name = "nv16",
 326                .id = XILINX_FRMBUF_FMT_Y_UV8,
 327                .bpw = 32,
 328                .ppw = 4,
 329                .num_planes = 2,
 330                .drm_fmt = 0,
 331                .v4l2_fmt = V4L2_PIX_FMT_NV16,
 332                .fmt_bitmask = BIT(11),
 333        },
 334        {
 335                .dts_name = "nv12",
 336                .id = XILINX_FRMBUF_FMT_Y_UV8_420,
 337                .bpw = 32,
 338                .ppw = 4,
 339                .num_planes = 2,
 340                .drm_fmt = DRM_FORMAT_NV12,
 341                .v4l2_fmt = V4L2_PIX_FMT_NV12M,
 342                .fmt_bitmask = BIT(12),
 343        },
 344        {
 345                .dts_name = "nv12",
 346                .id = XILINX_FRMBUF_FMT_Y_UV8_420,
 347                .bpw = 32,
 348                .ppw = 4,
 349                .num_planes = 2,
 350                .drm_fmt = 0,
 351                .v4l2_fmt = V4L2_PIX_FMT_NV12,
 352                .fmt_bitmask = BIT(12),
 353        },
 354        {
 355                .dts_name = "xv15",
 356                .id = XILINX_FRMBUF_FMT_Y_UV10_420,
 357                .bpw = 32,
 358                .ppw = 3,
 359                .num_planes = 2,
 360                .drm_fmt = DRM_FORMAT_XV15,
 361                .v4l2_fmt = V4L2_PIX_FMT_XV15M,
 362                .fmt_bitmask = BIT(13),
 363        },
 364        {
 365                .dts_name = "xv15",
 366                .id = XILINX_FRMBUF_FMT_Y_UV10_420,
 367                .bpw = 32,
 368                .ppw = 3,
 369                .num_planes = 2,
 370                .drm_fmt = 0,
 371                .v4l2_fmt = V4L2_PIX_FMT_XV15,
 372                .fmt_bitmask = BIT(13),
 373        },
 374        {
 375                .dts_name = "xv20",
 376                .id = XILINX_FRMBUF_FMT_Y_UV10,
 377                .bpw = 32,
 378                .ppw = 3,
 379                .num_planes = 2,
 380                .drm_fmt = DRM_FORMAT_XV20,
 381                .v4l2_fmt = V4L2_PIX_FMT_XV20M,
 382                .fmt_bitmask = BIT(14),
 383        },
 384        {
 385                .dts_name = "xv20",
 386                .id = XILINX_FRMBUF_FMT_Y_UV10,
 387                .bpw = 32,
 388                .ppw = 3,
 389                .num_planes = 2,
 390                .drm_fmt = 0,
 391                .v4l2_fmt = V4L2_PIX_FMT_XV20,
 392                .fmt_bitmask = BIT(14),
 393        },
 394        {
 395                .dts_name = "bgr888",
 396                .id = XILINX_FRMBUF_FMT_RGB8,
 397                .bpw = 24,
 398                .ppw = 1,
 399                .num_planes = 1,
 400                .drm_fmt = DRM_FORMAT_BGR888,
 401                .v4l2_fmt = V4L2_PIX_FMT_RGB24,
 402                .fmt_bitmask = BIT(15),
 403        },
 404        {
 405                .dts_name = "y8",
 406                .id = XILINX_FRMBUF_FMT_Y8,
 407                .bpw = 32,
 408                .ppw = 4,
 409                .num_planes = 1,
 410                .drm_fmt = DRM_FORMAT_Y8,
 411                .v4l2_fmt = V4L2_PIX_FMT_GREY,
 412                .fmt_bitmask = BIT(16),
 413        },
 414        {
 415                .dts_name = "y10",
 416                .id = XILINX_FRMBUF_FMT_Y10,
 417                .bpw = 32,
 418                .ppw = 3,
 419                .num_planes = 1,
 420                .drm_fmt = DRM_FORMAT_Y10,
 421                .v4l2_fmt = V4L2_PIX_FMT_XY10,
 422                .fmt_bitmask = BIT(17),
 423        },
 424        {
 425                .dts_name = "rgb888",
 426                .id = XILINX_FRMBUF_FMT_BGR8,
 427                .bpw = 24,
 428                .ppw = 1,
 429                .num_planes = 1,
 430                .drm_fmt = DRM_FORMAT_RGB888,
 431                .v4l2_fmt = V4L2_PIX_FMT_BGR24,
 432                .fmt_bitmask = BIT(18),
 433        },
 434        {
 435                .dts_name = "abgr8888",
 436                .id = XILINX_FRMBUF_FMT_RGBA8,
 437                .bpw = 32,
 438                .ppw = 1,
 439                .num_planes = 1,
 440                .drm_fmt = DRM_FORMAT_ABGR8888,
 441                .v4l2_fmt = 0,
 442                .fmt_bitmask = BIT(19),
 443        },
 444        {
 445                .dts_name = "argb8888",
 446                .id = XILINX_FRMBUF_FMT_BGRA8,
 447                .bpw = 32,
 448                .ppw = 1,
 449                .num_planes = 1,
 450                .drm_fmt = DRM_FORMAT_ARGB8888,
 451                .v4l2_fmt = 0,
 452                .fmt_bitmask = BIT(20),
 453        },
 454        {
 455                .dts_name = "avuy8888",
 456                .id = XILINX_FRMBUF_FMT_YUVA8,
 457                .bpw = 32,
 458                .ppw = 1,
 459                .num_planes = 1,
 460                .drm_fmt = DRM_FORMAT_AVUY,
 461                .v4l2_fmt = 0,
 462                .fmt_bitmask = BIT(21),
 463        },
 464        {
 465                .dts_name = "xbgr4121212",
 466                .id = XILINX_FRMBUF_FMT_RGBX12,
 467                .bpw = 40,
 468                .ppw = 1,
 469                .num_planes = 1,
 470                .v4l2_fmt = V4L2_PIX_FMT_XBGR40,
 471                .fmt_bitmask = BIT(22),
 472        },
 473        {
 474                .dts_name = "rgb16",
 475                .id = XILINX_FRMBUF_FMT_RGB16,
 476                .bpw = 48,
 477                .ppw = 1,
 478                .num_planes = 1,
 479                .v4l2_fmt = V4L2_PIX_FMT_BGR48,
 480                .fmt_bitmask = BIT(23),
 481        },
 482        {
 483                .dts_name = "y_u_v8",
 484                .id = XILINX_FRMBUF_FMT_Y_U_V8,
 485                .bpw = 32,
 486                .ppw = 4,
 487                .num_planes = 3,
 488                .v4l2_fmt = V4L2_PIX_FMT_YUV444M,
 489                .drm_fmt = DRM_FORMAT_YUV444,
 490                .fmt_bitmask = BIT(24),
 491        },
 492};
 493
 494/**
 495 * struct xilinx_frmbuf_feature - dt or IP property structure
 496 * @direction: dma transfer mode and direction
 497 * @flags: Bitmask of properties enabled in IP or dt
 498 */
 499struct xilinx_frmbuf_feature {
 500        enum dma_transfer_direction direction;
 501        u32 flags;
 502};
 503
 504/**
 505 * struct xilinx_frmbuf_device - dma device structure
 506 * @regs: I/O mapped base address
 507 * @dev: Device Structure
 508 * @common: DMA device structure
 509 * @chan: Driver specific dma channel
 510 * @rst_gpio: GPIO reset
 511 * @enabled_vid_fmts: Bitmask of video formats enabled in hardware
 512 * @drm_memory_fmts: Array of supported DRM fourcc codes
 513 * @drm_fmt_cnt: Count of supported DRM fourcc codes
 514 * @v4l2_memory_fmts: Array of supported V4L2 fourcc codes
 515 * @v4l2_fmt_cnt: Count of supported V4L2 fourcc codes
 516 * @cfg: Pointer to Framebuffer Feature config struct
 517 * @max_width: Maximum pixel width supported in IP.
 518 * @max_height: Maximum number of lines supported in IP.
 519 * @ppc: Pixels per clock supported in IP.
 520 * @ap_clk: Video core clock
 521 */
 522struct xilinx_frmbuf_device {
 523        void __iomem *regs;
 524        struct device *dev;
 525        struct dma_device common;
 526        struct xilinx_frmbuf_chan chan;
 527        struct gpio_desc *rst_gpio;
 528        u32 enabled_vid_fmts;
 529        u32 drm_memory_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
 530        u32 drm_fmt_cnt;
 531        u32 v4l2_memory_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
 532        u32 v4l2_fmt_cnt;
 533        const struct xilinx_frmbuf_feature *cfg;
 534        u32 max_width;
 535        u32 max_height;
 536        u32 ppc;
 537        struct clk *ap_clk;
 538};
 539
 540static const struct xilinx_frmbuf_feature xlnx_fbwr_cfg_v20 = {
 541        .direction = DMA_DEV_TO_MEM,
 542};
 543
 544static const struct xilinx_frmbuf_feature xlnx_fbwr_cfg_v21 = {
 545        .direction = DMA_DEV_TO_MEM,
 546        .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP
 547                | XILINX_FID_PROP | XILINX_CLK_PROP,
 548};
 549
 550static const struct xilinx_frmbuf_feature xlnx_fbwr_cfg_v22 = {
 551        .direction = DMA_DEV_TO_MEM,
 552        .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP
 553                | XILINX_FID_PROP | XILINX_CLK_PROP
 554                | XILINX_THREE_PLANES_PROP,
 555};
 556
 557static const struct xilinx_frmbuf_feature xlnx_fbrd_cfg_v20 = {
 558        .direction = DMA_MEM_TO_DEV,
 559};
 560
 561static const struct xilinx_frmbuf_feature xlnx_fbrd_cfg_v21 = {
 562        .direction = DMA_MEM_TO_DEV,
 563        .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP
 564                | XILINX_FID_PROP | XILINX_CLK_PROP,
 565};
 566
 567static const struct xilinx_frmbuf_feature xlnx_fbrd_cfg_v22 = {
 568        .direction = DMA_MEM_TO_DEV,
 569        .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP
 570                | XILINX_FID_PROP | XILINX_CLK_PROP
 571                | XILINX_THREE_PLANES_PROP
 572                | XILINX_FID_ERR_DETECT_PROP,
 573};
 574
 575static const struct of_device_id xilinx_frmbuf_of_ids[] = {
 576        { .compatible = "xlnx,axi-frmbuf-wr-v2",
 577                .data = (void *)&xlnx_fbwr_cfg_v20},
 578        { .compatible = "xlnx,axi-frmbuf-wr-v2.1",
 579                .data = (void *)&xlnx_fbwr_cfg_v21},
 580        { .compatible = "xlnx,axi-frmbuf-wr-v2.2",
 581                .data = (void *)&xlnx_fbwr_cfg_v22},
 582        { .compatible = "xlnx,axi-frmbuf-rd-v2",
 583                .data = (void *)&xlnx_fbrd_cfg_v20},
 584        { .compatible = "xlnx,axi-frmbuf-rd-v2.1",
 585                .data = (void *)&xlnx_fbrd_cfg_v21},
 586        { .compatible = "xlnx,axi-frmbuf-rd-v2.2",
 587                .data = (void *)&xlnx_fbrd_cfg_v22},
 588        {/* end of list */}
 589};
 590
 591/******************************PROTOTYPES*************************************/
 592#define to_xilinx_chan(chan) \
 593        container_of(chan, struct xilinx_frmbuf_chan, common)
 594#define to_dma_tx_descriptor(tx) \
 595        container_of(tx, struct xilinx_frmbuf_tx_descriptor, async_tx)
 596
 597static inline u32 frmbuf_read(struct xilinx_frmbuf_chan *chan, u32 reg)
 598{
 599        return ioread32(chan->xdev->regs + reg);
 600}
 601
 602static inline void frmbuf_write(struct xilinx_frmbuf_chan *chan, u32 reg,
 603                                u32 value)
 604{
 605        iowrite32(value, chan->xdev->regs + reg);
 606}
 607
 608static inline void frmbuf_writeq(struct xilinx_frmbuf_chan *chan, u32 reg,
 609                                 u64 value)
 610{
 611        iowrite32(lower_32_bits(value), chan->xdev->regs + reg);
 612        iowrite32(upper_32_bits(value), chan->xdev->regs + reg + 4);
 613}
 614
 615static void writeq_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
 616                        dma_addr_t addr)
 617{
 618        frmbuf_writeq(chan, reg, (u64)addr);
 619}
 620
 621static void write_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
 622                       dma_addr_t addr)
 623{
 624        frmbuf_write(chan, reg, addr);
 625}
 626
 627static inline void frmbuf_clr(struct xilinx_frmbuf_chan *chan, u32 reg,
 628                              u32 clr)
 629{
 630        frmbuf_write(chan, reg, frmbuf_read(chan, reg) & ~clr);
 631}
 632
 633static inline void frmbuf_set(struct xilinx_frmbuf_chan *chan, u32 reg,
 634                              u32 set)
 635{
 636        frmbuf_write(chan, reg, frmbuf_read(chan, reg) | set);
 637}
 638
 639static void frmbuf_init_format_array(struct xilinx_frmbuf_device *xdev)
 640{
 641        u32 i, cnt;
 642
 643        for (i = 0; i < ARRAY_SIZE(xilinx_frmbuf_formats); i++) {
 644                if (!(xdev->enabled_vid_fmts &
 645                      xilinx_frmbuf_formats[i].fmt_bitmask))
 646                        continue;
 647
 648                if (xilinx_frmbuf_formats[i].drm_fmt) {
 649                        cnt = xdev->drm_fmt_cnt++;
 650                        xdev->drm_memory_fmts[cnt] =
 651                                xilinx_frmbuf_formats[i].drm_fmt;
 652                }
 653
 654                if (xilinx_frmbuf_formats[i].v4l2_fmt) {
 655                        cnt = xdev->v4l2_fmt_cnt++;
 656                        xdev->v4l2_memory_fmts[cnt] =
 657                                xilinx_frmbuf_formats[i].v4l2_fmt;
 658                }
 659        }
 660}
 661
 662static struct xilinx_frmbuf_chan *frmbuf_find_chan(struct dma_chan *chan)
 663{
 664        struct xilinx_frmbuf_chan *xil_chan;
 665        bool found_xchan = false;
 666
 667        mutex_lock(&frmbuf_chan_list_lock);
 668        list_for_each_entry(xil_chan, &frmbuf_chan_list, chan_node) {
 669                if (chan == &xil_chan->common) {
 670                        found_xchan = true;
 671                        break;
 672                }
 673        }
 674        mutex_unlock(&frmbuf_chan_list_lock);
 675
 676        if (!found_xchan) {
 677                dev_dbg(chan->device->dev,
 678                        "dma chan not a Video Framebuffer channel instance\n");
 679                return ERR_PTR(-EINVAL);
 680        }
 681
 682        return xil_chan;
 683}
 684
 685static struct xilinx_frmbuf_device *frmbuf_find_dev(struct dma_chan *chan)
 686{
 687        struct xilinx_frmbuf_chan *xchan, *temp;
 688        struct xilinx_frmbuf_device *xdev;
 689        bool is_frmbuf_chan = false;
 690
 691        list_for_each_entry_safe(xchan, temp, &frmbuf_chan_list, chan_node) {
 692                if (chan == &xchan->common)
 693                        is_frmbuf_chan = true;
 694        }
 695
 696        if (!is_frmbuf_chan)
 697                return ERR_PTR(-ENODEV);
 698
 699        xchan = to_xilinx_chan(chan);
 700        xdev = container_of(xchan, struct xilinx_frmbuf_device, chan);
 701
 702        return xdev;
 703}
 704
 705static int frmbuf_verify_format(struct dma_chan *chan, u32 fourcc, u32 type)
 706{
 707        struct xilinx_frmbuf_chan *xil_chan = to_xilinx_chan(chan);
 708        u32 i, sz = ARRAY_SIZE(xilinx_frmbuf_formats);
 709
 710        for (i = 0; i < sz; i++) {
 711                if ((type == XDMA_DRM &&
 712                     fourcc != xilinx_frmbuf_formats[i].drm_fmt) ||
 713                   (type == XDMA_V4L2 &&
 714                    fourcc != xilinx_frmbuf_formats[i].v4l2_fmt))
 715                        continue;
 716
 717                if (!(xilinx_frmbuf_formats[i].fmt_bitmask &
 718                      xil_chan->xdev->enabled_vid_fmts))
 719                        return -EINVAL;
 720
 721                /*
 722                 * The Alpha color formats are supported in Framebuffer Read
 723                 * IP only as corresponding DRM formats.
 724                 */
 725                if (type == XDMA_DRM &&
 726                    (xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_ABGR8888 ||
 727                     xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_ARGB8888 ||
 728                     xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_AVUY) &&
 729                    xil_chan->direction != DMA_MEM_TO_DEV)
 730                        return -EINVAL;
 731
 732                xil_chan->vid_fmt = &xilinx_frmbuf_formats[i];
 733                return 0;
 734        }
 735        return -EINVAL;
 736}
 737
 738static void xilinx_xdma_set_config(struct dma_chan *chan, u32 fourcc, u32 type)
 739{
 740        struct xilinx_frmbuf_chan *xil_chan;
 741        struct xilinx_frmbuf_device *xdev;
 742        const struct xilinx_frmbuf_format_desc *old_vid_fmt;
 743        int ret;
 744
 745        xil_chan = frmbuf_find_chan(chan);
 746        if (IS_ERR(xil_chan))
 747                return;
 748
 749        xdev = frmbuf_find_dev(chan);
 750        if (IS_ERR(xdev))
 751                return;
 752
 753        /* Save old video format */
 754        old_vid_fmt = xil_chan->vid_fmt;
 755
 756        ret = frmbuf_verify_format(chan, fourcc, type);
 757        if (ret == -EINVAL) {
 758                dev_err(chan->device->dev,
 759                        "Framebuffer not configured for fourcc 0x%x\n",
 760                        fourcc);
 761                return;
 762        }
 763
 764        if ((!(xdev->cfg->flags & XILINX_THREE_PLANES_PROP)) &&
 765            xil_chan->vid_fmt->id == XILINX_FRMBUF_FMT_Y_U_V8) {
 766                dev_err(chan->device->dev, "doesn't support %s format\n",
 767                        xil_chan->vid_fmt->dts_name);
 768                /* Restore to old video format */
 769                xil_chan->vid_fmt = old_vid_fmt;
 770                return;
 771        }
 772}
 773
 774void xilinx_xdma_set_mode(struct dma_chan *chan, enum operation_mode
 775                          mode)
 776{
 777        struct xilinx_frmbuf_chan *xil_chan;
 778
 779        xil_chan = frmbuf_find_chan(chan);
 780        if (IS_ERR(xil_chan))
 781                return;
 782
 783        xil_chan->mode = mode;
 784
 785        return;
 786
 787} EXPORT_SYMBOL_GPL(xilinx_xdma_set_mode);
 788
 789void xilinx_xdma_drm_config(struct dma_chan *chan, u32 drm_fourcc)
 790{
 791        xilinx_xdma_set_config(chan, drm_fourcc, XDMA_DRM);
 792
 793} EXPORT_SYMBOL_GPL(xilinx_xdma_drm_config);
 794
 795void xilinx_xdma_v4l2_config(struct dma_chan *chan, u32 v4l2_fourcc)
 796{
 797        xilinx_xdma_set_config(chan, v4l2_fourcc, XDMA_V4L2);
 798
 799} EXPORT_SYMBOL_GPL(xilinx_xdma_v4l2_config);
 800
 801int xilinx_xdma_get_drm_vid_fmts(struct dma_chan *chan, u32 *fmt_cnt,
 802                                 u32 **fmts)
 803{
 804        struct xilinx_frmbuf_device *xdev;
 805
 806        xdev = frmbuf_find_dev(chan);
 807
 808        if (IS_ERR(xdev))
 809                return PTR_ERR(xdev);
 810
 811        *fmt_cnt = xdev->drm_fmt_cnt;
 812        *fmts = xdev->drm_memory_fmts;
 813
 814        return 0;
 815}
 816EXPORT_SYMBOL(xilinx_xdma_get_drm_vid_fmts);
 817
 818int xilinx_xdma_get_v4l2_vid_fmts(struct dma_chan *chan, u32 *fmt_cnt,
 819                                  u32 **fmts)
 820{
 821        struct xilinx_frmbuf_device *xdev;
 822
 823        xdev = frmbuf_find_dev(chan);
 824
 825        if (IS_ERR(xdev))
 826                return PTR_ERR(xdev);
 827
 828        *fmt_cnt = xdev->v4l2_fmt_cnt;
 829        *fmts = xdev->v4l2_memory_fmts;
 830
 831        return 0;
 832}
 833EXPORT_SYMBOL(xilinx_xdma_get_v4l2_vid_fmts);
 834
 835int xilinx_xdma_get_fid(struct dma_chan *chan,
 836                        struct dma_async_tx_descriptor *async_tx, u32 *fid)
 837{
 838        struct xilinx_frmbuf_device *xdev;
 839        struct xilinx_frmbuf_tx_descriptor *desc;
 840
 841        xdev = frmbuf_find_dev(chan);
 842        if (IS_ERR(xdev))
 843                return PTR_ERR(xdev);
 844
 845        if (!async_tx || !fid)
 846                return -EINVAL;
 847
 848        if (xdev->chan.direction != DMA_DEV_TO_MEM)
 849                return -EINVAL;
 850
 851        desc = to_dma_tx_descriptor(async_tx);
 852        if (!desc)
 853                return -EINVAL;
 854
 855        *fid = desc->fid;
 856        return 0;
 857}
 858EXPORT_SYMBOL(xilinx_xdma_get_fid);
 859
 860int xilinx_xdma_set_fid(struct dma_chan *chan,
 861                        struct dma_async_tx_descriptor *async_tx, u32 fid)
 862{
 863        struct xilinx_frmbuf_device *xdev;
 864        struct xilinx_frmbuf_tx_descriptor *desc;
 865
 866        if (fid > 1 || !async_tx)
 867                return -EINVAL;
 868
 869        xdev = frmbuf_find_dev(chan);
 870        if (IS_ERR(xdev))
 871                return PTR_ERR(xdev);
 872
 873        if (xdev->chan.direction != DMA_MEM_TO_DEV)
 874                return -EINVAL;
 875
 876        desc = to_dma_tx_descriptor(async_tx);
 877        if (!desc)
 878                return -EINVAL;
 879
 880        desc->fid = fid;
 881        return 0;
 882}
 883EXPORT_SYMBOL(xilinx_xdma_set_fid);
 884
 885int xilinx_xdma_get_fid_err_flag(struct dma_chan *chan,
 886                                 u32 *fid_err_flag)
 887{
 888        struct xilinx_frmbuf_device *xdev;
 889
 890        xdev = frmbuf_find_dev(chan);
 891        if (IS_ERR(xdev))
 892                return PTR_ERR(xdev);
 893
 894        if (xdev->chan.direction != DMA_DEV_TO_MEM || !xdev->chan.idle)
 895                return -EINVAL;
 896
 897        *fid_err_flag = xdev->chan.fid_err_flag;
 898
 899        return 0;
 900}
 901EXPORT_SYMBOL(xilinx_xdma_get_fid_err_flag);
 902
 903int xilinx_xdma_get_fid_out(struct dma_chan *chan,
 904                            u32 *fid_out_val)
 905{
 906        struct xilinx_frmbuf_device *xdev;
 907
 908        xdev = frmbuf_find_dev(chan);
 909        if (IS_ERR(xdev))
 910                return PTR_ERR(xdev);
 911
 912        if (xdev->chan.direction != DMA_DEV_TO_MEM || !xdev->chan.idle)
 913                return -EINVAL;
 914
 915        *fid_out_val = xdev->chan.fid_out_val;
 916
 917        return 0;
 918}
 919EXPORT_SYMBOL(xilinx_xdma_get_fid_out);
 920
 921int xilinx_xdma_get_width_align(struct dma_chan *chan, u32 *width_align)
 922{
 923        struct xilinx_frmbuf_device *xdev;
 924
 925        xdev = frmbuf_find_dev(chan);
 926        if (IS_ERR(xdev))
 927                return PTR_ERR(xdev);
 928        *width_align = xdev->ppc;
 929
 930        return 0;
 931}
 932EXPORT_SYMBOL(xilinx_xdma_get_width_align);
 933
 934int xilinx_xdma_get_earlycb(struct dma_chan *chan,
 935                            struct dma_async_tx_descriptor *async_tx,
 936                            u32 *earlycb)
 937{
 938        struct xilinx_frmbuf_device *xdev;
 939        struct xilinx_frmbuf_tx_descriptor *desc;
 940
 941        xdev = frmbuf_find_dev(chan);
 942        if (IS_ERR(xdev))
 943                return PTR_ERR(xdev);
 944
 945        if (!async_tx || !earlycb)
 946                return -EINVAL;
 947
 948        desc = to_dma_tx_descriptor(async_tx);
 949        if (!desc)
 950                return -EINVAL;
 951
 952        *earlycb = desc->earlycb;
 953        return 0;
 954}
 955EXPORT_SYMBOL(xilinx_xdma_get_earlycb);
 956
 957int xilinx_xdma_set_earlycb(struct dma_chan *chan,
 958                            struct dma_async_tx_descriptor *async_tx,
 959                            u32 earlycb)
 960{
 961        struct xilinx_frmbuf_device *xdev;
 962        struct xilinx_frmbuf_tx_descriptor *desc;
 963
 964        if (!async_tx)
 965                return -EINVAL;
 966
 967        xdev = frmbuf_find_dev(chan);
 968        if (IS_ERR(xdev))
 969                return PTR_ERR(xdev);
 970
 971        desc = to_dma_tx_descriptor(async_tx);
 972        if (!desc)
 973                return -EINVAL;
 974
 975        desc->earlycb = earlycb;
 976        return 0;
 977}
 978EXPORT_SYMBOL(xilinx_xdma_set_earlycb);
 979
 980/**
 981 * of_dma_xilinx_xlate - Translation function
 982 * @dma_spec: Pointer to DMA specifier as found in the device tree
 983 * @ofdma: Pointer to DMA controller data
 984 *
 985 * Return: DMA channel pointer on success or error code on error
 986 */
 987static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec,
 988                                            struct of_dma *ofdma)
 989{
 990        struct xilinx_frmbuf_device *xdev = ofdma->of_dma_data;
 991
 992        return dma_get_slave_channel(&xdev->chan.common);
 993}
 994
 995/* -----------------------------------------------------------------------------
 996 * Descriptors alloc and free
 997 */
 998
 999/**
1000 * xilinx_frmbuf_alloc_tx_descriptor - Allocate transaction descriptor
1001 * @chan: Driver specific dma channel
1002 *
1003 * Return: The allocated descriptor on success and NULL on failure.
1004 */
1005static struct xilinx_frmbuf_tx_descriptor *
1006xilinx_frmbuf_alloc_tx_descriptor(struct xilinx_frmbuf_chan *chan)
1007{
1008        struct xilinx_frmbuf_tx_descriptor *desc;
1009
1010        desc = kzalloc(sizeof(*desc), GFP_KERNEL);
1011        if (!desc)
1012                return NULL;
1013
1014        return desc;
1015}
1016
1017/**
1018 * xilinx_frmbuf_free_desc_list - Free descriptors list
1019 * @chan: Driver specific dma channel
1020 * @list: List to parse and delete the descriptor
1021 */
1022static void xilinx_frmbuf_free_desc_list(struct xilinx_frmbuf_chan *chan,
1023                                         struct list_head *list)
1024{
1025        struct xilinx_frmbuf_tx_descriptor *desc, *next;
1026
1027        list_for_each_entry_safe(desc, next, list, node) {
1028                list_del(&desc->node);
1029                kfree(desc);
1030        }
1031}
1032
1033/**
1034 * xilinx_frmbuf_free_descriptors - Free channel descriptors
1035 * @chan: Driver specific dma channel
1036 */
1037static void xilinx_frmbuf_free_descriptors(struct xilinx_frmbuf_chan *chan)
1038{
1039        unsigned long flags;
1040
1041        spin_lock_irqsave(&chan->lock, flags);
1042
1043        xilinx_frmbuf_free_desc_list(chan, &chan->pending_list);
1044        xilinx_frmbuf_free_desc_list(chan, &chan->done_list);
1045        kfree(chan->active_desc);
1046        kfree(chan->staged_desc);
1047
1048        chan->staged_desc = NULL;
1049        chan->active_desc = NULL;
1050        INIT_LIST_HEAD(&chan->pending_list);
1051        INIT_LIST_HEAD(&chan->done_list);
1052
1053        spin_unlock_irqrestore(&chan->lock, flags);
1054}
1055
1056/**
1057 * xilinx_frmbuf_free_chan_resources - Free channel resources
1058 * @dchan: DMA channel
1059 */
1060static void xilinx_frmbuf_free_chan_resources(struct dma_chan *dchan)
1061{
1062        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1063
1064        xilinx_frmbuf_free_descriptors(chan);
1065}
1066
1067/**
1068 * xilinx_frmbuf_chan_desc_cleanup - Clean channel descriptors
1069 * @chan: Driver specific dma channel
1070 */
1071static void xilinx_frmbuf_chan_desc_cleanup(struct xilinx_frmbuf_chan *chan)
1072{
1073        struct xilinx_frmbuf_tx_descriptor *desc, *next;
1074        unsigned long flags;
1075
1076        spin_lock_irqsave(&chan->lock, flags);
1077
1078        list_for_each_entry_safe(desc, next, &chan->done_list, node) {
1079                dma_async_tx_callback callback;
1080                void *callback_param;
1081
1082                list_del(&desc->node);
1083
1084                /* Run the link descriptor callback function */
1085                callback = desc->async_tx.callback;
1086                callback_param = desc->async_tx.callback_param;
1087                if (callback) {
1088                        spin_unlock_irqrestore(&chan->lock, flags);
1089                        callback(callback_param);
1090                        spin_lock_irqsave(&chan->lock, flags);
1091                }
1092
1093                /* Run any dependencies, then free the descriptor */
1094                dma_run_dependencies(&desc->async_tx);
1095                kfree(desc);
1096        }
1097
1098        spin_unlock_irqrestore(&chan->lock, flags);
1099}
1100
1101/**
1102 * xilinx_frmbuf_do_tasklet - Schedule completion tasklet
1103 * @data: Pointer to the Xilinx frmbuf channel structure
1104 */
1105static void xilinx_frmbuf_do_tasklet(unsigned long data)
1106{
1107        struct xilinx_frmbuf_chan *chan = (struct xilinx_frmbuf_chan *)data;
1108
1109        xilinx_frmbuf_chan_desc_cleanup(chan);
1110}
1111
1112/**
1113 * xilinx_frmbuf_alloc_chan_resources - Allocate channel resources
1114 * @dchan: DMA channel
1115 *
1116 * Return: '0' on success and failure value on error
1117 */
1118static int xilinx_frmbuf_alloc_chan_resources(struct dma_chan *dchan)
1119{
1120        dma_cookie_init(dchan);
1121
1122        return 0;
1123}
1124
1125/**
1126 * xilinx_frmbuf_tx_status - Get frmbuf transaction status
1127 * @dchan: DMA channel
1128 * @cookie: Transaction identifier
1129 * @txstate: Transaction state
1130 *
1131 * Return: fmrbuf transaction status
1132 */
1133static enum dma_status xilinx_frmbuf_tx_status(struct dma_chan *dchan,
1134                                               dma_cookie_t cookie,
1135                                               struct dma_tx_state *txstate)
1136{
1137        return dma_cookie_status(dchan, cookie, txstate);
1138}
1139
1140/**
1141 * xilinx_frmbuf_halt - Halt frmbuf channel
1142 * @chan: Driver specific dma channel
1143 */
1144static void xilinx_frmbuf_halt(struct xilinx_frmbuf_chan *chan)
1145{
1146        frmbuf_clr(chan, XILINX_FRMBUF_CTRL_OFFSET,
1147                   XILINX_FRMBUF_CTRL_AP_START | chan->mode);
1148        chan->idle = true;
1149}
1150
1151/**
1152 * xilinx_frmbuf_start - Start dma channel
1153 * @chan: Driver specific dma channel
1154 */
1155static void xilinx_frmbuf_start(struct xilinx_frmbuf_chan *chan)
1156{
1157        frmbuf_set(chan, XILINX_FRMBUF_CTRL_OFFSET,
1158                   XILINX_FRMBUF_CTRL_AP_START | chan->mode);
1159        chan->idle = false;
1160}
1161
1162/**
1163 * xilinx_frmbuf_complete_descriptor - Mark the active descriptor as complete
1164 * This function is invoked with spinlock held
1165 * @chan : xilinx frmbuf channel
1166 *
1167 * CONTEXT: hardirq
1168 */
1169static void xilinx_frmbuf_complete_descriptor(struct xilinx_frmbuf_chan *chan)
1170{
1171        struct xilinx_frmbuf_tx_descriptor *desc = chan->active_desc;
1172
1173        /*
1174         * In case of frame buffer write, read the fid register
1175         * and associate it with descriptor
1176         */
1177        if (chan->direction == DMA_DEV_TO_MEM && chan->hw_fid)
1178                desc->fid = frmbuf_read(chan, XILINX_FRMBUF_FID_OFFSET) &
1179                            XILINX_FRMBUF_FID_MASK;
1180
1181        dma_cookie_complete(&desc->async_tx);
1182        list_add_tail(&desc->node, &chan->done_list);
1183}
1184
1185/**
1186 * xilinx_frmbuf_start_transfer - Starts frmbuf transfer
1187 * @chan: Driver specific channel struct pointer
1188 */
1189static void xilinx_frmbuf_start_transfer(struct xilinx_frmbuf_chan *chan)
1190{
1191        struct xilinx_frmbuf_tx_descriptor *desc;
1192        struct xilinx_frmbuf_device *xdev;
1193
1194        xdev = container_of(chan, struct xilinx_frmbuf_device, chan);
1195
1196        if (!chan->idle)
1197                return;
1198
1199        if (chan->staged_desc) {
1200                chan->active_desc = chan->staged_desc;
1201                chan->staged_desc = NULL;
1202        }
1203
1204        if (list_empty(&chan->pending_list))
1205                return;
1206
1207        desc = list_first_entry(&chan->pending_list,
1208                                struct xilinx_frmbuf_tx_descriptor,
1209                                node);
1210
1211        if (desc->earlycb == EARLY_CALLBACK_START_DESC) {
1212                dma_async_tx_callback callback;
1213                void *callback_param;
1214
1215                callback = desc->async_tx.callback;
1216                callback_param = desc->async_tx.callback_param;
1217                if (callback) {
1218                        callback(callback_param);
1219                        desc->async_tx.callback = NULL;
1220                        chan->active_desc = desc;
1221                }
1222        }
1223
1224        /* Start the transfer */
1225        chan->write_addr(chan, XILINX_FRMBUF_ADDR_OFFSET,
1226                         desc->hw.luma_plane_addr);
1227        chan->write_addr(chan, XILINX_FRMBUF_ADDR2_OFFSET,
1228                         desc->hw.chroma_plane_addr[0]);
1229        if (xdev->cfg->flags & XILINX_THREE_PLANES_PROP) {
1230                if (chan->direction == DMA_MEM_TO_DEV)
1231                        chan->write_addr(chan, XILINX_FRMBUF_RD_ADDR3_OFFSET,
1232                                         desc->hw.chroma_plane_addr[1]);
1233                else
1234                        chan->write_addr(chan, XILINX_FRMBUF_ADDR3_OFFSET,
1235                                         desc->hw.chroma_plane_addr[1]);
1236        }
1237
1238        /* HW expects these parameters to be same for one transaction */
1239        frmbuf_write(chan, XILINX_FRMBUF_WIDTH_OFFSET, desc->hw.hsize);
1240        frmbuf_write(chan, XILINX_FRMBUF_STRIDE_OFFSET, desc->hw.stride);
1241        frmbuf_write(chan, XILINX_FRMBUF_HEIGHT_OFFSET, desc->hw.vsize);
1242        frmbuf_write(chan, XILINX_FRMBUF_FMT_OFFSET, chan->vid_fmt->id);
1243
1244        /* If it is framebuffer read IP set the FID */
1245        if (chan->direction == DMA_MEM_TO_DEV && chan->hw_fid)
1246                frmbuf_write(chan, XILINX_FRMBUF_FID_OFFSET, desc->fid);
1247
1248        /* Start the hardware */
1249        xilinx_frmbuf_start(chan);
1250        list_del(&desc->node);
1251
1252        /* No staging descriptor required when auto restart is disabled */
1253        if (chan->mode == AUTO_RESTART)
1254                chan->staged_desc = desc;
1255        else
1256                chan->active_desc = desc;
1257}
1258
1259/**
1260 * xilinx_frmbuf_issue_pending - Issue pending transactions
1261 * @dchan: DMA channel
1262 */
1263static void xilinx_frmbuf_issue_pending(struct dma_chan *dchan)
1264{
1265        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1266        unsigned long flags;
1267
1268        spin_lock_irqsave(&chan->lock, flags);
1269        xilinx_frmbuf_start_transfer(chan);
1270        spin_unlock_irqrestore(&chan->lock, flags);
1271}
1272
1273/**
1274 * xilinx_frmbuf_reset - Reset frmbuf channel
1275 * @chan: Driver specific dma channel
1276 */
1277static void xilinx_frmbuf_reset(struct xilinx_frmbuf_chan *chan)
1278{
1279        /* reset ip */
1280        gpiod_set_value(chan->xdev->rst_gpio, 1);
1281        udelay(1);
1282        gpiod_set_value(chan->xdev->rst_gpio, 0);
1283}
1284
1285/**
1286 * xilinx_frmbuf_chan_reset - Reset frmbuf channel and enable interrupts
1287 * @chan: Driver specific frmbuf channel
1288 */
1289static void xilinx_frmbuf_chan_reset(struct xilinx_frmbuf_chan *chan)
1290{
1291        xilinx_frmbuf_reset(chan);
1292        frmbuf_write(chan, XILINX_FRMBUF_IE_OFFSET, XILINX_FRMBUF_IE_AP_DONE);
1293        frmbuf_write(chan, XILINX_FRMBUF_GIE_OFFSET, XILINX_FRMBUF_GIE_EN);
1294        chan->fid_err_flag = 0;
1295        chan->fid_out_val = 0;
1296}
1297
1298/**
1299 * xilinx_frmbuf_irq_handler - frmbuf Interrupt handler
1300 * @irq: IRQ number
1301 * @data: Pointer to the Xilinx frmbuf channel structure
1302 *
1303 * Return: IRQ_HANDLED/IRQ_NONE
1304 */
1305static irqreturn_t xilinx_frmbuf_irq_handler(int irq, void *data)
1306{
1307        struct xilinx_frmbuf_chan *chan = data;
1308        u32 status;
1309        dma_async_tx_callback callback = NULL;
1310        void *callback_param;
1311        struct xilinx_frmbuf_tx_descriptor *desc;
1312
1313        status = frmbuf_read(chan, XILINX_FRMBUF_ISR_OFFSET);
1314        if (!(status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK))
1315                return IRQ_NONE;
1316
1317        frmbuf_write(chan, XILINX_FRMBUF_ISR_OFFSET,
1318                     status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
1319
1320        /* Check if callback function needs to be called early */
1321        desc = chan->staged_desc;
1322        if (desc && desc->earlycb == EARLY_CALLBACK) {
1323                callback = desc->async_tx.callback;
1324                callback_param = desc->async_tx.callback_param;
1325                if (callback) {
1326                        callback(callback_param);
1327                        desc->async_tx.callback = NULL;
1328                }
1329        }
1330
1331        if (status & XILINX_FRMBUF_ISR_AP_DONE_IRQ) {
1332                spin_lock(&chan->lock);
1333                chan->idle = true;
1334                if (chan->active_desc) {
1335                        xilinx_frmbuf_complete_descriptor(chan);
1336                        chan->active_desc = NULL;
1337                }
1338
1339                /* Update fid err detect flag and out value */
1340                if (chan->direction == DMA_MEM_TO_DEV &&
1341                    chan->hw_fid && chan->idle &&
1342                    chan->xdev->cfg->flags & XILINX_FID_ERR_DETECT_PROP) {
1343                        if (chan->mode == AUTO_RESTART)
1344                                chan->fid_mode = FID_MODE_2;
1345                        else
1346                                chan->fid_mode = FID_MODE_1;
1347
1348                        frmbuf_write(chan, XILINX_FRMBUF_FID_MODE_OFFSET,
1349                                     chan->fid_mode);
1350                        dev_dbg(chan->xdev->dev, "fid mode = %d\n",
1351                                frmbuf_read(chan, XILINX_FRMBUF_FID_MODE_OFFSET));
1352
1353                        chan->fid_err_flag = frmbuf_read(chan,
1354                                                         XILINX_FRMBUF_FID_ERR_OFFSET) &
1355                                                        XILINX_FRMBUF_FID_ERR_MASK;
1356                        chan->fid_out_val = frmbuf_read(chan,
1357                                                        XILINX_FRMBUF_FID_OUT_OFFSET) &
1358                                                        XILINX_FRMBUF_FID_OUT_MASK;
1359                        dev_dbg(chan->xdev->dev, "fid err cnt = 0x%x\n",
1360                                frmbuf_read(chan, XILINX_FRMBUF_FID_ERR_OFFSET));
1361                }
1362
1363                xilinx_frmbuf_start_transfer(chan);
1364                spin_unlock(&chan->lock);
1365        }
1366
1367        tasklet_schedule(&chan->tasklet);
1368        return IRQ_HANDLED;
1369}
1370
1371/**
1372 * xilinx_frmbuf_tx_submit - Submit DMA transaction
1373 * @tx: Async transaction descriptor
1374 *
1375 * Return: cookie value on success and failure value on error
1376 */
1377static dma_cookie_t xilinx_frmbuf_tx_submit(struct dma_async_tx_descriptor *tx)
1378{
1379        struct xilinx_frmbuf_tx_descriptor *desc = to_dma_tx_descriptor(tx);
1380        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(tx->chan);
1381        dma_cookie_t cookie;
1382        unsigned long flags;
1383
1384        spin_lock_irqsave(&chan->lock, flags);
1385        cookie = dma_cookie_assign(tx);
1386        list_add_tail(&desc->node, &chan->pending_list);
1387        spin_unlock_irqrestore(&chan->lock, flags);
1388
1389        return cookie;
1390}
1391
1392/**
1393 * xilinx_frmbuf_dma_prep_interleaved - prepare a descriptor for a
1394 *      DMA_SLAVE transaction
1395 * @dchan: DMA channel
1396 * @xt: Interleaved template pointer
1397 * @flags: transfer ack flags
1398 *
1399 * Return: Async transaction descriptor on success and NULL on failure
1400 */
1401static struct dma_async_tx_descriptor *
1402xilinx_frmbuf_dma_prep_interleaved(struct dma_chan *dchan,
1403                                   struct dma_interleaved_template *xt,
1404                                   unsigned long flags)
1405{
1406        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1407        struct xilinx_frmbuf_tx_descriptor *desc;
1408        struct xilinx_frmbuf_desc_hw *hw;
1409        u32 vsize, hsize;
1410
1411        if (chan->direction != xt->dir || !chan->vid_fmt)
1412                goto error;
1413
1414        if (!xt->numf || !xt->sgl[0].size)
1415                goto error;
1416
1417        if (xt->frame_size != chan->vid_fmt->num_planes)
1418                goto error;
1419
1420        vsize = xt->numf;
1421        hsize = (xt->sgl[0].size * chan->vid_fmt->ppw * 8) /
1422                 chan->vid_fmt->bpw;
1423        /* hsize calc should not have resulted in an odd number */
1424        if (hsize & 1)
1425                hsize++;
1426
1427        if (vsize > chan->xdev->max_height || hsize > chan->xdev->max_width) {
1428                dev_dbg(chan->xdev->dev,
1429                        "vsize %d max vsize %d hsize %d max hsize %d\n",
1430                        vsize, chan->xdev->max_height, hsize,
1431                        chan->xdev->max_width);
1432                dev_err(chan->xdev->dev, "Requested size not supported!\n");
1433                goto error;
1434        }
1435
1436        desc = xilinx_frmbuf_alloc_tx_descriptor(chan);
1437        if (!desc)
1438                return NULL;
1439
1440        dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
1441        desc->async_tx.tx_submit = xilinx_frmbuf_tx_submit;
1442        async_tx_ack(&desc->async_tx);
1443
1444        hw = &desc->hw;
1445        hw->vsize = xt->numf;
1446        hw->stride = xt->sgl[0].icg + xt->sgl[0].size;
1447        hw->hsize = (xt->sgl[0].size * chan->vid_fmt->ppw * 8) /
1448                     chan->vid_fmt->bpw;
1449
1450        /* hsize calc should not have resulted in an odd number */
1451        if (hw->hsize & 1)
1452                hw->hsize++;
1453
1454        if (chan->direction == DMA_MEM_TO_DEV) {
1455                hw->luma_plane_addr = xt->src_start;
1456                if (xt->frame_size == 2 || xt->frame_size == 3)
1457                        hw->chroma_plane_addr[0] =
1458                                xt->src_start +
1459                                xt->numf * hw->stride +
1460                                xt->sgl[0].src_icg;
1461                if (xt->frame_size == 3)
1462                        hw->chroma_plane_addr[1] =
1463                                hw->chroma_plane_addr[0] +
1464                                xt->numf * hw->stride +
1465                                xt->sgl[0].src_icg;
1466        } else {
1467                hw->luma_plane_addr = xt->dst_start;
1468                if (xt->frame_size == 2 || xt->frame_size == 3)
1469                        hw->chroma_plane_addr[0] =
1470                                xt->dst_start +
1471                                xt->numf * hw->stride +
1472                                xt->sgl[0].dst_icg;
1473                if (xt->frame_size == 3)
1474                        hw->chroma_plane_addr[1] =
1475                                hw->chroma_plane_addr[0] +
1476                                xt->numf * hw->stride +
1477                                xt->sgl[0].dst_icg;
1478        }
1479
1480        return &desc->async_tx;
1481
1482error:
1483        dev_err(chan->xdev->dev,
1484                "Invalid dma template or missing dma video fmt config\n");
1485        return NULL;
1486}
1487
1488/**
1489 * xilinx_frmbuf_terminate_all - Halt the channel and free descriptors
1490 * @dchan: Driver specific dma channel pointer
1491 *
1492 * Return: 0
1493 */
1494static int xilinx_frmbuf_terminate_all(struct dma_chan *dchan)
1495{
1496        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1497
1498        xilinx_frmbuf_halt(chan);
1499        xilinx_frmbuf_free_descriptors(chan);
1500        /* worst case frame-to-frame boundary; ensure frame output complete */
1501        msleep(50);
1502
1503        if (chan->xdev->cfg->flags & XILINX_FLUSH_PROP) {
1504                u8 count;
1505
1506                /*
1507                 * Flush the framebuffer FIFO and
1508                 * wait for max 50ms for flush done
1509                 */
1510                frmbuf_set(chan, XILINX_FRMBUF_CTRL_OFFSET,
1511                           XILINX_FRMBUF_CTRL_FLUSH);
1512                for (count = WAIT_FOR_FLUSH_DONE; count > 0; count--) {
1513                        if (frmbuf_read(chan, XILINX_FRMBUF_CTRL_OFFSET) &
1514                                        XILINX_FRMBUF_CTRL_FLUSH_DONE)
1515                                break;
1516                        usleep_range(2000, 2100);
1517                }
1518
1519                if (!count)
1520                        dev_err(chan->xdev->dev, "Framebuffer Flush not done!\n");
1521        }
1522
1523        xilinx_frmbuf_chan_reset(chan);
1524
1525        return 0;
1526}
1527
1528/**
1529 * xilinx_frmbuf_synchronize - kill tasklet to stop further descr processing
1530 * @dchan: Driver specific dma channel pointer
1531 */
1532static void xilinx_frmbuf_synchronize(struct dma_chan *dchan)
1533{
1534        struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1535
1536        tasklet_kill(&chan->tasklet);
1537}
1538
1539/* -----------------------------------------------------------------------------
1540 * Probe and remove
1541 */
1542
1543/**
1544 * xilinx_frmbuf_chan_remove - Per Channel remove function
1545 * @chan: Driver specific dma channel
1546 */
1547static void xilinx_frmbuf_chan_remove(struct xilinx_frmbuf_chan *chan)
1548{
1549        /* Disable all interrupts */
1550        frmbuf_clr(chan, XILINX_FRMBUF_IE_OFFSET,
1551                   XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
1552
1553        tasklet_kill(&chan->tasklet);
1554        list_del(&chan->common.device_node);
1555
1556        mutex_lock(&frmbuf_chan_list_lock);
1557        list_del(&chan->chan_node);
1558        mutex_unlock(&frmbuf_chan_list_lock);
1559}
1560
1561/**
1562 * xilinx_frmbuf_chan_probe - Per Channel Probing
1563 * It get channel features from the device tree entry and
1564 * initialize special channel handling routines
1565 *
1566 * @xdev: Driver specific device structure
1567 * @node: Device node
1568 *
1569 * Return: '0' on success and failure value on error
1570 */
1571static int xilinx_frmbuf_chan_probe(struct xilinx_frmbuf_device *xdev,
1572                                    struct device_node *node)
1573{
1574        struct xilinx_frmbuf_chan *chan;
1575        int err;
1576        u32 dma_addr_size = 0;
1577
1578        chan = &xdev->chan;
1579
1580        chan->dev = xdev->dev;
1581        chan->xdev = xdev;
1582        chan->idle = true;
1583        chan->fid_err_flag = 0;
1584        chan->fid_out_val = 0;
1585        chan->mode = AUTO_RESTART;
1586
1587        err = of_property_read_u32(node, "xlnx,dma-addr-width",
1588                                   &dma_addr_size);
1589        if (err || (dma_addr_size != 32 && dma_addr_size != 64)) {
1590                dev_err(xdev->dev, "missing or invalid addr width dts prop\n");
1591                return err;
1592        }
1593
1594        if (dma_addr_size == 64 && sizeof(dma_addr_t) == sizeof(u64))
1595                chan->write_addr = writeq_addr;
1596        else
1597                chan->write_addr = write_addr;
1598
1599        if (xdev->cfg->flags & XILINX_FID_PROP)
1600                chan->hw_fid = of_property_read_bool(node, "xlnx,fid");
1601
1602        spin_lock_init(&chan->lock);
1603        INIT_LIST_HEAD(&chan->pending_list);
1604        INIT_LIST_HEAD(&chan->done_list);
1605
1606        chan->irq = irq_of_parse_and_map(node, 0);
1607        err = devm_request_irq(xdev->dev, chan->irq, xilinx_frmbuf_irq_handler,
1608                               IRQF_SHARED, "xilinx_framebuffer", chan);
1609
1610        if (err) {
1611                dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq);
1612                return err;
1613        }
1614
1615        tasklet_init(&chan->tasklet, xilinx_frmbuf_do_tasklet,
1616                     (unsigned long)chan);
1617
1618        /*
1619         * Initialize the DMA channel and add it to the DMA engine channels
1620         * list.
1621         */
1622        chan->common.device = &xdev->common;
1623
1624        list_add_tail(&chan->common.device_node, &xdev->common.channels);
1625
1626        mutex_lock(&frmbuf_chan_list_lock);
1627        list_add_tail(&chan->chan_node, &frmbuf_chan_list);
1628        mutex_unlock(&frmbuf_chan_list_lock);
1629
1630        xilinx_frmbuf_chan_reset(chan);
1631
1632        return 0;
1633}
1634
1635/**
1636 * xilinx_frmbuf_probe - Driver probe function
1637 * @pdev: Pointer to the platform_device structure
1638 *
1639 * Return: '0' on success and failure value on error
1640 */
1641static int xilinx_frmbuf_probe(struct platform_device *pdev)
1642{
1643        struct device_node *node = pdev->dev.of_node;
1644        struct xilinx_frmbuf_device *xdev;
1645        struct resource *io;
1646        enum dma_transfer_direction dma_dir;
1647        const struct of_device_id *match;
1648        int err;
1649        u32 i, j, align, max_width, max_height;
1650        int hw_vid_fmt_cnt;
1651        const char *vid_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
1652
1653        xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL);
1654        if (!xdev)
1655                return -ENOMEM;
1656
1657        xdev->dev = &pdev->dev;
1658
1659        match = of_match_node(xilinx_frmbuf_of_ids, node);
1660        if (!match)
1661                return -ENODEV;
1662
1663        xdev->cfg = match->data;
1664
1665        dma_dir = (enum dma_transfer_direction)xdev->cfg->direction;
1666
1667        if (xdev->cfg->flags & XILINX_CLK_PROP) {
1668                xdev->ap_clk = devm_clk_get(xdev->dev, "ap_clk");
1669                if (IS_ERR(xdev->ap_clk)) {
1670                        err = PTR_ERR(xdev->ap_clk);
1671                        dev_err(xdev->dev, "failed to get ap_clk (%d)\n", err);
1672                        return err;
1673                }
1674        } else {
1675                dev_info(xdev->dev, "assuming clock is enabled!\n");
1676        }
1677
1678        xdev->rst_gpio = devm_gpiod_get(&pdev->dev, "reset",
1679                                        GPIOD_OUT_HIGH);
1680        if (IS_ERR(xdev->rst_gpio)) {
1681                err = PTR_ERR(xdev->rst_gpio);
1682                if (err == -EPROBE_DEFER)
1683                        dev_info(&pdev->dev,
1684                                 "Probe deferred due to GPIO reset defer\n");
1685                else
1686                        dev_err(&pdev->dev,
1687                                "Unable to locate reset property in dt\n");
1688                return err;
1689        }
1690
1691        gpiod_set_value_cansleep(xdev->rst_gpio, 0x0);
1692
1693        io = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1694        xdev->regs = devm_ioremap_resource(&pdev->dev, io);
1695        if (IS_ERR(xdev->regs))
1696                return PTR_ERR(xdev->regs);
1697
1698        if (xdev->cfg->flags & XILINX_THREE_PLANES_PROP)
1699                max_height = 8640;
1700        else
1701                max_height = 4320;
1702
1703        err = of_property_read_u32(node, "xlnx,max-height", &xdev->max_height);
1704        if (err < 0) {
1705                dev_err(xdev->dev, "xlnx,max-height is missing!");
1706                return -EINVAL;
1707        } else if (xdev->max_height > max_height ||
1708                   xdev->max_height < XILINX_FRMBUF_MIN_HEIGHT) {
1709                dev_err(&pdev->dev, "Invalid height in dt");
1710                return -EINVAL;
1711        }
1712
1713        if (xdev->cfg->flags & XILINX_THREE_PLANES_PROP)
1714                max_width = 15360;
1715        else
1716                max_width = 8192;
1717
1718        err = of_property_read_u32(node, "xlnx,max-width", &xdev->max_width);
1719        if (err < 0) {
1720                dev_err(xdev->dev, "xlnx,max-width is missing!");
1721                return -EINVAL;
1722        } else if (xdev->max_width > max_width ||
1723                   xdev->max_width < XILINX_FRMBUF_MIN_WIDTH) {
1724                dev_err(&pdev->dev, "Invalid width in dt");
1725                return -EINVAL;
1726        }
1727
1728        /* Initialize the DMA engine */
1729        if (xdev->cfg->flags & XILINX_PPC_PROP) {
1730                err = of_property_read_u32(node, "xlnx,pixels-per-clock", &xdev->ppc);
1731                if (err || (xdev->ppc != 1 && xdev->ppc != 2 &&
1732                            xdev->ppc != 4 && xdev->ppc != 8)) {
1733                        dev_err(&pdev->dev, "missing or invalid pixels per clock dts prop\n");
1734                        return err;
1735                }
1736                err = of_property_read_u32(node, "xlnx,dma-align", &align);
1737                if (err)
1738                        align = xdev->ppc * XILINX_FRMBUF_ALIGN_MUL;
1739
1740                if (align < (xdev->ppc * XILINX_FRMBUF_ALIGN_MUL) ||
1741                    ffs(align) != fls(align)) {
1742                        dev_err(&pdev->dev, "invalid dma align dts prop\n");
1743                        return -EINVAL;
1744                }
1745        } else {
1746                align = 16;
1747        }
1748
1749        xdev->common.copy_align = (enum dmaengine_alignment)(fls(align) - 1);
1750        xdev->common.dev = &pdev->dev;
1751
1752        if (xdev->cfg->flags & XILINX_CLK_PROP) {
1753                err = clk_prepare_enable(xdev->ap_clk);
1754                if (err) {
1755                        dev_err(&pdev->dev, " failed to enable ap_clk (%d)\n",
1756                                err);
1757                        return err;
1758                }
1759        }
1760
1761        INIT_LIST_HEAD(&xdev->common.channels);
1762        dma_cap_set(DMA_SLAVE, xdev->common.cap_mask);
1763        dma_cap_set(DMA_PRIVATE, xdev->common.cap_mask);
1764
1765        /* Initialize the channels */
1766        err = xilinx_frmbuf_chan_probe(xdev, node);
1767        if (err < 0)
1768                goto disable_clk;
1769
1770        xdev->chan.direction = dma_dir;
1771
1772        if (xdev->chan.direction == DMA_DEV_TO_MEM) {
1773                xdev->common.directions = BIT(DMA_DEV_TO_MEM);
1774                dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_DEV_TO_MEM\n");
1775        } else if (xdev->chan.direction == DMA_MEM_TO_DEV) {
1776                xdev->common.directions = BIT(DMA_MEM_TO_DEV);
1777                dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_MEM_TO_DEV\n");
1778        } else {
1779                err = -EINVAL;
1780                goto remove_chan;
1781        }
1782
1783        /* read supported video formats and update internal table */
1784        hw_vid_fmt_cnt = of_property_count_strings(node, "xlnx,vid-formats");
1785
1786        err = of_property_read_string_array(node, "xlnx,vid-formats",
1787                                            vid_fmts, hw_vid_fmt_cnt);
1788        if (err < 0) {
1789                dev_err(&pdev->dev,
1790                        "Missing or invalid xlnx,vid-formats dts prop\n");
1791                goto remove_chan;
1792        }
1793
1794        for (i = 0; i < hw_vid_fmt_cnt; i++) {
1795                const char *vid_fmt_name = vid_fmts[i];
1796
1797                for (j = 0; j < ARRAY_SIZE(xilinx_frmbuf_formats); j++) {
1798                        const char *dts_name =
1799                                xilinx_frmbuf_formats[j].dts_name;
1800
1801                        if (strcmp(vid_fmt_name, dts_name))
1802                                continue;
1803
1804                        xdev->enabled_vid_fmts |=
1805                                xilinx_frmbuf_formats[j].fmt_bitmask;
1806                }
1807        }
1808
1809        /* Determine supported vid framework formats */
1810        frmbuf_init_format_array(xdev);
1811
1812        xdev->common.device_alloc_chan_resources =
1813                                xilinx_frmbuf_alloc_chan_resources;
1814        xdev->common.device_free_chan_resources =
1815                                xilinx_frmbuf_free_chan_resources;
1816        xdev->common.device_prep_interleaved_dma =
1817                                xilinx_frmbuf_dma_prep_interleaved;
1818        xdev->common.device_terminate_all = xilinx_frmbuf_terminate_all;
1819        xdev->common.device_synchronize = xilinx_frmbuf_synchronize;
1820        xdev->common.device_tx_status = xilinx_frmbuf_tx_status;
1821        xdev->common.device_issue_pending = xilinx_frmbuf_issue_pending;
1822
1823        platform_set_drvdata(pdev, xdev);
1824
1825        /* Register the DMA engine with the core */
1826        dma_async_device_register(&xdev->common);
1827
1828        err = of_dma_controller_register(node, of_dma_xilinx_xlate, xdev);
1829        if (err < 0) {
1830                dev_err(&pdev->dev, "Unable to register DMA to DT\n");
1831                goto error;
1832        }
1833
1834        dev_info(&pdev->dev, "Xilinx AXI FrameBuffer Engine Driver Probed!!\n");
1835
1836        return 0;
1837error:
1838        dma_async_device_unregister(&xdev->common);
1839remove_chan:
1840        xilinx_frmbuf_chan_remove(&xdev->chan);
1841disable_clk:
1842        clk_disable_unprepare(xdev->ap_clk);
1843        return err;
1844}
1845
1846/**
1847 * xilinx_frmbuf_remove - Driver remove function
1848 * @pdev: Pointer to the platform_device structure
1849 *
1850 * Return: Always '0'
1851 */
1852static int xilinx_frmbuf_remove(struct platform_device *pdev)
1853{
1854        struct xilinx_frmbuf_device *xdev = platform_get_drvdata(pdev);
1855
1856        of_dma_controller_free(pdev->dev.of_node);
1857
1858        dma_async_device_unregister(&xdev->common);
1859        xilinx_frmbuf_chan_remove(&xdev->chan);
1860        clk_disable_unprepare(xdev->ap_clk);
1861
1862        return 0;
1863}
1864
1865MODULE_DEVICE_TABLE(of, xilinx_frmbuf_of_ids);
1866
1867static struct platform_driver xilinx_frmbuf_driver = {
1868        .driver = {
1869                .name = "xilinx-frmbuf",
1870                .of_match_table = xilinx_frmbuf_of_ids,
1871        },
1872        .probe = xilinx_frmbuf_probe,
1873        .remove = xilinx_frmbuf_remove,
1874};
1875
1876module_platform_driver(xilinx_frmbuf_driver);
1877
1878MODULE_AUTHOR("Xilinx, Inc.");
1879MODULE_DESCRIPTION("Xilinx Framebuffer driver");
1880MODULE_LICENSE("GPL v2");
1881