linux/drivers/media/platform/vsp1/vsp1_video.c
<<
>>
Prefs
   1/*
   2 * vsp1_video.c  --  R-Car VSP1 Video Node
   3 *
   4 * Copyright (C) 2013 Renesas Corporation
   5 *
   6 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
   7 *
   8 * This program is free software; you can redistribute it and/or modify
   9 * it under the terms of the GNU General Public License as published by
  10 * the Free Software Foundation; either version 2 of the License, or
  11 * (at your option) any later version.
  12 */
  13
  14#include <linux/list.h>
  15#include <linux/module.h>
  16#include <linux/mutex.h>
  17#include <linux/sched.h>
  18#include <linux/slab.h>
  19#include <linux/v4l2-mediabus.h>
  20#include <linux/videodev2.h>
  21
  22#include <media/media-entity.h>
  23#include <media/v4l2-dev.h>
  24#include <media/v4l2-fh.h>
  25#include <media/v4l2-ioctl.h>
  26#include <media/v4l2-subdev.h>
  27#include <media/videobuf2-core.h>
  28#include <media/videobuf2-dma-contig.h>
  29
  30#include "vsp1.h"
  31#include "vsp1_entity.h"
  32#include "vsp1_rwpf.h"
  33#include "vsp1_video.h"
  34
  35#define VSP1_VIDEO_DEF_FORMAT           V4L2_PIX_FMT_YUYV
  36#define VSP1_VIDEO_DEF_WIDTH            1024
  37#define VSP1_VIDEO_DEF_HEIGHT           768
  38
  39#define VSP1_VIDEO_MIN_WIDTH            2U
  40#define VSP1_VIDEO_MAX_WIDTH            8190U
  41#define VSP1_VIDEO_MIN_HEIGHT           2U
  42#define VSP1_VIDEO_MAX_HEIGHT           8190U
  43
  44/* -----------------------------------------------------------------------------
  45 * Helper functions
  46 */
  47
  48static const struct vsp1_format_info vsp1_video_formats[] = {
  49        { V4L2_PIX_FMT_RGB332, V4L2_MBUS_FMT_ARGB8888_1X32,
  50          VI6_FMT_RGB_332, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  51          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  52          1, { 8, 0, 0 }, false, false, 1, 1 },
  53        { V4L2_PIX_FMT_RGB444, V4L2_MBUS_FMT_ARGB8888_1X32,
  54          VI6_FMT_XRGB_4444, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  55          VI6_RPF_DSWAP_P_WDS,
  56          1, { 16, 0, 0 }, false, false, 1, 1 },
  57        { V4L2_PIX_FMT_RGB555, V4L2_MBUS_FMT_ARGB8888_1X32,
  58          VI6_FMT_XRGB_1555, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  59          VI6_RPF_DSWAP_P_WDS,
  60          1, { 16, 0, 0 }, false, false, 1, 1 },
  61        { V4L2_PIX_FMT_RGB565, V4L2_MBUS_FMT_ARGB8888_1X32,
  62          VI6_FMT_RGB_565, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  63          VI6_RPF_DSWAP_P_WDS,
  64          1, { 16, 0, 0 }, false, false, 1, 1 },
  65        { V4L2_PIX_FMT_BGR24, V4L2_MBUS_FMT_ARGB8888_1X32,
  66          VI6_FMT_BGR_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  67          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  68          1, { 24, 0, 0 }, false, false, 1, 1 },
  69        { V4L2_PIX_FMT_RGB24, V4L2_MBUS_FMT_ARGB8888_1X32,
  70          VI6_FMT_RGB_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  71          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  72          1, { 24, 0, 0 }, false, false, 1, 1 },
  73        { V4L2_PIX_FMT_BGR32, V4L2_MBUS_FMT_ARGB8888_1X32,
  74          VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS,
  75          1, { 32, 0, 0 }, false, false, 1, 1 },
  76        { V4L2_PIX_FMT_RGB32, V4L2_MBUS_FMT_ARGB8888_1X32,
  77          VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  78          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  79          1, { 32, 0, 0 }, false, false, 1, 1 },
  80        { V4L2_PIX_FMT_UYVY, V4L2_MBUS_FMT_AYUV8_1X32,
  81          VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  82          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  83          1, { 16, 0, 0 }, false, false, 2, 1 },
  84        { V4L2_PIX_FMT_VYUY, V4L2_MBUS_FMT_AYUV8_1X32,
  85          VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  86          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  87          1, { 16, 0, 0 }, false, true, 2, 1 },
  88        { V4L2_PIX_FMT_YUYV, V4L2_MBUS_FMT_AYUV8_1X32,
  89          VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  90          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  91          1, { 16, 0, 0 }, true, false, 2, 1 },
  92        { V4L2_PIX_FMT_YVYU, V4L2_MBUS_FMT_AYUV8_1X32,
  93          VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  94          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  95          1, { 16, 0, 0 }, true, true, 2, 1 },
  96        { V4L2_PIX_FMT_NV12M, V4L2_MBUS_FMT_AYUV8_1X32,
  97          VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  98          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  99          2, { 8, 16, 0 }, false, false, 2, 2 },
 100        { V4L2_PIX_FMT_NV21M, V4L2_MBUS_FMT_AYUV8_1X32,
 101          VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
 102          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
 103          2, { 8, 16, 0 }, false, true, 2, 2 },
 104        { V4L2_PIX_FMT_NV16M, V4L2_MBUS_FMT_AYUV8_1X32,
 105          VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
 106          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
 107          2, { 8, 16, 0 }, false, false, 2, 1 },
 108        { V4L2_PIX_FMT_NV61M, V4L2_MBUS_FMT_AYUV8_1X32,
 109          VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
 110          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
 111          2, { 8, 16, 0 }, false, true, 2, 1 },
 112        { V4L2_PIX_FMT_YUV420M, V4L2_MBUS_FMT_AYUV8_1X32,
 113          VI6_FMT_Y_U_V_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
 114          VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
 115          3, { 8, 8, 8 }, false, false, 2, 2 },
 116};
 117
 118/*
 119 * vsp1_get_format_info - Retrieve format information for a 4CC
 120 * @fourcc: the format 4CC
 121 *
 122 * Return a pointer to the format information structure corresponding to the
 123 * given V4L2 format 4CC, or NULL if no corresponding format can be found.
 124 */
 125static const struct vsp1_format_info *vsp1_get_format_info(u32 fourcc)
 126{
 127        unsigned int i;
 128
 129        for (i = 0; i < ARRAY_SIZE(vsp1_video_formats); ++i) {
 130                const struct vsp1_format_info *info = &vsp1_video_formats[i];
 131
 132                if (info->fourcc == fourcc)
 133                        return info;
 134        }
 135
 136        return NULL;
 137}
 138
 139
 140static struct v4l2_subdev *
 141vsp1_video_remote_subdev(struct media_pad *local, u32 *pad)
 142{
 143        struct media_pad *remote;
 144
 145        remote = media_entity_remote_pad(local);
 146        if (remote == NULL ||
 147            media_entity_type(remote->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
 148                return NULL;
 149
 150        if (pad)
 151                *pad = remote->index;
 152
 153        return media_entity_to_v4l2_subdev(remote->entity);
 154}
 155
 156static int vsp1_video_verify_format(struct vsp1_video *video)
 157{
 158        struct v4l2_subdev_format fmt;
 159        struct v4l2_subdev *subdev;
 160        int ret;
 161
 162        subdev = vsp1_video_remote_subdev(&video->pad, &fmt.pad);
 163        if (subdev == NULL)
 164                return -EINVAL;
 165
 166        fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
 167        ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
 168        if (ret < 0)
 169                return ret == -ENOIOCTLCMD ? -EINVAL : ret;
 170
 171        if (video->fmtinfo->mbus != fmt.format.code ||
 172            video->format.height != fmt.format.height ||
 173            video->format.width != fmt.format.width)
 174                return -EINVAL;
 175
 176        return 0;
 177}
 178
 179static int __vsp1_video_try_format(struct vsp1_video *video,
 180                                   struct v4l2_pix_format_mplane *pix,
 181                                   const struct vsp1_format_info **fmtinfo)
 182{
 183        const struct vsp1_format_info *info;
 184        unsigned int width = pix->width;
 185        unsigned int height = pix->height;
 186        unsigned int i;
 187
 188        /* Retrieve format information and select the default format if the
 189         * requested format isn't supported.
 190         */
 191        info = vsp1_get_format_info(pix->pixelformat);
 192        if (info == NULL)
 193                info = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT);
 194
 195        pix->pixelformat = info->fourcc;
 196        pix->colorspace = V4L2_COLORSPACE_SRGB;
 197        pix->field = V4L2_FIELD_NONE;
 198        memset(pix->reserved, 0, sizeof(pix->reserved));
 199
 200        /* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
 201        width = round_down(width, info->hsub);
 202        height = round_down(height, info->vsub);
 203
 204        /* Clamp the width and height. */
 205        pix->width = clamp(width, VSP1_VIDEO_MIN_WIDTH, VSP1_VIDEO_MAX_WIDTH);
 206        pix->height = clamp(height, VSP1_VIDEO_MIN_HEIGHT,
 207                            VSP1_VIDEO_MAX_HEIGHT);
 208
 209        /* Compute and clamp the stride and image size. While not documented in
 210         * the datasheet, strides not aligned to a multiple of 128 bytes result
 211         * in image corruption.
 212         */
 213        for (i = 0; i < max(info->planes, 2U); ++i) {
 214                unsigned int hsub = i > 0 ? info->hsub : 1;
 215                unsigned int vsub = i > 0 ? info->vsub : 1;
 216                unsigned int align = 128;
 217                unsigned int bpl;
 218
 219                bpl = clamp_t(unsigned int, pix->plane_fmt[i].bytesperline,
 220                              pix->width / hsub * info->bpp[i] / 8,
 221                              round_down(65535U, align));
 222
 223                pix->plane_fmt[i].bytesperline = round_up(bpl, align);
 224                pix->plane_fmt[i].sizeimage = pix->plane_fmt[i].bytesperline
 225                                            * pix->height / vsub;
 226        }
 227
 228        if (info->planes == 3) {
 229                /* The second and third planes must have the same stride. */
 230                pix->plane_fmt[2].bytesperline = pix->plane_fmt[1].bytesperline;
 231                pix->plane_fmt[2].sizeimage = pix->plane_fmt[1].sizeimage;
 232        }
 233
 234        pix->num_planes = info->planes;
 235
 236        if (fmtinfo)
 237                *fmtinfo = info;
 238
 239        return 0;
 240}
 241
 242static bool
 243vsp1_video_format_adjust(struct vsp1_video *video,
 244                         const struct v4l2_pix_format_mplane *format,
 245                         struct v4l2_pix_format_mplane *adjust)
 246{
 247        unsigned int i;
 248
 249        *adjust = *format;
 250        __vsp1_video_try_format(video, adjust, NULL);
 251
 252        if (format->width != adjust->width ||
 253            format->height != adjust->height ||
 254            format->pixelformat != adjust->pixelformat ||
 255            format->num_planes != adjust->num_planes)
 256                return false;
 257
 258        for (i = 0; i < format->num_planes; ++i) {
 259                if (format->plane_fmt[i].bytesperline !=
 260                    adjust->plane_fmt[i].bytesperline)
 261                        return false;
 262
 263                adjust->plane_fmt[i].sizeimage =
 264                        max(adjust->plane_fmt[i].sizeimage,
 265                            format->plane_fmt[i].sizeimage);
 266        }
 267
 268        return true;
 269}
 270
 271/* -----------------------------------------------------------------------------
 272 * Pipeline Management
 273 */
 274
 275static int vsp1_pipeline_validate_branch(struct vsp1_rwpf *input,
 276                                         struct vsp1_rwpf *output)
 277{
 278        struct vsp1_entity *entity;
 279        unsigned int entities = 0;
 280        struct media_pad *pad;
 281        bool uds_found = false;
 282
 283        pad = media_entity_remote_pad(&input->entity.pads[RWPF_PAD_SOURCE]);
 284
 285        while (1) {
 286                if (pad == NULL)
 287                        return -EPIPE;
 288
 289                /* We've reached a video node, that shouldn't have happened. */
 290                if (media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
 291                        return -EPIPE;
 292
 293                entity = to_vsp1_entity(media_entity_to_v4l2_subdev(pad->entity));
 294
 295                /* We've reached the WPF, we're done. */
 296                if (entity->type == VSP1_ENTITY_WPF)
 297                        break;
 298
 299                /* Ensure the branch has no loop. */
 300                if (entities & (1 << entity->subdev.entity.id))
 301                        return -EPIPE;
 302
 303                entities |= 1 << entity->subdev.entity.id;
 304
 305                /* UDS can't be chained. */
 306                if (entity->type == VSP1_ENTITY_UDS) {
 307                        if (uds_found)
 308                                return -EPIPE;
 309                        uds_found = true;
 310                }
 311
 312                /* Follow the source link. The link setup operations ensure
 313                 * that the output fan-out can't be more than one, there is thus
 314                 * no need to verify here that only a single source link is
 315                 * activated.
 316                 */
 317                pad = &entity->pads[entity->source_pad];
 318                pad = media_entity_remote_pad(pad);
 319        }
 320
 321        /* The last entity must be the output WPF. */
 322        if (entity != &output->entity)
 323                return -EPIPE;
 324
 325        return 0;
 326}
 327
 328static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe,
 329                                  struct vsp1_video *video)
 330{
 331        struct media_entity_graph graph;
 332        struct media_entity *entity = &video->video.entity;
 333        struct media_device *mdev = entity->parent;
 334        unsigned int i;
 335        int ret;
 336
 337        mutex_lock(&mdev->graph_mutex);
 338
 339        /* Walk the graph to locate the entities and video nodes. */
 340        media_entity_graph_walk_start(&graph, entity);
 341
 342        while ((entity = media_entity_graph_walk_next(&graph))) {
 343                struct v4l2_subdev *subdev;
 344                struct vsp1_rwpf *rwpf;
 345                struct vsp1_entity *e;
 346
 347                if (media_entity_type(entity) != MEDIA_ENT_T_V4L2_SUBDEV) {
 348                        pipe->num_video++;
 349                        continue;
 350                }
 351
 352                subdev = media_entity_to_v4l2_subdev(entity);
 353                e = to_vsp1_entity(subdev);
 354                list_add_tail(&e->list_pipe, &pipe->entities);
 355
 356                if (e->type == VSP1_ENTITY_RPF) {
 357                        rwpf = to_rwpf(subdev);
 358                        pipe->inputs[pipe->num_inputs++] = rwpf;
 359                        rwpf->video.pipe_index = pipe->num_inputs;
 360                } else if (e->type == VSP1_ENTITY_WPF) {
 361                        rwpf = to_rwpf(subdev);
 362                        pipe->output = to_rwpf(subdev);
 363                        rwpf->video.pipe_index = 0;
 364                } else if (e->type == VSP1_ENTITY_LIF) {
 365                        pipe->lif = e;
 366                }
 367        }
 368
 369        mutex_unlock(&mdev->graph_mutex);
 370
 371        /* We need one output and at least one input. */
 372        if (pipe->num_inputs == 0 || !pipe->output) {
 373                ret = -EPIPE;
 374                goto error;
 375        }
 376
 377        /* Follow links downstream for each input and make sure the graph
 378         * contains no loop and that all branches end at the output WPF.
 379         */
 380        for (i = 0; i < pipe->num_inputs; ++i) {
 381                ret = vsp1_pipeline_validate_branch(pipe->inputs[i],
 382                                                    pipe->output);
 383                if (ret < 0)
 384                        goto error;
 385        }
 386
 387        return 0;
 388
 389error:
 390        INIT_LIST_HEAD(&pipe->entities);
 391        pipe->buffers_ready = 0;
 392        pipe->num_video = 0;
 393        pipe->num_inputs = 0;
 394        pipe->output = NULL;
 395        pipe->lif = NULL;
 396        return ret;
 397}
 398
 399static int vsp1_pipeline_init(struct vsp1_pipeline *pipe,
 400                              struct vsp1_video *video)
 401{
 402        int ret;
 403
 404        mutex_lock(&pipe->lock);
 405
 406        /* If we're the first user validate and initialize the pipeline. */
 407        if (pipe->use_count == 0) {
 408                ret = vsp1_pipeline_validate(pipe, video);
 409                if (ret < 0)
 410                        goto done;
 411        }
 412
 413        pipe->use_count++;
 414        ret = 0;
 415
 416done:
 417        mutex_unlock(&pipe->lock);
 418        return ret;
 419}
 420
 421static void vsp1_pipeline_cleanup(struct vsp1_pipeline *pipe)
 422{
 423        mutex_lock(&pipe->lock);
 424
 425        /* If we're the last user clean up the pipeline. */
 426        if (--pipe->use_count == 0) {
 427                INIT_LIST_HEAD(&pipe->entities);
 428                pipe->state = VSP1_PIPELINE_STOPPED;
 429                pipe->buffers_ready = 0;
 430                pipe->num_video = 0;
 431                pipe->num_inputs = 0;
 432                pipe->output = NULL;
 433                pipe->lif = NULL;
 434        }
 435
 436        mutex_unlock(&pipe->lock);
 437}
 438
 439static void vsp1_pipeline_run(struct vsp1_pipeline *pipe)
 440{
 441        struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
 442
 443        vsp1_write(vsp1, VI6_CMD(pipe->output->entity.index), VI6_CMD_STRCMD);
 444        pipe->state = VSP1_PIPELINE_RUNNING;
 445        pipe->buffers_ready = 0;
 446}
 447
 448static int vsp1_pipeline_stop(struct vsp1_pipeline *pipe)
 449{
 450        struct vsp1_entity *entity;
 451        unsigned long flags;
 452        int ret;
 453
 454        spin_lock_irqsave(&pipe->irqlock, flags);
 455        pipe->state = VSP1_PIPELINE_STOPPING;
 456        spin_unlock_irqrestore(&pipe->irqlock, flags);
 457
 458        ret = wait_event_timeout(pipe->wq, pipe->state == VSP1_PIPELINE_STOPPED,
 459                                 msecs_to_jiffies(500));
 460        ret = ret == 0 ? -ETIMEDOUT : 0;
 461
 462        list_for_each_entry(entity, &pipe->entities, list_pipe) {
 463                if (entity->route)
 464                        vsp1_write(entity->vsp1, entity->route,
 465                                   VI6_DPR_NODE_UNUSED);
 466
 467                v4l2_subdev_call(&entity->subdev, video, s_stream, 0);
 468        }
 469
 470        return ret;
 471}
 472
 473static bool vsp1_pipeline_ready(struct vsp1_pipeline *pipe)
 474{
 475        unsigned int mask;
 476
 477        mask = ((1 << pipe->num_inputs) - 1) << 1;
 478        if (!pipe->lif)
 479                mask |= 1 << 0;
 480
 481        return pipe->buffers_ready == mask;
 482}
 483
 484/*
 485 * vsp1_video_complete_buffer - Complete the current buffer
 486 * @video: the video node
 487 *
 488 * This function completes the current buffer by filling its sequence number,
 489 * time stamp and payload size, and hands it back to the videobuf core.
 490 *
 491 * Return the next queued buffer or NULL if the queue is empty.
 492 */
 493static struct vsp1_video_buffer *
 494vsp1_video_complete_buffer(struct vsp1_video *video)
 495{
 496        struct vsp1_video_buffer *next = NULL;
 497        struct vsp1_video_buffer *done;
 498        unsigned long flags;
 499        unsigned int i;
 500
 501        spin_lock_irqsave(&video->irqlock, flags);
 502
 503        if (list_empty(&video->irqqueue)) {
 504                spin_unlock_irqrestore(&video->irqlock, flags);
 505                return NULL;
 506        }
 507
 508        done = list_first_entry(&video->irqqueue,
 509                                struct vsp1_video_buffer, queue);
 510        list_del(&done->queue);
 511
 512        if (!list_empty(&video->irqqueue))
 513                next = list_first_entry(&video->irqqueue,
 514                                        struct vsp1_video_buffer, queue);
 515
 516        spin_unlock_irqrestore(&video->irqlock, flags);
 517
 518        done->buf.v4l2_buf.sequence = video->sequence++;
 519        v4l2_get_timestamp(&done->buf.v4l2_buf.timestamp);
 520        for (i = 0; i < done->buf.num_planes; ++i)
 521                vb2_set_plane_payload(&done->buf, i, done->length[i]);
 522        vb2_buffer_done(&done->buf, VB2_BUF_STATE_DONE);
 523
 524        return next;
 525}
 526
 527static void vsp1_video_frame_end(struct vsp1_pipeline *pipe,
 528                                 struct vsp1_video *video)
 529{
 530        struct vsp1_video_buffer *buf;
 531        unsigned long flags;
 532
 533        buf = vsp1_video_complete_buffer(video);
 534        if (buf == NULL)
 535                return;
 536
 537        spin_lock_irqsave(&pipe->irqlock, flags);
 538
 539        video->ops->queue(video, buf);
 540        pipe->buffers_ready |= 1 << video->pipe_index;
 541
 542        spin_unlock_irqrestore(&pipe->irqlock, flags);
 543}
 544
 545void vsp1_pipeline_frame_end(struct vsp1_pipeline *pipe)
 546{
 547        unsigned long flags;
 548        unsigned int i;
 549
 550        if (pipe == NULL)
 551                return;
 552
 553        /* Complete buffers on all video nodes. */
 554        for (i = 0; i < pipe->num_inputs; ++i)
 555                vsp1_video_frame_end(pipe, &pipe->inputs[i]->video);
 556
 557        if (!pipe->lif)
 558                vsp1_video_frame_end(pipe, &pipe->output->video);
 559
 560        spin_lock_irqsave(&pipe->irqlock, flags);
 561
 562        /* If a stop has been requested, mark the pipeline as stopped and
 563         * return.
 564         */
 565        if (pipe->state == VSP1_PIPELINE_STOPPING) {
 566                pipe->state = VSP1_PIPELINE_STOPPED;
 567                wake_up(&pipe->wq);
 568                goto done;
 569        }
 570
 571        /* Restart the pipeline if ready. */
 572        if (vsp1_pipeline_ready(pipe))
 573                vsp1_pipeline_run(pipe);
 574
 575done:
 576        spin_unlock_irqrestore(&pipe->irqlock, flags);
 577}
 578
 579/* -----------------------------------------------------------------------------
 580 * videobuf2 Queue Operations
 581 */
 582
 583static int
 584vsp1_video_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
 585                     unsigned int *nbuffers, unsigned int *nplanes,
 586                     unsigned int sizes[], void *alloc_ctxs[])
 587{
 588        struct vsp1_video *video = vb2_get_drv_priv(vq);
 589        const struct v4l2_pix_format_mplane *format;
 590        struct v4l2_pix_format_mplane pix_mp;
 591        unsigned int i;
 592
 593        if (fmt) {
 594                /* Make sure the format is valid and adjust the sizeimage field
 595                 * if needed.
 596                 */
 597                if (!vsp1_video_format_adjust(video, &fmt->fmt.pix_mp, &pix_mp))
 598                        return -EINVAL;
 599
 600                format = &pix_mp;
 601        } else {
 602                format = &video->format;
 603        }
 604
 605        *nplanes = format->num_planes;
 606
 607        for (i = 0; i < format->num_planes; ++i) {
 608                sizes[i] = format->plane_fmt[i].sizeimage;
 609                alloc_ctxs[i] = video->alloc_ctx;
 610        }
 611
 612        return 0;
 613}
 614
 615static int vsp1_video_buffer_prepare(struct vb2_buffer *vb)
 616{
 617        struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
 618        struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vb);
 619        const struct v4l2_pix_format_mplane *format = &video->format;
 620        unsigned int i;
 621
 622        if (vb->num_planes < format->num_planes)
 623                return -EINVAL;
 624
 625        buf->video = video;
 626
 627        for (i = 0; i < vb->num_planes; ++i) {
 628                buf->addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
 629                buf->length[i] = vb2_plane_size(vb, i);
 630
 631                if (buf->length[i] < format->plane_fmt[i].sizeimage)
 632                        return -EINVAL;
 633        }
 634
 635        return 0;
 636}
 637
 638static void vsp1_video_buffer_queue(struct vb2_buffer *vb)
 639{
 640        struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
 641        struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
 642        struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vb);
 643        unsigned long flags;
 644        bool empty;
 645
 646        spin_lock_irqsave(&video->irqlock, flags);
 647        empty = list_empty(&video->irqqueue);
 648        list_add_tail(&buf->queue, &video->irqqueue);
 649        spin_unlock_irqrestore(&video->irqlock, flags);
 650
 651        if (!empty)
 652                return;
 653
 654        spin_lock_irqsave(&pipe->irqlock, flags);
 655
 656        video->ops->queue(video, buf);
 657        pipe->buffers_ready |= 1 << video->pipe_index;
 658
 659        if (vb2_is_streaming(&video->queue) &&
 660            vsp1_pipeline_ready(pipe))
 661                vsp1_pipeline_run(pipe);
 662
 663        spin_unlock_irqrestore(&pipe->irqlock, flags);
 664}
 665
 666static void vsp1_entity_route_setup(struct vsp1_entity *source)
 667{
 668        struct vsp1_entity *sink;
 669
 670        if (source->route == 0)
 671                return;
 672
 673        sink = container_of(source->sink, struct vsp1_entity, subdev.entity);
 674        vsp1_write(source->vsp1, source->route, sink->id);
 675}
 676
 677static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count)
 678{
 679        struct vsp1_video *video = vb2_get_drv_priv(vq);
 680        struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
 681        struct vsp1_entity *entity;
 682        unsigned long flags;
 683        int ret;
 684
 685        mutex_lock(&pipe->lock);
 686        if (pipe->stream_count == pipe->num_video - 1) {
 687                list_for_each_entry(entity, &pipe->entities, list_pipe) {
 688                        vsp1_entity_route_setup(entity);
 689
 690                        ret = v4l2_subdev_call(&entity->subdev, video,
 691                                               s_stream, 1);
 692                        if (ret < 0) {
 693                                mutex_unlock(&pipe->lock);
 694                                return ret;
 695                        }
 696                }
 697        }
 698
 699        pipe->stream_count++;
 700        mutex_unlock(&pipe->lock);
 701
 702        spin_lock_irqsave(&pipe->irqlock, flags);
 703        if (vsp1_pipeline_ready(pipe))
 704                vsp1_pipeline_run(pipe);
 705        spin_unlock_irqrestore(&pipe->irqlock, flags);
 706
 707        return 0;
 708}
 709
 710static int vsp1_video_stop_streaming(struct vb2_queue *vq)
 711{
 712        struct vsp1_video *video = vb2_get_drv_priv(vq);
 713        struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
 714        unsigned long flags;
 715        int ret;
 716
 717        mutex_lock(&pipe->lock);
 718        if (--pipe->stream_count == 0) {
 719                /* Stop the pipeline. */
 720                ret = vsp1_pipeline_stop(pipe);
 721                if (ret == -ETIMEDOUT)
 722                        dev_err(video->vsp1->dev, "pipeline stop timeout\n");
 723        }
 724        mutex_unlock(&pipe->lock);
 725
 726        vsp1_pipeline_cleanup(pipe);
 727        media_entity_pipeline_stop(&video->video.entity);
 728
 729        /* Remove all buffers from the IRQ queue. */
 730        spin_lock_irqsave(&video->irqlock, flags);
 731        INIT_LIST_HEAD(&video->irqqueue);
 732        spin_unlock_irqrestore(&video->irqlock, flags);
 733
 734        return 0;
 735}
 736
 737static struct vb2_ops vsp1_video_queue_qops = {
 738        .queue_setup = vsp1_video_queue_setup,
 739        .buf_prepare = vsp1_video_buffer_prepare,
 740        .buf_queue = vsp1_video_buffer_queue,
 741        .wait_prepare = vb2_ops_wait_prepare,
 742        .wait_finish = vb2_ops_wait_finish,
 743        .start_streaming = vsp1_video_start_streaming,
 744        .stop_streaming = vsp1_video_stop_streaming,
 745};
 746
 747/* -----------------------------------------------------------------------------
 748 * V4L2 ioctls
 749 */
 750
 751static int
 752vsp1_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
 753{
 754        struct v4l2_fh *vfh = file->private_data;
 755        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 756
 757        cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
 758                          | V4L2_CAP_VIDEO_CAPTURE_MPLANE
 759                          | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
 760
 761        if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
 762                cap->device_caps = V4L2_CAP_VIDEO_CAPTURE_MPLANE
 763                                 | V4L2_CAP_STREAMING;
 764        else
 765                cap->device_caps = V4L2_CAP_VIDEO_OUTPUT_MPLANE
 766                                 | V4L2_CAP_STREAMING;
 767
 768        strlcpy(cap->driver, "vsp1", sizeof(cap->driver));
 769        strlcpy(cap->card, video->video.name, sizeof(cap->card));
 770        snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
 771                 dev_name(video->vsp1->dev));
 772
 773        return 0;
 774}
 775
 776static int
 777vsp1_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
 778{
 779        struct v4l2_fh *vfh = file->private_data;
 780        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 781
 782        if (format->type != video->queue.type)
 783                return -EINVAL;
 784
 785        mutex_lock(&video->lock);
 786        format->fmt.pix_mp = video->format;
 787        mutex_unlock(&video->lock);
 788
 789        return 0;
 790}
 791
 792static int
 793vsp1_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
 794{
 795        struct v4l2_fh *vfh = file->private_data;
 796        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 797
 798        if (format->type != video->queue.type)
 799                return -EINVAL;
 800
 801        return __vsp1_video_try_format(video, &format->fmt.pix_mp, NULL);
 802}
 803
 804static int
 805vsp1_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
 806{
 807        struct v4l2_fh *vfh = file->private_data;
 808        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 809        const struct vsp1_format_info *info;
 810        int ret;
 811
 812        if (format->type != video->queue.type)
 813                return -EINVAL;
 814
 815        ret = __vsp1_video_try_format(video, &format->fmt.pix_mp, &info);
 816        if (ret < 0)
 817                return ret;
 818
 819        mutex_lock(&video->lock);
 820
 821        if (vb2_is_busy(&video->queue)) {
 822                ret = -EBUSY;
 823                goto done;
 824        }
 825
 826        video->format = format->fmt.pix_mp;
 827        video->fmtinfo = info;
 828
 829done:
 830        mutex_unlock(&video->lock);
 831        return ret;
 832}
 833
 834static int
 835vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
 836{
 837        struct v4l2_fh *vfh = file->private_data;
 838        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 839        struct vsp1_pipeline *pipe;
 840        int ret;
 841
 842        if (video->queue.owner && video->queue.owner != file->private_data)
 843                return -EBUSY;
 844
 845        video->sequence = 0;
 846
 847        /* Start streaming on the pipeline. No link touching an entity in the
 848         * pipeline can be activated or deactivated once streaming is started.
 849         *
 850         * Use the VSP1 pipeline object embedded in the first video object that
 851         * starts streaming.
 852         */
 853        pipe = video->video.entity.pipe
 854             ? to_vsp1_pipeline(&video->video.entity) : &video->pipe;
 855
 856        ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe);
 857        if (ret < 0)
 858                return ret;
 859
 860        /* Verify that the configured format matches the output of the connected
 861         * subdev.
 862         */
 863        ret = vsp1_video_verify_format(video);
 864        if (ret < 0)
 865                goto err_stop;
 866
 867        ret = vsp1_pipeline_init(pipe, video);
 868        if (ret < 0)
 869                goto err_stop;
 870
 871        /* Start the queue. */
 872        ret = vb2_streamon(&video->queue, type);
 873        if (ret < 0)
 874                goto err_cleanup;
 875
 876        return 0;
 877
 878err_cleanup:
 879        vsp1_pipeline_cleanup(pipe);
 880err_stop:
 881        media_entity_pipeline_stop(&video->video.entity);
 882        return ret;
 883}
 884
 885static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops = {
 886        .vidioc_querycap                = vsp1_video_querycap,
 887        .vidioc_g_fmt_vid_cap_mplane    = vsp1_video_get_format,
 888        .vidioc_s_fmt_vid_cap_mplane    = vsp1_video_set_format,
 889        .vidioc_try_fmt_vid_cap_mplane  = vsp1_video_try_format,
 890        .vidioc_g_fmt_vid_out_mplane    = vsp1_video_get_format,
 891        .vidioc_s_fmt_vid_out_mplane    = vsp1_video_set_format,
 892        .vidioc_try_fmt_vid_out_mplane  = vsp1_video_try_format,
 893        .vidioc_reqbufs                 = vb2_ioctl_reqbufs,
 894        .vidioc_querybuf                = vb2_ioctl_querybuf,
 895        .vidioc_qbuf                    = vb2_ioctl_qbuf,
 896        .vidioc_dqbuf                   = vb2_ioctl_dqbuf,
 897        .vidioc_create_bufs             = vb2_ioctl_create_bufs,
 898        .vidioc_prepare_buf             = vb2_ioctl_prepare_buf,
 899        .vidioc_streamon                = vsp1_video_streamon,
 900        .vidioc_streamoff               = vb2_ioctl_streamoff,
 901};
 902
 903/* -----------------------------------------------------------------------------
 904 * V4L2 File Operations
 905 */
 906
 907static int vsp1_video_open(struct file *file)
 908{
 909        struct vsp1_video *video = video_drvdata(file);
 910        struct v4l2_fh *vfh;
 911        int ret = 0;
 912
 913        vfh = kzalloc(sizeof(*vfh), GFP_KERNEL);
 914        if (vfh == NULL)
 915                return -ENOMEM;
 916
 917        v4l2_fh_init(vfh, &video->video);
 918        v4l2_fh_add(vfh);
 919
 920        file->private_data = vfh;
 921
 922        if (!vsp1_device_get(video->vsp1)) {
 923                ret = -EBUSY;
 924                v4l2_fh_del(vfh);
 925                kfree(vfh);
 926        }
 927
 928        return ret;
 929}
 930
 931static int vsp1_video_release(struct file *file)
 932{
 933        struct vsp1_video *video = video_drvdata(file);
 934        struct v4l2_fh *vfh = file->private_data;
 935
 936        mutex_lock(&video->lock);
 937        if (video->queue.owner == vfh) {
 938                vb2_queue_release(&video->queue);
 939                video->queue.owner = NULL;
 940        }
 941        mutex_unlock(&video->lock);
 942
 943        vsp1_device_put(video->vsp1);
 944
 945        v4l2_fh_release(file);
 946
 947        file->private_data = NULL;
 948
 949        return 0;
 950}
 951
 952static struct v4l2_file_operations vsp1_video_fops = {
 953        .owner = THIS_MODULE,
 954        .unlocked_ioctl = video_ioctl2,
 955        .open = vsp1_video_open,
 956        .release = vsp1_video_release,
 957        .poll = vb2_fop_poll,
 958        .mmap = vb2_fop_mmap,
 959};
 960
 961/* -----------------------------------------------------------------------------
 962 * Initialization and Cleanup
 963 */
 964
 965int vsp1_video_init(struct vsp1_video *video, struct vsp1_entity *rwpf)
 966{
 967        const char *direction;
 968        int ret;
 969
 970        switch (video->type) {
 971        case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
 972                direction = "output";
 973                video->pad.flags = MEDIA_PAD_FL_SINK;
 974                break;
 975
 976        case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
 977                direction = "input";
 978                video->pad.flags = MEDIA_PAD_FL_SOURCE;
 979                video->video.vfl_dir = VFL_DIR_TX;
 980                break;
 981
 982        default:
 983                return -EINVAL;
 984        }
 985
 986        video->rwpf = rwpf;
 987
 988        mutex_init(&video->lock);
 989        spin_lock_init(&video->irqlock);
 990        INIT_LIST_HEAD(&video->irqqueue);
 991
 992        mutex_init(&video->pipe.lock);
 993        spin_lock_init(&video->pipe.irqlock);
 994        INIT_LIST_HEAD(&video->pipe.entities);
 995        init_waitqueue_head(&video->pipe.wq);
 996        video->pipe.state = VSP1_PIPELINE_STOPPED;
 997
 998        /* Initialize the media entity... */
 999        ret = media_entity_init(&video->video.entity, 1, &video->pad, 0);
1000        if (ret < 0)
1001                return ret;
1002
1003        /* ... and the format ... */
1004        video->fmtinfo = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT);
1005        video->format.pixelformat = video->fmtinfo->fourcc;
1006        video->format.colorspace = V4L2_COLORSPACE_SRGB;
1007        video->format.field = V4L2_FIELD_NONE;
1008        video->format.width = VSP1_VIDEO_DEF_WIDTH;
1009        video->format.height = VSP1_VIDEO_DEF_HEIGHT;
1010        video->format.num_planes = 1;
1011        video->format.plane_fmt[0].bytesperline =
1012                video->format.width * video->fmtinfo->bpp[0] / 8;
1013        video->format.plane_fmt[0].sizeimage =
1014                video->format.plane_fmt[0].bytesperline * video->format.height;
1015
1016        /* ... and the video node... */
1017        video->video.v4l2_dev = &video->vsp1->v4l2_dev;
1018        video->video.fops = &vsp1_video_fops;
1019        snprintf(video->video.name, sizeof(video->video.name), "%s %s",
1020                 rwpf->subdev.name, direction);
1021        video->video.vfl_type = VFL_TYPE_GRABBER;
1022        video->video.release = video_device_release_empty;
1023        video->video.ioctl_ops = &vsp1_video_ioctl_ops;
1024
1025        video_set_drvdata(&video->video, video);
1026
1027        /* ... and the buffers queue... */
1028        video->alloc_ctx = vb2_dma_contig_init_ctx(video->vsp1->dev);
1029        if (IS_ERR(video->alloc_ctx))
1030                goto error;
1031
1032        video->queue.type = video->type;
1033        video->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
1034        video->queue.lock = &video->lock;
1035        video->queue.drv_priv = video;
1036        video->queue.buf_struct_size = sizeof(struct vsp1_video_buffer);
1037        video->queue.ops = &vsp1_video_queue_qops;
1038        video->queue.mem_ops = &vb2_dma_contig_memops;
1039        video->queue.timestamp_type = V4L2_BUF_FLAG_TIMESTAMP_COPY;
1040        ret = vb2_queue_init(&video->queue);
1041        if (ret < 0) {
1042                dev_err(video->vsp1->dev, "failed to initialize vb2 queue\n");
1043                goto error;
1044        }
1045
1046        /* ... and register the video device. */
1047        video->video.queue = &video->queue;
1048        ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1);
1049        if (ret < 0) {
1050                dev_err(video->vsp1->dev, "failed to register video device\n");
1051                goto error;
1052        }
1053
1054        return 0;
1055
1056error:
1057        vb2_dma_contig_cleanup_ctx(video->alloc_ctx);
1058        vsp1_video_cleanup(video);
1059        return ret;
1060}
1061
1062void vsp1_video_cleanup(struct vsp1_video *video)
1063{
1064        if (video_is_registered(&video->video))
1065                video_unregister_device(&video->video);
1066
1067        vb2_dma_contig_cleanup_ctx(video->alloc_ctx);
1068        media_entity_cleanup(&video->video.entity);
1069}
1070