linux/drivers/media/platform/vsp1/vsp1_video.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * vsp1_video.c  --  R-Car VSP1 Video Node
   4 *
   5 * Copyright (C) 2013-2015 Renesas Electronics Corporation
   6 *
   7 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
   8 */
   9
  10#include <linux/list.h>
  11#include <linux/module.h>
  12#include <linux/mutex.h>
  13#include <linux/slab.h>
  14#include <linux/v4l2-mediabus.h>
  15#include <linux/videodev2.h>
  16#include <linux/wait.h>
  17
  18#include <media/media-entity.h>
  19#include <media/v4l2-dev.h>
  20#include <media/v4l2-fh.h>
  21#include <media/v4l2-ioctl.h>
  22#include <media/v4l2-subdev.h>
  23#include <media/videobuf2-v4l2.h>
  24#include <media/videobuf2-dma-contig.h>
  25
  26#include "vsp1.h"
  27#include "vsp1_brx.h"
  28#include "vsp1_dl.h"
  29#include "vsp1_entity.h"
  30#include "vsp1_hgo.h"
  31#include "vsp1_hgt.h"
  32#include "vsp1_pipe.h"
  33#include "vsp1_rwpf.h"
  34#include "vsp1_uds.h"
  35#include "vsp1_video.h"
  36
  37#define VSP1_VIDEO_DEF_FORMAT           V4L2_PIX_FMT_YUYV
  38#define VSP1_VIDEO_DEF_WIDTH            1024
  39#define VSP1_VIDEO_DEF_HEIGHT           768
  40
  41#define VSP1_VIDEO_MIN_WIDTH            2U
  42#define VSP1_VIDEO_MAX_WIDTH            8190U
  43#define VSP1_VIDEO_MIN_HEIGHT           2U
  44#define VSP1_VIDEO_MAX_HEIGHT           8190U
  45
  46/* -----------------------------------------------------------------------------
  47 * Helper functions
  48 */
  49
  50static struct v4l2_subdev *
  51vsp1_video_remote_subdev(struct media_pad *local, u32 *pad)
  52{
  53        struct media_pad *remote;
  54
  55        remote = media_entity_remote_pad(local);
  56        if (!remote || !is_media_entity_v4l2_subdev(remote->entity))
  57                return NULL;
  58
  59        if (pad)
  60                *pad = remote->index;
  61
  62        return media_entity_to_v4l2_subdev(remote->entity);
  63}
  64
  65static int vsp1_video_verify_format(struct vsp1_video *video)
  66{
  67        struct v4l2_subdev_format fmt;
  68        struct v4l2_subdev *subdev;
  69        int ret;
  70
  71        subdev = vsp1_video_remote_subdev(&video->pad, &fmt.pad);
  72        if (subdev == NULL)
  73                return -EINVAL;
  74
  75        fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
  76        ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
  77        if (ret < 0)
  78                return ret == -ENOIOCTLCMD ? -EINVAL : ret;
  79
  80        if (video->rwpf->fmtinfo->mbus != fmt.format.code ||
  81            video->rwpf->format.height != fmt.format.height ||
  82            video->rwpf->format.width != fmt.format.width)
  83                return -EINVAL;
  84
  85        return 0;
  86}
  87
  88static int __vsp1_video_try_format(struct vsp1_video *video,
  89                                   struct v4l2_pix_format_mplane *pix,
  90                                   const struct vsp1_format_info **fmtinfo)
  91{
  92        static const u32 xrgb_formats[][2] = {
  93                { V4L2_PIX_FMT_RGB444, V4L2_PIX_FMT_XRGB444 },
  94                { V4L2_PIX_FMT_RGB555, V4L2_PIX_FMT_XRGB555 },
  95                { V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_XBGR32 },
  96                { V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_XRGB32 },
  97        };
  98
  99        const struct vsp1_format_info *info;
 100        unsigned int width = pix->width;
 101        unsigned int height = pix->height;
 102        unsigned int i;
 103
 104        /*
 105         * Backward compatibility: replace deprecated RGB formats by their XRGB
 106         * equivalent. This selects the format older userspace applications want
 107         * while still exposing the new format.
 108         */
 109        for (i = 0; i < ARRAY_SIZE(xrgb_formats); ++i) {
 110                if (xrgb_formats[i][0] == pix->pixelformat) {
 111                        pix->pixelformat = xrgb_formats[i][1];
 112                        break;
 113                }
 114        }
 115
 116        /*
 117         * Retrieve format information and select the default format if the
 118         * requested format isn't supported.
 119         */
 120        info = vsp1_get_format_info(video->vsp1, pix->pixelformat);
 121        if (info == NULL)
 122                info = vsp1_get_format_info(video->vsp1, VSP1_VIDEO_DEF_FORMAT);
 123
 124        pix->pixelformat = info->fourcc;
 125        pix->colorspace = V4L2_COLORSPACE_SRGB;
 126        pix->field = V4L2_FIELD_NONE;
 127
 128        if (info->fourcc == V4L2_PIX_FMT_HSV24 ||
 129            info->fourcc == V4L2_PIX_FMT_HSV32)
 130                pix->hsv_enc = V4L2_HSV_ENC_256;
 131
 132        memset(pix->reserved, 0, sizeof(pix->reserved));
 133
 134        /* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
 135        width = round_down(width, info->hsub);
 136        height = round_down(height, info->vsub);
 137
 138        /* Clamp the width and height. */
 139        pix->width = clamp(width, VSP1_VIDEO_MIN_WIDTH, VSP1_VIDEO_MAX_WIDTH);
 140        pix->height = clamp(height, VSP1_VIDEO_MIN_HEIGHT,
 141                            VSP1_VIDEO_MAX_HEIGHT);
 142
 143        /*
 144         * Compute and clamp the stride and image size. While not documented in
 145         * the datasheet, strides not aligned to a multiple of 128 bytes result
 146         * in image corruption.
 147         */
 148        for (i = 0; i < min(info->planes, 2U); ++i) {
 149                unsigned int hsub = i > 0 ? info->hsub : 1;
 150                unsigned int vsub = i > 0 ? info->vsub : 1;
 151                unsigned int align = 128;
 152                unsigned int bpl;
 153
 154                bpl = clamp_t(unsigned int, pix->plane_fmt[i].bytesperline,
 155                              pix->width / hsub * info->bpp[i] / 8,
 156                              round_down(65535U, align));
 157
 158                pix->plane_fmt[i].bytesperline = round_up(bpl, align);
 159                pix->plane_fmt[i].sizeimage = pix->plane_fmt[i].bytesperline
 160                                            * pix->height / vsub;
 161        }
 162
 163        if (info->planes == 3) {
 164                /* The second and third planes must have the same stride. */
 165                pix->plane_fmt[2].bytesperline = pix->plane_fmt[1].bytesperline;
 166                pix->plane_fmt[2].sizeimage = pix->plane_fmt[1].sizeimage;
 167        }
 168
 169        pix->num_planes = info->planes;
 170
 171        if (fmtinfo)
 172                *fmtinfo = info;
 173
 174        return 0;
 175}
 176
 177/* -----------------------------------------------------------------------------
 178 * VSP1 Partition Algorithm support
 179 */
 180
 181/**
 182 * vsp1_video_calculate_partition - Calculate the active partition output window
 183 *
 184 * @pipe: the pipeline
 185 * @partition: partition that will hold the calculated values
 186 * @div_size: pre-determined maximum partition division size
 187 * @index: partition index
 188 */
 189static void vsp1_video_calculate_partition(struct vsp1_pipeline *pipe,
 190                                           struct vsp1_partition *partition,
 191                                           unsigned int div_size,
 192                                           unsigned int index)
 193{
 194        const struct v4l2_mbus_framefmt *format;
 195        struct vsp1_partition_window window;
 196        unsigned int modulus;
 197
 198        /*
 199         * Partitions are computed on the size before rotation, use the format
 200         * at the WPF sink.
 201         */
 202        format = vsp1_entity_get_pad_format(&pipe->output->entity,
 203                                            pipe->output->entity.config,
 204                                            RWPF_PAD_SINK);
 205
 206        /* A single partition simply processes the output size in full. */
 207        if (pipe->partitions <= 1) {
 208                window.left = 0;
 209                window.width = format->width;
 210
 211                vsp1_pipeline_propagate_partition(pipe, partition, index,
 212                                                  &window);
 213                return;
 214        }
 215
 216        /* Initialise the partition with sane starting conditions. */
 217        window.left = index * div_size;
 218        window.width = div_size;
 219
 220        modulus = format->width % div_size;
 221
 222        /*
 223         * We need to prevent the last partition from being smaller than the
 224         * *minimum* width of the hardware capabilities.
 225         *
 226         * If the modulus is less than half of the partition size,
 227         * the penultimate partition is reduced to half, which is added
 228         * to the final partition: |1234|1234|1234|12|341|
 229         * to prevents this:       |1234|1234|1234|1234|1|.
 230         */
 231        if (modulus) {
 232                /*
 233                 * pipe->partitions is 1 based, whilst index is a 0 based index.
 234                 * Normalise this locally.
 235                 */
 236                unsigned int partitions = pipe->partitions - 1;
 237
 238                if (modulus < div_size / 2) {
 239                        if (index == partitions - 1) {
 240                                /* Halve the penultimate partition. */
 241                                window.width = div_size / 2;
 242                        } else if (index == partitions) {
 243                                /* Increase the final partition. */
 244                                window.width = (div_size / 2) + modulus;
 245                                window.left -= div_size / 2;
 246                        }
 247                } else if (index == partitions) {
 248                        window.width = modulus;
 249                }
 250        }
 251
 252        vsp1_pipeline_propagate_partition(pipe, partition, index, &window);
 253}
 254
 255static int vsp1_video_pipeline_setup_partitions(struct vsp1_pipeline *pipe)
 256{
 257        struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
 258        const struct v4l2_mbus_framefmt *format;
 259        struct vsp1_entity *entity;
 260        unsigned int div_size;
 261        unsigned int i;
 262
 263        /*
 264         * Partitions are computed on the size before rotation, use the format
 265         * at the WPF sink.
 266         */
 267        format = vsp1_entity_get_pad_format(&pipe->output->entity,
 268                                            pipe->output->entity.config,
 269                                            RWPF_PAD_SINK);
 270        div_size = format->width;
 271
 272        /*
 273         * Only Gen3 hardware requires image partitioning, Gen2 will operate
 274         * with a single partition that covers the whole output.
 275         */
 276        if (vsp1->info->gen == 3) {
 277                list_for_each_entry(entity, &pipe->entities, list_pipe) {
 278                        unsigned int entity_max;
 279
 280                        if (!entity->ops->max_width)
 281                                continue;
 282
 283                        entity_max = entity->ops->max_width(entity, pipe);
 284                        if (entity_max)
 285                                div_size = min(div_size, entity_max);
 286                }
 287        }
 288
 289        pipe->partitions = DIV_ROUND_UP(format->width, div_size);
 290        pipe->part_table = kcalloc(pipe->partitions, sizeof(*pipe->part_table),
 291                                   GFP_KERNEL);
 292        if (!pipe->part_table)
 293                return -ENOMEM;
 294
 295        for (i = 0; i < pipe->partitions; ++i)
 296                vsp1_video_calculate_partition(pipe, &pipe->part_table[i],
 297                                               div_size, i);
 298
 299        return 0;
 300}
 301
 302/* -----------------------------------------------------------------------------
 303 * Pipeline Management
 304 */
 305
 306/*
 307 * vsp1_video_complete_buffer - Complete the current buffer
 308 * @video: the video node
 309 *
 310 * This function completes the current buffer by filling its sequence number,
 311 * time stamp and payload size, and hands it back to the videobuf core.
 312 *
 313 * When operating in DU output mode (deep pipeline to the DU through the LIF),
 314 * the VSP1 needs to constantly supply frames to the display. In that case, if
 315 * no other buffer is queued, reuse the one that has just been processed instead
 316 * of handing it back to the videobuf core.
 317 *
 318 * Return the next queued buffer or NULL if the queue is empty.
 319 */
 320static struct vsp1_vb2_buffer *
 321vsp1_video_complete_buffer(struct vsp1_video *video)
 322{
 323        struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
 324        struct vsp1_vb2_buffer *next = NULL;
 325        struct vsp1_vb2_buffer *done;
 326        unsigned long flags;
 327        unsigned int i;
 328
 329        spin_lock_irqsave(&video->irqlock, flags);
 330
 331        if (list_empty(&video->irqqueue)) {
 332                spin_unlock_irqrestore(&video->irqlock, flags);
 333                return NULL;
 334        }
 335
 336        done = list_first_entry(&video->irqqueue,
 337                                struct vsp1_vb2_buffer, queue);
 338
 339        /* In DU output mode reuse the buffer if the list is singular. */
 340        if (pipe->lif && list_is_singular(&video->irqqueue)) {
 341                spin_unlock_irqrestore(&video->irqlock, flags);
 342                return done;
 343        }
 344
 345        list_del(&done->queue);
 346
 347        if (!list_empty(&video->irqqueue))
 348                next = list_first_entry(&video->irqqueue,
 349                                        struct vsp1_vb2_buffer, queue);
 350
 351        spin_unlock_irqrestore(&video->irqlock, flags);
 352
 353        done->buf.sequence = pipe->sequence;
 354        done->buf.vb2_buf.timestamp = ktime_get_ns();
 355        for (i = 0; i < done->buf.vb2_buf.num_planes; ++i)
 356                vb2_set_plane_payload(&done->buf.vb2_buf, i,
 357                                      vb2_plane_size(&done->buf.vb2_buf, i));
 358        vb2_buffer_done(&done->buf.vb2_buf, VB2_BUF_STATE_DONE);
 359
 360        return next;
 361}
 362
 363static void vsp1_video_frame_end(struct vsp1_pipeline *pipe,
 364                                 struct vsp1_rwpf *rwpf)
 365{
 366        struct vsp1_video *video = rwpf->video;
 367        struct vsp1_vb2_buffer *buf;
 368
 369        buf = vsp1_video_complete_buffer(video);
 370        if (buf == NULL)
 371                return;
 372
 373        video->rwpf->mem = buf->mem;
 374        pipe->buffers_ready |= 1 << video->pipe_index;
 375}
 376
 377static void vsp1_video_pipeline_run_partition(struct vsp1_pipeline *pipe,
 378                                              struct vsp1_dl_list *dl,
 379                                              unsigned int partition)
 380{
 381        struct vsp1_dl_body *dlb = vsp1_dl_list_get_body0(dl);
 382        struct vsp1_entity *entity;
 383
 384        pipe->partition = &pipe->part_table[partition];
 385
 386        list_for_each_entry(entity, &pipe->entities, list_pipe)
 387                vsp1_entity_configure_partition(entity, pipe, dl, dlb);
 388}
 389
 390static void vsp1_video_pipeline_run(struct vsp1_pipeline *pipe)
 391{
 392        struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
 393        struct vsp1_entity *entity;
 394        struct vsp1_dl_body *dlb;
 395        struct vsp1_dl_list *dl;
 396        unsigned int partition;
 397
 398        dl = vsp1_dl_list_get(pipe->output->dlm);
 399
 400        /*
 401         * If the VSP hardware isn't configured yet (which occurs either when
 402         * processing the first frame or after a system suspend/resume), add the
 403         * cached stream configuration to the display list to perform a full
 404         * initialisation.
 405         */
 406        if (!pipe->configured)
 407                vsp1_dl_list_add_body(dl, pipe->stream_config);
 408
 409        dlb = vsp1_dl_list_get_body0(dl);
 410
 411        list_for_each_entry(entity, &pipe->entities, list_pipe)
 412                vsp1_entity_configure_frame(entity, pipe, dl, dlb);
 413
 414        /* Run the first partition. */
 415        vsp1_video_pipeline_run_partition(pipe, dl, 0);
 416
 417        /* Process consecutive partitions as necessary. */
 418        for (partition = 1; partition < pipe->partitions; ++partition) {
 419                struct vsp1_dl_list *dl_next;
 420
 421                dl_next = vsp1_dl_list_get(pipe->output->dlm);
 422
 423                /*
 424                 * An incomplete chain will still function, but output only
 425                 * the partitions that had a dl available. The frame end
 426                 * interrupt will be marked on the last dl in the chain.
 427                 */
 428                if (!dl_next) {
 429                        dev_err(vsp1->dev, "Failed to obtain a dl list. Frame will be incomplete\n");
 430                        break;
 431                }
 432
 433                vsp1_video_pipeline_run_partition(pipe, dl_next, partition);
 434                vsp1_dl_list_add_chain(dl, dl_next);
 435        }
 436
 437        /* Complete, and commit the head display list. */
 438        vsp1_dl_list_commit(dl, false);
 439        pipe->configured = true;
 440
 441        vsp1_pipeline_run(pipe);
 442}
 443
 444static void vsp1_video_pipeline_frame_end(struct vsp1_pipeline *pipe,
 445                                          unsigned int completion)
 446{
 447        struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
 448        enum vsp1_pipeline_state state;
 449        unsigned long flags;
 450        unsigned int i;
 451
 452        /* M2M Pipelines should never call here with an incomplete frame. */
 453        WARN_ON_ONCE(!(completion & VSP1_DL_FRAME_END_COMPLETED));
 454
 455        spin_lock_irqsave(&pipe->irqlock, flags);
 456
 457        /* Complete buffers on all video nodes. */
 458        for (i = 0; i < vsp1->info->rpf_count; ++i) {
 459                if (!pipe->inputs[i])
 460                        continue;
 461
 462                vsp1_video_frame_end(pipe, pipe->inputs[i]);
 463        }
 464
 465        vsp1_video_frame_end(pipe, pipe->output);
 466
 467        state = pipe->state;
 468        pipe->state = VSP1_PIPELINE_STOPPED;
 469
 470        /*
 471         * If a stop has been requested, mark the pipeline as stopped and
 472         * return. Otherwise restart the pipeline if ready.
 473         */
 474        if (state == VSP1_PIPELINE_STOPPING)
 475                wake_up(&pipe->wq);
 476        else if (vsp1_pipeline_ready(pipe))
 477                vsp1_video_pipeline_run(pipe);
 478
 479        spin_unlock_irqrestore(&pipe->irqlock, flags);
 480}
 481
 482static int vsp1_video_pipeline_build_branch(struct vsp1_pipeline *pipe,
 483                                            struct vsp1_rwpf *input,
 484                                            struct vsp1_rwpf *output)
 485{
 486        struct media_entity_enum ent_enum;
 487        struct vsp1_entity *entity;
 488        struct media_pad *pad;
 489        struct vsp1_brx *brx = NULL;
 490        int ret;
 491
 492        ret = media_entity_enum_init(&ent_enum, &input->entity.vsp1->media_dev);
 493        if (ret < 0)
 494                return ret;
 495
 496        /*
 497         * The main data path doesn't include the HGO or HGT, use
 498         * vsp1_entity_remote_pad() to traverse the graph.
 499         */
 500
 501        pad = vsp1_entity_remote_pad(&input->entity.pads[RWPF_PAD_SOURCE]);
 502
 503        while (1) {
 504                if (pad == NULL) {
 505                        ret = -EPIPE;
 506                        goto out;
 507                }
 508
 509                /* We've reached a video node, that shouldn't have happened. */
 510                if (!is_media_entity_v4l2_subdev(pad->entity)) {
 511                        ret = -EPIPE;
 512                        goto out;
 513                }
 514
 515                entity = to_vsp1_entity(
 516                        media_entity_to_v4l2_subdev(pad->entity));
 517
 518                /*
 519                 * A BRU or BRS is present in the pipeline, store its input pad
 520                 * number in the input RPF for use when configuring the RPF.
 521                 */
 522                if (entity->type == VSP1_ENTITY_BRU ||
 523                    entity->type == VSP1_ENTITY_BRS) {
 524                        /* BRU and BRS can't be chained. */
 525                        if (brx) {
 526                                ret = -EPIPE;
 527                                goto out;
 528                        }
 529
 530                        brx = to_brx(&entity->subdev);
 531                        brx->inputs[pad->index].rpf = input;
 532                        input->brx_input = pad->index;
 533                }
 534
 535                /* We've reached the WPF, we're done. */
 536                if (entity->type == VSP1_ENTITY_WPF)
 537                        break;
 538
 539                /* Ensure the branch has no loop. */
 540                if (media_entity_enum_test_and_set(&ent_enum,
 541                                                   &entity->subdev.entity)) {
 542                        ret = -EPIPE;
 543                        goto out;
 544                }
 545
 546                /* UDS can't be chained. */
 547                if (entity->type == VSP1_ENTITY_UDS) {
 548                        if (pipe->uds) {
 549                                ret = -EPIPE;
 550                                goto out;
 551                        }
 552
 553                        pipe->uds = entity;
 554                        pipe->uds_input = brx ? &brx->entity : &input->entity;
 555                }
 556
 557                /* Follow the source link, ignoring any HGO or HGT. */
 558                pad = &entity->pads[entity->source_pad];
 559                pad = vsp1_entity_remote_pad(pad);
 560        }
 561
 562        /* The last entity must be the output WPF. */
 563        if (entity != &output->entity)
 564                ret = -EPIPE;
 565
 566out:
 567        media_entity_enum_cleanup(&ent_enum);
 568
 569        return ret;
 570}
 571
 572static int vsp1_video_pipeline_build(struct vsp1_pipeline *pipe,
 573                                     struct vsp1_video *video)
 574{
 575        struct media_graph graph;
 576        struct media_entity *entity = &video->video.entity;
 577        struct media_device *mdev = entity->graph_obj.mdev;
 578        unsigned int i;
 579        int ret;
 580
 581        /* Walk the graph to locate the entities and video nodes. */
 582        ret = media_graph_walk_init(&graph, mdev);
 583        if (ret)
 584                return ret;
 585
 586        media_graph_walk_start(&graph, entity);
 587
 588        while ((entity = media_graph_walk_next(&graph))) {
 589                struct v4l2_subdev *subdev;
 590                struct vsp1_rwpf *rwpf;
 591                struct vsp1_entity *e;
 592
 593                if (!is_media_entity_v4l2_subdev(entity))
 594                        continue;
 595
 596                subdev = media_entity_to_v4l2_subdev(entity);
 597                e = to_vsp1_entity(subdev);
 598                list_add_tail(&e->list_pipe, &pipe->entities);
 599                e->pipe = pipe;
 600
 601                switch (e->type) {
 602                case VSP1_ENTITY_RPF:
 603                        rwpf = to_rwpf(subdev);
 604                        pipe->inputs[rwpf->entity.index] = rwpf;
 605                        rwpf->video->pipe_index = ++pipe->num_inputs;
 606                        break;
 607
 608                case VSP1_ENTITY_WPF:
 609                        rwpf = to_rwpf(subdev);
 610                        pipe->output = rwpf;
 611                        rwpf->video->pipe_index = 0;
 612                        break;
 613
 614                case VSP1_ENTITY_LIF:
 615                        pipe->lif = e;
 616                        break;
 617
 618                case VSP1_ENTITY_BRU:
 619                case VSP1_ENTITY_BRS:
 620                        pipe->brx = e;
 621                        break;
 622
 623                case VSP1_ENTITY_HGO:
 624                        pipe->hgo = e;
 625                        break;
 626
 627                case VSP1_ENTITY_HGT:
 628                        pipe->hgt = e;
 629                        break;
 630
 631                default:
 632                        break;
 633                }
 634        }
 635
 636        media_graph_walk_cleanup(&graph);
 637
 638        /* We need one output and at least one input. */
 639        if (pipe->num_inputs == 0 || !pipe->output)
 640                return -EPIPE;
 641
 642        /*
 643         * Follow links downstream for each input and make sure the graph
 644         * contains no loop and that all branches end at the output WPF.
 645         */
 646        for (i = 0; i < video->vsp1->info->rpf_count; ++i) {
 647                if (!pipe->inputs[i])
 648                        continue;
 649
 650                ret = vsp1_video_pipeline_build_branch(pipe, pipe->inputs[i],
 651                                                       pipe->output);
 652                if (ret < 0)
 653                        return ret;
 654        }
 655
 656        return 0;
 657}
 658
 659static int vsp1_video_pipeline_init(struct vsp1_pipeline *pipe,
 660                                    struct vsp1_video *video)
 661{
 662        vsp1_pipeline_init(pipe);
 663
 664        pipe->frame_end = vsp1_video_pipeline_frame_end;
 665
 666        return vsp1_video_pipeline_build(pipe, video);
 667}
 668
 669static struct vsp1_pipeline *vsp1_video_pipeline_get(struct vsp1_video *video)
 670{
 671        struct vsp1_pipeline *pipe;
 672        int ret;
 673
 674        /*
 675         * Get a pipeline object for the video node. If a pipeline has already
 676         * been allocated just increment its reference count and return it.
 677         * Otherwise allocate a new pipeline and initialize it, it will be freed
 678         * when the last reference is released.
 679         */
 680        if (!video->rwpf->entity.pipe) {
 681                pipe = kzalloc(sizeof(*pipe), GFP_KERNEL);
 682                if (!pipe)
 683                        return ERR_PTR(-ENOMEM);
 684
 685                ret = vsp1_video_pipeline_init(pipe, video);
 686                if (ret < 0) {
 687                        vsp1_pipeline_reset(pipe);
 688                        kfree(pipe);
 689                        return ERR_PTR(ret);
 690                }
 691        } else {
 692                pipe = video->rwpf->entity.pipe;
 693                kref_get(&pipe->kref);
 694        }
 695
 696        return pipe;
 697}
 698
 699static void vsp1_video_pipeline_release(struct kref *kref)
 700{
 701        struct vsp1_pipeline *pipe = container_of(kref, typeof(*pipe), kref);
 702
 703        vsp1_pipeline_reset(pipe);
 704        kfree(pipe);
 705}
 706
 707static void vsp1_video_pipeline_put(struct vsp1_pipeline *pipe)
 708{
 709        struct media_device *mdev = &pipe->output->entity.vsp1->media_dev;
 710
 711        mutex_lock(&mdev->graph_mutex);
 712        kref_put(&pipe->kref, vsp1_video_pipeline_release);
 713        mutex_unlock(&mdev->graph_mutex);
 714}
 715
 716/* -----------------------------------------------------------------------------
 717 * videobuf2 Queue Operations
 718 */
 719
 720static int
 721vsp1_video_queue_setup(struct vb2_queue *vq,
 722                       unsigned int *nbuffers, unsigned int *nplanes,
 723                       unsigned int sizes[], struct device *alloc_devs[])
 724{
 725        struct vsp1_video *video = vb2_get_drv_priv(vq);
 726        const struct v4l2_pix_format_mplane *format = &video->rwpf->format;
 727        unsigned int i;
 728
 729        if (*nplanes) {
 730                if (*nplanes != format->num_planes)
 731                        return -EINVAL;
 732
 733                for (i = 0; i < *nplanes; i++)
 734                        if (sizes[i] < format->plane_fmt[i].sizeimage)
 735                                return -EINVAL;
 736                return 0;
 737        }
 738
 739        *nplanes = format->num_planes;
 740
 741        for (i = 0; i < format->num_planes; ++i)
 742                sizes[i] = format->plane_fmt[i].sizeimage;
 743
 744        return 0;
 745}
 746
 747static int vsp1_video_buffer_prepare(struct vb2_buffer *vb)
 748{
 749        struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
 750        struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
 751        struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf);
 752        const struct v4l2_pix_format_mplane *format = &video->rwpf->format;
 753        unsigned int i;
 754
 755        if (vb->num_planes < format->num_planes)
 756                return -EINVAL;
 757
 758        for (i = 0; i < vb->num_planes; ++i) {
 759                buf->mem.addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
 760
 761                if (vb2_plane_size(vb, i) < format->plane_fmt[i].sizeimage)
 762                        return -EINVAL;
 763        }
 764
 765        for ( ; i < 3; ++i)
 766                buf->mem.addr[i] = 0;
 767
 768        return 0;
 769}
 770
 771static void vsp1_video_buffer_queue(struct vb2_buffer *vb)
 772{
 773        struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
 774        struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
 775        struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
 776        struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf);
 777        unsigned long flags;
 778        bool empty;
 779
 780        spin_lock_irqsave(&video->irqlock, flags);
 781        empty = list_empty(&video->irqqueue);
 782        list_add_tail(&buf->queue, &video->irqqueue);
 783        spin_unlock_irqrestore(&video->irqlock, flags);
 784
 785        if (!empty)
 786                return;
 787
 788        spin_lock_irqsave(&pipe->irqlock, flags);
 789
 790        video->rwpf->mem = buf->mem;
 791        pipe->buffers_ready |= 1 << video->pipe_index;
 792
 793        if (vb2_is_streaming(&video->queue) &&
 794            vsp1_pipeline_ready(pipe))
 795                vsp1_video_pipeline_run(pipe);
 796
 797        spin_unlock_irqrestore(&pipe->irqlock, flags);
 798}
 799
 800static int vsp1_video_setup_pipeline(struct vsp1_pipeline *pipe)
 801{
 802        struct vsp1_entity *entity;
 803        int ret;
 804
 805        /* Determine this pipelines sizes for image partitioning support. */
 806        ret = vsp1_video_pipeline_setup_partitions(pipe);
 807        if (ret < 0)
 808                return ret;
 809
 810        if (pipe->uds) {
 811                struct vsp1_uds *uds = to_uds(&pipe->uds->subdev);
 812
 813                /*
 814                 * If a BRU or BRS is present in the pipeline before the UDS,
 815                 * the alpha component doesn't need to be scaled as the BRU and
 816                 * BRS output alpha value is fixed to 255. Otherwise we need to
 817                 * scale the alpha component only when available at the input
 818                 * RPF.
 819                 */
 820                if (pipe->uds_input->type == VSP1_ENTITY_BRU ||
 821                    pipe->uds_input->type == VSP1_ENTITY_BRS) {
 822                        uds->scale_alpha = false;
 823                } else {
 824                        struct vsp1_rwpf *rpf =
 825                                to_rwpf(&pipe->uds_input->subdev);
 826
 827                        uds->scale_alpha = rpf->fmtinfo->alpha;
 828                }
 829        }
 830
 831        /*
 832         * Compute and cache the stream configuration into a body. The cached
 833         * body will be added to the display list by vsp1_video_pipeline_run()
 834         * whenever the pipeline needs to be fully reconfigured.
 835         */
 836        pipe->stream_config = vsp1_dlm_dl_body_get(pipe->output->dlm);
 837        if (!pipe->stream_config)
 838                return -ENOMEM;
 839
 840        list_for_each_entry(entity, &pipe->entities, list_pipe) {
 841                vsp1_entity_route_setup(entity, pipe, pipe->stream_config);
 842                vsp1_entity_configure_stream(entity, pipe, pipe->stream_config);
 843        }
 844
 845        return 0;
 846}
 847
 848static void vsp1_video_release_buffers(struct vsp1_video *video)
 849{
 850        struct vsp1_vb2_buffer *buffer;
 851        unsigned long flags;
 852
 853        /* Remove all buffers from the IRQ queue. */
 854        spin_lock_irqsave(&video->irqlock, flags);
 855        list_for_each_entry(buffer, &video->irqqueue, queue)
 856                vb2_buffer_done(&buffer->buf.vb2_buf, VB2_BUF_STATE_ERROR);
 857        INIT_LIST_HEAD(&video->irqqueue);
 858        spin_unlock_irqrestore(&video->irqlock, flags);
 859}
 860
 861static void vsp1_video_cleanup_pipeline(struct vsp1_pipeline *pipe)
 862{
 863        lockdep_assert_held(&pipe->lock);
 864
 865        /* Release any cached configuration from our output video. */
 866        vsp1_dl_body_put(pipe->stream_config);
 867        pipe->stream_config = NULL;
 868        pipe->configured = false;
 869
 870        /* Release our partition table allocation */
 871        kfree(pipe->part_table);
 872        pipe->part_table = NULL;
 873}
 874
 875static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count)
 876{
 877        struct vsp1_video *video = vb2_get_drv_priv(vq);
 878        struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
 879        bool start_pipeline = false;
 880        unsigned long flags;
 881        int ret;
 882
 883        mutex_lock(&pipe->lock);
 884        if (pipe->stream_count == pipe->num_inputs) {
 885                ret = vsp1_video_setup_pipeline(pipe);
 886                if (ret < 0) {
 887                        vsp1_video_release_buffers(video);
 888                        vsp1_video_cleanup_pipeline(pipe);
 889                        mutex_unlock(&pipe->lock);
 890                        return ret;
 891                }
 892
 893                start_pipeline = true;
 894        }
 895
 896        pipe->stream_count++;
 897        mutex_unlock(&pipe->lock);
 898
 899        /*
 900         * vsp1_pipeline_ready() is not sufficient to establish that all streams
 901         * are prepared and the pipeline is configured, as multiple streams
 902         * can race through streamon with buffers already queued; Therefore we
 903         * don't even attempt to start the pipeline until the last stream has
 904         * called through here.
 905         */
 906        if (!start_pipeline)
 907                return 0;
 908
 909        spin_lock_irqsave(&pipe->irqlock, flags);
 910        if (vsp1_pipeline_ready(pipe))
 911                vsp1_video_pipeline_run(pipe);
 912        spin_unlock_irqrestore(&pipe->irqlock, flags);
 913
 914        return 0;
 915}
 916
 917static void vsp1_video_stop_streaming(struct vb2_queue *vq)
 918{
 919        struct vsp1_video *video = vb2_get_drv_priv(vq);
 920        struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
 921        unsigned long flags;
 922        int ret;
 923
 924        /*
 925         * Clear the buffers ready flag to make sure the device won't be started
 926         * by a QBUF on the video node on the other side of the pipeline.
 927         */
 928        spin_lock_irqsave(&video->irqlock, flags);
 929        pipe->buffers_ready &= ~(1 << video->pipe_index);
 930        spin_unlock_irqrestore(&video->irqlock, flags);
 931
 932        mutex_lock(&pipe->lock);
 933        if (--pipe->stream_count == pipe->num_inputs) {
 934                /* Stop the pipeline. */
 935                ret = vsp1_pipeline_stop(pipe);
 936                if (ret == -ETIMEDOUT)
 937                        dev_err(video->vsp1->dev, "pipeline stop timeout\n");
 938
 939                vsp1_video_cleanup_pipeline(pipe);
 940        }
 941        mutex_unlock(&pipe->lock);
 942
 943        media_pipeline_stop(&video->video.entity);
 944        vsp1_video_release_buffers(video);
 945        vsp1_video_pipeline_put(pipe);
 946}
 947
 948static const struct vb2_ops vsp1_video_queue_qops = {
 949        .queue_setup = vsp1_video_queue_setup,
 950        .buf_prepare = vsp1_video_buffer_prepare,
 951        .buf_queue = vsp1_video_buffer_queue,
 952        .wait_prepare = vb2_ops_wait_prepare,
 953        .wait_finish = vb2_ops_wait_finish,
 954        .start_streaming = vsp1_video_start_streaming,
 955        .stop_streaming = vsp1_video_stop_streaming,
 956};
 957
 958/* -----------------------------------------------------------------------------
 959 * V4L2 ioctls
 960 */
 961
 962static int
 963vsp1_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
 964{
 965        struct v4l2_fh *vfh = file->private_data;
 966        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 967
 968        cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
 969                          | V4L2_CAP_VIDEO_CAPTURE_MPLANE
 970                          | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
 971
 972        if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
 973                cap->device_caps = V4L2_CAP_VIDEO_CAPTURE_MPLANE
 974                                 | V4L2_CAP_STREAMING;
 975        else
 976                cap->device_caps = V4L2_CAP_VIDEO_OUTPUT_MPLANE
 977                                 | V4L2_CAP_STREAMING;
 978
 979        strlcpy(cap->driver, "vsp1", sizeof(cap->driver));
 980        strlcpy(cap->card, video->video.name, sizeof(cap->card));
 981        snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
 982                 dev_name(video->vsp1->dev));
 983
 984        return 0;
 985}
 986
 987static int
 988vsp1_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
 989{
 990        struct v4l2_fh *vfh = file->private_data;
 991        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
 992
 993        if (format->type != video->queue.type)
 994                return -EINVAL;
 995
 996        mutex_lock(&video->lock);
 997        format->fmt.pix_mp = video->rwpf->format;
 998        mutex_unlock(&video->lock);
 999
1000        return 0;
1001}
1002
1003static int
1004vsp1_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
1005{
1006        struct v4l2_fh *vfh = file->private_data;
1007        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
1008
1009        if (format->type != video->queue.type)
1010                return -EINVAL;
1011
1012        return __vsp1_video_try_format(video, &format->fmt.pix_mp, NULL);
1013}
1014
1015static int
1016vsp1_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
1017{
1018        struct v4l2_fh *vfh = file->private_data;
1019        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
1020        const struct vsp1_format_info *info;
1021        int ret;
1022
1023        if (format->type != video->queue.type)
1024                return -EINVAL;
1025
1026        ret = __vsp1_video_try_format(video, &format->fmt.pix_mp, &info);
1027        if (ret < 0)
1028                return ret;
1029
1030        mutex_lock(&video->lock);
1031
1032        if (vb2_is_busy(&video->queue)) {
1033                ret = -EBUSY;
1034                goto done;
1035        }
1036
1037        video->rwpf->format = format->fmt.pix_mp;
1038        video->rwpf->fmtinfo = info;
1039
1040done:
1041        mutex_unlock(&video->lock);
1042        return ret;
1043}
1044
1045static int
1046vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
1047{
1048        struct v4l2_fh *vfh = file->private_data;
1049        struct vsp1_video *video = to_vsp1_video(vfh->vdev);
1050        struct media_device *mdev = &video->vsp1->media_dev;
1051        struct vsp1_pipeline *pipe;
1052        int ret;
1053
1054        if (video->queue.owner && video->queue.owner != file->private_data)
1055                return -EBUSY;
1056
1057        /*
1058         * Get a pipeline for the video node and start streaming on it. No link
1059         * touching an entity in the pipeline can be activated or deactivated
1060         * once streaming is started.
1061         */
1062        mutex_lock(&mdev->graph_mutex);
1063
1064        pipe = vsp1_video_pipeline_get(video);
1065        if (IS_ERR(pipe)) {
1066                mutex_unlock(&mdev->graph_mutex);
1067                return PTR_ERR(pipe);
1068        }
1069
1070        ret = __media_pipeline_start(&video->video.entity, &pipe->pipe);
1071        if (ret < 0) {
1072                mutex_unlock(&mdev->graph_mutex);
1073                goto err_pipe;
1074        }
1075
1076        mutex_unlock(&mdev->graph_mutex);
1077
1078        /*
1079         * Verify that the configured format matches the output of the connected
1080         * subdev.
1081         */
1082        ret = vsp1_video_verify_format(video);
1083        if (ret < 0)
1084                goto err_stop;
1085
1086        /* Start the queue. */
1087        ret = vb2_streamon(&video->queue, type);
1088        if (ret < 0)
1089                goto err_stop;
1090
1091        return 0;
1092
1093err_stop:
1094        media_pipeline_stop(&video->video.entity);
1095err_pipe:
1096        vsp1_video_pipeline_put(pipe);
1097        return ret;
1098}
1099
1100static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops = {
1101        .vidioc_querycap                = vsp1_video_querycap,
1102        .vidioc_g_fmt_vid_cap_mplane    = vsp1_video_get_format,
1103        .vidioc_s_fmt_vid_cap_mplane    = vsp1_video_set_format,
1104        .vidioc_try_fmt_vid_cap_mplane  = vsp1_video_try_format,
1105        .vidioc_g_fmt_vid_out_mplane    = vsp1_video_get_format,
1106        .vidioc_s_fmt_vid_out_mplane    = vsp1_video_set_format,
1107        .vidioc_try_fmt_vid_out_mplane  = vsp1_video_try_format,
1108        .vidioc_reqbufs                 = vb2_ioctl_reqbufs,
1109        .vidioc_querybuf                = vb2_ioctl_querybuf,
1110        .vidioc_qbuf                    = vb2_ioctl_qbuf,
1111        .vidioc_dqbuf                   = vb2_ioctl_dqbuf,
1112        .vidioc_expbuf                  = vb2_ioctl_expbuf,
1113        .vidioc_create_bufs             = vb2_ioctl_create_bufs,
1114        .vidioc_prepare_buf             = vb2_ioctl_prepare_buf,
1115        .vidioc_streamon                = vsp1_video_streamon,
1116        .vidioc_streamoff               = vb2_ioctl_streamoff,
1117};
1118
1119/* -----------------------------------------------------------------------------
1120 * V4L2 File Operations
1121 */
1122
1123static int vsp1_video_open(struct file *file)
1124{
1125        struct vsp1_video *video = video_drvdata(file);
1126        struct v4l2_fh *vfh;
1127        int ret = 0;
1128
1129        vfh = kzalloc(sizeof(*vfh), GFP_KERNEL);
1130        if (vfh == NULL)
1131                return -ENOMEM;
1132
1133        v4l2_fh_init(vfh, &video->video);
1134        v4l2_fh_add(vfh);
1135
1136        file->private_data = vfh;
1137
1138        ret = vsp1_device_get(video->vsp1);
1139        if (ret < 0) {
1140                v4l2_fh_del(vfh);
1141                v4l2_fh_exit(vfh);
1142                kfree(vfh);
1143        }
1144
1145        return ret;
1146}
1147
1148static int vsp1_video_release(struct file *file)
1149{
1150        struct vsp1_video *video = video_drvdata(file);
1151        struct v4l2_fh *vfh = file->private_data;
1152
1153        mutex_lock(&video->lock);
1154        if (video->queue.owner == vfh) {
1155                vb2_queue_release(&video->queue);
1156                video->queue.owner = NULL;
1157        }
1158        mutex_unlock(&video->lock);
1159
1160        vsp1_device_put(video->vsp1);
1161
1162        v4l2_fh_release(file);
1163
1164        file->private_data = NULL;
1165
1166        return 0;
1167}
1168
1169static const struct v4l2_file_operations vsp1_video_fops = {
1170        .owner = THIS_MODULE,
1171        .unlocked_ioctl = video_ioctl2,
1172        .open = vsp1_video_open,
1173        .release = vsp1_video_release,
1174        .poll = vb2_fop_poll,
1175        .mmap = vb2_fop_mmap,
1176};
1177
1178/* -----------------------------------------------------------------------------
1179 * Suspend and Resume
1180 */
1181
1182void vsp1_video_suspend(struct vsp1_device *vsp1)
1183{
1184        unsigned long flags;
1185        unsigned int i;
1186        int ret;
1187
1188        /*
1189         * To avoid increasing the system suspend time needlessly, loop over the
1190         * pipelines twice, first to set them all to the stopping state, and
1191         * then to wait for the stop to complete.
1192         */
1193        for (i = 0; i < vsp1->info->wpf_count; ++i) {
1194                struct vsp1_rwpf *wpf = vsp1->wpf[i];
1195                struct vsp1_pipeline *pipe;
1196
1197                if (wpf == NULL)
1198                        continue;
1199
1200                pipe = wpf->entity.pipe;
1201                if (pipe == NULL)
1202                        continue;
1203
1204                spin_lock_irqsave(&pipe->irqlock, flags);
1205                if (pipe->state == VSP1_PIPELINE_RUNNING)
1206                        pipe->state = VSP1_PIPELINE_STOPPING;
1207                spin_unlock_irqrestore(&pipe->irqlock, flags);
1208        }
1209
1210        for (i = 0; i < vsp1->info->wpf_count; ++i) {
1211                struct vsp1_rwpf *wpf = vsp1->wpf[i];
1212                struct vsp1_pipeline *pipe;
1213
1214                if (wpf == NULL)
1215                        continue;
1216
1217                pipe = wpf->entity.pipe;
1218                if (pipe == NULL)
1219                        continue;
1220
1221                ret = wait_event_timeout(pipe->wq, vsp1_pipeline_stopped(pipe),
1222                                         msecs_to_jiffies(500));
1223                if (ret == 0)
1224                        dev_warn(vsp1->dev, "pipeline %u stop timeout\n",
1225                                 wpf->entity.index);
1226        }
1227}
1228
1229void vsp1_video_resume(struct vsp1_device *vsp1)
1230{
1231        unsigned long flags;
1232        unsigned int i;
1233
1234        /* Resume all running pipelines. */
1235        for (i = 0; i < vsp1->info->wpf_count; ++i) {
1236                struct vsp1_rwpf *wpf = vsp1->wpf[i];
1237                struct vsp1_pipeline *pipe;
1238
1239                if (wpf == NULL)
1240                        continue;
1241
1242                pipe = wpf->entity.pipe;
1243                if (pipe == NULL)
1244                        continue;
1245
1246                /*
1247                 * The hardware may have been reset during a suspend and will
1248                 * need a full reconfiguration.
1249                 */
1250                pipe->configured = false;
1251
1252                spin_lock_irqsave(&pipe->irqlock, flags);
1253                if (vsp1_pipeline_ready(pipe))
1254                        vsp1_video_pipeline_run(pipe);
1255                spin_unlock_irqrestore(&pipe->irqlock, flags);
1256        }
1257}
1258
1259/* -----------------------------------------------------------------------------
1260 * Initialization and Cleanup
1261 */
1262
1263struct vsp1_video *vsp1_video_create(struct vsp1_device *vsp1,
1264                                     struct vsp1_rwpf *rwpf)
1265{
1266        struct vsp1_video *video;
1267        const char *direction;
1268        int ret;
1269
1270        video = devm_kzalloc(vsp1->dev, sizeof(*video), GFP_KERNEL);
1271        if (!video)
1272                return ERR_PTR(-ENOMEM);
1273
1274        rwpf->video = video;
1275
1276        video->vsp1 = vsp1;
1277        video->rwpf = rwpf;
1278
1279        if (rwpf->entity.type == VSP1_ENTITY_RPF) {
1280                direction = "input";
1281                video->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1282                video->pad.flags = MEDIA_PAD_FL_SOURCE;
1283                video->video.vfl_dir = VFL_DIR_TX;
1284        } else {
1285                direction = "output";
1286                video->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1287                video->pad.flags = MEDIA_PAD_FL_SINK;
1288                video->video.vfl_dir = VFL_DIR_RX;
1289        }
1290
1291        mutex_init(&video->lock);
1292        spin_lock_init(&video->irqlock);
1293        INIT_LIST_HEAD(&video->irqqueue);
1294
1295        /* Initialize the media entity... */
1296        ret = media_entity_pads_init(&video->video.entity, 1, &video->pad);
1297        if (ret < 0)
1298                return ERR_PTR(ret);
1299
1300        /* ... and the format ... */
1301        rwpf->format.pixelformat = VSP1_VIDEO_DEF_FORMAT;
1302        rwpf->format.width = VSP1_VIDEO_DEF_WIDTH;
1303        rwpf->format.height = VSP1_VIDEO_DEF_HEIGHT;
1304        __vsp1_video_try_format(video, &rwpf->format, &rwpf->fmtinfo);
1305
1306        /* ... and the video node... */
1307        video->video.v4l2_dev = &video->vsp1->v4l2_dev;
1308        video->video.fops = &vsp1_video_fops;
1309        snprintf(video->video.name, sizeof(video->video.name), "%s %s",
1310                 rwpf->entity.subdev.name, direction);
1311        video->video.vfl_type = VFL_TYPE_GRABBER;
1312        video->video.release = video_device_release_empty;
1313        video->video.ioctl_ops = &vsp1_video_ioctl_ops;
1314
1315        video_set_drvdata(&video->video, video);
1316
1317        video->queue.type = video->type;
1318        video->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
1319        video->queue.lock = &video->lock;
1320        video->queue.drv_priv = video;
1321        video->queue.buf_struct_size = sizeof(struct vsp1_vb2_buffer);
1322        video->queue.ops = &vsp1_video_queue_qops;
1323        video->queue.mem_ops = &vb2_dma_contig_memops;
1324        video->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
1325        video->queue.dev = video->vsp1->bus_master;
1326        ret = vb2_queue_init(&video->queue);
1327        if (ret < 0) {
1328                dev_err(video->vsp1->dev, "failed to initialize vb2 queue\n");
1329                goto error;
1330        }
1331
1332        /* ... and register the video device. */
1333        video->video.queue = &video->queue;
1334        ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1);
1335        if (ret < 0) {
1336                dev_err(video->vsp1->dev, "failed to register video device\n");
1337                goto error;
1338        }
1339
1340        return video;
1341
1342error:
1343        vsp1_video_cleanup(video);
1344        return ERR_PTR(ret);
1345}
1346
1347void vsp1_video_cleanup(struct vsp1_video *video)
1348{
1349        if (video_is_registered(&video->video))
1350                video_unregister_device(&video->video);
1351
1352        media_entity_cleanup(&video->video.entity);
1353}
1354