1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#include <linux/pm_runtime.h>
16
17#include <media/v4l2-device.h>
18#include <media/v4l2-ioctl.h>
19#include <media/v4l2-mem2mem.h>
20#include <media/videobuf2-dma-sg.h>
21#include <media/videobuf2-v4l2.h>
22
23#include "rga-hw.h"
24#include "rga.h"
25
26static int
27rga_queue_setup(struct vb2_queue *vq,
28 unsigned int *nbuffers, unsigned int *nplanes,
29 unsigned int sizes[], struct device *alloc_devs[])
30{
31 struct rga_ctx *ctx = vb2_get_drv_priv(vq);
32 struct rga_frame *f = rga_get_frame(ctx, vq->type);
33
34 if (IS_ERR(f))
35 return PTR_ERR(f);
36
37 if (*nplanes)
38 return sizes[0] < f->size ? -EINVAL : 0;
39
40 sizes[0] = f->size;
41 *nplanes = 1;
42
43 return 0;
44}
45
46static int rga_buf_prepare(struct vb2_buffer *vb)
47{
48 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
49 struct rga_frame *f = rga_get_frame(ctx, vb->vb2_queue->type);
50
51 if (IS_ERR(f))
52 return PTR_ERR(f);
53
54 vb2_set_plane_payload(vb, 0, f->size);
55
56 return 0;
57}
58
59static void rga_buf_queue(struct vb2_buffer *vb)
60{
61 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
62 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
63
64 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
65}
66
67static int rga_buf_start_streaming(struct vb2_queue *q, unsigned int count)
68{
69 struct rga_ctx *ctx = vb2_get_drv_priv(q);
70 struct rockchip_rga *rga = ctx->rga;
71 int ret, i;
72
73 ret = pm_runtime_get_sync(rga->dev);
74
75 if (!ret)
76 return 0;
77
78 for (i = 0; i < q->num_buffers; ++i) {
79 if (q->bufs[i]->state == VB2_BUF_STATE_ACTIVE) {
80 v4l2_m2m_buf_done(to_vb2_v4l2_buffer(q->bufs[i]),
81 VB2_BUF_STATE_QUEUED);
82 }
83 }
84
85 return ret;
86}
87
88static void rga_buf_stop_streaming(struct vb2_queue *q)
89{
90 struct rga_ctx *ctx = vb2_get_drv_priv(q);
91 struct rockchip_rga *rga = ctx->rga;
92 struct vb2_v4l2_buffer *vbuf;
93
94 for (;;) {
95 if (V4L2_TYPE_IS_OUTPUT(q->type))
96 vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
97 else
98 vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
99 if (!vbuf)
100 break;
101 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
102 }
103
104 pm_runtime_put(rga->dev);
105}
106
107const struct vb2_ops rga_qops = {
108 .queue_setup = rga_queue_setup,
109 .buf_prepare = rga_buf_prepare,
110 .buf_queue = rga_buf_queue,
111 .wait_prepare = vb2_ops_wait_prepare,
112 .wait_finish = vb2_ops_wait_finish,
113 .start_streaming = rga_buf_start_streaming,
114 .stop_streaming = rga_buf_stop_streaming,
115};
116
117
118
119
120void rga_buf_map(struct vb2_buffer *vb)
121{
122 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
123 struct rockchip_rga *rga = ctx->rga;
124 struct sg_table *sgt;
125 struct scatterlist *sgl;
126 unsigned int *pages;
127 unsigned int address, len, i, p;
128 unsigned int mapped_size = 0;
129
130 if (vb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
131 pages = rga->src_mmu_pages;
132 else
133 pages = rga->dst_mmu_pages;
134
135
136 sgt = vb2_plane_cookie(vb, 0);
137
138 for_each_sg(sgt->sgl, sgl, sgt->nents, i) {
139 len = sg_dma_len(sgl) >> PAGE_SHIFT;
140 address = sg_phys(sgl);
141
142 for (p = 0; p < len; p++) {
143 dma_addr_t phys = address +
144 ((dma_addr_t)p << PAGE_SHIFT);
145
146 pages[mapped_size + p] = phys;
147 }
148
149 mapped_size += len;
150 }
151
152
153 dma_sync_single_for_device(rga->dev, virt_to_phys(pages),
154 8 * PAGE_SIZE, DMA_BIDIRECTIONAL);
155}
156