1
2
3
4
5
6
7
8#ifndef __INDUSTRIALIO_DMA_BUFFER_H__
9#define __INDUSTRIALIO_DMA_BUFFER_H__
10
11#include <linux/list.h>
12#include <linux/kref.h>
13#include <linux/spinlock.h>
14#include <linux/mutex.h>
15#include <linux/iio/buffer.h>
16
17struct iio_dma_buffer_queue;
18struct iio_dma_buffer_ops;
19struct device;
20
21struct iio_buffer_block {
22 u32 size;
23 u32 bytes_used;
24};
25
26
27
28
29
30
31
32
33
34enum iio_block_state {
35 IIO_BLOCK_STATE_DEQUEUED,
36 IIO_BLOCK_STATE_QUEUED,
37 IIO_BLOCK_STATE_ACTIVE,
38 IIO_BLOCK_STATE_DONE,
39 IIO_BLOCK_STATE_DEAD,
40};
41
42
43
44
45
46
47
48
49
50
51
52
53struct iio_dma_buffer_block {
54
55 struct list_head head;
56 size_t bytes_used;
57
58
59
60
61
62 void *vaddr;
63 dma_addr_t phys_addr;
64 size_t size;
65 struct iio_dma_buffer_queue *queue;
66
67
68 struct kref kref;
69
70
71
72
73 enum iio_block_state state;
74};
75
76
77
78
79
80
81
82
83struct iio_dma_buffer_queue_fileio {
84 struct iio_dma_buffer_block *blocks[2];
85 struct iio_dma_buffer_block *active_block;
86 size_t pos;
87 size_t block_size;
88};
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106struct iio_dma_buffer_queue {
107 struct iio_buffer buffer;
108 struct device *dev;
109 const struct iio_dma_buffer_ops *ops;
110
111 struct mutex lock;
112 spinlock_t list_lock;
113 struct list_head incoming;
114 struct list_head outgoing;
115
116 bool active;
117
118 struct iio_dma_buffer_queue_fileio fileio;
119};
120
121
122
123
124
125
126struct iio_dma_buffer_ops {
127 int (*submit)(struct iio_dma_buffer_queue *queue,
128 struct iio_dma_buffer_block *block);
129 void (*abort)(struct iio_dma_buffer_queue *queue);
130};
131
132void iio_dma_buffer_block_done(struct iio_dma_buffer_block *block);
133void iio_dma_buffer_block_list_abort(struct iio_dma_buffer_queue *queue,
134 struct list_head *list);
135
136int iio_dma_buffer_enable(struct iio_buffer *buffer,
137 struct iio_dev *indio_dev);
138int iio_dma_buffer_disable(struct iio_buffer *buffer,
139 struct iio_dev *indio_dev);
140int iio_dma_buffer_read(struct iio_buffer *buffer, size_t n,
141 char __user *user_buffer);
142size_t iio_dma_buffer_data_available(struct iio_buffer *buffer);
143int iio_dma_buffer_set_bytes_per_datum(struct iio_buffer *buffer, size_t bpd);
144int iio_dma_buffer_set_length(struct iio_buffer *buffer, int length);
145int iio_dma_buffer_request_update(struct iio_buffer *buffer);
146
147int iio_dma_buffer_init(struct iio_dma_buffer_queue *queue,
148 struct device *dma_dev, const struct iio_dma_buffer_ops *ops);
149void iio_dma_buffer_exit(struct iio_dma_buffer_queue *queue);
150void iio_dma_buffer_release(struct iio_dma_buffer_queue *queue);
151
152#endif
153