1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#ifndef _LINUX_SKB_ARRAY_H
21#define _LINUX_SKB_ARRAY_H 1
22
23#ifdef __KERNEL__
24#include <linux/ptr_ring.h>
25#include <linux/skbuff.h>
26#include <linux/if_vlan.h>
27#endif
28
29struct skb_array {
30 struct ptr_ring ring;
31};
32
33
34
35
36static inline bool __skb_array_full(struct skb_array *a)
37{
38 return __ptr_ring_full(&a->ring);
39}
40
41static inline bool skb_array_full(struct skb_array *a)
42{
43 return ptr_ring_full(&a->ring);
44}
45
46static inline int skb_array_produce(struct skb_array *a, struct sk_buff *skb)
47{
48 return ptr_ring_produce(&a->ring, skb);
49}
50
51static inline int skb_array_produce_irq(struct skb_array *a, struct sk_buff *skb)
52{
53 return ptr_ring_produce_irq(&a->ring, skb);
54}
55
56static inline int skb_array_produce_bh(struct skb_array *a, struct sk_buff *skb)
57{
58 return ptr_ring_produce_bh(&a->ring, skb);
59}
60
61static inline int skb_array_produce_any(struct skb_array *a, struct sk_buff *skb)
62{
63 return ptr_ring_produce_any(&a->ring, skb);
64}
65
66
67
68
69
70static inline bool __skb_array_empty(struct skb_array *a)
71{
72 return __ptr_ring_empty(&a->ring);
73}
74
75static inline struct sk_buff *__skb_array_peek(struct skb_array *a)
76{
77 return __ptr_ring_peek(&a->ring);
78}
79
80static inline bool skb_array_empty(struct skb_array *a)
81{
82 return ptr_ring_empty(&a->ring);
83}
84
85static inline bool skb_array_empty_bh(struct skb_array *a)
86{
87 return ptr_ring_empty_bh(&a->ring);
88}
89
90static inline bool skb_array_empty_irq(struct skb_array *a)
91{
92 return ptr_ring_empty_irq(&a->ring);
93}
94
95static inline bool skb_array_empty_any(struct skb_array *a)
96{
97 return ptr_ring_empty_any(&a->ring);
98}
99
100static inline struct sk_buff *__skb_array_consume(struct skb_array *a)
101{
102 return __ptr_ring_consume(&a->ring);
103}
104
105static inline struct sk_buff *skb_array_consume(struct skb_array *a)
106{
107 return ptr_ring_consume(&a->ring);
108}
109
110static inline int skb_array_consume_batched(struct skb_array *a,
111 struct sk_buff **array, int n)
112{
113 return ptr_ring_consume_batched(&a->ring, (void **)array, n);
114}
115
116static inline struct sk_buff *skb_array_consume_irq(struct skb_array *a)
117{
118 return ptr_ring_consume_irq(&a->ring);
119}
120
121static inline int skb_array_consume_batched_irq(struct skb_array *a,
122 struct sk_buff **array, int n)
123{
124 return ptr_ring_consume_batched_irq(&a->ring, (void **)array, n);
125}
126
127static inline struct sk_buff *skb_array_consume_any(struct skb_array *a)
128{
129 return ptr_ring_consume_any(&a->ring);
130}
131
132static inline int skb_array_consume_batched_any(struct skb_array *a,
133 struct sk_buff **array, int n)
134{
135 return ptr_ring_consume_batched_any(&a->ring, (void **)array, n);
136}
137
138
139static inline struct sk_buff *skb_array_consume_bh(struct skb_array *a)
140{
141 return ptr_ring_consume_bh(&a->ring);
142}
143
144static inline int skb_array_consume_batched_bh(struct skb_array *a,
145 struct sk_buff **array, int n)
146{
147 return ptr_ring_consume_batched_bh(&a->ring, (void **)array, n);
148}
149
150static inline int __skb_array_len_with_tag(struct sk_buff *skb)
151{
152 if (likely(skb)) {
153 int len = skb->len;
154
155 if (skb_vlan_tag_present(skb))
156 len += VLAN_HLEN;
157
158 return len;
159 } else {
160 return 0;
161 }
162}
163
164static inline int skb_array_peek_len(struct skb_array *a)
165{
166 return PTR_RING_PEEK_CALL(&a->ring, __skb_array_len_with_tag);
167}
168
169static inline int skb_array_peek_len_irq(struct skb_array *a)
170{
171 return PTR_RING_PEEK_CALL_IRQ(&a->ring, __skb_array_len_with_tag);
172}
173
174static inline int skb_array_peek_len_bh(struct skb_array *a)
175{
176 return PTR_RING_PEEK_CALL_BH(&a->ring, __skb_array_len_with_tag);
177}
178
179static inline int skb_array_peek_len_any(struct skb_array *a)
180{
181 return PTR_RING_PEEK_CALL_ANY(&a->ring, __skb_array_len_with_tag);
182}
183
184static inline int skb_array_init(struct skb_array *a, int size, gfp_t gfp)
185{
186 return ptr_ring_init(&a->ring, size, gfp);
187}
188
189static void __skb_array_destroy_skb(void *ptr)
190{
191 kfree_skb(ptr);
192}
193
194static inline void skb_array_unconsume(struct skb_array *a,
195 struct sk_buff **skbs, int n)
196{
197 ptr_ring_unconsume(&a->ring, (void **)skbs, n, __skb_array_destroy_skb);
198}
199
200static inline int skb_array_resize(struct skb_array *a, int size, gfp_t gfp)
201{
202 return ptr_ring_resize(&a->ring, size, gfp, __skb_array_destroy_skb);
203}
204
205static inline int skb_array_resize_multiple(struct skb_array **rings,
206 int nrings, unsigned int size,
207 gfp_t gfp)
208{
209 BUILD_BUG_ON(offsetof(struct skb_array, ring));
210 return ptr_ring_resize_multiple((struct ptr_ring **)rings,
211 nrings, size, gfp,
212 __skb_array_destroy_skb);
213}
214
215static inline void skb_array_cleanup(struct skb_array *a)
216{
217 ptr_ring_cleanup(&a->ring, __skb_array_destroy_skb);
218}
219
220#endif
221