1
2#ifndef __BPF_CORE_READ_H__
3#define __BPF_CORE_READ_H__
4
5
6
7
8
9
10
11
12
13enum bpf_field_info_kind {
14 BPF_FIELD_BYTE_OFFSET = 0,
15 BPF_FIELD_BYTE_SIZE = 1,
16 BPF_FIELD_EXISTS = 2,
17 BPF_FIELD_SIGNED = 3,
18 BPF_FIELD_LSHIFT_U64 = 4,
19 BPF_FIELD_RSHIFT_U64 = 5,
20};
21
22
23enum bpf_type_id_kind {
24 BPF_TYPE_ID_LOCAL = 0,
25 BPF_TYPE_ID_TARGET = 1,
26};
27
28
29enum bpf_type_info_kind {
30 BPF_TYPE_EXISTS = 0,
31 BPF_TYPE_SIZE = 1,
32};
33
34
35enum bpf_enum_value_kind {
36 BPF_ENUMVAL_EXISTS = 0,
37 BPF_ENUMVAL_VALUE = 1,
38};
39
40#define __CORE_RELO(src, field, info) \
41 __builtin_preserve_field_info((src)->field, BPF_FIELD_##info)
42
43#if __BYTE_ORDER == __LITTLE_ENDIAN
44#define __CORE_BITFIELD_PROBE_READ(dst, src, fld) \
45 bpf_probe_read_kernel( \
46 (void *)dst, \
47 __CORE_RELO(src, fld, BYTE_SIZE), \
48 (const void *)src + __CORE_RELO(src, fld, BYTE_OFFSET))
49#else
50
51
52
53
54#define __CORE_BITFIELD_PROBE_READ(dst, src, fld) \
55 bpf_probe_read_kernel( \
56 (void *)dst + (8 - __CORE_RELO(src, fld, BYTE_SIZE)), \
57 __CORE_RELO(src, fld, BYTE_SIZE), \
58 (const void *)src + __CORE_RELO(src, fld, BYTE_OFFSET))
59#endif
60
61
62
63
64
65
66
67
68
69#define BPF_CORE_READ_BITFIELD_PROBED(s, field) ({ \
70 unsigned long long val = 0; \
71 \
72 __CORE_BITFIELD_PROBE_READ(&val, s, field); \
73 val <<= __CORE_RELO(s, field, LSHIFT_U64); \
74 if (__CORE_RELO(s, field, SIGNED)) \
75 val = ((long long)val) >> __CORE_RELO(s, field, RSHIFT_U64); \
76 else \
77 val = val >> __CORE_RELO(s, field, RSHIFT_U64); \
78 val; \
79})
80
81
82
83
84
85
86
87#define BPF_CORE_READ_BITFIELD(s, field) ({ \
88 const void *p = (const void *)s + __CORE_RELO(s, field, BYTE_OFFSET); \
89 unsigned long long val; \
90 \
91 switch (__CORE_RELO(s, field, BYTE_SIZE)) { \
92 case 1: val = *(const unsigned char *)p; \
93 case 2: val = *(const unsigned short *)p; \
94 case 4: val = *(const unsigned int *)p; \
95 case 8: val = *(const unsigned long long *)p; \
96 } \
97 val <<= __CORE_RELO(s, field, LSHIFT_U64); \
98 if (__CORE_RELO(s, field, SIGNED)) \
99 val = ((long long)val) >> __CORE_RELO(s, field, RSHIFT_U64); \
100 else \
101 val = val >> __CORE_RELO(s, field, RSHIFT_U64); \
102 val; \
103})
104
105
106
107
108
109
110
111#define bpf_core_field_exists(field) \
112 __builtin_preserve_field_info(field, BPF_FIELD_EXISTS)
113
114
115
116
117
118#define bpf_core_field_size(field) \
119 __builtin_preserve_field_info(field, BPF_FIELD_BYTE_SIZE)
120
121
122
123
124
125
126#define bpf_core_type_id_local(type) \
127 __builtin_btf_type_id(*(typeof(type) *)0, BPF_TYPE_ID_LOCAL)
128
129
130
131
132
133
134
135
136#define bpf_core_type_id_kernel(type) \
137 __builtin_btf_type_id(*(typeof(type) *)0, BPF_TYPE_ID_TARGET)
138
139
140
141
142
143
144
145
146#define bpf_core_type_exists(type) \
147 __builtin_preserve_type_info(*(typeof(type) *)0, BPF_TYPE_EXISTS)
148
149
150
151
152
153
154
155
156#define bpf_core_type_size(type) \
157 __builtin_preserve_type_info(*(typeof(type) *)0, BPF_TYPE_SIZE)
158
159
160
161
162
163
164
165
166
167#define bpf_core_enum_value_exists(enum_type, enum_value) \
168 __builtin_preserve_enum_value(*(typeof(enum_type) *)enum_value, BPF_ENUMVAL_EXISTS)
169
170
171
172
173
174
175
176
177
178#define bpf_core_enum_value(enum_type, enum_value) \
179 __builtin_preserve_enum_value(*(typeof(enum_type) *)enum_value, BPF_ENUMVAL_VALUE)
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197#define bpf_core_read(dst, sz, src) \
198 bpf_probe_read_kernel(dst, sz, \
199 (const void *)__builtin_preserve_access_index(src))
200
201
202
203
204
205
206#define bpf_core_read_str(dst, sz, src) \
207 bpf_probe_read_kernel_str(dst, sz, \
208 (const void *)__builtin_preserve_access_index(src))
209
210#define ___concat(a, b) a ## b
211#define ___apply(fn, n) ___concat(fn, n)
212#define ___nth(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, __11, N, ...) N
213
214
215
216
217
218#define ___narg(...) ___nth(_, ##__VA_ARGS__, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
219
220
221
222
223
224#define ___empty(...) ___nth(_, ##__VA_ARGS__, N, N, N, N, N, N, N, N, N, N, 0)
225
226#define ___last1(x) x
227#define ___last2(a, x) x
228#define ___last3(a, b, x) x
229#define ___last4(a, b, c, x) x
230#define ___last5(a, b, c, d, x) x
231#define ___last6(a, b, c, d, e, x) x
232#define ___last7(a, b, c, d, e, f, x) x
233#define ___last8(a, b, c, d, e, f, g, x) x
234#define ___last9(a, b, c, d, e, f, g, h, x) x
235#define ___last10(a, b, c, d, e, f, g, h, i, x) x
236#define ___last(...) ___apply(___last, ___narg(__VA_ARGS__))(__VA_ARGS__)
237
238#define ___nolast2(a, _) a
239#define ___nolast3(a, b, _) a, b
240#define ___nolast4(a, b, c, _) a, b, c
241#define ___nolast5(a, b, c, d, _) a, b, c, d
242#define ___nolast6(a, b, c, d, e, _) a, b, c, d, e
243#define ___nolast7(a, b, c, d, e, f, _) a, b, c, d, e, f
244#define ___nolast8(a, b, c, d, e, f, g, _) a, b, c, d, e, f, g
245#define ___nolast9(a, b, c, d, e, f, g, h, _) a, b, c, d, e, f, g, h
246#define ___nolast10(a, b, c, d, e, f, g, h, i, _) a, b, c, d, e, f, g, h, i
247#define ___nolast(...) ___apply(___nolast, ___narg(__VA_ARGS__))(__VA_ARGS__)
248
249#define ___arrow1(a) a
250#define ___arrow2(a, b) a->b
251#define ___arrow3(a, b, c) a->b->c
252#define ___arrow4(a, b, c, d) a->b->c->d
253#define ___arrow5(a, b, c, d, e) a->b->c->d->e
254#define ___arrow6(a, b, c, d, e, f) a->b->c->d->e->f
255#define ___arrow7(a, b, c, d, e, f, g) a->b->c->d->e->f->g
256#define ___arrow8(a, b, c, d, e, f, g, h) a->b->c->d->e->f->g->h
257#define ___arrow9(a, b, c, d, e, f, g, h, i) a->b->c->d->e->f->g->h->i
258#define ___arrow10(a, b, c, d, e, f, g, h, i, j) a->b->c->d->e->f->g->h->i->j
259#define ___arrow(...) ___apply(___arrow, ___narg(__VA_ARGS__))(__VA_ARGS__)
260
261#define ___type(...) typeof(___arrow(__VA_ARGS__))
262
263#define ___read(read_fn, dst, src_type, src, accessor) \
264 read_fn((void *)(dst), sizeof(*(dst)), &((src_type)(src))->accessor)
265
266
267#define ___rd_first(src, a) ___read(bpf_core_read, &__t, ___type(src), src, a);
268#define ___rd_last(...) \
269 ___read(bpf_core_read, &__t, \
270 ___type(___nolast(__VA_ARGS__)), __t, ___last(__VA_ARGS__));
271#define ___rd_p1(...) const void *__t; ___rd_first(__VA_ARGS__)
272#define ___rd_p2(...) ___rd_p1(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
273#define ___rd_p3(...) ___rd_p2(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
274#define ___rd_p4(...) ___rd_p3(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
275#define ___rd_p5(...) ___rd_p4(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
276#define ___rd_p6(...) ___rd_p5(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
277#define ___rd_p7(...) ___rd_p6(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
278#define ___rd_p8(...) ___rd_p7(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
279#define ___rd_p9(...) ___rd_p8(___nolast(__VA_ARGS__)) ___rd_last(__VA_ARGS__)
280#define ___read_ptrs(src, ...) \
281 ___apply(___rd_p, ___narg(__VA_ARGS__))(src, __VA_ARGS__)
282
283#define ___core_read0(fn, dst, src, a) \
284 ___read(fn, dst, ___type(src), src, a);
285#define ___core_readN(fn, dst, src, ...) \
286 ___read_ptrs(src, ___nolast(__VA_ARGS__)) \
287 ___read(fn, dst, ___type(src, ___nolast(__VA_ARGS__)), __t, \
288 ___last(__VA_ARGS__));
289#define ___core_read(fn, dst, src, a, ...) \
290 ___apply(___core_read, ___empty(__VA_ARGS__))(fn, dst, \
291 src, a, ##__VA_ARGS__)
292
293
294
295
296
297
298#define BPF_CORE_READ_INTO(dst, src, a, ...) \
299 ({ \
300 ___core_read(bpf_core_read, dst, (src), a, ##__VA_ARGS__) \
301 })
302
303
304
305
306
307
308#define BPF_CORE_READ_STR_INTO(dst, src, a, ...) \
309 ({ \
310 ___core_read(bpf_core_read_str, dst, (src), a, ##__VA_ARGS__)\
311 })
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337#define BPF_CORE_READ(src, a, ...) \
338 ({ \
339 ___type((src), a, ##__VA_ARGS__) __r; \
340 BPF_CORE_READ_INTO(&__r, (src), a, ##__VA_ARGS__); \
341 __r; \
342 })
343
344#endif
345
346