1
2#ifndef __BPF_HELPERS__
3#define __BPF_HELPERS__
4
5
6
7
8
9
10
11#include "bpf_helper_defs.h"
12
13#define __uint(name, val) int (*name)[val]
14#define __type(name, val) typeof(val) *name
15#define __array(name, val) typeof(val) *name[]
16
17
18#define bpf_printk(fmt, ...) \
19({ \
20 char ____fmt[] = fmt; \
21 bpf_trace_printk(____fmt, sizeof(____fmt), \
22 ##__VA_ARGS__); \
23})
24
25
26
27
28
29
30#define SEC(NAME) __attribute__((section(NAME), used))
31
32#ifndef __always_inline
33#define __always_inline __attribute__((always_inline))
34#endif
35#ifndef __noinline
36#define __noinline __attribute__((noinline))
37#endif
38#ifndef __weak
39#define __weak __attribute__((weak))
40#endif
41
42
43
44
45#ifndef offsetof
46#define offsetof(TYPE, MEMBER) ((unsigned long)&((TYPE *)0)->MEMBER)
47#endif
48#ifndef container_of
49#define container_of(ptr, type, member) \
50 ({ \
51 void *__mptr = (void *)(ptr); \
52 ((type *)(__mptr - offsetof(type, member))); \
53 })
54#endif
55
56
57
58
59
60
61
62
63
64
65
66
67
68#ifndef __bpf_unreachable
69# define __bpf_unreachable() __builtin_trap()
70#endif
71
72
73
74
75#if __clang_major__ >= 8 && defined(__bpf__)
76static __always_inline void
77bpf_tail_call_static(void *ctx, const void *map, const __u32 slot)
78{
79 if (!__builtin_constant_p(slot))
80 __bpf_unreachable();
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95 asm volatile("r1 = %[ctx]\n\t"
96 "r2 = %[map]\n\t"
97 "r3 = %[slot]\n\t"
98 "call 12"
99 :: [ctx]"r"(ctx), [map]"r"(map), [slot]"i"(slot)
100 : "r0", "r1", "r2", "r3", "r4", "r5");
101}
102#endif
103
104
105
106
107
108struct bpf_map_def {
109 unsigned int type;
110 unsigned int key_size;
111 unsigned int value_size;
112 unsigned int max_entries;
113 unsigned int map_flags;
114};
115
116enum libbpf_pin_type {
117 LIBBPF_PIN_NONE,
118
119 LIBBPF_PIN_BY_NAME,
120};
121
122enum libbpf_tristate {
123 TRI_NO = 0,
124 TRI_YES = 1,
125 TRI_MODULE = 2,
126};
127
128#define __kconfig __attribute__((section(".kconfig")))
129#define __ksym __attribute__((section(".ksyms")))
130
131#endif
132