1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16#include <asm/uaccess.h>
17#include <linux/bitops.h>
18#include <linux/module.h>
19#include <linux/types.h>
20#include <linux/kernel.h>
21#include <linux/jiffies.h>
22#include <linux/string.h>
23#include <linux/mm.h>
24#include <linux/socket.h>
25#include <linux/sockios.h>
26#include <linux/in.h>
27#include <linux/errno.h>
28#include <linux/interrupt.h>
29#include <linux/netdevice.h>
30#include <linux/skbuff.h>
31#include <linux/rtnetlink.h>
32#include <linux/init.h>
33#include <linux/rbtree.h>
34#include <linux/slab.h>
35#include <net/sock.h>
36#include <net/gen_stats.h>
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79#define EST_MAX_INTERVAL 5
80
81struct gen_estimator
82{
83 struct list_head list;
84 struct gnet_stats_basic_packed *bstats;
85 struct gnet_stats_rate_est64 *rate_est;
86 spinlock_t *stats_lock;
87 int ewma_log;
88 u32 last_packets;
89 unsigned long avpps;
90 u64 last_bytes;
91 u64 avbps;
92 struct rcu_head e_rcu;
93 struct rb_node node;
94 struct gnet_stats_basic_cpu __percpu *cpu_bstats;
95 struct rcu_head head;
96};
97
98struct gen_estimator_head
99{
100 struct timer_list timer;
101 struct list_head list;
102};
103
104static struct gen_estimator_head elist[EST_MAX_INTERVAL+1];
105
106
107static DEFINE_RWLOCK(est_lock);
108
109
110static struct rb_root est_root = RB_ROOT;
111static DEFINE_SPINLOCK(est_tree_lock);
112
113static void est_timer(unsigned long arg)
114{
115 int idx = (int)arg;
116 struct gen_estimator *e;
117
118 rcu_read_lock();
119 list_for_each_entry_rcu(e, &elist[idx].list, list) {
120 struct gnet_stats_basic_packed b = {0};
121 unsigned long rate;
122 u64 brate;
123
124 spin_lock(e->stats_lock);
125 read_lock(&est_lock);
126 if (e->bstats == NULL)
127 goto skip;
128
129 __gnet_stats_copy_basic(&b, e->cpu_bstats, e->bstats);
130
131 brate = (b.bytes - e->last_bytes)<<(7 - idx);
132 e->last_bytes = b.bytes;
133 e->avbps += (brate >> e->ewma_log) - (e->avbps >> e->ewma_log);
134 e->rate_est->bps = (e->avbps+0xF)>>5;
135
136 rate = b.packets - e->last_packets;
137 rate <<= (7 - idx);
138 e->last_packets = b.packets;
139 e->avpps += (rate >> e->ewma_log) - (e->avpps >> e->ewma_log);
140 e->rate_est->pps = (e->avpps + 0xF) >> 5;
141skip:
142 read_unlock(&est_lock);
143 spin_unlock(e->stats_lock);
144 }
145
146 if (!list_empty(&elist[idx].list))
147 mod_timer(&elist[idx].timer, jiffies + ((HZ/4) << idx));
148 rcu_read_unlock();
149}
150
151static void gen_add_node(struct gen_estimator *est)
152{
153 struct rb_node **p = &est_root.rb_node, *parent = NULL;
154
155 while (*p) {
156 struct gen_estimator *e;
157
158 parent = *p;
159 e = rb_entry(parent, struct gen_estimator, node);
160
161 if (est->bstats > e->bstats)
162 p = &parent->rb_right;
163 else
164 p = &parent->rb_left;
165 }
166 rb_link_node(&est->node, parent, p);
167 rb_insert_color(&est->node, &est_root);
168}
169
170static
171struct gen_estimator *gen_find_node(const struct gnet_stats_basic_packed *bstats,
172 const struct gnet_stats_rate_est64 *rate_est)
173{
174 struct rb_node *p = est_root.rb_node;
175
176 while (p) {
177 struct gen_estimator *e;
178
179 e = rb_entry(p, struct gen_estimator, node);
180
181 if (bstats > e->bstats)
182 p = p->rb_right;
183 else if (bstats < e->bstats || rate_est != e->rate_est)
184 p = p->rb_left;
185 else
186 return e;
187 }
188 return NULL;
189}
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208int gen_new_estimator(struct gnet_stats_basic_packed *bstats,
209 struct gnet_stats_basic_cpu __percpu *cpu_bstats,
210 struct gnet_stats_rate_est64 *rate_est,
211 spinlock_t *stats_lock,
212 struct nlattr *opt)
213{
214 struct gen_estimator *est;
215 struct gnet_estimator *parm = nla_data(opt);
216 struct gnet_stats_basic_packed b = {0};
217 int idx;
218
219 if (nla_len(opt) < sizeof(*parm))
220 return -EINVAL;
221
222 if (parm->interval < -2 || parm->interval > 3)
223 return -EINVAL;
224
225 est = kzalloc(sizeof(*est), GFP_KERNEL);
226 if (est == NULL)
227 return -ENOBUFS;
228
229 __gnet_stats_copy_basic(&b, cpu_bstats, bstats);
230
231 idx = parm->interval + 2;
232 est->bstats = bstats;
233 est->rate_est = rate_est;
234 est->stats_lock = stats_lock;
235 est->ewma_log = parm->ewma_log;
236 est->last_bytes = b.bytes;
237 est->avbps = rate_est->bps<<5;
238 est->last_packets = b.packets;
239 est->avpps = rate_est->pps<<10;
240 est->cpu_bstats = cpu_bstats;
241
242 spin_lock_bh(&est_tree_lock);
243 if (!elist[idx].timer.function) {
244 INIT_LIST_HEAD(&elist[idx].list);
245 setup_timer(&elist[idx].timer, est_timer, idx);
246 }
247
248 if (list_empty(&elist[idx].list))
249 mod_timer(&elist[idx].timer, jiffies + ((HZ/4) << idx));
250
251 list_add_rcu(&est->list, &elist[idx].list);
252 gen_add_node(est);
253 spin_unlock_bh(&est_tree_lock);
254
255 return 0;
256}
257EXPORT_SYMBOL(gen_new_estimator);
258
259
260
261
262
263
264
265
266
267
268void gen_kill_estimator(struct gnet_stats_basic_packed *bstats,
269 struct gnet_stats_rate_est64 *rate_est)
270{
271 struct gen_estimator *e;
272
273 spin_lock_bh(&est_tree_lock);
274 while ((e = gen_find_node(bstats, rate_est))) {
275 rb_erase(&e->node, &est_root);
276
277 write_lock(&est_lock);
278 e->bstats = NULL;
279 write_unlock(&est_lock);
280
281 list_del_rcu(&e->list);
282 kfree_rcu(e, e_rcu);
283 }
284 spin_unlock_bh(&est_tree_lock);
285}
286EXPORT_SYMBOL(gen_kill_estimator);
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301int gen_replace_estimator(struct gnet_stats_basic_packed *bstats,
302 struct gnet_stats_basic_cpu __percpu *cpu_bstats,
303 struct gnet_stats_rate_est64 *rate_est,
304 spinlock_t *stats_lock, struct nlattr *opt)
305{
306 gen_kill_estimator(bstats, rate_est);
307 return gen_new_estimator(bstats, cpu_bstats, rate_est, stats_lock, opt);
308}
309EXPORT_SYMBOL(gen_replace_estimator);
310
311
312
313
314
315
316
317
318bool gen_estimator_active(const struct gnet_stats_basic_packed *bstats,
319 const struct gnet_stats_rate_est64 *rate_est)
320{
321 bool res;
322
323 ASSERT_RTNL();
324
325 spin_lock_bh(&est_tree_lock);
326 res = gen_find_node(bstats, rate_est) != NULL;
327 spin_unlock_bh(&est_tree_lock);
328
329 return res;
330}
331EXPORT_SYMBOL(gen_estimator_active);
332