1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16#include <linux/bitops.h>
17#include <linux/errno.h>
18#include <linux/filter.h>
19#include <linux/bpf.h>
20#include <linux/slab.h>
21#include <asm/bitops.h>
22#include <asm/byteorder.h>
23#include <asm/cacheflush.h>
24#include <asm/cpu-features.h>
25#include <asm/uasm.h>
26
27
28#define MIPS_R_ZERO 0
29#define MIPS_R_AT 1
30#define MIPS_R_V0 2
31#define MIPS_R_V1 3
32#define MIPS_R_A0 4
33#define MIPS_R_A1 5
34#define MIPS_R_A2 6
35#define MIPS_R_A3 7
36#define MIPS_R_A4 8
37#define MIPS_R_T4 12
38#define MIPS_R_T5 13
39#define MIPS_R_T6 14
40#define MIPS_R_T7 15
41#define MIPS_R_S0 16
42#define MIPS_R_S1 17
43#define MIPS_R_S2 18
44#define MIPS_R_S3 19
45#define MIPS_R_S4 20
46#define MIPS_R_S5 21
47#define MIPS_R_S6 22
48#define MIPS_R_S7 23
49#define MIPS_R_T8 24
50#define MIPS_R_T9 25
51#define MIPS_R_SP 29
52#define MIPS_R_RA 31
53
54
55#define EBPF_SAVE_S0 BIT(0)
56#define EBPF_SAVE_S1 BIT(1)
57#define EBPF_SAVE_S2 BIT(2)
58#define EBPF_SAVE_S3 BIT(3)
59#define EBPF_SAVE_S4 BIT(4)
60#define EBPF_SAVE_RA BIT(5)
61#define EBPF_SEEN_FP BIT(6)
62#define EBPF_SEEN_TC BIT(7)
63#define EBPF_TCC_IN_V1 BIT(8)
64
65
66
67
68
69
70
71
72
73enum reg_val_type {
74
75 REG_UNKNOWN,
76
77 REG_64BIT,
78
79 REG_64BIT_32BIT,
80
81 REG_32BIT,
82
83 REG_32BIT_ZERO_EX,
84
85 REG_32BIT_POS
86};
87
88
89
90
91
92#define OFFSETS_B_CONV BIT(31)
93
94
95
96
97
98
99
100
101
102
103
104
105struct jit_ctx {
106 const struct bpf_prog *skf;
107 int stack_size;
108 int tmp_offset;
109 u32 idx;
110 u32 flags;
111 u32 *offsets;
112 u32 *target;
113 u64 *reg_val_types;
114 unsigned int long_b_conversion:1;
115 unsigned int gen_b_offsets:1;
116 unsigned int use_bbit_insns:1;
117};
118
119static void set_reg_val_type(u64 *rvt, int reg, enum reg_val_type type)
120{
121 *rvt &= ~(7ull << (reg * 3));
122 *rvt |= ((u64)type << (reg * 3));
123}
124
125static enum reg_val_type get_reg_val_type(const struct jit_ctx *ctx,
126 int index, int reg)
127{
128 return (ctx->reg_val_types[index] >> (reg * 3)) & 7;
129}
130
131
132#define emit_instr(ctx, func, ...) \
133do { \
134 if ((ctx)->target != NULL) { \
135 u32 *p = &(ctx)->target[ctx->idx]; \
136 uasm_i_##func(&p, ##__VA_ARGS__); \
137 } \
138 (ctx)->idx++; \
139} while (0)
140
141static unsigned int j_target(struct jit_ctx *ctx, int target_idx)
142{
143 unsigned long target_va, base_va;
144 unsigned int r;
145
146 if (!ctx->target)
147 return 0;
148
149 base_va = (unsigned long)ctx->target;
150 target_va = base_va + (ctx->offsets[target_idx] & ~OFFSETS_B_CONV);
151
152 if ((base_va & ~0x0ffffffful) != (target_va & ~0x0ffffffful))
153 return (unsigned int)-1;
154 r = target_va & 0x0ffffffful;
155 return r;
156}
157
158
159static u32 b_imm(unsigned int tgt, struct jit_ctx *ctx)
160{
161 if (!ctx->gen_b_offsets)
162 return 0;
163
164
165
166
167
168
169
170
171
172
173
174
175
176 return (ctx->offsets[tgt] & ~OFFSETS_B_CONV) -
177 (ctx->idx * 4) - 4;
178}
179
180int bpf_jit_enable __read_mostly;
181
182enum which_ebpf_reg {
183 src_reg,
184 src_reg_no_fp,
185 dst_reg,
186 dst_reg_fp_ok
187};
188
189
190
191
192
193
194
195int ebpf_to_mips_reg(struct jit_ctx *ctx, const struct bpf_insn *insn,
196 enum which_ebpf_reg w)
197{
198 int ebpf_reg = (w == src_reg || w == src_reg_no_fp) ?
199 insn->src_reg : insn->dst_reg;
200
201 switch (ebpf_reg) {
202 case BPF_REG_0:
203 return MIPS_R_V0;
204 case BPF_REG_1:
205 return MIPS_R_A0;
206 case BPF_REG_2:
207 return MIPS_R_A1;
208 case BPF_REG_3:
209 return MIPS_R_A2;
210 case BPF_REG_4:
211 return MIPS_R_A3;
212 case BPF_REG_5:
213 return MIPS_R_A4;
214 case BPF_REG_6:
215 ctx->flags |= EBPF_SAVE_S0;
216 return MIPS_R_S0;
217 case BPF_REG_7:
218 ctx->flags |= EBPF_SAVE_S1;
219 return MIPS_R_S1;
220 case BPF_REG_8:
221 ctx->flags |= EBPF_SAVE_S2;
222 return MIPS_R_S2;
223 case BPF_REG_9:
224 ctx->flags |= EBPF_SAVE_S3;
225 return MIPS_R_S3;
226 case BPF_REG_10:
227 if (w == dst_reg || w == src_reg_no_fp)
228 goto bad_reg;
229 ctx->flags |= EBPF_SEEN_FP;
230
231
232
233
234 return MIPS_R_ZERO;
235 case BPF_REG_AX:
236 return MIPS_R_T4;
237 default:
238bad_reg:
239 WARN(1, "Illegal bpf reg: %d\n", ebpf_reg);
240 return -EINVAL;
241 }
242}
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271static int gen_int_prologue(struct jit_ctx *ctx)
272{
273 int stack_adjust = 0;
274 int store_offset;
275 int locals_size;
276
277 if (ctx->flags & EBPF_SAVE_RA)
278
279
280
281
282 stack_adjust += 16;
283 if (ctx->flags & EBPF_SAVE_S0)
284 stack_adjust += 8;
285 if (ctx->flags & EBPF_SAVE_S1)
286 stack_adjust += 8;
287 if (ctx->flags & EBPF_SAVE_S2)
288 stack_adjust += 8;
289 if (ctx->flags & EBPF_SAVE_S3)
290 stack_adjust += 8;
291 if (ctx->flags & EBPF_SAVE_S4)
292 stack_adjust += 8;
293
294 BUILD_BUG_ON(MAX_BPF_STACK & 7);
295 locals_size = (ctx->flags & EBPF_SEEN_FP) ? MAX_BPF_STACK : 0;
296
297 stack_adjust += locals_size;
298 ctx->tmp_offset = locals_size;
299
300 ctx->stack_size = stack_adjust;
301
302
303
304
305
306
307 emit_instr(ctx, daddiu, MIPS_R_V1, MIPS_R_ZERO, MAX_TAIL_CALL_CNT);
308 if (stack_adjust)
309 emit_instr(ctx, daddiu, MIPS_R_SP, MIPS_R_SP, -stack_adjust);
310 else
311 return 0;
312
313 store_offset = stack_adjust - 8;
314
315 if (ctx->flags & EBPF_SAVE_RA) {
316 emit_instr(ctx, sd, MIPS_R_RA, store_offset, MIPS_R_SP);
317 store_offset -= 8;
318 }
319 if (ctx->flags & EBPF_SAVE_S0) {
320 emit_instr(ctx, sd, MIPS_R_S0, store_offset, MIPS_R_SP);
321 store_offset -= 8;
322 }
323 if (ctx->flags & EBPF_SAVE_S1) {
324 emit_instr(ctx, sd, MIPS_R_S1, store_offset, MIPS_R_SP);
325 store_offset -= 8;
326 }
327 if (ctx->flags & EBPF_SAVE_S2) {
328 emit_instr(ctx, sd, MIPS_R_S2, store_offset, MIPS_R_SP);
329 store_offset -= 8;
330 }
331 if (ctx->flags & EBPF_SAVE_S3) {
332 emit_instr(ctx, sd, MIPS_R_S3, store_offset, MIPS_R_SP);
333 store_offset -= 8;
334 }
335 if (ctx->flags & EBPF_SAVE_S4) {
336 emit_instr(ctx, sd, MIPS_R_S4, store_offset, MIPS_R_SP);
337 store_offset -= 8;
338 }
339
340 if ((ctx->flags & EBPF_SEEN_TC) && !(ctx->flags & EBPF_TCC_IN_V1))
341 emit_instr(ctx, daddu, MIPS_R_S4, MIPS_R_V1, MIPS_R_ZERO);
342
343 return 0;
344}
345
346static int build_int_epilogue(struct jit_ctx *ctx, int dest_reg)
347{
348 const struct bpf_prog *prog = ctx->skf;
349 int stack_adjust = ctx->stack_size;
350 int store_offset = stack_adjust - 8;
351 int r0 = MIPS_R_V0;
352
353 if (dest_reg == MIPS_R_RA &&
354 get_reg_val_type(ctx, prog->len, BPF_REG_0) == REG_32BIT_ZERO_EX)
355
356 emit_instr(ctx, sll, r0, r0, 0);
357
358 if (ctx->flags & EBPF_SAVE_RA) {
359 emit_instr(ctx, ld, MIPS_R_RA, store_offset, MIPS_R_SP);
360 store_offset -= 8;
361 }
362 if (ctx->flags & EBPF_SAVE_S0) {
363 emit_instr(ctx, ld, MIPS_R_S0, store_offset, MIPS_R_SP);
364 store_offset -= 8;
365 }
366 if (ctx->flags & EBPF_SAVE_S1) {
367 emit_instr(ctx, ld, MIPS_R_S1, store_offset, MIPS_R_SP);
368 store_offset -= 8;
369 }
370 if (ctx->flags & EBPF_SAVE_S2) {
371 emit_instr(ctx, ld, MIPS_R_S2, store_offset, MIPS_R_SP);
372 store_offset -= 8;
373 }
374 if (ctx->flags & EBPF_SAVE_S3) {
375 emit_instr(ctx, ld, MIPS_R_S3, store_offset, MIPS_R_SP);
376 store_offset -= 8;
377 }
378 if (ctx->flags & EBPF_SAVE_S4) {
379 emit_instr(ctx, ld, MIPS_R_S4, store_offset, MIPS_R_SP);
380 store_offset -= 8;
381 }
382 emit_instr(ctx, jr, dest_reg);
383
384 if (stack_adjust)
385 emit_instr(ctx, daddiu, MIPS_R_SP, MIPS_R_SP, stack_adjust);
386 else
387 emit_instr(ctx, nop);
388
389 return 0;
390}
391
392static void gen_imm_to_reg(const struct bpf_insn *insn, int reg,
393 struct jit_ctx *ctx)
394{
395 if (insn->imm >= S16_MIN && insn->imm <= S16_MAX) {
396 emit_instr(ctx, addiu, reg, MIPS_R_ZERO, insn->imm);
397 } else {
398 int lower = (s16)(insn->imm & 0xffff);
399 int upper = insn->imm - lower;
400
401 emit_instr(ctx, lui, reg, upper >> 16);
402 emit_instr(ctx, addiu, reg, reg, lower);
403 }
404
405}
406
407static int gen_imm_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
408 int idx)
409{
410 int upper_bound, lower_bound;
411 int dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
412
413 if (dst < 0)
414 return dst;
415
416 switch (BPF_OP(insn->code)) {
417 case BPF_MOV:
418 case BPF_ADD:
419 upper_bound = S16_MAX;
420 lower_bound = S16_MIN;
421 break;
422 case BPF_SUB:
423 upper_bound = -(int)S16_MIN;
424 lower_bound = -(int)S16_MAX;
425 break;
426 case BPF_AND:
427 case BPF_OR:
428 case BPF_XOR:
429 upper_bound = 0xffff;
430 lower_bound = 0;
431 break;
432 case BPF_RSH:
433 case BPF_LSH:
434 case BPF_ARSH:
435
436 upper_bound = S32_MAX;
437 lower_bound = S32_MIN;
438 break;
439 default:
440 return -EINVAL;
441 }
442
443
444
445
446
447 if (BPF_CLASS(insn->code) == BPF_ALU64 &&
448 BPF_OP(insn->code) != BPF_MOV &&
449 get_reg_val_type(ctx, idx, insn->dst_reg) == REG_32BIT)
450 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
451
452 if (BPF_CLASS(insn->code) == BPF_ALU &&
453 BPF_OP(insn->code) != BPF_LSH &&
454 BPF_OP(insn->code) != BPF_MOV &&
455 get_reg_val_type(ctx, idx, insn->dst_reg) != REG_32BIT)
456 emit_instr(ctx, sll, dst, dst, 0);
457
458 if (insn->imm >= lower_bound && insn->imm <= upper_bound) {
459
460 switch (BPF_OP(insn->code) | BPF_CLASS(insn->code)) {
461 case BPF_ALU64 | BPF_MOV:
462 emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, insn->imm);
463 break;
464 case BPF_ALU64 | BPF_AND:
465 case BPF_ALU | BPF_AND:
466 emit_instr(ctx, andi, dst, dst, insn->imm);
467 break;
468 case BPF_ALU64 | BPF_OR:
469 case BPF_ALU | BPF_OR:
470 emit_instr(ctx, ori, dst, dst, insn->imm);
471 break;
472 case BPF_ALU64 | BPF_XOR:
473 case BPF_ALU | BPF_XOR:
474 emit_instr(ctx, xori, dst, dst, insn->imm);
475 break;
476 case BPF_ALU64 | BPF_ADD:
477 emit_instr(ctx, daddiu, dst, dst, insn->imm);
478 break;
479 case BPF_ALU64 | BPF_SUB:
480 emit_instr(ctx, daddiu, dst, dst, -insn->imm);
481 break;
482 case BPF_ALU64 | BPF_RSH:
483 emit_instr(ctx, dsrl_safe, dst, dst, insn->imm & 0x3f);
484 break;
485 case BPF_ALU | BPF_RSH:
486 emit_instr(ctx, srl, dst, dst, insn->imm & 0x1f);
487 break;
488 case BPF_ALU64 | BPF_LSH:
489 emit_instr(ctx, dsll_safe, dst, dst, insn->imm & 0x3f);
490 break;
491 case BPF_ALU | BPF_LSH:
492 emit_instr(ctx, sll, dst, dst, insn->imm & 0x1f);
493 break;
494 case BPF_ALU64 | BPF_ARSH:
495 emit_instr(ctx, dsra_safe, dst, dst, insn->imm & 0x3f);
496 break;
497 case BPF_ALU | BPF_ARSH:
498 emit_instr(ctx, sra, dst, dst, insn->imm & 0x1f);
499 break;
500 case BPF_ALU | BPF_MOV:
501 emit_instr(ctx, addiu, dst, MIPS_R_ZERO, insn->imm);
502 break;
503 case BPF_ALU | BPF_ADD:
504 emit_instr(ctx, addiu, dst, dst, insn->imm);
505 break;
506 case BPF_ALU | BPF_SUB:
507 emit_instr(ctx, addiu, dst, dst, -insn->imm);
508 break;
509 default:
510 return -EINVAL;
511 }
512 } else {
513
514 if (BPF_OP(insn->code) == BPF_MOV) {
515 gen_imm_to_reg(insn, dst, ctx);
516 } else {
517 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
518 switch (BPF_OP(insn->code) | BPF_CLASS(insn->code)) {
519 case BPF_ALU64 | BPF_AND:
520 case BPF_ALU | BPF_AND:
521 emit_instr(ctx, and, dst, dst, MIPS_R_AT);
522 break;
523 case BPF_ALU64 | BPF_OR:
524 case BPF_ALU | BPF_OR:
525 emit_instr(ctx, or, dst, dst, MIPS_R_AT);
526 break;
527 case BPF_ALU64 | BPF_XOR:
528 case BPF_ALU | BPF_XOR:
529 emit_instr(ctx, xor, dst, dst, MIPS_R_AT);
530 break;
531 case BPF_ALU64 | BPF_ADD:
532 emit_instr(ctx, daddu, dst, dst, MIPS_R_AT);
533 break;
534 case BPF_ALU64 | BPF_SUB:
535 emit_instr(ctx, dsubu, dst, dst, MIPS_R_AT);
536 break;
537 case BPF_ALU | BPF_ADD:
538 emit_instr(ctx, addu, dst, dst, MIPS_R_AT);
539 break;
540 case BPF_ALU | BPF_SUB:
541 emit_instr(ctx, subu, dst, dst, MIPS_R_AT);
542 break;
543 default:
544 return -EINVAL;
545 }
546 }
547 }
548
549 return 0;
550}
551
552static void * __must_check
553ool_skb_header_pointer(const struct sk_buff *skb, int offset,
554 int len, void *buffer)
555{
556 return skb_header_pointer(skb, offset, len, buffer);
557}
558
559static int size_to_len(const struct bpf_insn *insn)
560{
561 switch (BPF_SIZE(insn->code)) {
562 case BPF_B:
563 return 1;
564 case BPF_H:
565 return 2;
566 case BPF_W:
567 return 4;
568 case BPF_DW:
569 return 8;
570 }
571 return 0;
572}
573
574static void emit_const_to_reg(struct jit_ctx *ctx, int dst, u64 value)
575{
576 if (value >= 0xffffffffffff8000ull || value < 0x8000ull) {
577 emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, (int)value);
578 } else if (value >= 0xffffffff80000000ull ||
579 (value < 0x80000000 && value > 0xffff)) {
580 emit_instr(ctx, lui, dst, (s32)(s16)(value >> 16));
581 emit_instr(ctx, ori, dst, dst, (unsigned int)(value & 0xffff));
582 } else {
583 int i;
584 bool seen_part = false;
585 int needed_shift = 0;
586
587 for (i = 0; i < 4; i++) {
588 u64 part = (value >> (16 * (3 - i))) & 0xffff;
589
590 if (seen_part && needed_shift > 0 && (part || i == 3)) {
591 emit_instr(ctx, dsll_safe, dst, dst, needed_shift);
592 needed_shift = 0;
593 }
594 if (part) {
595 if (i == 0 || (!seen_part && i < 3 && part < 0x8000)) {
596 emit_instr(ctx, lui, dst, (s32)(s16)part);
597 needed_shift = -16;
598 } else {
599 emit_instr(ctx, ori, dst,
600 seen_part ? dst : MIPS_R_ZERO,
601 (unsigned int)part);
602 }
603 seen_part = true;
604 }
605 if (seen_part)
606 needed_shift += 16;
607 }
608 }
609}
610
611static int emit_bpf_tail_call(struct jit_ctx *ctx, int this_idx)
612{
613 int off, b_off;
614
615 ctx->flags |= EBPF_SEEN_TC;
616
617
618
619
620 off = offsetof(struct bpf_array, map.max_entries);
621 emit_instr(ctx, lwu, MIPS_R_T5, off, MIPS_R_A1);
622 emit_instr(ctx, sltu, MIPS_R_AT, MIPS_R_T5, MIPS_R_A2);
623 b_off = b_imm(this_idx + 1, ctx);
624 emit_instr(ctx, bne, MIPS_R_AT, MIPS_R_ZERO, b_off);
625
626
627
628
629
630 emit_instr(ctx, daddiu, MIPS_R_T5,
631 (ctx->flags & EBPF_TCC_IN_V1) ? MIPS_R_V1 : MIPS_R_S4, -1);
632 b_off = b_imm(this_idx + 1, ctx);
633 emit_instr(ctx, bltz, MIPS_R_T5, b_off);
634
635
636
637
638
639
640 emit_instr(ctx, dsll, MIPS_R_T8, MIPS_R_A2, 3);
641 emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, MIPS_R_A1);
642 off = offsetof(struct bpf_array, ptrs);
643 emit_instr(ctx, ld, MIPS_R_AT, off, MIPS_R_T8);
644 b_off = b_imm(this_idx + 1, ctx);
645 emit_instr(ctx, beq, MIPS_R_AT, MIPS_R_ZERO, b_off);
646
647 emit_instr(ctx, nop);
648
649
650 off = offsetof(struct bpf_prog, bpf_func);
651 emit_instr(ctx, ld, MIPS_R_T9, off, MIPS_R_AT);
652
653 emit_instr(ctx, daddu, MIPS_R_V1, MIPS_R_T5, MIPS_R_ZERO);
654
655 emit_instr(ctx, daddiu, MIPS_R_T9, MIPS_R_T9, 4);
656 return build_int_epilogue(ctx, MIPS_R_T9);
657}
658
659static bool is_bad_offset(int b_off)
660{
661 return b_off > 0x1ffff || b_off < -0x20000;
662}
663
664
665static int build_one_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
666 int this_idx, int exit_idx)
667{
668 int src, dst, r, td, ts, mem_off, b_off;
669 bool need_swap, did_move, cmp_eq;
670 unsigned int target = 0;
671 u64 t64;
672 s64 t64s;
673 int bpf_op = BPF_OP(insn->code);
674
675 switch (insn->code) {
676 case BPF_ALU64 | BPF_ADD | BPF_K:
677 case BPF_ALU64 | BPF_SUB | BPF_K:
678 case BPF_ALU64 | BPF_OR | BPF_K:
679 case BPF_ALU64 | BPF_AND | BPF_K:
680 case BPF_ALU64 | BPF_LSH | BPF_K:
681 case BPF_ALU64 | BPF_RSH | BPF_K:
682 case BPF_ALU64 | BPF_XOR | BPF_K:
683 case BPF_ALU64 | BPF_ARSH | BPF_K:
684 case BPF_ALU64 | BPF_MOV | BPF_K:
685 case BPF_ALU | BPF_MOV | BPF_K:
686 case BPF_ALU | BPF_ADD | BPF_K:
687 case BPF_ALU | BPF_SUB | BPF_K:
688 case BPF_ALU | BPF_OR | BPF_K:
689 case BPF_ALU | BPF_AND | BPF_K:
690 case BPF_ALU | BPF_LSH | BPF_K:
691 case BPF_ALU | BPF_RSH | BPF_K:
692 case BPF_ALU | BPF_XOR | BPF_K:
693 case BPF_ALU | BPF_ARSH | BPF_K:
694 r = gen_imm_insn(insn, ctx, this_idx);
695 if (r < 0)
696 return r;
697 break;
698 case BPF_ALU64 | BPF_MUL | BPF_K:
699 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
700 if (dst < 0)
701 return dst;
702 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
703 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
704 if (insn->imm == 1)
705 break;
706 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
707 emit_instr(ctx, dmultu, MIPS_R_AT, dst);
708 emit_instr(ctx, mflo, dst);
709 break;
710 case BPF_ALU64 | BPF_NEG | BPF_K:
711 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
712 if (dst < 0)
713 return dst;
714 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
715 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
716 emit_instr(ctx, dsubu, dst, MIPS_R_ZERO, dst);
717 break;
718 case BPF_ALU | BPF_MUL | BPF_K:
719 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
720 if (dst < 0)
721 return dst;
722 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
723 if (td == REG_64BIT || td == REG_32BIT_ZERO_EX) {
724
725 emit_instr(ctx, sll, dst, dst, 0);
726 }
727 if (insn->imm == 1)
728 break;
729 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
730 emit_instr(ctx, multu, dst, MIPS_R_AT);
731 emit_instr(ctx, mflo, dst);
732 break;
733 case BPF_ALU | BPF_NEG | BPF_K:
734 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
735 if (dst < 0)
736 return dst;
737 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
738 if (td == REG_64BIT || td == REG_32BIT_ZERO_EX) {
739
740 emit_instr(ctx, sll, dst, dst, 0);
741 }
742 emit_instr(ctx, subu, dst, MIPS_R_ZERO, dst);
743 break;
744 case BPF_ALU | BPF_DIV | BPF_K:
745 case BPF_ALU | BPF_MOD | BPF_K:
746 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
747 if (dst < 0)
748 return dst;
749 if (insn->imm == 0) {
750 b_off = b_imm(exit_idx, ctx);
751 if (is_bad_offset(b_off))
752 return -E2BIG;
753 emit_instr(ctx, beq, MIPS_R_ZERO, MIPS_R_ZERO, b_off);
754 emit_instr(ctx, addu, MIPS_R_V0, MIPS_R_ZERO, MIPS_R_ZERO);
755 }
756 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
757 if (td == REG_64BIT || td == REG_32BIT_ZERO_EX)
758
759 emit_instr(ctx, sll, dst, dst, 0);
760 if (insn->imm == 1) {
761
762 if (bpf_op == BPF_MOD)
763 emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
764 break;
765 }
766 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
767 emit_instr(ctx, divu, dst, MIPS_R_AT);
768 if (bpf_op == BPF_DIV)
769 emit_instr(ctx, mflo, dst);
770 else
771 emit_instr(ctx, mfhi, dst);
772 break;
773 case BPF_ALU64 | BPF_DIV | BPF_K:
774 case BPF_ALU64 | BPF_MOD | BPF_K:
775 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
776 if (dst < 0)
777 return dst;
778 if (insn->imm == 0) {
779 b_off = b_imm(exit_idx, ctx);
780 if (is_bad_offset(b_off))
781 return -E2BIG;
782 emit_instr(ctx, beq, MIPS_R_ZERO, MIPS_R_ZERO, b_off);
783 emit_instr(ctx, addu, MIPS_R_V0, MIPS_R_ZERO, MIPS_R_ZERO);
784 }
785 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
786 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
787
788 if (insn->imm == 1) {
789
790 if (bpf_op == BPF_MOD)
791 emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
792 break;
793 }
794 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
795 emit_instr(ctx, ddivu, dst, MIPS_R_AT);
796 if (bpf_op == BPF_DIV)
797 emit_instr(ctx, mflo, dst);
798 else
799 emit_instr(ctx, mfhi, dst);
800 break;
801 case BPF_ALU64 | BPF_MOV | BPF_X:
802 case BPF_ALU64 | BPF_ADD | BPF_X:
803 case BPF_ALU64 | BPF_SUB | BPF_X:
804 case BPF_ALU64 | BPF_XOR | BPF_X:
805 case BPF_ALU64 | BPF_OR | BPF_X:
806 case BPF_ALU64 | BPF_AND | BPF_X:
807 case BPF_ALU64 | BPF_MUL | BPF_X:
808 case BPF_ALU64 | BPF_DIV | BPF_X:
809 case BPF_ALU64 | BPF_MOD | BPF_X:
810 case BPF_ALU64 | BPF_LSH | BPF_X:
811 case BPF_ALU64 | BPF_RSH | BPF_X:
812 case BPF_ALU64 | BPF_ARSH | BPF_X:
813 src = ebpf_to_mips_reg(ctx, insn, src_reg);
814 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
815 if (src < 0 || dst < 0)
816 return -EINVAL;
817 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
818 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
819 did_move = false;
820 if (insn->src_reg == BPF_REG_10) {
821 if (bpf_op == BPF_MOV) {
822 emit_instr(ctx, daddiu, dst, MIPS_R_SP, MAX_BPF_STACK);
823 did_move = true;
824 } else {
825 emit_instr(ctx, daddiu, MIPS_R_AT, MIPS_R_SP, MAX_BPF_STACK);
826 src = MIPS_R_AT;
827 }
828 } else if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
829 int tmp_reg = MIPS_R_AT;
830
831 if (bpf_op == BPF_MOV) {
832 tmp_reg = dst;
833 did_move = true;
834 }
835 emit_instr(ctx, daddu, tmp_reg, src, MIPS_R_ZERO);
836 emit_instr(ctx, dinsu, tmp_reg, MIPS_R_ZERO, 32, 32);
837 src = MIPS_R_AT;
838 }
839 switch (bpf_op) {
840 case BPF_MOV:
841 if (!did_move)
842 emit_instr(ctx, daddu, dst, src, MIPS_R_ZERO);
843 break;
844 case BPF_ADD:
845 emit_instr(ctx, daddu, dst, dst, src);
846 break;
847 case BPF_SUB:
848 emit_instr(ctx, dsubu, dst, dst, src);
849 break;
850 case BPF_XOR:
851 emit_instr(ctx, xor, dst, dst, src);
852 break;
853 case BPF_OR:
854 emit_instr(ctx, or, dst, dst, src);
855 break;
856 case BPF_AND:
857 emit_instr(ctx, and, dst, dst, src);
858 break;
859 case BPF_MUL:
860 emit_instr(ctx, dmultu, dst, src);
861 emit_instr(ctx, mflo, dst);
862 break;
863 case BPF_DIV:
864 case BPF_MOD:
865 b_off = b_imm(exit_idx, ctx);
866 if (is_bad_offset(b_off))
867 return -E2BIG;
868 emit_instr(ctx, beq, src, MIPS_R_ZERO, b_off);
869 emit_instr(ctx, movz, MIPS_R_V0, MIPS_R_ZERO, src);
870 emit_instr(ctx, ddivu, dst, src);
871 if (bpf_op == BPF_DIV)
872 emit_instr(ctx, mflo, dst);
873 else
874 emit_instr(ctx, mfhi, dst);
875 break;
876 case BPF_LSH:
877 emit_instr(ctx, dsllv, dst, dst, src);
878 break;
879 case BPF_RSH:
880 emit_instr(ctx, dsrlv, dst, dst, src);
881 break;
882 case BPF_ARSH:
883 emit_instr(ctx, dsrav, dst, dst, src);
884 break;
885 default:
886 pr_err("ALU64_REG NOT HANDLED\n");
887 return -EINVAL;
888 }
889 break;
890 case BPF_ALU | BPF_MOV | BPF_X:
891 case BPF_ALU | BPF_ADD | BPF_X:
892 case BPF_ALU | BPF_SUB | BPF_X:
893 case BPF_ALU | BPF_XOR | BPF_X:
894 case BPF_ALU | BPF_OR | BPF_X:
895 case BPF_ALU | BPF_AND | BPF_X:
896 case BPF_ALU | BPF_MUL | BPF_X:
897 case BPF_ALU | BPF_DIV | BPF_X:
898 case BPF_ALU | BPF_MOD | BPF_X:
899 case BPF_ALU | BPF_LSH | BPF_X:
900 case BPF_ALU | BPF_RSH | BPF_X:
901 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
902 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
903 if (src < 0 || dst < 0)
904 return -EINVAL;
905 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
906 if (td == REG_64BIT || td == REG_32BIT_ZERO_EX) {
907
908 emit_instr(ctx, sll, dst, dst, 0);
909 }
910 did_move = false;
911 ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
912 if (ts == REG_64BIT || ts == REG_32BIT_ZERO_EX) {
913 int tmp_reg = MIPS_R_AT;
914
915 if (bpf_op == BPF_MOV) {
916 tmp_reg = dst;
917 did_move = true;
918 }
919
920 emit_instr(ctx, sll, tmp_reg, src, 0);
921 src = MIPS_R_AT;
922 }
923 switch (bpf_op) {
924 case BPF_MOV:
925 if (!did_move)
926 emit_instr(ctx, addu, dst, src, MIPS_R_ZERO);
927 break;
928 case BPF_ADD:
929 emit_instr(ctx, addu, dst, dst, src);
930 break;
931 case BPF_SUB:
932 emit_instr(ctx, subu, dst, dst, src);
933 break;
934 case BPF_XOR:
935 emit_instr(ctx, xor, dst, dst, src);
936 break;
937 case BPF_OR:
938 emit_instr(ctx, or, dst, dst, src);
939 break;
940 case BPF_AND:
941 emit_instr(ctx, and, dst, dst, src);
942 break;
943 case BPF_MUL:
944 emit_instr(ctx, mul, dst, dst, src);
945 break;
946 case BPF_DIV:
947 case BPF_MOD:
948 b_off = b_imm(exit_idx, ctx);
949 if (is_bad_offset(b_off))
950 return -E2BIG;
951 emit_instr(ctx, beq, src, MIPS_R_ZERO, b_off);
952 emit_instr(ctx, movz, MIPS_R_V0, MIPS_R_ZERO, src);
953 emit_instr(ctx, divu, dst, src);
954 if (bpf_op == BPF_DIV)
955 emit_instr(ctx, mflo, dst);
956 else
957 emit_instr(ctx, mfhi, dst);
958 break;
959 case BPF_LSH:
960 emit_instr(ctx, sllv, dst, dst, src);
961 break;
962 case BPF_RSH:
963 emit_instr(ctx, srlv, dst, dst, src);
964 break;
965 default:
966 pr_err("ALU_REG NOT HANDLED\n");
967 return -EINVAL;
968 }
969 break;
970 case BPF_JMP | BPF_EXIT:
971 if (this_idx + 1 < exit_idx) {
972 b_off = b_imm(exit_idx, ctx);
973 if (is_bad_offset(b_off))
974 return -E2BIG;
975 emit_instr(ctx, beq, MIPS_R_ZERO, MIPS_R_ZERO, b_off);
976 emit_instr(ctx, nop);
977 }
978 break;
979 case BPF_JMP | BPF_JEQ | BPF_K:
980 case BPF_JMP | BPF_JNE | BPF_K:
981 cmp_eq = (bpf_op == BPF_JEQ);
982 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
983 if (dst < 0)
984 return dst;
985 if (insn->imm == 0) {
986 src = MIPS_R_ZERO;
987 } else {
988 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
989 src = MIPS_R_AT;
990 }
991 goto jeq_common;
992 case BPF_JMP | BPF_JEQ | BPF_X:
993 case BPF_JMP | BPF_JNE | BPF_X:
994 case BPF_JMP | BPF_JSLT | BPF_X:
995 case BPF_JMP | BPF_JSLE | BPF_X:
996 case BPF_JMP | BPF_JSGT | BPF_X:
997 case BPF_JMP | BPF_JSGE | BPF_X:
998 case BPF_JMP | BPF_JLT | BPF_X:
999 case BPF_JMP | BPF_JLE | BPF_X:
1000 case BPF_JMP | BPF_JGT | BPF_X:
1001 case BPF_JMP | BPF_JGE | BPF_X:
1002 case BPF_JMP | BPF_JSET | BPF_X:
1003 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1004 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1005 if (src < 0 || dst < 0)
1006 return -EINVAL;
1007 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
1008 ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
1009 if (td == REG_32BIT && ts != REG_32BIT) {
1010 emit_instr(ctx, sll, MIPS_R_AT, src, 0);
1011 src = MIPS_R_AT;
1012 } else if (ts == REG_32BIT && td != REG_32BIT) {
1013 emit_instr(ctx, sll, MIPS_R_AT, dst, 0);
1014 dst = MIPS_R_AT;
1015 }
1016 if (bpf_op == BPF_JSET) {
1017 emit_instr(ctx, and, MIPS_R_AT, dst, src);
1018 cmp_eq = false;
1019 dst = MIPS_R_AT;
1020 src = MIPS_R_ZERO;
1021 } else if (bpf_op == BPF_JSGT || bpf_op == BPF_JSLE) {
1022 emit_instr(ctx, dsubu, MIPS_R_AT, dst, src);
1023 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1024 b_off = b_imm(exit_idx, ctx);
1025 if (is_bad_offset(b_off))
1026 return -E2BIG;
1027 if (bpf_op == BPF_JSGT)
1028 emit_instr(ctx, blez, MIPS_R_AT, b_off);
1029 else
1030 emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
1031 emit_instr(ctx, nop);
1032 return 2;
1033 }
1034 b_off = b_imm(this_idx + insn->off + 1, ctx);
1035 if (is_bad_offset(b_off))
1036 return -E2BIG;
1037 if (bpf_op == BPF_JSGT)
1038 emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
1039 else
1040 emit_instr(ctx, blez, MIPS_R_AT, b_off);
1041 emit_instr(ctx, nop);
1042 break;
1043 } else if (bpf_op == BPF_JSGE || bpf_op == BPF_JSLT) {
1044 emit_instr(ctx, slt, MIPS_R_AT, dst, src);
1045 cmp_eq = bpf_op == BPF_JSGE;
1046 dst = MIPS_R_AT;
1047 src = MIPS_R_ZERO;
1048 } else if (bpf_op == BPF_JGT || bpf_op == BPF_JLE) {
1049
1050 emit_instr(ctx, dsubu, MIPS_R_T8, dst, src);
1051 emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
1052
1053 emit_instr(ctx, movz, MIPS_R_T9, MIPS_R_SP, MIPS_R_T8);
1054 emit_instr(ctx, movn, MIPS_R_T9, MIPS_R_ZERO, MIPS_R_T8);
1055 emit_instr(ctx, or, MIPS_R_AT, MIPS_R_T9, MIPS_R_AT);
1056 cmp_eq = bpf_op == BPF_JGT;
1057 dst = MIPS_R_AT;
1058 src = MIPS_R_ZERO;
1059 } else if (bpf_op == BPF_JGE || bpf_op == BPF_JLT) {
1060 emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
1061 cmp_eq = bpf_op == BPF_JGE;
1062 dst = MIPS_R_AT;
1063 src = MIPS_R_ZERO;
1064 } else {
1065 cmp_eq = (bpf_op == BPF_JEQ);
1066 }
1067jeq_common:
1068
1069
1070
1071
1072
1073
1074 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1075 b_off = b_imm(exit_idx, ctx);
1076 if (is_bad_offset(b_off)) {
1077 target = j_target(ctx, exit_idx);
1078 if (target == (unsigned int)-1)
1079 return -E2BIG;
1080 cmp_eq = !cmp_eq;
1081 b_off = 4 * 3;
1082 if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
1083 ctx->offsets[this_idx] |= OFFSETS_B_CONV;
1084 ctx->long_b_conversion = 1;
1085 }
1086 }
1087
1088 if (cmp_eq)
1089 emit_instr(ctx, bne, dst, src, b_off);
1090 else
1091 emit_instr(ctx, beq, dst, src, b_off);
1092 emit_instr(ctx, nop);
1093 if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
1094 emit_instr(ctx, j, target);
1095 emit_instr(ctx, nop);
1096 }
1097 return 2;
1098 }
1099 b_off = b_imm(this_idx + insn->off + 1, ctx);
1100 if (is_bad_offset(b_off)) {
1101 target = j_target(ctx, this_idx + insn->off + 1);
1102 if (target == (unsigned int)-1)
1103 return -E2BIG;
1104 cmp_eq = !cmp_eq;
1105 b_off = 4 * 3;
1106 if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
1107 ctx->offsets[this_idx] |= OFFSETS_B_CONV;
1108 ctx->long_b_conversion = 1;
1109 }
1110 }
1111
1112 if (cmp_eq)
1113 emit_instr(ctx, beq, dst, src, b_off);
1114 else
1115 emit_instr(ctx, bne, dst, src, b_off);
1116 emit_instr(ctx, nop);
1117 if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
1118 emit_instr(ctx, j, target);
1119 emit_instr(ctx, nop);
1120 }
1121 break;
1122 case BPF_JMP | BPF_JSGT | BPF_K:
1123 case BPF_JMP | BPF_JSGE | BPF_K:
1124 case BPF_JMP | BPF_JSLT | BPF_K:
1125 case BPF_JMP | BPF_JSLE | BPF_K:
1126 cmp_eq = (bpf_op == BPF_JSGE);
1127 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1128 if (dst < 0)
1129 return dst;
1130
1131 if (insn->imm == 0) {
1132 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1133 b_off = b_imm(exit_idx, ctx);
1134 if (is_bad_offset(b_off))
1135 return -E2BIG;
1136 switch (bpf_op) {
1137 case BPF_JSGT:
1138 emit_instr(ctx, blez, dst, b_off);
1139 break;
1140 case BPF_JSGE:
1141 emit_instr(ctx, bltz, dst, b_off);
1142 break;
1143 case BPF_JSLT:
1144 emit_instr(ctx, bgez, dst, b_off);
1145 break;
1146 case BPF_JSLE:
1147 emit_instr(ctx, bgtz, dst, b_off);
1148 break;
1149 }
1150 emit_instr(ctx, nop);
1151 return 2;
1152 }
1153 b_off = b_imm(this_idx + insn->off + 1, ctx);
1154 if (is_bad_offset(b_off))
1155 return -E2BIG;
1156 switch (bpf_op) {
1157 case BPF_JSGT:
1158 emit_instr(ctx, bgtz, dst, b_off);
1159 break;
1160 case BPF_JSGE:
1161 emit_instr(ctx, bgez, dst, b_off);
1162 break;
1163 case BPF_JSLT:
1164 emit_instr(ctx, bltz, dst, b_off);
1165 break;
1166 case BPF_JSLE:
1167 emit_instr(ctx, blez, dst, b_off);
1168 break;
1169 }
1170 emit_instr(ctx, nop);
1171 break;
1172 }
1173
1174
1175
1176
1177 if (bpf_op == BPF_JSGT)
1178 t64s = insn->imm + 1;
1179 else if (bpf_op == BPF_JSLE)
1180 t64s = insn->imm + 1;
1181 else
1182 t64s = insn->imm;
1183
1184 cmp_eq = bpf_op == BPF_JSGT || bpf_op == BPF_JSGE;
1185 if (t64s >= S16_MIN && t64s <= S16_MAX) {
1186 emit_instr(ctx, slti, MIPS_R_AT, dst, (int)t64s);
1187 src = MIPS_R_AT;
1188 dst = MIPS_R_ZERO;
1189 goto jeq_common;
1190 }
1191 emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
1192 emit_instr(ctx, slt, MIPS_R_AT, dst, MIPS_R_AT);
1193 src = MIPS_R_AT;
1194 dst = MIPS_R_ZERO;
1195 goto jeq_common;
1196
1197 case BPF_JMP | BPF_JGT | BPF_K:
1198 case BPF_JMP | BPF_JGE | BPF_K:
1199 case BPF_JMP | BPF_JLT | BPF_K:
1200 case BPF_JMP | BPF_JLE | BPF_K:
1201 cmp_eq = (bpf_op == BPF_JGE);
1202 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1203 if (dst < 0)
1204 return dst;
1205
1206
1207
1208
1209 if (bpf_op == BPF_JGT)
1210 t64s = (u64)(u32)(insn->imm) + 1;
1211 else if (bpf_op == BPF_JLE)
1212 t64s = (u64)(u32)(insn->imm) + 1;
1213 else
1214 t64s = (u64)(u32)(insn->imm);
1215
1216 cmp_eq = bpf_op == BPF_JGT || bpf_op == BPF_JGE;
1217
1218 emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
1219 emit_instr(ctx, sltu, MIPS_R_AT, dst, MIPS_R_AT);
1220 src = MIPS_R_AT;
1221 dst = MIPS_R_ZERO;
1222 goto jeq_common;
1223
1224 case BPF_JMP | BPF_JSET | BPF_K:
1225 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1226 if (dst < 0)
1227 return dst;
1228
1229 if (ctx->use_bbit_insns && hweight32((u32)insn->imm) == 1) {
1230 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1231 b_off = b_imm(exit_idx, ctx);
1232 if (is_bad_offset(b_off))
1233 return -E2BIG;
1234 emit_instr(ctx, bbit0, dst, ffs((u32)insn->imm) - 1, b_off);
1235 emit_instr(ctx, nop);
1236 return 2;
1237 }
1238 b_off = b_imm(this_idx + insn->off + 1, ctx);
1239 if (is_bad_offset(b_off))
1240 return -E2BIG;
1241 emit_instr(ctx, bbit1, dst, ffs((u32)insn->imm) - 1, b_off);
1242 emit_instr(ctx, nop);
1243 break;
1244 }
1245 t64 = (u32)insn->imm;
1246 emit_const_to_reg(ctx, MIPS_R_AT, t64);
1247 emit_instr(ctx, and, MIPS_R_AT, dst, MIPS_R_AT);
1248 src = MIPS_R_AT;
1249 dst = MIPS_R_ZERO;
1250 cmp_eq = false;
1251 goto jeq_common;
1252
1253 case BPF_JMP | BPF_JA:
1254
1255
1256
1257
1258 b_off = b_imm(this_idx + insn->off + 1, ctx);
1259 if (is_bad_offset(b_off)) {
1260 target = j_target(ctx, this_idx + insn->off + 1);
1261 if (target == (unsigned int)-1)
1262 return -E2BIG;
1263 emit_instr(ctx, j, target);
1264 } else {
1265 emit_instr(ctx, b, b_off);
1266 }
1267 emit_instr(ctx, nop);
1268 break;
1269 case BPF_LD | BPF_DW | BPF_IMM:
1270 if (insn->src_reg != 0)
1271 return -EINVAL;
1272 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1273 if (dst < 0)
1274 return dst;
1275 t64 = ((u64)(u32)insn->imm) | ((u64)(insn + 1)->imm << 32);
1276 emit_const_to_reg(ctx, dst, t64);
1277 return 2;
1278
1279 case BPF_JMP | BPF_CALL:
1280 ctx->flags |= EBPF_SAVE_RA;
1281 t64s = (s64)insn->imm + (s64)__bpf_call_base;
1282 emit_const_to_reg(ctx, MIPS_R_T9, (u64)t64s);
1283 emit_instr(ctx, jalr, MIPS_R_RA, MIPS_R_T9);
1284
1285 emit_instr(ctx, nop);
1286 break;
1287
1288 case BPF_JMP | BPF_TAIL_CALL:
1289 if (emit_bpf_tail_call(ctx, this_idx))
1290 return -EINVAL;
1291 break;
1292
1293 case BPF_LD | BPF_B | BPF_ABS:
1294 case BPF_LD | BPF_H | BPF_ABS:
1295 case BPF_LD | BPF_W | BPF_ABS:
1296 case BPF_LD | BPF_DW | BPF_ABS:
1297 ctx->flags |= EBPF_SAVE_RA;
1298
1299 gen_imm_to_reg(insn, MIPS_R_A1, ctx);
1300 emit_instr(ctx, addiu, MIPS_R_A2, MIPS_R_ZERO, size_to_len(insn));
1301
1302 if (insn->imm < 0) {
1303 emit_const_to_reg(ctx, MIPS_R_T9, (u64)bpf_internal_load_pointer_neg_helper);
1304 } else {
1305 emit_const_to_reg(ctx, MIPS_R_T9, (u64)ool_skb_header_pointer);
1306 emit_instr(ctx, daddiu, MIPS_R_A3, MIPS_R_SP, ctx->tmp_offset);
1307 }
1308 goto ld_skb_common;
1309
1310 case BPF_LD | BPF_B | BPF_IND:
1311 case BPF_LD | BPF_H | BPF_IND:
1312 case BPF_LD | BPF_W | BPF_IND:
1313 case BPF_LD | BPF_DW | BPF_IND:
1314 ctx->flags |= EBPF_SAVE_RA;
1315 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1316 if (src < 0)
1317 return src;
1318 ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
1319 if (ts == REG_32BIT_ZERO_EX) {
1320
1321 emit_instr(ctx, sll, MIPS_R_A1, src, 0);
1322 src = MIPS_R_A1;
1323 }
1324 if (insn->imm >= S16_MIN && insn->imm <= S16_MAX) {
1325 emit_instr(ctx, daddiu, MIPS_R_A1, src, insn->imm);
1326 } else {
1327 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
1328 emit_instr(ctx, daddu, MIPS_R_A1, MIPS_R_AT, src);
1329 }
1330
1331 emit_instr(ctx, sll, MIPS_R_A1, MIPS_R_A1, 0);
1332 emit_instr(ctx, daddiu, MIPS_R_A3, MIPS_R_SP, ctx->tmp_offset);
1333 emit_instr(ctx, slt, MIPS_R_AT, MIPS_R_A1, MIPS_R_ZERO);
1334
1335 emit_const_to_reg(ctx, MIPS_R_T8, (u64)bpf_internal_load_pointer_neg_helper);
1336 emit_const_to_reg(ctx, MIPS_R_T9, (u64)ool_skb_header_pointer);
1337 emit_instr(ctx, addiu, MIPS_R_A2, MIPS_R_ZERO, size_to_len(insn));
1338 emit_instr(ctx, movn, MIPS_R_T9, MIPS_R_T8, MIPS_R_AT);
1339
1340ld_skb_common:
1341 emit_instr(ctx, jalr, MIPS_R_RA, MIPS_R_T9);
1342
1343 emit_instr(ctx, daddu, MIPS_R_A0, MIPS_R_S0, MIPS_R_ZERO);
1344
1345
1346 b_off = b_imm(exit_idx, ctx);
1347 if (is_bad_offset(b_off)) {
1348 target = j_target(ctx, exit_idx);
1349 if (target == (unsigned int)-1)
1350 return -E2BIG;
1351
1352 if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
1353 ctx->offsets[this_idx] |= OFFSETS_B_CONV;
1354 ctx->long_b_conversion = 1;
1355 }
1356 emit_instr(ctx, bne, MIPS_R_V0, MIPS_R_ZERO, 4 * 3);
1357 emit_instr(ctx, nop);
1358 emit_instr(ctx, j, target);
1359 emit_instr(ctx, nop);
1360 } else {
1361 emit_instr(ctx, beq, MIPS_R_V0, MIPS_R_ZERO, b_off);
1362 emit_instr(ctx, nop);
1363 }
1364
1365#ifdef __BIG_ENDIAN
1366 need_swap = false;
1367#else
1368 need_swap = true;
1369#endif
1370 dst = MIPS_R_V0;
1371 switch (BPF_SIZE(insn->code)) {
1372 case BPF_B:
1373 emit_instr(ctx, lbu, dst, 0, MIPS_R_V0);
1374 break;
1375 case BPF_H:
1376 emit_instr(ctx, lhu, dst, 0, MIPS_R_V0);
1377 if (need_swap)
1378 emit_instr(ctx, wsbh, dst, dst);
1379 break;
1380 case BPF_W:
1381 emit_instr(ctx, lw, dst, 0, MIPS_R_V0);
1382 if (need_swap) {
1383 emit_instr(ctx, wsbh, dst, dst);
1384 emit_instr(ctx, rotr, dst, dst, 16);
1385 }
1386 break;
1387 case BPF_DW:
1388 emit_instr(ctx, ld, dst, 0, MIPS_R_V0);
1389 if (need_swap) {
1390 emit_instr(ctx, dsbh, dst, dst);
1391 emit_instr(ctx, dshd, dst, dst);
1392 }
1393 break;
1394 }
1395
1396 break;
1397 case BPF_ALU | BPF_END | BPF_FROM_BE:
1398 case BPF_ALU | BPF_END | BPF_FROM_LE:
1399 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1400 if (dst < 0)
1401 return dst;
1402 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
1403 if (insn->imm == 64 && td == REG_32BIT)
1404 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
1405
1406 if (insn->imm != 64 &&
1407 (td == REG_64BIT || td == REG_32BIT_ZERO_EX)) {
1408
1409 emit_instr(ctx, sll, dst, dst, 0);
1410 }
1411
1412#ifdef __BIG_ENDIAN
1413 need_swap = (BPF_SRC(insn->code) == BPF_FROM_LE);
1414#else
1415 need_swap = (BPF_SRC(insn->code) == BPF_FROM_BE);
1416#endif
1417 if (insn->imm == 16) {
1418 if (need_swap)
1419 emit_instr(ctx, wsbh, dst, dst);
1420 emit_instr(ctx, andi, dst, dst, 0xffff);
1421 } else if (insn->imm == 32) {
1422 if (need_swap) {
1423 emit_instr(ctx, wsbh, dst, dst);
1424 emit_instr(ctx, rotr, dst, dst, 16);
1425 }
1426 } else {
1427 if (need_swap) {
1428 emit_instr(ctx, dsbh, dst, dst);
1429 emit_instr(ctx, dshd, dst, dst);
1430 }
1431 }
1432 break;
1433
1434 case BPF_ST | BPF_B | BPF_MEM:
1435 case BPF_ST | BPF_H | BPF_MEM:
1436 case BPF_ST | BPF_W | BPF_MEM:
1437 case BPF_ST | BPF_DW | BPF_MEM:
1438 if (insn->dst_reg == BPF_REG_10) {
1439 ctx->flags |= EBPF_SEEN_FP;
1440 dst = MIPS_R_SP;
1441 mem_off = insn->off + MAX_BPF_STACK;
1442 } else {
1443 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1444 if (dst < 0)
1445 return dst;
1446 mem_off = insn->off;
1447 }
1448 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
1449 switch (BPF_SIZE(insn->code)) {
1450 case BPF_B:
1451 emit_instr(ctx, sb, MIPS_R_AT, mem_off, dst);
1452 break;
1453 case BPF_H:
1454 emit_instr(ctx, sh, MIPS_R_AT, mem_off, dst);
1455 break;
1456 case BPF_W:
1457 emit_instr(ctx, sw, MIPS_R_AT, mem_off, dst);
1458 break;
1459 case BPF_DW:
1460 emit_instr(ctx, sd, MIPS_R_AT, mem_off, dst);
1461 break;
1462 }
1463 break;
1464
1465 case BPF_LDX | BPF_B | BPF_MEM:
1466 case BPF_LDX | BPF_H | BPF_MEM:
1467 case BPF_LDX | BPF_W | BPF_MEM:
1468 case BPF_LDX | BPF_DW | BPF_MEM:
1469 if (insn->src_reg == BPF_REG_10) {
1470 ctx->flags |= EBPF_SEEN_FP;
1471 src = MIPS_R_SP;
1472 mem_off = insn->off + MAX_BPF_STACK;
1473 } else {
1474 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1475 if (src < 0)
1476 return src;
1477 mem_off = insn->off;
1478 }
1479 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1480 if (dst < 0)
1481 return dst;
1482 switch (BPF_SIZE(insn->code)) {
1483 case BPF_B:
1484 emit_instr(ctx, lbu, dst, mem_off, src);
1485 break;
1486 case BPF_H:
1487 emit_instr(ctx, lhu, dst, mem_off, src);
1488 break;
1489 case BPF_W:
1490 emit_instr(ctx, lw, dst, mem_off, src);
1491 break;
1492 case BPF_DW:
1493 emit_instr(ctx, ld, dst, mem_off, src);
1494 break;
1495 }
1496 break;
1497
1498 case BPF_STX | BPF_B | BPF_MEM:
1499 case BPF_STX | BPF_H | BPF_MEM:
1500 case BPF_STX | BPF_W | BPF_MEM:
1501 case BPF_STX | BPF_DW | BPF_MEM:
1502 case BPF_STX | BPF_W | BPF_XADD:
1503 case BPF_STX | BPF_DW | BPF_XADD:
1504 if (insn->dst_reg == BPF_REG_10) {
1505 ctx->flags |= EBPF_SEEN_FP;
1506 dst = MIPS_R_SP;
1507 mem_off = insn->off + MAX_BPF_STACK;
1508 } else {
1509 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1510 if (dst < 0)
1511 return dst;
1512 mem_off = insn->off;
1513 }
1514 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1515 if (src < 0)
1516 return src;
1517 if (BPF_MODE(insn->code) == BPF_XADD) {
1518 switch (BPF_SIZE(insn->code)) {
1519 case BPF_W:
1520 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1521 emit_instr(ctx, sll, MIPS_R_AT, src, 0);
1522 src = MIPS_R_AT;
1523 }
1524 emit_instr(ctx, ll, MIPS_R_T8, mem_off, dst);
1525 emit_instr(ctx, addu, MIPS_R_T8, MIPS_R_T8, src);
1526 emit_instr(ctx, sc, MIPS_R_T8, mem_off, dst);
1527
1528
1529
1530
1531 emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
1532 emit_instr(ctx, nop);
1533 break;
1534 case BPF_DW:
1535 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1536 emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
1537 emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
1538 src = MIPS_R_AT;
1539 }
1540 emit_instr(ctx, lld, MIPS_R_T8, mem_off, dst);
1541 emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, src);
1542 emit_instr(ctx, scd, MIPS_R_T8, mem_off, dst);
1543 emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
1544 emit_instr(ctx, nop);
1545 break;
1546 }
1547 } else {
1548 switch (BPF_SIZE(insn->code)) {
1549 case BPF_B:
1550 emit_instr(ctx, sb, src, mem_off, dst);
1551 break;
1552 case BPF_H:
1553 emit_instr(ctx, sh, src, mem_off, dst);
1554 break;
1555 case BPF_W:
1556 emit_instr(ctx, sw, src, mem_off, dst);
1557 break;
1558 case BPF_DW:
1559 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1560 emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
1561 emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
1562 src = MIPS_R_AT;
1563 }
1564 emit_instr(ctx, sd, src, mem_off, dst);
1565 break;
1566 }
1567 }
1568 break;
1569
1570 default:
1571 pr_err("NOT HANDLED %d - (%02x)\n",
1572 this_idx, (unsigned int)insn->code);
1573 return -EINVAL;
1574 }
1575 return 1;
1576}
1577
1578#define RVT_VISITED_MASK 0xc000000000000000ull
1579#define RVT_FALL_THROUGH 0x4000000000000000ull
1580#define RVT_BRANCH_TAKEN 0x8000000000000000ull
1581#define RVT_DONE (RVT_FALL_THROUGH | RVT_BRANCH_TAKEN)
1582
1583static int build_int_body(struct jit_ctx *ctx)
1584{
1585 const struct bpf_prog *prog = ctx->skf;
1586 const struct bpf_insn *insn;
1587 int i, r;
1588
1589 for (i = 0; i < prog->len; ) {
1590 insn = prog->insnsi + i;
1591 if ((ctx->reg_val_types[i] & RVT_VISITED_MASK) == 0) {
1592
1593 i++;
1594 continue;
1595 }
1596
1597 if (ctx->target == NULL)
1598 ctx->offsets[i] = (ctx->offsets[i] & OFFSETS_B_CONV) | (ctx->idx * 4);
1599
1600 r = build_one_insn(insn, ctx, i, prog->len);
1601 if (r < 0)
1602 return r;
1603 i += r;
1604 }
1605
1606 if (ctx->target == NULL)
1607 ctx->offsets[i] = ctx->idx * 4;
1608
1609
1610
1611
1612
1613
1614 if (ctx->target == NULL)
1615 for (i = 0; i < prog->len; i++) {
1616 insn = prog->insnsi + i;
1617 if (insn->code == (BPF_JMP | BPF_EXIT))
1618 ctx->offsets[i] = ctx->idx * 4;
1619 }
1620 return 0;
1621}
1622
1623
1624static int reg_val_propagate_range(struct jit_ctx *ctx, u64 initial_rvt,
1625 int start_idx, bool follow_taken)
1626{
1627 const struct bpf_prog *prog = ctx->skf;
1628 const struct bpf_insn *insn;
1629 u64 exit_rvt = initial_rvt;
1630 u64 *rvt = ctx->reg_val_types;
1631 int idx;
1632 int reg;
1633
1634 for (idx = start_idx; idx < prog->len; idx++) {
1635 rvt[idx] = (rvt[idx] & RVT_VISITED_MASK) | exit_rvt;
1636 insn = prog->insnsi + idx;
1637 switch (BPF_CLASS(insn->code)) {
1638 case BPF_ALU:
1639 switch (BPF_OP(insn->code)) {
1640 case BPF_ADD:
1641 case BPF_SUB:
1642 case BPF_MUL:
1643 case BPF_DIV:
1644 case BPF_OR:
1645 case BPF_AND:
1646 case BPF_LSH:
1647 case BPF_RSH:
1648 case BPF_NEG:
1649 case BPF_MOD:
1650 case BPF_XOR:
1651 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1652 break;
1653 case BPF_MOV:
1654 if (BPF_SRC(insn->code)) {
1655 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1656 } else {
1657
1658 if (insn->imm >= 0)
1659 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1660 else
1661 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1662 }
1663 break;
1664 case BPF_END:
1665 if (insn->imm == 64)
1666 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1667 else if (insn->imm == 32)
1668 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1669 else
1670 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1671 break;
1672 }
1673 rvt[idx] |= RVT_DONE;
1674 break;
1675 case BPF_ALU64:
1676 switch (BPF_OP(insn->code)) {
1677 case BPF_MOV:
1678 if (BPF_SRC(insn->code)) {
1679
1680 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1681 } else {
1682
1683 if (insn->imm >= 0)
1684 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1685 else
1686 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT_32BIT);
1687 }
1688 break;
1689 default:
1690 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1691 }
1692 rvt[idx] |= RVT_DONE;
1693 break;
1694 case BPF_LD:
1695 switch (BPF_SIZE(insn->code)) {
1696 case BPF_DW:
1697 if (BPF_MODE(insn->code) == BPF_IMM) {
1698 s64 val;
1699
1700 val = (s64)((u32)insn->imm | ((u64)(insn + 1)->imm << 32));
1701 if (val > 0 && val <= S32_MAX)
1702 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1703 else if (val >= S32_MIN && val <= S32_MAX)
1704 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT_32BIT);
1705 else
1706 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1707 rvt[idx] |= RVT_DONE;
1708 idx++;
1709 } else {
1710 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1711 }
1712 break;
1713 case BPF_B:
1714 case BPF_H:
1715 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1716 break;
1717 case BPF_W:
1718 if (BPF_MODE(insn->code) == BPF_IMM)
1719 set_reg_val_type(&exit_rvt, insn->dst_reg,
1720 insn->imm >= 0 ? REG_32BIT_POS : REG_32BIT);
1721 else
1722 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1723 break;
1724 }
1725 rvt[idx] |= RVT_DONE;
1726 break;
1727 case BPF_LDX:
1728 switch (BPF_SIZE(insn->code)) {
1729 case BPF_DW:
1730 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1731 break;
1732 case BPF_B:
1733 case BPF_H:
1734 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1735 break;
1736 case BPF_W:
1737 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1738 break;
1739 }
1740 rvt[idx] |= RVT_DONE;
1741 break;
1742 case BPF_JMP:
1743 switch (BPF_OP(insn->code)) {
1744 case BPF_EXIT:
1745 rvt[idx] = RVT_DONE | exit_rvt;
1746 rvt[prog->len] = exit_rvt;
1747 return idx;
1748 case BPF_JA:
1749 rvt[idx] |= RVT_DONE;
1750 idx += insn->off;
1751 break;
1752 case BPF_JEQ:
1753 case BPF_JGT:
1754 case BPF_JGE:
1755 case BPF_JLT:
1756 case BPF_JLE:
1757 case BPF_JSET:
1758 case BPF_JNE:
1759 case BPF_JSGT:
1760 case BPF_JSGE:
1761 case BPF_JSLT:
1762 case BPF_JSLE:
1763 if (follow_taken) {
1764 rvt[idx] |= RVT_BRANCH_TAKEN;
1765 idx += insn->off;
1766 follow_taken = false;
1767 } else {
1768 rvt[idx] |= RVT_FALL_THROUGH;
1769 }
1770 break;
1771 case BPF_CALL:
1772 set_reg_val_type(&exit_rvt, BPF_REG_0, REG_64BIT);
1773
1774 for (reg = BPF_REG_0; reg <= BPF_REG_5; reg++)
1775 set_reg_val_type(&exit_rvt, reg, REG_64BIT);
1776
1777 rvt[idx] |= RVT_DONE;
1778 break;
1779 default:
1780 WARN(1, "Unhandled BPF_JMP case.\n");
1781 rvt[idx] |= RVT_DONE;
1782 break;
1783 }
1784 break;
1785 default:
1786 rvt[idx] |= RVT_DONE;
1787 break;
1788 }
1789 }
1790 return idx;
1791}
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801static int reg_val_propagate(struct jit_ctx *ctx)
1802{
1803 const struct bpf_prog *prog = ctx->skf;
1804 u64 exit_rvt;
1805 int reg;
1806 int i;
1807
1808
1809
1810
1811
1812 exit_rvt = 0;
1813
1814
1815 for (reg = BPF_REG_1; reg <= BPF_REG_5; reg++)
1816 set_reg_val_type(&exit_rvt, reg, REG_64BIT);
1817
1818
1819
1820
1821
1822 reg_val_propagate_range(ctx, exit_rvt, 0, false);
1823restart_search:
1824
1825
1826
1827
1828
1829
1830 for (i = 0; i < prog->len; i++) {
1831 u64 rvt = ctx->reg_val_types[i];
1832
1833 if ((rvt & RVT_VISITED_MASK) == RVT_DONE ||
1834 (rvt & RVT_VISITED_MASK) == 0)
1835 continue;
1836 if ((rvt & RVT_VISITED_MASK) == RVT_FALL_THROUGH) {
1837 reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, true);
1838 } else {
1839 WARN(1, "Unexpected RVT_BRANCH_TAKEN case.\n");
1840 reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, false);
1841 }
1842 goto restart_search;
1843 }
1844
1845
1846
1847
1848
1849
1850 return 0;
1851}
1852
1853static void jit_fill_hole(void *area, unsigned int size)
1854{
1855 u32 *p;
1856
1857
1858 for (p = area; size >= sizeof(u32); size -= sizeof(u32))
1859 uasm_i_break(&p, BRK_BUG);
1860}
1861
1862struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog)
1863{
1864 struct bpf_prog *orig_prog = prog;
1865 bool tmp_blinded = false;
1866 struct bpf_prog *tmp;
1867 struct bpf_binary_header *header = NULL;
1868 struct jit_ctx ctx;
1869 unsigned int image_size;
1870 u8 *image_ptr;
1871
1872 if (!bpf_jit_enable || !cpu_has_mips64r2)
1873 return prog;
1874
1875 tmp = bpf_jit_blind_constants(prog);
1876
1877
1878
1879 if (IS_ERR(tmp))
1880 return orig_prog;
1881 if (tmp != prog) {
1882 tmp_blinded = true;
1883 prog = tmp;
1884 }
1885
1886 memset(&ctx, 0, sizeof(ctx));
1887
1888 preempt_disable();
1889 switch (current_cpu_type()) {
1890 case CPU_CAVIUM_OCTEON:
1891 case CPU_CAVIUM_OCTEON_PLUS:
1892 case CPU_CAVIUM_OCTEON2:
1893 case CPU_CAVIUM_OCTEON3:
1894 ctx.use_bbit_insns = 1;
1895 break;
1896 default:
1897 ctx.use_bbit_insns = 0;
1898 }
1899 preempt_enable();
1900
1901 ctx.offsets = kcalloc(prog->len + 1, sizeof(*ctx.offsets), GFP_KERNEL);
1902 if (ctx.offsets == NULL)
1903 goto out_err;
1904
1905 ctx.reg_val_types = kcalloc(prog->len + 1, sizeof(*ctx.reg_val_types), GFP_KERNEL);
1906 if (ctx.reg_val_types == NULL)
1907 goto out_err;
1908
1909 ctx.skf = prog;
1910
1911 if (reg_val_propagate(&ctx))
1912 goto out_err;
1913
1914
1915
1916
1917
1918 if (build_int_body(&ctx))
1919 goto out_err;
1920
1921
1922
1923
1924
1925 if (ctx.flags & EBPF_SEEN_TC) {
1926 if (ctx.flags & EBPF_SAVE_RA)
1927 ctx.flags |= EBPF_SAVE_S4;
1928 else
1929 ctx.flags |= EBPF_TCC_IN_V1;
1930 }
1931
1932
1933
1934
1935
1936
1937
1938
1939 do {
1940 ctx.idx = 0;
1941 ctx.gen_b_offsets = 1;
1942 ctx.long_b_conversion = 0;
1943 if (gen_int_prologue(&ctx))
1944 goto out_err;
1945 if (build_int_body(&ctx))
1946 goto out_err;
1947 if (build_int_epilogue(&ctx, MIPS_R_RA))
1948 goto out_err;
1949 } while (ctx.long_b_conversion);
1950
1951 image_size = 4 * ctx.idx;
1952
1953 header = bpf_jit_binary_alloc(image_size, &image_ptr,
1954 sizeof(u32), jit_fill_hole);
1955 if (header == NULL)
1956 goto out_err;
1957
1958 ctx.target = (u32 *)image_ptr;
1959
1960
1961 ctx.idx = 0;
1962 if (gen_int_prologue(&ctx))
1963 goto out_err;
1964 if (build_int_body(&ctx))
1965 goto out_err;
1966 if (build_int_epilogue(&ctx, MIPS_R_RA))
1967 goto out_err;
1968
1969
1970 flush_icache_range((unsigned long)ctx.target,
1971 (unsigned long)(ctx.target + ctx.idx * sizeof(u32)));
1972
1973 if (bpf_jit_enable > 1)
1974
1975 bpf_jit_dump(prog->len, image_size, 2, ctx.target);
1976
1977 bpf_jit_binary_lock_ro(header);
1978 prog->bpf_func = (void *)ctx.target;
1979 prog->jited = 1;
1980 prog->jited_len = image_size;
1981out_normal:
1982 if (tmp_blinded)
1983 bpf_jit_prog_release_other(prog, prog == orig_prog ?
1984 tmp : orig_prog);
1985 kfree(ctx.offsets);
1986 kfree(ctx.reg_val_types);
1987
1988 return prog;
1989
1990out_err:
1991 prog = orig_prog;
1992 if (header)
1993 bpf_jit_binary_free(header);
1994 goto out_normal;
1995}
1996