1
2
3
4
5
6
7
8
9
10
11
12
13#include <linux/bitops.h>
14#include <linux/errno.h>
15#include <linux/filter.h>
16#include <linux/bpf.h>
17#include <linux/slab.h>
18#include <asm/bitops.h>
19#include <asm/byteorder.h>
20#include <asm/cacheflush.h>
21#include <asm/cpu-features.h>
22#include <asm/isa-rev.h>
23#include <asm/uasm.h>
24
25
26#define MIPS_R_ZERO 0
27#define MIPS_R_AT 1
28#define MIPS_R_V0 2
29#define MIPS_R_V1 3
30#define MIPS_R_A0 4
31#define MIPS_R_A1 5
32#define MIPS_R_A2 6
33#define MIPS_R_A3 7
34#define MIPS_R_A4 8
35#define MIPS_R_T4 12
36#define MIPS_R_T5 13
37#define MIPS_R_T6 14
38#define MIPS_R_T7 15
39#define MIPS_R_S0 16
40#define MIPS_R_S1 17
41#define MIPS_R_S2 18
42#define MIPS_R_S3 19
43#define MIPS_R_S4 20
44#define MIPS_R_S5 21
45#define MIPS_R_S6 22
46#define MIPS_R_S7 23
47#define MIPS_R_T8 24
48#define MIPS_R_T9 25
49#define MIPS_R_SP 29
50#define MIPS_R_RA 31
51
52
53#define EBPF_SAVE_S0 BIT(0)
54#define EBPF_SAVE_S1 BIT(1)
55#define EBPF_SAVE_S2 BIT(2)
56#define EBPF_SAVE_S3 BIT(3)
57#define EBPF_SAVE_S4 BIT(4)
58#define EBPF_SAVE_RA BIT(5)
59#define EBPF_SEEN_FP BIT(6)
60#define EBPF_SEEN_TC BIT(7)
61#define EBPF_TCC_IN_V1 BIT(8)
62
63
64
65
66
67
68
69
70
71enum reg_val_type {
72
73 REG_UNKNOWN,
74
75 REG_64BIT,
76
77 REG_64BIT_32BIT,
78
79 REG_32BIT,
80
81 REG_32BIT_POS
82};
83
84
85
86
87
88#define OFFSETS_B_CONV BIT(31)
89
90
91
92
93
94
95
96
97
98
99
100struct jit_ctx {
101 const struct bpf_prog *skf;
102 int stack_size;
103 u32 idx;
104 u32 flags;
105 u32 *offsets;
106 u32 *target;
107 u64 *reg_val_types;
108 unsigned int long_b_conversion:1;
109 unsigned int gen_b_offsets:1;
110 unsigned int use_bbit_insns:1;
111};
112
113static void set_reg_val_type(u64 *rvt, int reg, enum reg_val_type type)
114{
115 *rvt &= ~(7ull << (reg * 3));
116 *rvt |= ((u64)type << (reg * 3));
117}
118
119static enum reg_val_type get_reg_val_type(const struct jit_ctx *ctx,
120 int index, int reg)
121{
122 return (ctx->reg_val_types[index] >> (reg * 3)) & 7;
123}
124
125
126#define emit_instr_long(ctx, func64, func32, ...) \
127do { \
128 if ((ctx)->target != NULL) { \
129 u32 *p = &(ctx)->target[ctx->idx]; \
130 if (IS_ENABLED(CONFIG_64BIT)) \
131 uasm_i_##func64(&p, ##__VA_ARGS__); \
132 else \
133 uasm_i_##func32(&p, ##__VA_ARGS__); \
134 } \
135 (ctx)->idx++; \
136} while (0)
137
138#define emit_instr(ctx, func, ...) \
139 emit_instr_long(ctx, func, func, ##__VA_ARGS__)
140
141static unsigned int j_target(struct jit_ctx *ctx, int target_idx)
142{
143 unsigned long target_va, base_va;
144 unsigned int r;
145
146 if (!ctx->target)
147 return 0;
148
149 base_va = (unsigned long)ctx->target;
150 target_va = base_va + (ctx->offsets[target_idx] & ~OFFSETS_B_CONV);
151
152 if ((base_va & ~0x0ffffffful) != (target_va & ~0x0ffffffful))
153 return (unsigned int)-1;
154 r = target_va & 0x0ffffffful;
155 return r;
156}
157
158
159static u32 b_imm(unsigned int tgt, struct jit_ctx *ctx)
160{
161 if (!ctx->gen_b_offsets)
162 return 0;
163
164
165
166
167
168
169
170
171
172
173
174
175
176 return (ctx->offsets[tgt] & ~OFFSETS_B_CONV) -
177 (ctx->idx * 4) - 4;
178}
179
180enum which_ebpf_reg {
181 src_reg,
182 src_reg_no_fp,
183 dst_reg,
184 dst_reg_fp_ok
185};
186
187
188
189
190
191
192
193static int ebpf_to_mips_reg(struct jit_ctx *ctx,
194 const struct bpf_insn *insn,
195 enum which_ebpf_reg w)
196{
197 int ebpf_reg = (w == src_reg || w == src_reg_no_fp) ?
198 insn->src_reg : insn->dst_reg;
199
200 switch (ebpf_reg) {
201 case BPF_REG_0:
202 return MIPS_R_V0;
203 case BPF_REG_1:
204 return MIPS_R_A0;
205 case BPF_REG_2:
206 return MIPS_R_A1;
207 case BPF_REG_3:
208 return MIPS_R_A2;
209 case BPF_REG_4:
210 return MIPS_R_A3;
211 case BPF_REG_5:
212 return MIPS_R_A4;
213 case BPF_REG_6:
214 ctx->flags |= EBPF_SAVE_S0;
215 return MIPS_R_S0;
216 case BPF_REG_7:
217 ctx->flags |= EBPF_SAVE_S1;
218 return MIPS_R_S1;
219 case BPF_REG_8:
220 ctx->flags |= EBPF_SAVE_S2;
221 return MIPS_R_S2;
222 case BPF_REG_9:
223 ctx->flags |= EBPF_SAVE_S3;
224 return MIPS_R_S3;
225 case BPF_REG_10:
226 if (w == dst_reg || w == src_reg_no_fp)
227 goto bad_reg;
228 ctx->flags |= EBPF_SEEN_FP;
229
230
231
232
233 return MIPS_R_ZERO;
234 case BPF_REG_AX:
235 return MIPS_R_T4;
236 default:
237bad_reg:
238 WARN(1, "Illegal bpf reg: %d\n", ebpf_reg);
239 return -EINVAL;
240 }
241}
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270static int gen_int_prologue(struct jit_ctx *ctx)
271{
272 int stack_adjust = 0;
273 int store_offset;
274 int locals_size;
275
276 if (ctx->flags & EBPF_SAVE_RA)
277
278
279
280
281 stack_adjust += 2 * sizeof(long);
282 if (ctx->flags & EBPF_SAVE_S0)
283 stack_adjust += sizeof(long);
284 if (ctx->flags & EBPF_SAVE_S1)
285 stack_adjust += sizeof(long);
286 if (ctx->flags & EBPF_SAVE_S2)
287 stack_adjust += sizeof(long);
288 if (ctx->flags & EBPF_SAVE_S3)
289 stack_adjust += sizeof(long);
290 if (ctx->flags & EBPF_SAVE_S4)
291 stack_adjust += sizeof(long);
292
293 BUILD_BUG_ON(MAX_BPF_STACK & 7);
294 locals_size = (ctx->flags & EBPF_SEEN_FP) ? MAX_BPF_STACK : 0;
295
296 stack_adjust += locals_size;
297
298 ctx->stack_size = stack_adjust;
299
300
301
302
303
304
305 emit_instr(ctx, addiu, MIPS_R_V1, MIPS_R_ZERO, MAX_TAIL_CALL_CNT);
306 if (stack_adjust)
307 emit_instr_long(ctx, daddiu, addiu,
308 MIPS_R_SP, MIPS_R_SP, -stack_adjust);
309 else
310 return 0;
311
312 store_offset = stack_adjust - sizeof(long);
313
314 if (ctx->flags & EBPF_SAVE_RA) {
315 emit_instr_long(ctx, sd, sw,
316 MIPS_R_RA, store_offset, MIPS_R_SP);
317 store_offset -= sizeof(long);
318 }
319 if (ctx->flags & EBPF_SAVE_S0) {
320 emit_instr_long(ctx, sd, sw,
321 MIPS_R_S0, store_offset, MIPS_R_SP);
322 store_offset -= sizeof(long);
323 }
324 if (ctx->flags & EBPF_SAVE_S1) {
325 emit_instr_long(ctx, sd, sw,
326 MIPS_R_S1, store_offset, MIPS_R_SP);
327 store_offset -= sizeof(long);
328 }
329 if (ctx->flags & EBPF_SAVE_S2) {
330 emit_instr_long(ctx, sd, sw,
331 MIPS_R_S2, store_offset, MIPS_R_SP);
332 store_offset -= sizeof(long);
333 }
334 if (ctx->flags & EBPF_SAVE_S3) {
335 emit_instr_long(ctx, sd, sw,
336 MIPS_R_S3, store_offset, MIPS_R_SP);
337 store_offset -= sizeof(long);
338 }
339 if (ctx->flags & EBPF_SAVE_S4) {
340 emit_instr_long(ctx, sd, sw,
341 MIPS_R_S4, store_offset, MIPS_R_SP);
342 store_offset -= sizeof(long);
343 }
344
345 if ((ctx->flags & EBPF_SEEN_TC) && !(ctx->flags & EBPF_TCC_IN_V1))
346 emit_instr_long(ctx, daddu, addu,
347 MIPS_R_S4, MIPS_R_V1, MIPS_R_ZERO);
348
349 return 0;
350}
351
352static int build_int_epilogue(struct jit_ctx *ctx, int dest_reg)
353{
354 const struct bpf_prog *prog = ctx->skf;
355 int stack_adjust = ctx->stack_size;
356 int store_offset = stack_adjust - sizeof(long);
357 enum reg_val_type td;
358 int r0 = MIPS_R_V0;
359
360 if (dest_reg == MIPS_R_RA) {
361
362 td = get_reg_val_type(ctx, prog->len, BPF_REG_0);
363 if (td == REG_64BIT)
364 emit_instr(ctx, sll, r0, r0, 0);
365 }
366
367 if (ctx->flags & EBPF_SAVE_RA) {
368 emit_instr_long(ctx, ld, lw,
369 MIPS_R_RA, store_offset, MIPS_R_SP);
370 store_offset -= sizeof(long);
371 }
372 if (ctx->flags & EBPF_SAVE_S0) {
373 emit_instr_long(ctx, ld, lw,
374 MIPS_R_S0, store_offset, MIPS_R_SP);
375 store_offset -= sizeof(long);
376 }
377 if (ctx->flags & EBPF_SAVE_S1) {
378 emit_instr_long(ctx, ld, lw,
379 MIPS_R_S1, store_offset, MIPS_R_SP);
380 store_offset -= sizeof(long);
381 }
382 if (ctx->flags & EBPF_SAVE_S2) {
383 emit_instr_long(ctx, ld, lw,
384 MIPS_R_S2, store_offset, MIPS_R_SP);
385 store_offset -= sizeof(long);
386 }
387 if (ctx->flags & EBPF_SAVE_S3) {
388 emit_instr_long(ctx, ld, lw,
389 MIPS_R_S3, store_offset, MIPS_R_SP);
390 store_offset -= sizeof(long);
391 }
392 if (ctx->flags & EBPF_SAVE_S4) {
393 emit_instr_long(ctx, ld, lw,
394 MIPS_R_S4, store_offset, MIPS_R_SP);
395 store_offset -= sizeof(long);
396 }
397 emit_instr(ctx, jr, dest_reg);
398
399 if (stack_adjust)
400 emit_instr_long(ctx, daddiu, addiu,
401 MIPS_R_SP, MIPS_R_SP, stack_adjust);
402 else
403 emit_instr(ctx, nop);
404
405 return 0;
406}
407
408static void gen_imm_to_reg(const struct bpf_insn *insn, int reg,
409 struct jit_ctx *ctx)
410{
411 if (insn->imm >= S16_MIN && insn->imm <= S16_MAX) {
412 emit_instr(ctx, addiu, reg, MIPS_R_ZERO, insn->imm);
413 } else {
414 int lower = (s16)(insn->imm & 0xffff);
415 int upper = insn->imm - lower;
416
417 emit_instr(ctx, lui, reg, upper >> 16);
418 emit_instr(ctx, addiu, reg, reg, lower);
419 }
420}
421
422static int gen_imm_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
423 int idx)
424{
425 int upper_bound, lower_bound;
426 int dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
427
428 if (dst < 0)
429 return dst;
430
431 switch (BPF_OP(insn->code)) {
432 case BPF_MOV:
433 case BPF_ADD:
434 upper_bound = S16_MAX;
435 lower_bound = S16_MIN;
436 break;
437 case BPF_SUB:
438 upper_bound = -(int)S16_MIN;
439 lower_bound = -(int)S16_MAX;
440 break;
441 case BPF_AND:
442 case BPF_OR:
443 case BPF_XOR:
444 upper_bound = 0xffff;
445 lower_bound = 0;
446 break;
447 case BPF_RSH:
448 case BPF_LSH:
449 case BPF_ARSH:
450
451 upper_bound = S32_MAX;
452 lower_bound = S32_MIN;
453 break;
454 default:
455 return -EINVAL;
456 }
457
458
459
460
461
462 if (BPF_CLASS(insn->code) == BPF_ALU64 &&
463 BPF_OP(insn->code) != BPF_MOV &&
464 get_reg_val_type(ctx, idx, insn->dst_reg) == REG_32BIT)
465 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
466
467 if (BPF_CLASS(insn->code) == BPF_ALU &&
468 BPF_OP(insn->code) != BPF_LSH &&
469 BPF_OP(insn->code) != BPF_MOV &&
470 get_reg_val_type(ctx, idx, insn->dst_reg) != REG_32BIT)
471 emit_instr(ctx, sll, dst, dst, 0);
472
473 if (insn->imm >= lower_bound && insn->imm <= upper_bound) {
474
475 switch (BPF_OP(insn->code) | BPF_CLASS(insn->code)) {
476 case BPF_ALU64 | BPF_MOV:
477 emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, insn->imm);
478 break;
479 case BPF_ALU64 | BPF_AND:
480 case BPF_ALU | BPF_AND:
481 emit_instr(ctx, andi, dst, dst, insn->imm);
482 break;
483 case BPF_ALU64 | BPF_OR:
484 case BPF_ALU | BPF_OR:
485 emit_instr(ctx, ori, dst, dst, insn->imm);
486 break;
487 case BPF_ALU64 | BPF_XOR:
488 case BPF_ALU | BPF_XOR:
489 emit_instr(ctx, xori, dst, dst, insn->imm);
490 break;
491 case BPF_ALU64 | BPF_ADD:
492 emit_instr(ctx, daddiu, dst, dst, insn->imm);
493 break;
494 case BPF_ALU64 | BPF_SUB:
495 emit_instr(ctx, daddiu, dst, dst, -insn->imm);
496 break;
497 case BPF_ALU64 | BPF_RSH:
498 emit_instr(ctx, dsrl_safe, dst, dst, insn->imm & 0x3f);
499 break;
500 case BPF_ALU | BPF_RSH:
501 emit_instr(ctx, srl, dst, dst, insn->imm & 0x1f);
502 break;
503 case BPF_ALU64 | BPF_LSH:
504 emit_instr(ctx, dsll_safe, dst, dst, insn->imm & 0x3f);
505 break;
506 case BPF_ALU | BPF_LSH:
507 emit_instr(ctx, sll, dst, dst, insn->imm & 0x1f);
508 break;
509 case BPF_ALU64 | BPF_ARSH:
510 emit_instr(ctx, dsra_safe, dst, dst, insn->imm & 0x3f);
511 break;
512 case BPF_ALU | BPF_ARSH:
513 emit_instr(ctx, sra, dst, dst, insn->imm & 0x1f);
514 break;
515 case BPF_ALU | BPF_MOV:
516 emit_instr(ctx, addiu, dst, MIPS_R_ZERO, insn->imm);
517 break;
518 case BPF_ALU | BPF_ADD:
519 emit_instr(ctx, addiu, dst, dst, insn->imm);
520 break;
521 case BPF_ALU | BPF_SUB:
522 emit_instr(ctx, addiu, dst, dst, -insn->imm);
523 break;
524 default:
525 return -EINVAL;
526 }
527 } else {
528
529 if (BPF_OP(insn->code) == BPF_MOV) {
530 gen_imm_to_reg(insn, dst, ctx);
531 } else {
532 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
533 switch (BPF_OP(insn->code) | BPF_CLASS(insn->code)) {
534 case BPF_ALU64 | BPF_AND:
535 case BPF_ALU | BPF_AND:
536 emit_instr(ctx, and, dst, dst, MIPS_R_AT);
537 break;
538 case BPF_ALU64 | BPF_OR:
539 case BPF_ALU | BPF_OR:
540 emit_instr(ctx, or, dst, dst, MIPS_R_AT);
541 break;
542 case BPF_ALU64 | BPF_XOR:
543 case BPF_ALU | BPF_XOR:
544 emit_instr(ctx, xor, dst, dst, MIPS_R_AT);
545 break;
546 case BPF_ALU64 | BPF_ADD:
547 emit_instr(ctx, daddu, dst, dst, MIPS_R_AT);
548 break;
549 case BPF_ALU64 | BPF_SUB:
550 emit_instr(ctx, dsubu, dst, dst, MIPS_R_AT);
551 break;
552 case BPF_ALU | BPF_ADD:
553 emit_instr(ctx, addu, dst, dst, MIPS_R_AT);
554 break;
555 case BPF_ALU | BPF_SUB:
556 emit_instr(ctx, subu, dst, dst, MIPS_R_AT);
557 break;
558 default:
559 return -EINVAL;
560 }
561 }
562 }
563
564 return 0;
565}
566
567static void emit_const_to_reg(struct jit_ctx *ctx, int dst, u64 value)
568{
569 if (value >= 0xffffffffffff8000ull || value < 0x8000ull) {
570 emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, (int)value);
571 } else if (value >= 0xffffffff80000000ull ||
572 (value < 0x80000000 && value > 0xffff)) {
573 emit_instr(ctx, lui, dst, (s32)(s16)(value >> 16));
574 emit_instr(ctx, ori, dst, dst, (unsigned int)(value & 0xffff));
575 } else {
576 int i;
577 bool seen_part = false;
578 int needed_shift = 0;
579
580 for (i = 0; i < 4; i++) {
581 u64 part = (value >> (16 * (3 - i))) & 0xffff;
582
583 if (seen_part && needed_shift > 0 && (part || i == 3)) {
584 emit_instr(ctx, dsll_safe, dst, dst, needed_shift);
585 needed_shift = 0;
586 }
587 if (part) {
588 if (i == 0 || (!seen_part && i < 3 && part < 0x8000)) {
589 emit_instr(ctx, lui, dst, (s32)(s16)part);
590 needed_shift = -16;
591 } else {
592 emit_instr(ctx, ori, dst,
593 seen_part ? dst : MIPS_R_ZERO,
594 (unsigned int)part);
595 }
596 seen_part = true;
597 }
598 if (seen_part)
599 needed_shift += 16;
600 }
601 }
602}
603
604static int emit_bpf_tail_call(struct jit_ctx *ctx, int this_idx)
605{
606 int off, b_off;
607
608 ctx->flags |= EBPF_SEEN_TC;
609
610
611
612
613 off = offsetof(struct bpf_array, map.max_entries);
614 emit_instr(ctx, lwu, MIPS_R_T5, off, MIPS_R_A1);
615 emit_instr(ctx, sltu, MIPS_R_AT, MIPS_R_T5, MIPS_R_A2);
616 b_off = b_imm(this_idx + 1, ctx);
617 emit_instr(ctx, bne, MIPS_R_AT, MIPS_R_ZERO, b_off);
618
619
620
621
622
623 emit_instr(ctx, daddiu, MIPS_R_T5,
624 (ctx->flags & EBPF_TCC_IN_V1) ? MIPS_R_V1 : MIPS_R_S4, -1);
625 b_off = b_imm(this_idx + 1, ctx);
626 emit_instr(ctx, bltz, MIPS_R_T5, b_off);
627
628
629
630
631
632
633 emit_instr(ctx, dsll, MIPS_R_T8, MIPS_R_A2, 3);
634 emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, MIPS_R_A1);
635 off = offsetof(struct bpf_array, ptrs);
636 emit_instr(ctx, ld, MIPS_R_AT, off, MIPS_R_T8);
637 b_off = b_imm(this_idx + 1, ctx);
638 emit_instr(ctx, beq, MIPS_R_AT, MIPS_R_ZERO, b_off);
639
640 emit_instr(ctx, nop);
641
642
643 off = offsetof(struct bpf_prog, bpf_func);
644 emit_instr(ctx, ld, MIPS_R_T9, off, MIPS_R_AT);
645
646 emit_instr(ctx, daddu, MIPS_R_V1, MIPS_R_T5, MIPS_R_ZERO);
647
648 emit_instr(ctx, daddiu, MIPS_R_T9, MIPS_R_T9, 4);
649 return build_int_epilogue(ctx, MIPS_R_T9);
650}
651
652static bool is_bad_offset(int b_off)
653{
654 return b_off > 0x1ffff || b_off < -0x20000;
655}
656
657
658static int build_one_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
659 int this_idx, int exit_idx)
660{
661 int src, dst, r, td, ts, mem_off, b_off;
662 bool need_swap, did_move, cmp_eq;
663 unsigned int target = 0;
664 u64 t64;
665 s64 t64s;
666 int bpf_op = BPF_OP(insn->code);
667
668 if (IS_ENABLED(CONFIG_32BIT) && ((BPF_CLASS(insn->code) == BPF_ALU64)
669 || (bpf_op == BPF_DW)))
670 return -EINVAL;
671
672 switch (insn->code) {
673 case BPF_ALU64 | BPF_ADD | BPF_K:
674 case BPF_ALU64 | BPF_SUB | BPF_K:
675 case BPF_ALU64 | BPF_OR | BPF_K:
676 case BPF_ALU64 | BPF_AND | BPF_K:
677 case BPF_ALU64 | BPF_LSH | BPF_K:
678 case BPF_ALU64 | BPF_RSH | BPF_K:
679 case BPF_ALU64 | BPF_XOR | BPF_K:
680 case BPF_ALU64 | BPF_ARSH | BPF_K:
681 case BPF_ALU64 | BPF_MOV | BPF_K:
682 case BPF_ALU | BPF_MOV | BPF_K:
683 case BPF_ALU | BPF_ADD | BPF_K:
684 case BPF_ALU | BPF_SUB | BPF_K:
685 case BPF_ALU | BPF_OR | BPF_K:
686 case BPF_ALU | BPF_AND | BPF_K:
687 case BPF_ALU | BPF_LSH | BPF_K:
688 case BPF_ALU | BPF_RSH | BPF_K:
689 case BPF_ALU | BPF_XOR | BPF_K:
690 case BPF_ALU | BPF_ARSH | BPF_K:
691 r = gen_imm_insn(insn, ctx, this_idx);
692 if (r < 0)
693 return r;
694 break;
695 case BPF_ALU64 | BPF_MUL | BPF_K:
696 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
697 if (dst < 0)
698 return dst;
699 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
700 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
701 if (insn->imm == 1)
702 break;
703 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
704 if (MIPS_ISA_REV >= 6) {
705 emit_instr(ctx, dmulu, dst, dst, MIPS_R_AT);
706 } else {
707 emit_instr(ctx, dmultu, MIPS_R_AT, dst);
708 emit_instr(ctx, mflo, dst);
709 }
710 break;
711 case BPF_ALU64 | BPF_NEG | BPF_K:
712 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
713 if (dst < 0)
714 return dst;
715 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
716 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
717 emit_instr(ctx, dsubu, dst, MIPS_R_ZERO, dst);
718 break;
719 case BPF_ALU | BPF_MUL | BPF_K:
720 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
721 if (dst < 0)
722 return dst;
723 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
724 if (td == REG_64BIT) {
725
726 emit_instr(ctx, sll, dst, dst, 0);
727 }
728 if (insn->imm == 1)
729 break;
730 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
731 if (MIPS_ISA_REV >= 6) {
732 emit_instr(ctx, mulu, dst, dst, MIPS_R_AT);
733 } else {
734 emit_instr(ctx, multu, dst, MIPS_R_AT);
735 emit_instr(ctx, mflo, dst);
736 }
737 break;
738 case BPF_ALU | BPF_NEG | BPF_K:
739 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
740 if (dst < 0)
741 return dst;
742 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
743 if (td == REG_64BIT) {
744
745 emit_instr(ctx, sll, dst, dst, 0);
746 }
747 emit_instr(ctx, subu, dst, MIPS_R_ZERO, dst);
748 break;
749 case BPF_ALU | BPF_DIV | BPF_K:
750 case BPF_ALU | BPF_MOD | BPF_K:
751 if (insn->imm == 0)
752 return -EINVAL;
753 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
754 if (dst < 0)
755 return dst;
756 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
757 if (td == REG_64BIT)
758
759 emit_instr(ctx, sll, dst, dst, 0);
760 if (insn->imm == 1) {
761
762 if (bpf_op == BPF_MOD)
763 emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
764 break;
765 }
766 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
767 if (MIPS_ISA_REV >= 6) {
768 if (bpf_op == BPF_DIV)
769 emit_instr(ctx, divu_r6, dst, dst, MIPS_R_AT);
770 else
771 emit_instr(ctx, modu, dst, dst, MIPS_R_AT);
772 break;
773 }
774 emit_instr(ctx, divu, dst, MIPS_R_AT);
775 if (bpf_op == BPF_DIV)
776 emit_instr(ctx, mflo, dst);
777 else
778 emit_instr(ctx, mfhi, dst);
779 break;
780 case BPF_ALU64 | BPF_DIV | BPF_K:
781 case BPF_ALU64 | BPF_MOD | BPF_K:
782 if (insn->imm == 0)
783 return -EINVAL;
784 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
785 if (dst < 0)
786 return dst;
787 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
788 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
789 if (insn->imm == 1) {
790
791 if (bpf_op == BPF_MOD)
792 emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
793 break;
794 }
795 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
796 if (MIPS_ISA_REV >= 6) {
797 if (bpf_op == BPF_DIV)
798 emit_instr(ctx, ddivu_r6, dst, dst, MIPS_R_AT);
799 else
800 emit_instr(ctx, modu, dst, dst, MIPS_R_AT);
801 break;
802 }
803 emit_instr(ctx, ddivu, dst, MIPS_R_AT);
804 if (bpf_op == BPF_DIV)
805 emit_instr(ctx, mflo, dst);
806 else
807 emit_instr(ctx, mfhi, dst);
808 break;
809 case BPF_ALU64 | BPF_MOV | BPF_X:
810 case BPF_ALU64 | BPF_ADD | BPF_X:
811 case BPF_ALU64 | BPF_SUB | BPF_X:
812 case BPF_ALU64 | BPF_XOR | BPF_X:
813 case BPF_ALU64 | BPF_OR | BPF_X:
814 case BPF_ALU64 | BPF_AND | BPF_X:
815 case BPF_ALU64 | BPF_MUL | BPF_X:
816 case BPF_ALU64 | BPF_DIV | BPF_X:
817 case BPF_ALU64 | BPF_MOD | BPF_X:
818 case BPF_ALU64 | BPF_LSH | BPF_X:
819 case BPF_ALU64 | BPF_RSH | BPF_X:
820 case BPF_ALU64 | BPF_ARSH | BPF_X:
821 src = ebpf_to_mips_reg(ctx, insn, src_reg);
822 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
823 if (src < 0 || dst < 0)
824 return -EINVAL;
825 if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
826 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
827 did_move = false;
828 if (insn->src_reg == BPF_REG_10) {
829 if (bpf_op == BPF_MOV) {
830 emit_instr(ctx, daddiu, dst, MIPS_R_SP, MAX_BPF_STACK);
831 did_move = true;
832 } else {
833 emit_instr(ctx, daddiu, MIPS_R_AT, MIPS_R_SP, MAX_BPF_STACK);
834 src = MIPS_R_AT;
835 }
836 } else if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
837 int tmp_reg = MIPS_R_AT;
838
839 if (bpf_op == BPF_MOV) {
840 tmp_reg = dst;
841 did_move = true;
842 }
843 emit_instr(ctx, daddu, tmp_reg, src, MIPS_R_ZERO);
844 emit_instr(ctx, dinsu, tmp_reg, MIPS_R_ZERO, 32, 32);
845 src = MIPS_R_AT;
846 }
847 switch (bpf_op) {
848 case BPF_MOV:
849 if (!did_move)
850 emit_instr(ctx, daddu, dst, src, MIPS_R_ZERO);
851 break;
852 case BPF_ADD:
853 emit_instr(ctx, daddu, dst, dst, src);
854 break;
855 case BPF_SUB:
856 emit_instr(ctx, dsubu, dst, dst, src);
857 break;
858 case BPF_XOR:
859 emit_instr(ctx, xor, dst, dst, src);
860 break;
861 case BPF_OR:
862 emit_instr(ctx, or, dst, dst, src);
863 break;
864 case BPF_AND:
865 emit_instr(ctx, and, dst, dst, src);
866 break;
867 case BPF_MUL:
868 if (MIPS_ISA_REV >= 6) {
869 emit_instr(ctx, dmulu, dst, dst, src);
870 } else {
871 emit_instr(ctx, dmultu, dst, src);
872 emit_instr(ctx, mflo, dst);
873 }
874 break;
875 case BPF_DIV:
876 case BPF_MOD:
877 if (MIPS_ISA_REV >= 6) {
878 if (bpf_op == BPF_DIV)
879 emit_instr(ctx, ddivu_r6,
880 dst, dst, src);
881 else
882 emit_instr(ctx, modu, dst, dst, src);
883 break;
884 }
885 emit_instr(ctx, ddivu, dst, src);
886 if (bpf_op == BPF_DIV)
887 emit_instr(ctx, mflo, dst);
888 else
889 emit_instr(ctx, mfhi, dst);
890 break;
891 case BPF_LSH:
892 emit_instr(ctx, dsllv, dst, dst, src);
893 break;
894 case BPF_RSH:
895 emit_instr(ctx, dsrlv, dst, dst, src);
896 break;
897 case BPF_ARSH:
898 emit_instr(ctx, dsrav, dst, dst, src);
899 break;
900 default:
901 pr_err("ALU64_REG NOT HANDLED\n");
902 return -EINVAL;
903 }
904 break;
905 case BPF_ALU | BPF_MOV | BPF_X:
906 case BPF_ALU | BPF_ADD | BPF_X:
907 case BPF_ALU | BPF_SUB | BPF_X:
908 case BPF_ALU | BPF_XOR | BPF_X:
909 case BPF_ALU | BPF_OR | BPF_X:
910 case BPF_ALU | BPF_AND | BPF_X:
911 case BPF_ALU | BPF_MUL | BPF_X:
912 case BPF_ALU | BPF_DIV | BPF_X:
913 case BPF_ALU | BPF_MOD | BPF_X:
914 case BPF_ALU | BPF_LSH | BPF_X:
915 case BPF_ALU | BPF_RSH | BPF_X:
916 case BPF_ALU | BPF_ARSH | BPF_X:
917 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
918 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
919 if (src < 0 || dst < 0)
920 return -EINVAL;
921 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
922 if (td == REG_64BIT) {
923
924 emit_instr(ctx, sll, dst, dst, 0);
925 }
926 did_move = false;
927 ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
928 if (ts == REG_64BIT) {
929 int tmp_reg = MIPS_R_AT;
930
931 if (bpf_op == BPF_MOV) {
932 tmp_reg = dst;
933 did_move = true;
934 }
935
936 emit_instr(ctx, sll, tmp_reg, src, 0);
937 src = MIPS_R_AT;
938 }
939 switch (bpf_op) {
940 case BPF_MOV:
941 if (!did_move)
942 emit_instr(ctx, addu, dst, src, MIPS_R_ZERO);
943 break;
944 case BPF_ADD:
945 emit_instr(ctx, addu, dst, dst, src);
946 break;
947 case BPF_SUB:
948 emit_instr(ctx, subu, dst, dst, src);
949 break;
950 case BPF_XOR:
951 emit_instr(ctx, xor, dst, dst, src);
952 break;
953 case BPF_OR:
954 emit_instr(ctx, or, dst, dst, src);
955 break;
956 case BPF_AND:
957 emit_instr(ctx, and, dst, dst, src);
958 break;
959 case BPF_MUL:
960 emit_instr(ctx, mul, dst, dst, src);
961 break;
962 case BPF_DIV:
963 case BPF_MOD:
964 if (MIPS_ISA_REV >= 6) {
965 if (bpf_op == BPF_DIV)
966 emit_instr(ctx, divu_r6, dst, dst, src);
967 else
968 emit_instr(ctx, modu, dst, dst, src);
969 break;
970 }
971 emit_instr(ctx, divu, dst, src);
972 if (bpf_op == BPF_DIV)
973 emit_instr(ctx, mflo, dst);
974 else
975 emit_instr(ctx, mfhi, dst);
976 break;
977 case BPF_LSH:
978 emit_instr(ctx, sllv, dst, dst, src);
979 break;
980 case BPF_RSH:
981 emit_instr(ctx, srlv, dst, dst, src);
982 break;
983 case BPF_ARSH:
984 emit_instr(ctx, srav, dst, dst, src);
985 break;
986 default:
987 pr_err("ALU_REG NOT HANDLED\n");
988 return -EINVAL;
989 }
990 break;
991 case BPF_JMP | BPF_EXIT:
992 if (this_idx + 1 < exit_idx) {
993 b_off = b_imm(exit_idx, ctx);
994 if (is_bad_offset(b_off))
995 return -E2BIG;
996 emit_instr(ctx, beq, MIPS_R_ZERO, MIPS_R_ZERO, b_off);
997 emit_instr(ctx, nop);
998 }
999 break;
1000 case BPF_JMP | BPF_JEQ | BPF_K:
1001 case BPF_JMP | BPF_JNE | BPF_K:
1002 cmp_eq = (bpf_op == BPF_JEQ);
1003 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1004 if (dst < 0)
1005 return dst;
1006 if (insn->imm == 0) {
1007 src = MIPS_R_ZERO;
1008 } else {
1009 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
1010 src = MIPS_R_AT;
1011 }
1012 goto jeq_common;
1013 case BPF_JMP | BPF_JEQ | BPF_X:
1014 case BPF_JMP | BPF_JNE | BPF_X:
1015 case BPF_JMP | BPF_JSLT | BPF_X:
1016 case BPF_JMP | BPF_JSLE | BPF_X:
1017 case BPF_JMP | BPF_JSGT | BPF_X:
1018 case BPF_JMP | BPF_JSGE | BPF_X:
1019 case BPF_JMP | BPF_JLT | BPF_X:
1020 case BPF_JMP | BPF_JLE | BPF_X:
1021 case BPF_JMP | BPF_JGT | BPF_X:
1022 case BPF_JMP | BPF_JGE | BPF_X:
1023 case BPF_JMP | BPF_JSET | BPF_X:
1024 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1025 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1026 if (src < 0 || dst < 0)
1027 return -EINVAL;
1028 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
1029 ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
1030 if (td == REG_32BIT && ts != REG_32BIT) {
1031 emit_instr(ctx, sll, MIPS_R_AT, src, 0);
1032 src = MIPS_R_AT;
1033 } else if (ts == REG_32BIT && td != REG_32BIT) {
1034 emit_instr(ctx, sll, MIPS_R_AT, dst, 0);
1035 dst = MIPS_R_AT;
1036 }
1037 if (bpf_op == BPF_JSET) {
1038 emit_instr(ctx, and, MIPS_R_AT, dst, src);
1039 cmp_eq = false;
1040 dst = MIPS_R_AT;
1041 src = MIPS_R_ZERO;
1042 } else if (bpf_op == BPF_JSGT || bpf_op == BPF_JSLE) {
1043 emit_instr(ctx, dsubu, MIPS_R_AT, dst, src);
1044 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1045 b_off = b_imm(exit_idx, ctx);
1046 if (is_bad_offset(b_off))
1047 return -E2BIG;
1048 if (bpf_op == BPF_JSGT)
1049 emit_instr(ctx, blez, MIPS_R_AT, b_off);
1050 else
1051 emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
1052 emit_instr(ctx, nop);
1053 return 2;
1054 }
1055 b_off = b_imm(this_idx + insn->off + 1, ctx);
1056 if (is_bad_offset(b_off))
1057 return -E2BIG;
1058 if (bpf_op == BPF_JSGT)
1059 emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
1060 else
1061 emit_instr(ctx, blez, MIPS_R_AT, b_off);
1062 emit_instr(ctx, nop);
1063 break;
1064 } else if (bpf_op == BPF_JSGE || bpf_op == BPF_JSLT) {
1065 emit_instr(ctx, slt, MIPS_R_AT, dst, src);
1066 cmp_eq = bpf_op == BPF_JSGE;
1067 dst = MIPS_R_AT;
1068 src = MIPS_R_ZERO;
1069 } else if (bpf_op == BPF_JGT || bpf_op == BPF_JLE) {
1070
1071 emit_instr(ctx, dsubu, MIPS_R_T8, dst, src);
1072 emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
1073
1074 if (MIPS_ISA_REV >= 6) {
1075 emit_instr(ctx, seleqz, MIPS_R_T9,
1076 MIPS_R_SP, MIPS_R_T8);
1077 } else {
1078 emit_instr(ctx, movz, MIPS_R_T9,
1079 MIPS_R_SP, MIPS_R_T8);
1080 emit_instr(ctx, movn, MIPS_R_T9,
1081 MIPS_R_ZERO, MIPS_R_T8);
1082 }
1083 emit_instr(ctx, or, MIPS_R_AT, MIPS_R_T9, MIPS_R_AT);
1084 cmp_eq = bpf_op == BPF_JGT;
1085 dst = MIPS_R_AT;
1086 src = MIPS_R_ZERO;
1087 } else if (bpf_op == BPF_JGE || bpf_op == BPF_JLT) {
1088 emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
1089 cmp_eq = bpf_op == BPF_JGE;
1090 dst = MIPS_R_AT;
1091 src = MIPS_R_ZERO;
1092 } else {
1093 cmp_eq = (bpf_op == BPF_JEQ);
1094 }
1095jeq_common:
1096
1097
1098
1099
1100
1101
1102 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1103 b_off = b_imm(exit_idx, ctx);
1104 if (is_bad_offset(b_off)) {
1105 target = j_target(ctx, exit_idx);
1106 if (target == (unsigned int)-1)
1107 return -E2BIG;
1108 cmp_eq = !cmp_eq;
1109 b_off = 4 * 3;
1110 if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
1111 ctx->offsets[this_idx] |= OFFSETS_B_CONV;
1112 ctx->long_b_conversion = 1;
1113 }
1114 }
1115
1116 if (cmp_eq)
1117 emit_instr(ctx, bne, dst, src, b_off);
1118 else
1119 emit_instr(ctx, beq, dst, src, b_off);
1120 emit_instr(ctx, nop);
1121 if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
1122 emit_instr(ctx, j, target);
1123 emit_instr(ctx, nop);
1124 }
1125 return 2;
1126 }
1127 b_off = b_imm(this_idx + insn->off + 1, ctx);
1128 if (is_bad_offset(b_off)) {
1129 target = j_target(ctx, this_idx + insn->off + 1);
1130 if (target == (unsigned int)-1)
1131 return -E2BIG;
1132 cmp_eq = !cmp_eq;
1133 b_off = 4 * 3;
1134 if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
1135 ctx->offsets[this_idx] |= OFFSETS_B_CONV;
1136 ctx->long_b_conversion = 1;
1137 }
1138 }
1139
1140 if (cmp_eq)
1141 emit_instr(ctx, beq, dst, src, b_off);
1142 else
1143 emit_instr(ctx, bne, dst, src, b_off);
1144 emit_instr(ctx, nop);
1145 if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
1146 emit_instr(ctx, j, target);
1147 emit_instr(ctx, nop);
1148 }
1149 break;
1150 case BPF_JMP | BPF_JSGT | BPF_K:
1151 case BPF_JMP | BPF_JSGE | BPF_K:
1152 case BPF_JMP | BPF_JSLT | BPF_K:
1153 case BPF_JMP | BPF_JSLE | BPF_K:
1154 cmp_eq = (bpf_op == BPF_JSGE);
1155 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1156 if (dst < 0)
1157 return dst;
1158
1159 if (insn->imm == 0) {
1160 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1161 b_off = b_imm(exit_idx, ctx);
1162 if (is_bad_offset(b_off))
1163 return -E2BIG;
1164 switch (bpf_op) {
1165 case BPF_JSGT:
1166 emit_instr(ctx, blez, dst, b_off);
1167 break;
1168 case BPF_JSGE:
1169 emit_instr(ctx, bltz, dst, b_off);
1170 break;
1171 case BPF_JSLT:
1172 emit_instr(ctx, bgez, dst, b_off);
1173 break;
1174 case BPF_JSLE:
1175 emit_instr(ctx, bgtz, dst, b_off);
1176 break;
1177 }
1178 emit_instr(ctx, nop);
1179 return 2;
1180 }
1181 b_off = b_imm(this_idx + insn->off + 1, ctx);
1182 if (is_bad_offset(b_off))
1183 return -E2BIG;
1184 switch (bpf_op) {
1185 case BPF_JSGT:
1186 emit_instr(ctx, bgtz, dst, b_off);
1187 break;
1188 case BPF_JSGE:
1189 emit_instr(ctx, bgez, dst, b_off);
1190 break;
1191 case BPF_JSLT:
1192 emit_instr(ctx, bltz, dst, b_off);
1193 break;
1194 case BPF_JSLE:
1195 emit_instr(ctx, blez, dst, b_off);
1196 break;
1197 }
1198 emit_instr(ctx, nop);
1199 break;
1200 }
1201
1202
1203
1204
1205 if (bpf_op == BPF_JSGT)
1206 t64s = insn->imm + 1;
1207 else if (bpf_op == BPF_JSLE)
1208 t64s = insn->imm + 1;
1209 else
1210 t64s = insn->imm;
1211
1212 cmp_eq = bpf_op == BPF_JSGT || bpf_op == BPF_JSGE;
1213 if (t64s >= S16_MIN && t64s <= S16_MAX) {
1214 emit_instr(ctx, slti, MIPS_R_AT, dst, (int)t64s);
1215 src = MIPS_R_AT;
1216 dst = MIPS_R_ZERO;
1217 goto jeq_common;
1218 }
1219 emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
1220 emit_instr(ctx, slt, MIPS_R_AT, dst, MIPS_R_AT);
1221 src = MIPS_R_AT;
1222 dst = MIPS_R_ZERO;
1223 goto jeq_common;
1224
1225 case BPF_JMP | BPF_JGT | BPF_K:
1226 case BPF_JMP | BPF_JGE | BPF_K:
1227 case BPF_JMP | BPF_JLT | BPF_K:
1228 case BPF_JMP | BPF_JLE | BPF_K:
1229 cmp_eq = (bpf_op == BPF_JGE);
1230 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1231 if (dst < 0)
1232 return dst;
1233
1234
1235
1236
1237 if (bpf_op == BPF_JGT)
1238 t64s = (u64)(u32)(insn->imm) + 1;
1239 else if (bpf_op == BPF_JLE)
1240 t64s = (u64)(u32)(insn->imm) + 1;
1241 else
1242 t64s = (u64)(u32)(insn->imm);
1243
1244 cmp_eq = bpf_op == BPF_JGT || bpf_op == BPF_JGE;
1245
1246 emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
1247 emit_instr(ctx, sltu, MIPS_R_AT, dst, MIPS_R_AT);
1248 src = MIPS_R_AT;
1249 dst = MIPS_R_ZERO;
1250 goto jeq_common;
1251
1252 case BPF_JMP | BPF_JSET | BPF_K:
1253 dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
1254 if (dst < 0)
1255 return dst;
1256
1257 if (ctx->use_bbit_insns && hweight32((u32)insn->imm) == 1) {
1258 if ((insn + 1)->code == (BPF_JMP | BPF_EXIT) && insn->off == 1) {
1259 b_off = b_imm(exit_idx, ctx);
1260 if (is_bad_offset(b_off))
1261 return -E2BIG;
1262 emit_instr(ctx, bbit0, dst, ffs((u32)insn->imm) - 1, b_off);
1263 emit_instr(ctx, nop);
1264 return 2;
1265 }
1266 b_off = b_imm(this_idx + insn->off + 1, ctx);
1267 if (is_bad_offset(b_off))
1268 return -E2BIG;
1269 emit_instr(ctx, bbit1, dst, ffs((u32)insn->imm) - 1, b_off);
1270 emit_instr(ctx, nop);
1271 break;
1272 }
1273 t64 = (u32)insn->imm;
1274 emit_const_to_reg(ctx, MIPS_R_AT, t64);
1275 emit_instr(ctx, and, MIPS_R_AT, dst, MIPS_R_AT);
1276 src = MIPS_R_AT;
1277 dst = MIPS_R_ZERO;
1278 cmp_eq = false;
1279 goto jeq_common;
1280
1281 case BPF_JMP | BPF_JA:
1282
1283
1284
1285
1286 b_off = b_imm(this_idx + insn->off + 1, ctx);
1287 if (is_bad_offset(b_off)) {
1288 target = j_target(ctx, this_idx + insn->off + 1);
1289 if (target == (unsigned int)-1)
1290 return -E2BIG;
1291 emit_instr(ctx, j, target);
1292 } else {
1293 emit_instr(ctx, b, b_off);
1294 }
1295 emit_instr(ctx, nop);
1296 break;
1297 case BPF_LD | BPF_DW | BPF_IMM:
1298 if (insn->src_reg != 0)
1299 return -EINVAL;
1300 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1301 if (dst < 0)
1302 return dst;
1303 t64 = ((u64)(u32)insn->imm) | ((u64)(insn + 1)->imm << 32);
1304 emit_const_to_reg(ctx, dst, t64);
1305 return 2;
1306
1307 case BPF_JMP | BPF_CALL:
1308 ctx->flags |= EBPF_SAVE_RA;
1309 t64s = (s64)insn->imm + (long)__bpf_call_base;
1310 emit_const_to_reg(ctx, MIPS_R_T9, (u64)t64s);
1311 emit_instr(ctx, jalr, MIPS_R_RA, MIPS_R_T9);
1312
1313 emit_instr(ctx, nop);
1314 break;
1315
1316 case BPF_JMP | BPF_TAIL_CALL:
1317 if (emit_bpf_tail_call(ctx, this_idx))
1318 return -EINVAL;
1319 break;
1320
1321 case BPF_ALU | BPF_END | BPF_FROM_BE:
1322 case BPF_ALU | BPF_END | BPF_FROM_LE:
1323 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1324 if (dst < 0)
1325 return dst;
1326 td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
1327 if (insn->imm == 64 && td == REG_32BIT)
1328 emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
1329
1330 if (insn->imm != 64 && td == REG_64BIT) {
1331
1332 emit_instr(ctx, sll, dst, dst, 0);
1333 }
1334
1335#ifdef __BIG_ENDIAN
1336 need_swap = (BPF_SRC(insn->code) == BPF_FROM_LE);
1337#else
1338 need_swap = (BPF_SRC(insn->code) == BPF_FROM_BE);
1339#endif
1340 if (insn->imm == 16) {
1341 if (need_swap)
1342 emit_instr(ctx, wsbh, dst, dst);
1343 emit_instr(ctx, andi, dst, dst, 0xffff);
1344 } else if (insn->imm == 32) {
1345 if (need_swap) {
1346 emit_instr(ctx, wsbh, dst, dst);
1347 emit_instr(ctx, rotr, dst, dst, 16);
1348 }
1349 } else {
1350 if (need_swap) {
1351 emit_instr(ctx, dsbh, dst, dst);
1352 emit_instr(ctx, dshd, dst, dst);
1353 }
1354 }
1355 break;
1356
1357 case BPF_ST | BPF_B | BPF_MEM:
1358 case BPF_ST | BPF_H | BPF_MEM:
1359 case BPF_ST | BPF_W | BPF_MEM:
1360 case BPF_ST | BPF_DW | BPF_MEM:
1361 if (insn->dst_reg == BPF_REG_10) {
1362 ctx->flags |= EBPF_SEEN_FP;
1363 dst = MIPS_R_SP;
1364 mem_off = insn->off + MAX_BPF_STACK;
1365 } else {
1366 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1367 if (dst < 0)
1368 return dst;
1369 mem_off = insn->off;
1370 }
1371 gen_imm_to_reg(insn, MIPS_R_AT, ctx);
1372 switch (BPF_SIZE(insn->code)) {
1373 case BPF_B:
1374 emit_instr(ctx, sb, MIPS_R_AT, mem_off, dst);
1375 break;
1376 case BPF_H:
1377 emit_instr(ctx, sh, MIPS_R_AT, mem_off, dst);
1378 break;
1379 case BPF_W:
1380 emit_instr(ctx, sw, MIPS_R_AT, mem_off, dst);
1381 break;
1382 case BPF_DW:
1383 emit_instr(ctx, sd, MIPS_R_AT, mem_off, dst);
1384 break;
1385 }
1386 break;
1387
1388 case BPF_LDX | BPF_B | BPF_MEM:
1389 case BPF_LDX | BPF_H | BPF_MEM:
1390 case BPF_LDX | BPF_W | BPF_MEM:
1391 case BPF_LDX | BPF_DW | BPF_MEM:
1392 if (insn->src_reg == BPF_REG_10) {
1393 ctx->flags |= EBPF_SEEN_FP;
1394 src = MIPS_R_SP;
1395 mem_off = insn->off + MAX_BPF_STACK;
1396 } else {
1397 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1398 if (src < 0)
1399 return src;
1400 mem_off = insn->off;
1401 }
1402 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1403 if (dst < 0)
1404 return dst;
1405 switch (BPF_SIZE(insn->code)) {
1406 case BPF_B:
1407 emit_instr(ctx, lbu, dst, mem_off, src);
1408 break;
1409 case BPF_H:
1410 emit_instr(ctx, lhu, dst, mem_off, src);
1411 break;
1412 case BPF_W:
1413 emit_instr(ctx, lw, dst, mem_off, src);
1414 break;
1415 case BPF_DW:
1416 emit_instr(ctx, ld, dst, mem_off, src);
1417 break;
1418 }
1419 break;
1420
1421 case BPF_STX | BPF_B | BPF_MEM:
1422 case BPF_STX | BPF_H | BPF_MEM:
1423 case BPF_STX | BPF_W | BPF_MEM:
1424 case BPF_STX | BPF_DW | BPF_MEM:
1425 case BPF_STX | BPF_W | BPF_XADD:
1426 case BPF_STX | BPF_DW | BPF_XADD:
1427 if (insn->dst_reg == BPF_REG_10) {
1428 ctx->flags |= EBPF_SEEN_FP;
1429 dst = MIPS_R_SP;
1430 mem_off = insn->off + MAX_BPF_STACK;
1431 } else {
1432 dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
1433 if (dst < 0)
1434 return dst;
1435 mem_off = insn->off;
1436 }
1437 src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
1438 if (src < 0)
1439 return src;
1440 if (BPF_MODE(insn->code) == BPF_XADD) {
1441
1442
1443
1444
1445 if (MIPS_ISA_REV >= 6 &&
1446 (mem_off >= BIT(8) || mem_off < -BIT(8))) {
1447 emit_instr(ctx, daddiu, MIPS_R_T6,
1448 dst, mem_off);
1449 mem_off = 0;
1450 dst = MIPS_R_T6;
1451 }
1452 switch (BPF_SIZE(insn->code)) {
1453 case BPF_W:
1454 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1455 emit_instr(ctx, sll, MIPS_R_AT, src, 0);
1456 src = MIPS_R_AT;
1457 }
1458 emit_instr(ctx, ll, MIPS_R_T8, mem_off, dst);
1459 emit_instr(ctx, addu, MIPS_R_T8, MIPS_R_T8, src);
1460 emit_instr(ctx, sc, MIPS_R_T8, mem_off, dst);
1461
1462
1463
1464
1465 emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
1466 emit_instr(ctx, nop);
1467 break;
1468 case BPF_DW:
1469 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1470 emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
1471 emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
1472 src = MIPS_R_AT;
1473 }
1474 emit_instr(ctx, lld, MIPS_R_T8, mem_off, dst);
1475 emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, src);
1476 emit_instr(ctx, scd, MIPS_R_T8, mem_off, dst);
1477 emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
1478 emit_instr(ctx, nop);
1479 break;
1480 }
1481 } else {
1482 switch (BPF_SIZE(insn->code)) {
1483 case BPF_B:
1484 emit_instr(ctx, sb, src, mem_off, dst);
1485 break;
1486 case BPF_H:
1487 emit_instr(ctx, sh, src, mem_off, dst);
1488 break;
1489 case BPF_W:
1490 emit_instr(ctx, sw, src, mem_off, dst);
1491 break;
1492 case BPF_DW:
1493 if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
1494 emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
1495 emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
1496 src = MIPS_R_AT;
1497 }
1498 emit_instr(ctx, sd, src, mem_off, dst);
1499 break;
1500 }
1501 }
1502 break;
1503
1504 default:
1505 pr_err("NOT HANDLED %d - (%02x)\n",
1506 this_idx, (unsigned int)insn->code);
1507 return -EINVAL;
1508 }
1509 return 1;
1510}
1511
1512#define RVT_VISITED_MASK 0xc000000000000000ull
1513#define RVT_FALL_THROUGH 0x4000000000000000ull
1514#define RVT_BRANCH_TAKEN 0x8000000000000000ull
1515#define RVT_DONE (RVT_FALL_THROUGH | RVT_BRANCH_TAKEN)
1516
1517static int build_int_body(struct jit_ctx *ctx)
1518{
1519 const struct bpf_prog *prog = ctx->skf;
1520 const struct bpf_insn *insn;
1521 int i, r;
1522
1523 for (i = 0; i < prog->len; ) {
1524 insn = prog->insnsi + i;
1525 if ((ctx->reg_val_types[i] & RVT_VISITED_MASK) == 0) {
1526
1527 i++;
1528 continue;
1529 }
1530
1531 if (ctx->target == NULL)
1532 ctx->offsets[i] = (ctx->offsets[i] & OFFSETS_B_CONV) | (ctx->idx * 4);
1533
1534 r = build_one_insn(insn, ctx, i, prog->len);
1535 if (r < 0)
1536 return r;
1537 i += r;
1538 }
1539
1540 if (ctx->target == NULL)
1541 ctx->offsets[i] = ctx->idx * 4;
1542
1543
1544
1545
1546
1547
1548 if (ctx->target == NULL)
1549 for (i = 0; i < prog->len; i++) {
1550 insn = prog->insnsi + i;
1551 if (insn->code == (BPF_JMP | BPF_EXIT))
1552 ctx->offsets[i] = ctx->idx * 4;
1553 }
1554 return 0;
1555}
1556
1557
1558static int reg_val_propagate_range(struct jit_ctx *ctx, u64 initial_rvt,
1559 int start_idx, bool follow_taken)
1560{
1561 const struct bpf_prog *prog = ctx->skf;
1562 const struct bpf_insn *insn;
1563 u64 exit_rvt = initial_rvt;
1564 u64 *rvt = ctx->reg_val_types;
1565 int idx;
1566 int reg;
1567
1568 for (idx = start_idx; idx < prog->len; idx++) {
1569 rvt[idx] = (rvt[idx] & RVT_VISITED_MASK) | exit_rvt;
1570 insn = prog->insnsi + idx;
1571 switch (BPF_CLASS(insn->code)) {
1572 case BPF_ALU:
1573 switch (BPF_OP(insn->code)) {
1574 case BPF_ADD:
1575 case BPF_SUB:
1576 case BPF_MUL:
1577 case BPF_DIV:
1578 case BPF_OR:
1579 case BPF_AND:
1580 case BPF_LSH:
1581 case BPF_RSH:
1582 case BPF_NEG:
1583 case BPF_MOD:
1584 case BPF_XOR:
1585 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1586 break;
1587 case BPF_MOV:
1588 if (BPF_SRC(insn->code)) {
1589 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1590 } else {
1591
1592 if (insn->imm >= 0)
1593 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1594 else
1595 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1596 }
1597 break;
1598 case BPF_END:
1599 if (insn->imm == 64)
1600 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1601 else if (insn->imm == 32)
1602 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1603 else
1604 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1605 break;
1606 }
1607 rvt[idx] |= RVT_DONE;
1608 break;
1609 case BPF_ALU64:
1610 switch (BPF_OP(insn->code)) {
1611 case BPF_MOV:
1612 if (BPF_SRC(insn->code)) {
1613
1614 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1615 } else {
1616
1617 if (insn->imm >= 0)
1618 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1619 else
1620 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT_32BIT);
1621 }
1622 break;
1623 default:
1624 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1625 }
1626 rvt[idx] |= RVT_DONE;
1627 break;
1628 case BPF_LD:
1629 switch (BPF_SIZE(insn->code)) {
1630 case BPF_DW:
1631 if (BPF_MODE(insn->code) == BPF_IMM) {
1632 s64 val;
1633
1634 val = (s64)((u32)insn->imm | ((u64)(insn + 1)->imm << 32));
1635 if (val > 0 && val <= S32_MAX)
1636 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1637 else if (val >= S32_MIN && val <= S32_MAX)
1638 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT_32BIT);
1639 else
1640 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1641 rvt[idx] |= RVT_DONE;
1642 idx++;
1643 } else {
1644 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1645 }
1646 break;
1647 case BPF_B:
1648 case BPF_H:
1649 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1650 break;
1651 case BPF_W:
1652 if (BPF_MODE(insn->code) == BPF_IMM)
1653 set_reg_val_type(&exit_rvt, insn->dst_reg,
1654 insn->imm >= 0 ? REG_32BIT_POS : REG_32BIT);
1655 else
1656 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1657 break;
1658 }
1659 rvt[idx] |= RVT_DONE;
1660 break;
1661 case BPF_LDX:
1662 switch (BPF_SIZE(insn->code)) {
1663 case BPF_DW:
1664 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_64BIT);
1665 break;
1666 case BPF_B:
1667 case BPF_H:
1668 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT_POS);
1669 break;
1670 case BPF_W:
1671 set_reg_val_type(&exit_rvt, insn->dst_reg, REG_32BIT);
1672 break;
1673 }
1674 rvt[idx] |= RVT_DONE;
1675 break;
1676 case BPF_JMP:
1677 switch (BPF_OP(insn->code)) {
1678 case BPF_EXIT:
1679 rvt[idx] = RVT_DONE | exit_rvt;
1680 rvt[prog->len] = exit_rvt;
1681 return idx;
1682 case BPF_JA:
1683 rvt[idx] |= RVT_DONE;
1684 idx += insn->off;
1685 break;
1686 case BPF_JEQ:
1687 case BPF_JGT:
1688 case BPF_JGE:
1689 case BPF_JLT:
1690 case BPF_JLE:
1691 case BPF_JSET:
1692 case BPF_JNE:
1693 case BPF_JSGT:
1694 case BPF_JSGE:
1695 case BPF_JSLT:
1696 case BPF_JSLE:
1697 if (follow_taken) {
1698 rvt[idx] |= RVT_BRANCH_TAKEN;
1699 idx += insn->off;
1700 follow_taken = false;
1701 } else {
1702 rvt[idx] |= RVT_FALL_THROUGH;
1703 }
1704 break;
1705 case BPF_CALL:
1706 set_reg_val_type(&exit_rvt, BPF_REG_0, REG_64BIT);
1707
1708 for (reg = BPF_REG_0; reg <= BPF_REG_5; reg++)
1709 set_reg_val_type(&exit_rvt, reg, REG_64BIT);
1710
1711 rvt[idx] |= RVT_DONE;
1712 break;
1713 default:
1714 WARN(1, "Unhandled BPF_JMP case.\n");
1715 rvt[idx] |= RVT_DONE;
1716 break;
1717 }
1718 break;
1719 default:
1720 rvt[idx] |= RVT_DONE;
1721 break;
1722 }
1723 }
1724 return idx;
1725}
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735static int reg_val_propagate(struct jit_ctx *ctx)
1736{
1737 const struct bpf_prog *prog = ctx->skf;
1738 u64 exit_rvt;
1739 int reg;
1740 int i;
1741
1742
1743
1744
1745
1746 exit_rvt = 0;
1747
1748
1749 for (reg = BPF_REG_1; reg <= BPF_REG_5; reg++)
1750 set_reg_val_type(&exit_rvt, reg, REG_64BIT);
1751
1752
1753
1754
1755
1756 reg_val_propagate_range(ctx, exit_rvt, 0, false);
1757restart_search:
1758
1759
1760
1761
1762
1763
1764 for (i = 0; i < prog->len; i++) {
1765 u64 rvt = ctx->reg_val_types[i];
1766
1767 if ((rvt & RVT_VISITED_MASK) == RVT_DONE ||
1768 (rvt & RVT_VISITED_MASK) == 0)
1769 continue;
1770 if ((rvt & RVT_VISITED_MASK) == RVT_FALL_THROUGH) {
1771 reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, true);
1772 } else {
1773 WARN(1, "Unexpected RVT_BRANCH_TAKEN case.\n");
1774 reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, false);
1775 }
1776 goto restart_search;
1777 }
1778
1779
1780
1781
1782
1783
1784 return 0;
1785}
1786
1787static void jit_fill_hole(void *area, unsigned int size)
1788{
1789 u32 *p;
1790
1791
1792 for (p = area; size >= sizeof(u32); size -= sizeof(u32))
1793 uasm_i_break(&p, BRK_BUG);
1794}
1795
1796struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog)
1797{
1798 struct bpf_prog *orig_prog = prog;
1799 bool tmp_blinded = false;
1800 struct bpf_prog *tmp;
1801 struct bpf_binary_header *header = NULL;
1802 struct jit_ctx ctx;
1803 unsigned int image_size;
1804 u8 *image_ptr;
1805
1806 if (!prog->jit_requested || MIPS_ISA_REV < 2)
1807 return prog;
1808
1809 tmp = bpf_jit_blind_constants(prog);
1810
1811
1812
1813 if (IS_ERR(tmp))
1814 return orig_prog;
1815 if (tmp != prog) {
1816 tmp_blinded = true;
1817 prog = tmp;
1818 }
1819
1820 memset(&ctx, 0, sizeof(ctx));
1821
1822 preempt_disable();
1823 switch (current_cpu_type()) {
1824 case CPU_CAVIUM_OCTEON:
1825 case CPU_CAVIUM_OCTEON_PLUS:
1826 case CPU_CAVIUM_OCTEON2:
1827 case CPU_CAVIUM_OCTEON3:
1828 ctx.use_bbit_insns = 1;
1829 break;
1830 default:
1831 ctx.use_bbit_insns = 0;
1832 }
1833 preempt_enable();
1834
1835 ctx.offsets = kcalloc(prog->len + 1, sizeof(*ctx.offsets), GFP_KERNEL);
1836 if (ctx.offsets == NULL)
1837 goto out_err;
1838
1839 ctx.reg_val_types = kcalloc(prog->len + 1, sizeof(*ctx.reg_val_types), GFP_KERNEL);
1840 if (ctx.reg_val_types == NULL)
1841 goto out_err;
1842
1843 ctx.skf = prog;
1844
1845 if (reg_val_propagate(&ctx))
1846 goto out_err;
1847
1848
1849
1850
1851
1852 if (build_int_body(&ctx))
1853 goto out_err;
1854
1855
1856
1857
1858
1859 if (ctx.flags & EBPF_SEEN_TC) {
1860 if (ctx.flags & EBPF_SAVE_RA)
1861 ctx.flags |= EBPF_SAVE_S4;
1862 else
1863 ctx.flags |= EBPF_TCC_IN_V1;
1864 }
1865
1866
1867
1868
1869
1870
1871
1872
1873 do {
1874 ctx.idx = 0;
1875 ctx.gen_b_offsets = 1;
1876 ctx.long_b_conversion = 0;
1877 if (gen_int_prologue(&ctx))
1878 goto out_err;
1879 if (build_int_body(&ctx))
1880 goto out_err;
1881 if (build_int_epilogue(&ctx, MIPS_R_RA))
1882 goto out_err;
1883 } while (ctx.long_b_conversion);
1884
1885 image_size = 4 * ctx.idx;
1886
1887 header = bpf_jit_binary_alloc(image_size, &image_ptr,
1888 sizeof(u32), jit_fill_hole);
1889 if (header == NULL)
1890 goto out_err;
1891
1892 ctx.target = (u32 *)image_ptr;
1893
1894
1895 ctx.idx = 0;
1896 if (gen_int_prologue(&ctx))
1897 goto out_err;
1898 if (build_int_body(&ctx))
1899 goto out_err;
1900 if (build_int_epilogue(&ctx, MIPS_R_RA))
1901 goto out_err;
1902
1903
1904 flush_icache_range((unsigned long)ctx.target,
1905 (unsigned long)&ctx.target[ctx.idx]);
1906
1907 if (bpf_jit_enable > 1)
1908
1909 bpf_jit_dump(prog->len, image_size, 2, ctx.target);
1910
1911 bpf_jit_binary_lock_ro(header);
1912 prog->bpf_func = (void *)ctx.target;
1913 prog->jited = 1;
1914 prog->jited_len = image_size;
1915out_normal:
1916 if (tmp_blinded)
1917 bpf_jit_prog_release_other(prog, prog == orig_prog ?
1918 tmp : orig_prog);
1919 kfree(ctx.offsets);
1920 kfree(ctx.reg_val_types);
1921
1922 return prog;
1923
1924out_err:
1925 prog = orig_prog;
1926 if (header)
1927 bpf_jit_binary_free(header);
1928 goto out_normal;
1929}
1930