0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015 #include <linux/errno.h>
0016 #include <linux/filter.h>
0017 #include <linux/bpf.h>
0018 #include <asm/cpu-features.h>
0019 #include <asm/isa-rev.h>
0020 #include <asm/uasm.h>
0021
0022 #include "bpf_jit_comp.h"
0023
0024
0025 #undef MIPS_R_T0
0026 #undef MIPS_R_T1
0027 #undef MIPS_R_T2
0028 #undef MIPS_R_T3
0029
0030
0031 #define MIPS_STACK_ALIGNMENT 16
0032
0033
0034 #define JIT_REG_TC (MAX_BPF_JIT_REG + 0)
0035 #define JIT_REG_ZX (MAX_BPF_JIT_REG + 1)
0036
0037
0038 #define JIT_TCALL_SKIP 4
0039
0040
0041 #define JIT_CALLEE_REGS \
0042 (BIT(MIPS_R_S0) | \
0043 BIT(MIPS_R_S1) | \
0044 BIT(MIPS_R_S2) | \
0045 BIT(MIPS_R_S3) | \
0046 BIT(MIPS_R_S4) | \
0047 BIT(MIPS_R_S5) | \
0048 BIT(MIPS_R_S6) | \
0049 BIT(MIPS_R_S7) | \
0050 BIT(MIPS_R_GP) | \
0051 BIT(MIPS_R_FP) | \
0052 BIT(MIPS_R_RA))
0053
0054
0055 #define JIT_CALLER_REGS \
0056 (BIT(MIPS_R_A5) | \
0057 BIT(MIPS_R_A6) | \
0058 BIT(MIPS_R_A7))
0059
0060
0061
0062
0063
0064 static const u8 bpf2mips64[] = {
0065
0066 [BPF_REG_0] = MIPS_R_V0,
0067
0068 [BPF_REG_1] = MIPS_R_A0,
0069 [BPF_REG_2] = MIPS_R_A1,
0070 [BPF_REG_3] = MIPS_R_A2,
0071 [BPF_REG_4] = MIPS_R_A3,
0072 [BPF_REG_5] = MIPS_R_A4,
0073
0074 [BPF_REG_6] = MIPS_R_S0,
0075 [BPF_REG_7] = MIPS_R_S1,
0076 [BPF_REG_8] = MIPS_R_S2,
0077 [BPF_REG_9] = MIPS_R_S3,
0078
0079 [BPF_REG_FP] = MIPS_R_FP,
0080
0081 [BPF_REG_AX] = MIPS_R_AT,
0082
0083 [JIT_REG_TC] = MIPS_R_A5,
0084
0085 [JIT_REG_ZX] = MIPS_R_V1,
0086 };
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098 static void emit_sext(struct jit_context *ctx, u8 dst, u8 src)
0099 {
0100 emit(ctx, sll, dst, src, 0);
0101 clobber_reg(ctx, dst);
0102 }
0103
0104
0105 static void emit_zext(struct jit_context *ctx, u8 dst)
0106 {
0107 if (cpu_has_mips64r2 || cpu_has_mips64r6) {
0108 emit(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
0109 } else {
0110 emit(ctx, and, dst, dst, bpf2mips64[JIT_REG_ZX]);
0111 access_reg(ctx, JIT_REG_ZX);
0112 }
0113 clobber_reg(ctx, dst);
0114 }
0115
0116
0117 static void emit_zext_ver(struct jit_context *ctx, u8 dst)
0118 {
0119 if (!ctx->program->aux->verifier_zext)
0120 emit_zext(ctx, dst);
0121 }
0122
0123
0124 static void emit_mov_i64(struct jit_context *ctx, u8 dst, u64 imm64)
0125 {
0126 if (imm64 >= 0xffffffffffff8000ULL || imm64 < 0x8000ULL) {
0127 emit(ctx, daddiu, dst, MIPS_R_ZERO, (s16)imm64);
0128 } else if (imm64 >= 0xffffffff80000000ULL ||
0129 (imm64 < 0x80000000 && imm64 > 0xffff)) {
0130 emit(ctx, lui, dst, (s16)(imm64 >> 16));
0131 emit(ctx, ori, dst, dst, (u16)imm64 & 0xffff);
0132 } else {
0133 u8 acc = MIPS_R_ZERO;
0134 int shift = 0;
0135 int k;
0136
0137 for (k = 0; k < 4; k++) {
0138 u16 half = imm64 >> (48 - 16 * k);
0139
0140 if (acc == dst)
0141 shift += 16;
0142
0143 if (half) {
0144 if (shift)
0145 emit(ctx, dsll_safe, dst, dst, shift);
0146 emit(ctx, ori, dst, acc, half);
0147 acc = dst;
0148 shift = 0;
0149 }
0150 }
0151 if (shift)
0152 emit(ctx, dsll_safe, dst, dst, shift);
0153 }
0154 clobber_reg(ctx, dst);
0155 }
0156
0157
0158 static void emit_alu_i64(struct jit_context *ctx, u8 dst, s32 imm, u8 op)
0159 {
0160 switch (BPF_OP(op)) {
0161
0162 case BPF_OR:
0163 emit(ctx, ori, dst, dst, (u16)imm);
0164 break;
0165
0166 case BPF_XOR:
0167 emit(ctx, xori, dst, dst, (u16)imm);
0168 break;
0169
0170 case BPF_NEG:
0171 emit(ctx, dsubu, dst, MIPS_R_ZERO, dst);
0172 break;
0173
0174 case BPF_LSH:
0175 emit(ctx, dsll_safe, dst, dst, imm);
0176 break;
0177
0178 case BPF_RSH:
0179 emit(ctx, dsrl_safe, dst, dst, imm);
0180 break;
0181
0182 case BPF_ARSH:
0183 emit(ctx, dsra_safe, dst, dst, imm);
0184 break;
0185
0186 case BPF_ADD:
0187 emit(ctx, daddiu, dst, dst, imm);
0188 break;
0189
0190 case BPF_SUB:
0191 emit(ctx, daddiu, dst, dst, -imm);
0192 break;
0193 default:
0194
0195 emit_alu_i(ctx, dst, imm, op);
0196 }
0197 clobber_reg(ctx, dst);
0198 }
0199
0200
0201 static void emit_alu_r64(struct jit_context *ctx, u8 dst, u8 src, u8 op)
0202 {
0203 switch (BPF_OP(op)) {
0204
0205 case BPF_LSH:
0206 emit(ctx, dsllv, dst, dst, src);
0207 break;
0208
0209 case BPF_RSH:
0210 emit(ctx, dsrlv, dst, dst, src);
0211 break;
0212
0213 case BPF_ARSH:
0214 emit(ctx, dsrav, dst, dst, src);
0215 break;
0216
0217 case BPF_ADD:
0218 emit(ctx, daddu, dst, dst, src);
0219 break;
0220
0221 case BPF_SUB:
0222 emit(ctx, dsubu, dst, dst, src);
0223 break;
0224
0225 case BPF_MUL:
0226 if (cpu_has_mips64r6) {
0227 emit(ctx, dmulu, dst, dst, src);
0228 } else {
0229 emit(ctx, dmultu, dst, src);
0230 emit(ctx, mflo, dst);
0231 }
0232 break;
0233
0234 case BPF_DIV:
0235 if (cpu_has_mips64r6) {
0236 emit(ctx, ddivu_r6, dst, dst, src);
0237 } else {
0238 emit(ctx, ddivu, dst, src);
0239 emit(ctx, mflo, dst);
0240 }
0241 break;
0242
0243 case BPF_MOD:
0244 if (cpu_has_mips64r6) {
0245 emit(ctx, dmodu, dst, dst, src);
0246 } else {
0247 emit(ctx, ddivu, dst, src);
0248 emit(ctx, mfhi, dst);
0249 }
0250 break;
0251 default:
0252
0253 emit_alu_r(ctx, dst, src, op);
0254 }
0255 clobber_reg(ctx, dst);
0256 }
0257
0258
0259 static void emit_swap_r64(struct jit_context *ctx, u8 dst, u8 mask, u32 bits)
0260 {
0261 u8 tmp = MIPS_R_T9;
0262
0263 emit(ctx, and, tmp, dst, mask);
0264 emit(ctx, dsll, tmp, tmp, bits);
0265 emit(ctx, dsrl, dst, dst, bits);
0266 emit(ctx, and, dst, dst, mask);
0267 emit(ctx, or, dst, dst, tmp);
0268 }
0269
0270
0271 static void emit_bswap_r64(struct jit_context *ctx, u8 dst, u32 width)
0272 {
0273 switch (width) {
0274
0275 case 64:
0276 if (cpu_has_mips64r2 || cpu_has_mips64r6) {
0277 emit(ctx, dsbh, dst, dst);
0278 emit(ctx, dshd, dst, dst);
0279 } else {
0280 u8 t1 = MIPS_R_T6;
0281 u8 t2 = MIPS_R_T7;
0282
0283 emit(ctx, dsll32, t2, dst, 0);
0284 emit(ctx, dsrl32, dst, dst, 0);
0285 emit(ctx, or, dst, dst, t2);
0286
0287 emit(ctx, ori, t2, MIPS_R_ZERO, 0xffff);
0288 emit(ctx, dsll32, t1, t2, 0);
0289 emit(ctx, or, t1, t1, t2);
0290 emit_swap_r64(ctx, dst, t1, 16);
0291
0292 emit(ctx, lui, t2, 0xff);
0293 emit(ctx, ori, t2, t2, 0xff);
0294 emit(ctx, dsll32, t1, t2, 0);
0295 emit(ctx, or, t1, t1, t2);
0296 emit_swap_r64(ctx, dst, t1, 8);
0297 }
0298 break;
0299
0300
0301 case 32:
0302 case 16:
0303 emit_sext(ctx, dst, dst);
0304 emit_bswap_r(ctx, dst, width);
0305 if (cpu_has_mips64r2 || cpu_has_mips64r6)
0306 emit_zext(ctx, dst);
0307 break;
0308 }
0309 clobber_reg(ctx, dst);
0310 }
0311
0312
0313 static void emit_trunc_r64(struct jit_context *ctx, u8 dst, u32 width)
0314 {
0315 switch (width) {
0316 case 64:
0317 break;
0318
0319 case 32:
0320 emit_zext(ctx, dst);
0321 break;
0322
0323 case 16:
0324 emit(ctx, andi, dst, dst, 0xffff);
0325 break;
0326 }
0327 clobber_reg(ctx, dst);
0328 }
0329
0330
0331 static void emit_ldx(struct jit_context *ctx, u8 dst, u8 src, s16 off, u8 size)
0332 {
0333 switch (size) {
0334
0335 case BPF_B:
0336 emit(ctx, lbu, dst, off, src);
0337 break;
0338
0339 case BPF_H:
0340 emit(ctx, lhu, dst, off, src);
0341 break;
0342
0343 case BPF_W:
0344 emit(ctx, lwu, dst, off, src);
0345 break;
0346
0347 case BPF_DW:
0348 emit(ctx, ld, dst, off, src);
0349 break;
0350 }
0351 clobber_reg(ctx, dst);
0352 }
0353
0354
0355 static void emit_stx(struct jit_context *ctx, u8 dst, u8 src, s16 off, u8 size)
0356 {
0357 switch (size) {
0358
0359 case BPF_B:
0360 emit(ctx, sb, src, off, dst);
0361 break;
0362
0363 case BPF_H:
0364 emit(ctx, sh, src, off, dst);
0365 break;
0366
0367 case BPF_W:
0368 emit(ctx, sw, src, off, dst);
0369 break;
0370
0371 case BPF_DW:
0372 emit(ctx, sd, src, off, dst);
0373 break;
0374 }
0375 }
0376
0377
0378 static void emit_atomic_r64(struct jit_context *ctx,
0379 u8 dst, u8 src, s16 off, u8 code)
0380 {
0381 u8 t1 = MIPS_R_T6;
0382 u8 t2 = MIPS_R_T7;
0383
0384 LLSC_sync(ctx);
0385 emit(ctx, lld, t1, off, dst);
0386 switch (code) {
0387 case BPF_ADD:
0388 case BPF_ADD | BPF_FETCH:
0389 emit(ctx, daddu, t2, t1, src);
0390 break;
0391 case BPF_AND:
0392 case BPF_AND | BPF_FETCH:
0393 emit(ctx, and, t2, t1, src);
0394 break;
0395 case BPF_OR:
0396 case BPF_OR | BPF_FETCH:
0397 emit(ctx, or, t2, t1, src);
0398 break;
0399 case BPF_XOR:
0400 case BPF_XOR | BPF_FETCH:
0401 emit(ctx, xor, t2, t1, src);
0402 break;
0403 case BPF_XCHG:
0404 emit(ctx, move, t2, src);
0405 break;
0406 }
0407 emit(ctx, scd, t2, off, dst);
0408 emit(ctx, LLSC_beqz, t2, -16 - LLSC_offset);
0409 emit(ctx, nop);
0410
0411 if (code & BPF_FETCH) {
0412 emit(ctx, move, src, t1);
0413 clobber_reg(ctx, src);
0414 }
0415 }
0416
0417
0418 static void emit_cmpxchg_r64(struct jit_context *ctx, u8 dst, u8 src, s16 off)
0419 {
0420 u8 r0 = bpf2mips64[BPF_REG_0];
0421 u8 t1 = MIPS_R_T6;
0422 u8 t2 = MIPS_R_T7;
0423
0424 LLSC_sync(ctx);
0425 emit(ctx, lld, t1, off, dst);
0426 emit(ctx, bne, t1, r0, 12);
0427 emit(ctx, move, t2, src);
0428 emit(ctx, scd, t2, off, dst);
0429 emit(ctx, LLSC_beqz, t2, -20 - LLSC_offset);
0430 emit(ctx, move, r0, t1);
0431
0432 clobber_reg(ctx, r0);
0433 }
0434
0435
0436 static int emit_call(struct jit_context *ctx, const struct bpf_insn *insn)
0437 {
0438 u8 zx = bpf2mips64[JIT_REG_ZX];
0439 u8 tmp = MIPS_R_T6;
0440 bool fixed;
0441 u64 addr;
0442
0443
0444 if (bpf_jit_get_func_addr(ctx->program, insn, false,
0445 &addr, &fixed) < 0)
0446 return -1;
0447 if (!fixed)
0448 return -1;
0449
0450
0451 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, 0, 0);
0452
0453
0454 emit_mov_i64(ctx, tmp, addr & JALR_MASK);
0455 emit(ctx, jalr, MIPS_R_RA, tmp);
0456 emit(ctx, nop);
0457
0458
0459 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, 0, 0);
0460
0461
0462 if (ctx->accessed & BIT(JIT_REG_ZX)) {
0463 emit(ctx, daddiu, zx, MIPS_R_ZERO, -1);
0464 emit(ctx, dsrl32, zx, zx, 0);
0465 }
0466
0467 clobber_reg(ctx, MIPS_R_RA);
0468 clobber_reg(ctx, MIPS_R_V0);
0469 clobber_reg(ctx, MIPS_R_V1);
0470 return 0;
0471 }
0472
0473
0474 static int emit_tail_call(struct jit_context *ctx)
0475 {
0476 u8 ary = bpf2mips64[BPF_REG_2];
0477 u8 ind = bpf2mips64[BPF_REG_3];
0478 u8 tcc = bpf2mips64[JIT_REG_TC];
0479 u8 tmp = MIPS_R_T6;
0480 int off;
0481
0482
0483
0484
0485
0486
0487
0488
0489
0490 off = offsetof(struct bpf_array, map.max_entries);
0491 if (off > 0x7fff)
0492 return -1;
0493 emit(ctx, lwu, tmp, off, ary);
0494 emit(ctx, sltu, tmp, ind, tmp);
0495 emit(ctx, beqz, tmp, get_offset(ctx, 1));
0496
0497
0498 emit(ctx, daddiu, tcc, tcc, -1);
0499 emit(ctx, bltz, tcc, get_offset(ctx, 1));
0500
0501
0502 off = offsetof(struct bpf_array, ptrs);
0503 if (off > 0x7fff)
0504 return -1;
0505 emit(ctx, dsll, tmp, ind, 3);
0506 emit(ctx, daddu, tmp, tmp, ary);
0507 emit(ctx, ld, tmp, off, tmp);
0508
0509
0510 emit(ctx, beqz, tmp, get_offset(ctx, 1));
0511 emit(ctx, nop);
0512
0513
0514 off = offsetof(struct bpf_prog, bpf_func);
0515 if (off > 0x7fff)
0516 return -1;
0517 emit(ctx, ld, tmp, off, tmp);
0518 emit(ctx, daddiu, tmp, tmp, JIT_TCALL_SKIP);
0519
0520
0521 build_epilogue(ctx, tmp);
0522 access_reg(ctx, JIT_REG_TC);
0523 return 0;
0524 }
0525
0526
0527
0528
0529
0530
0531
0532
0533
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543 void build_prologue(struct jit_context *ctx)
0544 {
0545 u8 fp = bpf2mips64[BPF_REG_FP];
0546 u8 tc = bpf2mips64[JIT_REG_TC];
0547 u8 zx = bpf2mips64[JIT_REG_ZX];
0548 int stack, saved, locals, reserved;
0549
0550
0551
0552
0553
0554
0555 emit(ctx, ori, tc, MIPS_R_ZERO, min(MAX_TAIL_CALL_CNT, 0xffff));
0556
0557
0558
0559
0560
0561
0562
0563
0564 if (ctx->accessed & BIT(BPF_REG_FP))
0565 clobber_reg(ctx, fp);
0566 if (ctx->accessed & BIT(JIT_REG_TC))
0567 clobber_reg(ctx, tc);
0568 if (ctx->accessed & BIT(JIT_REG_ZX))
0569 clobber_reg(ctx, zx);
0570
0571
0572 saved = hweight32(ctx->clobbered & JIT_CALLEE_REGS) * sizeof(u64);
0573 saved = ALIGN(saved, MIPS_STACK_ALIGNMENT);
0574
0575
0576 locals = ALIGN(ctx->program->aux->stack_depth, MIPS_STACK_ALIGNMENT);
0577
0578
0579
0580
0581
0582
0583 reserved = ctx->stack_used;
0584
0585
0586 stack = ALIGN(saved + locals + reserved, MIPS_STACK_ALIGNMENT);
0587 if (stack)
0588 emit(ctx, daddiu, MIPS_R_SP, MIPS_R_SP, -stack);
0589
0590
0591 push_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0, stack - saved);
0592
0593
0594 if (ctx->accessed & BIT(BPF_REG_FP))
0595 emit(ctx, daddiu, fp, MIPS_R_SP, stack - saved);
0596
0597
0598 if (ctx->accessed & BIT(JIT_REG_ZX)) {
0599 emit(ctx, daddiu, zx, MIPS_R_ZERO, -1);
0600 emit(ctx, dsrl32, zx, zx, 0);
0601 }
0602
0603 ctx->saved_size = saved;
0604 ctx->stack_size = stack;
0605 }
0606
0607
0608 void build_epilogue(struct jit_context *ctx, int dest_reg)
0609 {
0610
0611 pop_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0,
0612 ctx->stack_size - ctx->saved_size);
0613
0614
0615 if (ctx->stack_size)
0616 emit(ctx, daddiu, MIPS_R_SP, MIPS_R_SP, ctx->stack_size);
0617
0618
0619 emit(ctx, jr, dest_reg);
0620 emit(ctx, sll, MIPS_R_V0, MIPS_R_V0, 0);
0621 }
0622
0623
0624 int build_insn(const struct bpf_insn *insn, struct jit_context *ctx)
0625 {
0626 u8 dst = bpf2mips64[insn->dst_reg];
0627 u8 src = bpf2mips64[insn->src_reg];
0628 u8 res = bpf2mips64[BPF_REG_0];
0629 u8 code = insn->code;
0630 s16 off = insn->off;
0631 s32 imm = insn->imm;
0632 s32 val, rel;
0633 u8 alu, jmp;
0634
0635 switch (code) {
0636
0637
0638 case BPF_ALU | BPF_MOV | BPF_K:
0639 emit_mov_i(ctx, dst, imm);
0640 emit_zext_ver(ctx, dst);
0641 break;
0642
0643 case BPF_ALU | BPF_MOV | BPF_X:
0644 if (imm == 1) {
0645
0646 emit_zext(ctx, dst);
0647 } else {
0648 emit_mov_r(ctx, dst, src);
0649 emit_zext_ver(ctx, dst);
0650 }
0651 break;
0652
0653 case BPF_ALU | BPF_NEG:
0654 emit_sext(ctx, dst, dst);
0655 emit_alu_i(ctx, dst, 0, BPF_NEG);
0656 emit_zext_ver(ctx, dst);
0657 break;
0658
0659
0660
0661
0662 case BPF_ALU | BPF_OR | BPF_K:
0663 case BPF_ALU | BPF_AND | BPF_K:
0664 case BPF_ALU | BPF_XOR | BPF_K:
0665 case BPF_ALU | BPF_LSH | BPF_K:
0666 if (!valid_alu_i(BPF_OP(code), imm)) {
0667 emit_mov_i(ctx, MIPS_R_T4, imm);
0668 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code));
0669 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) {
0670 emit_alu_i(ctx, dst, val, alu);
0671 }
0672 emit_zext_ver(ctx, dst);
0673 break;
0674
0675
0676
0677
0678
0679
0680
0681 case BPF_ALU | BPF_RSH | BPF_K:
0682 case BPF_ALU | BPF_ARSH | BPF_K:
0683 case BPF_ALU | BPF_ADD | BPF_K:
0684 case BPF_ALU | BPF_SUB | BPF_K:
0685 case BPF_ALU | BPF_MUL | BPF_K:
0686 case BPF_ALU | BPF_DIV | BPF_K:
0687 case BPF_ALU | BPF_MOD | BPF_K:
0688 if (!valid_alu_i(BPF_OP(code), imm)) {
0689 emit_sext(ctx, dst, dst);
0690 emit_mov_i(ctx, MIPS_R_T4, imm);
0691 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code));
0692 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) {
0693 emit_sext(ctx, dst, dst);
0694 emit_alu_i(ctx, dst, val, alu);
0695 }
0696 emit_zext_ver(ctx, dst);
0697 break;
0698
0699
0700
0701
0702 case BPF_ALU | BPF_AND | BPF_X:
0703 case BPF_ALU | BPF_OR | BPF_X:
0704 case BPF_ALU | BPF_XOR | BPF_X:
0705 case BPF_ALU | BPF_LSH | BPF_X:
0706 emit_alu_r(ctx, dst, src, BPF_OP(code));
0707 emit_zext_ver(ctx, dst);
0708 break;
0709
0710
0711
0712
0713
0714
0715
0716 case BPF_ALU | BPF_RSH | BPF_X:
0717 case BPF_ALU | BPF_ARSH | BPF_X:
0718 case BPF_ALU | BPF_ADD | BPF_X:
0719 case BPF_ALU | BPF_SUB | BPF_X:
0720 case BPF_ALU | BPF_MUL | BPF_X:
0721 case BPF_ALU | BPF_DIV | BPF_X:
0722 case BPF_ALU | BPF_MOD | BPF_X:
0723 emit_sext(ctx, dst, dst);
0724 emit_sext(ctx, MIPS_R_T4, src);
0725 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code));
0726 emit_zext_ver(ctx, dst);
0727 break;
0728
0729 case BPF_ALU64 | BPF_MOV | BPF_K:
0730 emit_mov_i(ctx, dst, imm);
0731 break;
0732
0733 case BPF_ALU64 | BPF_MOV | BPF_X:
0734 emit_mov_r(ctx, dst, src);
0735 break;
0736
0737 case BPF_ALU64 | BPF_NEG:
0738 emit_alu_i64(ctx, dst, 0, BPF_NEG);
0739 break;
0740
0741
0742
0743
0744
0745
0746
0747
0748
0749
0750
0751 case BPF_ALU64 | BPF_AND | BPF_K:
0752 case BPF_ALU64 | BPF_OR | BPF_K:
0753 case BPF_ALU64 | BPF_XOR | BPF_K:
0754 case BPF_ALU64 | BPF_LSH | BPF_K:
0755 case BPF_ALU64 | BPF_RSH | BPF_K:
0756 case BPF_ALU64 | BPF_ARSH | BPF_K:
0757 case BPF_ALU64 | BPF_ADD | BPF_K:
0758 case BPF_ALU64 | BPF_SUB | BPF_K:
0759 case BPF_ALU64 | BPF_MUL | BPF_K:
0760 case BPF_ALU64 | BPF_DIV | BPF_K:
0761 case BPF_ALU64 | BPF_MOD | BPF_K:
0762 if (!valid_alu_i(BPF_OP(code), imm)) {
0763 emit_mov_i(ctx, MIPS_R_T4, imm);
0764 emit_alu_r64(ctx, dst, MIPS_R_T4, BPF_OP(code));
0765 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) {
0766 emit_alu_i64(ctx, dst, val, alu);
0767 }
0768 break;
0769
0770
0771
0772
0773
0774
0775
0776
0777
0778
0779
0780 case BPF_ALU64 | BPF_AND | BPF_X:
0781 case BPF_ALU64 | BPF_OR | BPF_X:
0782 case BPF_ALU64 | BPF_XOR | BPF_X:
0783 case BPF_ALU64 | BPF_LSH | BPF_X:
0784 case BPF_ALU64 | BPF_RSH | BPF_X:
0785 case BPF_ALU64 | BPF_ARSH | BPF_X:
0786 case BPF_ALU64 | BPF_ADD | BPF_X:
0787 case BPF_ALU64 | BPF_SUB | BPF_X:
0788 case BPF_ALU64 | BPF_MUL | BPF_X:
0789 case BPF_ALU64 | BPF_DIV | BPF_X:
0790 case BPF_ALU64 | BPF_MOD | BPF_X:
0791 emit_alu_r64(ctx, dst, src, BPF_OP(code));
0792 break;
0793
0794
0795 case BPF_ALU | BPF_END | BPF_FROM_LE:
0796 case BPF_ALU | BPF_END | BPF_FROM_BE:
0797 if (BPF_SRC(code) ==
0798 #ifdef __BIG_ENDIAN
0799 BPF_FROM_LE
0800 #else
0801 BPF_FROM_BE
0802 #endif
0803 )
0804 emit_bswap_r64(ctx, dst, imm);
0805 else
0806 emit_trunc_r64(ctx, dst, imm);
0807 break;
0808
0809 case BPF_LD | BPF_IMM | BPF_DW:
0810 emit_mov_i64(ctx, dst, (u32)imm | ((u64)insn[1].imm << 32));
0811 return 1;
0812
0813 case BPF_LDX | BPF_MEM | BPF_W:
0814 case BPF_LDX | BPF_MEM | BPF_H:
0815 case BPF_LDX | BPF_MEM | BPF_B:
0816 case BPF_LDX | BPF_MEM | BPF_DW:
0817 emit_ldx(ctx, dst, src, off, BPF_SIZE(code));
0818 break;
0819
0820 case BPF_ST | BPF_MEM | BPF_W:
0821 case BPF_ST | BPF_MEM | BPF_H:
0822 case BPF_ST | BPF_MEM | BPF_B:
0823 case BPF_ST | BPF_MEM | BPF_DW:
0824 emit_mov_i(ctx, MIPS_R_T4, imm);
0825 emit_stx(ctx, dst, MIPS_R_T4, off, BPF_SIZE(code));
0826 break;
0827
0828 case BPF_STX | BPF_MEM | BPF_W:
0829 case BPF_STX | BPF_MEM | BPF_H:
0830 case BPF_STX | BPF_MEM | BPF_B:
0831 case BPF_STX | BPF_MEM | BPF_DW:
0832 emit_stx(ctx, dst, src, off, BPF_SIZE(code));
0833 break;
0834
0835 case BPF_ST | BPF_NOSPEC:
0836 break;
0837
0838 case BPF_STX | BPF_ATOMIC | BPF_W:
0839 case BPF_STX | BPF_ATOMIC | BPF_DW:
0840 switch (imm) {
0841 case BPF_ADD:
0842 case BPF_ADD | BPF_FETCH:
0843 case BPF_AND:
0844 case BPF_AND | BPF_FETCH:
0845 case BPF_OR:
0846 case BPF_OR | BPF_FETCH:
0847 case BPF_XOR:
0848 case BPF_XOR | BPF_FETCH:
0849 case BPF_XCHG:
0850 if (BPF_SIZE(code) == BPF_DW) {
0851 emit_atomic_r64(ctx, dst, src, off, imm);
0852 } else if (imm & BPF_FETCH) {
0853 u8 tmp = dst;
0854
0855 if (src == dst) {
0856 emit_mov_r(ctx, MIPS_R_T4, dst);
0857 tmp = MIPS_R_T4;
0858 }
0859 emit_sext(ctx, src, src);
0860 emit_atomic_r(ctx, tmp, src, off, imm);
0861 emit_zext_ver(ctx, src);
0862 } else {
0863 emit_sext(ctx, MIPS_R_T4, src);
0864 emit_atomic_r(ctx, dst, MIPS_R_T4, off, imm);
0865 }
0866 break;
0867 case BPF_CMPXCHG:
0868 if (BPF_SIZE(code) == BPF_DW) {
0869 emit_cmpxchg_r64(ctx, dst, src, off);
0870 } else {
0871 u8 tmp = res;
0872
0873 if (res == dst)
0874 tmp = MIPS_R_T4;
0875 emit_sext(ctx, tmp, res);
0876 emit_sext(ctx, MIPS_R_T5, src);
0877 emit_cmpxchg_r(ctx, dst, MIPS_R_T5, tmp, off);
0878 if (res == dst)
0879 emit_mov_r(ctx, res, MIPS_R_T4);
0880
0881 }
0882 break;
0883 default:
0884 goto notyet;
0885 }
0886 break;
0887
0888
0889
0890
0891
0892
0893
0894
0895
0896
0897
0898 case BPF_JMP32 | BPF_JEQ | BPF_X:
0899 case BPF_JMP32 | BPF_JNE | BPF_X:
0900 case BPF_JMP32 | BPF_JSET | BPF_X:
0901 case BPF_JMP32 | BPF_JGT | BPF_X:
0902 case BPF_JMP32 | BPF_JGE | BPF_X:
0903 case BPF_JMP32 | BPF_JLT | BPF_X:
0904 case BPF_JMP32 | BPF_JLE | BPF_X:
0905 case BPF_JMP32 | BPF_JSGT | BPF_X:
0906 case BPF_JMP32 | BPF_JSGE | BPF_X:
0907 case BPF_JMP32 | BPF_JSLT | BPF_X:
0908 case BPF_JMP32 | BPF_JSLE | BPF_X:
0909 if (off == 0)
0910 break;
0911 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel);
0912 emit_sext(ctx, MIPS_R_T4, dst);
0913 emit_sext(ctx, MIPS_R_T5, src);
0914 emit_jmp_r(ctx, MIPS_R_T4, MIPS_R_T5, rel, jmp);
0915 if (finish_jmp(ctx, jmp, off) < 0)
0916 goto toofar;
0917 break;
0918
0919
0920
0921
0922
0923
0924
0925
0926
0927
0928
0929 case BPF_JMP32 | BPF_JEQ | BPF_K:
0930 case BPF_JMP32 | BPF_JNE | BPF_K:
0931 case BPF_JMP32 | BPF_JSET | BPF_K:
0932 case BPF_JMP32 | BPF_JGT | BPF_K:
0933 case BPF_JMP32 | BPF_JGE | BPF_K:
0934 case BPF_JMP32 | BPF_JLT | BPF_K:
0935 case BPF_JMP32 | BPF_JLE | BPF_K:
0936 case BPF_JMP32 | BPF_JSGT | BPF_K:
0937 case BPF_JMP32 | BPF_JSGE | BPF_K:
0938 case BPF_JMP32 | BPF_JSLT | BPF_K:
0939 case BPF_JMP32 | BPF_JSLE | BPF_K:
0940 if (off == 0)
0941 break;
0942 setup_jmp_i(ctx, imm, 32, BPF_OP(code), off, &jmp, &rel);
0943 emit_sext(ctx, MIPS_R_T4, dst);
0944 if (valid_jmp_i(jmp, imm)) {
0945 emit_jmp_i(ctx, MIPS_R_T4, imm, rel, jmp);
0946 } else {
0947
0948 emit_mov_i(ctx, MIPS_R_T5, imm);
0949 emit_jmp_r(ctx, MIPS_R_T4, MIPS_R_T5, rel, jmp);
0950 }
0951 if (finish_jmp(ctx, jmp, off) < 0)
0952 goto toofar;
0953 break;
0954
0955
0956
0957
0958
0959
0960
0961
0962
0963
0964
0965 case BPF_JMP | BPF_JEQ | BPF_X:
0966 case BPF_JMP | BPF_JNE | BPF_X:
0967 case BPF_JMP | BPF_JSET | BPF_X:
0968 case BPF_JMP | BPF_JGT | BPF_X:
0969 case BPF_JMP | BPF_JGE | BPF_X:
0970 case BPF_JMP | BPF_JLT | BPF_X:
0971 case BPF_JMP | BPF_JLE | BPF_X:
0972 case BPF_JMP | BPF_JSGT | BPF_X:
0973 case BPF_JMP | BPF_JSGE | BPF_X:
0974 case BPF_JMP | BPF_JSLT | BPF_X:
0975 case BPF_JMP | BPF_JSLE | BPF_X:
0976 if (off == 0)
0977 break;
0978 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel);
0979 emit_jmp_r(ctx, dst, src, rel, jmp);
0980 if (finish_jmp(ctx, jmp, off) < 0)
0981 goto toofar;
0982 break;
0983
0984
0985
0986
0987
0988
0989
0990
0991
0992
0993
0994 case BPF_JMP | BPF_JEQ | BPF_K:
0995 case BPF_JMP | BPF_JNE | BPF_K:
0996 case BPF_JMP | BPF_JSET | BPF_K:
0997 case BPF_JMP | BPF_JGT | BPF_K:
0998 case BPF_JMP | BPF_JGE | BPF_K:
0999 case BPF_JMP | BPF_JLT | BPF_K:
1000 case BPF_JMP | BPF_JLE | BPF_K:
1001 case BPF_JMP | BPF_JSGT | BPF_K:
1002 case BPF_JMP | BPF_JSGE | BPF_K:
1003 case BPF_JMP | BPF_JSLT | BPF_K:
1004 case BPF_JMP | BPF_JSLE | BPF_K:
1005 if (off == 0)
1006 break;
1007 setup_jmp_i(ctx, imm, 64, BPF_OP(code), off, &jmp, &rel);
1008 if (valid_jmp_i(jmp, imm)) {
1009 emit_jmp_i(ctx, dst, imm, rel, jmp);
1010 } else {
1011
1012 emit_mov_i(ctx, MIPS_R_T4, imm);
1013 emit_jmp_r(ctx, dst, MIPS_R_T4, rel, jmp);
1014 }
1015 if (finish_jmp(ctx, jmp, off) < 0)
1016 goto toofar;
1017 break;
1018
1019 case BPF_JMP | BPF_JA:
1020 if (off == 0)
1021 break;
1022 if (emit_ja(ctx, off) < 0)
1023 goto toofar;
1024 break;
1025
1026 case BPF_JMP | BPF_TAIL_CALL:
1027 if (emit_tail_call(ctx) < 0)
1028 goto invalid;
1029 break;
1030
1031 case BPF_JMP | BPF_CALL:
1032 if (emit_call(ctx, insn) < 0)
1033 goto invalid;
1034 break;
1035
1036 case BPF_JMP | BPF_EXIT:
1037
1038
1039
1040
1041 if (ctx->bpf_index == ctx->program->len - 1)
1042 break;
1043 if (emit_exit(ctx) < 0)
1044 goto toofar;
1045 break;
1046
1047 default:
1048 invalid:
1049 pr_err_once("unknown opcode %02x\n", code);
1050 return -EINVAL;
1051 notyet:
1052 pr_info_once("*** NOT YET: opcode %02x ***\n", code);
1053 return -EFAULT;
1054 toofar:
1055 pr_info_once("*** TOO FAR: jump at %u opcode %02x ***\n",
1056 ctx->bpf_index, code);
1057 return -E2BIG;
1058 }
1059 return 0;
1060 }