| /linux/samples/bpf/ |
| A D | bpf_insn.h | 12 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 20 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 30 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 38 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 177 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 187 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 197 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 207 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/kernel/bpf/ |
| A D | disasm.c | 139 if (BPF_OP(insn->code) == BPF_END) { in print_bpf_insn() 144 } else if (BPF_OP(insn->code) == BPF_NEG) { in print_bpf_insn() 153 bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 160 bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 177 bpf_alu_string[BPF_OP(insn->imm) >> 4], in print_bpf_insn() 187 bpf_atomic_alu_string[BPF_OP(insn->imm) >> 4], in print_bpf_insn() 262 u8 opcode = BPF_OP(insn->code); in print_bpf_insn() 289 bpf_jmp_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 297 bpf_jmp_string[BPF_OP(insn->code) >> 4], in print_bpf_insn()
|
| A D | verifier.c | 2007 if (BPF_OP(code) == BPF_EXIT || BPF_OP(code) == BPF_CALL) in check_subprogs() 2096 op = BPF_OP(code); in is_reg64() 2330 u8 opcode = BPF_OP(insn->code); in backtrack_insn() 6972 u8 opcode = BPF_OP(insn->code); in sanitize_ptr_alu() 7192 u8 opcode = BPF_OP(insn->code); in adjust_ptr_min_max_vals() 7986 u8 opcode = BPF_OP(insn->code); in adjust_scalar_min_max_vals() 8151 u8 opcode = BPF_OP(insn->code); in adjust_reg_min_max_vals() 8228 u8 opcode = BPF_OP(insn->code); in check_alu_op() 9052 switch (BPF_OP(insn->code)) { in try_match_pkt_pointers() 9175 u8 opcode = BPF_OP(insn->code); in check_cond_jmp_op() [all …]
|
| A D | core.c | 403 BPF_OP(code) == BPF_EXIT) in bpf_adj_branches() 406 if (BPF_OP(code) == BPF_CALL) { in bpf_adj_branches()
|
| /linux/tools/include/linux/ |
| A D | filter.h | 36 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 44 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 54 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 62 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 212 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 222 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 232 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 242 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/arch/mips/net/ |
| A D | bpf_jit_comp64.c | 160 switch (BPF_OP(op)) { in emit_alu_i64() 203 switch (BPF_OP(op)) { in emit_alu_r64() 666 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 668 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() 688 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 691 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() 706 emit_alu_r(ctx, dst, src, BPF_OP(code)); in build_insn() 725 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() 762 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 764 emit_alu_r64(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() [all …]
|
| A D | bpf_jit_comp32.c | 270 switch (BPF_OP(op)) { in emit_alu_r64() 324 switch (BPF_OP(op)) { in emit_shift_i64() 376 switch (BPF_OP(op)) { in emit_shift_r64() 546 switch (BPF_OP(op)) { in emit_divmod_r64() 1512 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 1560 emit_alu_i64(ctx, dst, imm, BPF_OP(code)); in build_insn() 1571 emit_alu_i64(ctx, dst, imm, BPF_OP(code)); in build_insn() 1580 emit_shift_i64(ctx, dst, imm, BPF_OP(code)); in build_insn() 1595 emit_divmod_r64(ctx, dst, tmp, BPF_OP(code)); in build_insn() 1607 emit_alu_r64(ctx, dst, src, BPF_OP(code)); in build_insn() [all …]
|
| A D | bpf_jit_comp.c | 213 switch (BPF_OP(op)) { in valid_alu_i() 247 switch (BPF_OP(op)) { in rewrite_alu_i() 296 switch (BPF_OP(op)) { in emit_alu_i() 340 switch (BPF_OP(op)) { in emit_alu_r()
|
| /linux/include/linux/ |
| A D | filter.h | 94 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 102 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 112 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 120 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 307 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 317 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 327 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 337 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/tools/include/uapi/linux/ |
| A D | bpf_common.h | 31 #define BPF_OP(code) ((code) & 0xf0) macro
|
| /linux/include/uapi/linux/ |
| A D | bpf_common.h | 31 #define BPF_OP(code) ((code) & 0xf0) macro
|
| /linux/arch/x86/net/ |
| A D | bpf_jit_comp32.c | 540 switch (BPF_OP(op)) { in emit_ia32_alu_r() 1740 emit_ia32_alu_r64(is64, BPF_OP(code), dst, in do_jit() 1745 emit_ia32_alu_i64(is64, BPF_OP(code), dst, in do_jit() 1775 emit_ia32_shift_r(BPF_OP(code), dst_lo, src_lo, in do_jit() 1782 emit_ia32_shift_r(BPF_OP(code), dst_lo, in do_jit() 1798 emit_ia32_div_mod_r(BPF_OP(code), dst_lo, in do_jit() 1805 emit_ia32_div_mod_r(BPF_OP(code), dst_lo, in do_jit() 1826 emit_ia32_shift_r(BPF_OP(code), dst_lo, IA32_ECX, dstk, in do_jit() 1863 emit_ia32_alu_i(is64, false, BPF_OP(code), in do_jit() 2411 jmp_cond = get_cond_jmp_opcode(BPF_OP(code), true); in do_jit() [all …]
|
| A D | bpf_jit_comp.c | 928 b2 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 964 switch (BPF_OP(insn->code)) { in do_jit() 1050 if (BPF_OP(insn->code) == BPF_MOD && in do_jit() 1054 else if (BPF_OP(insn->code) == BPF_DIV && in do_jit() 1101 b3 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 1133 b3 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 1379 EMIT2(simple_alu_opcodes[BPF_OP(insn->imm)], in do_jit() 1516 switch (BPF_OP(insn->code)) { in do_jit()
|
| /linux/tools/bpf/bpftool/ |
| A D | cfg.c | 179 __u8 opcode = BPF_OP(cur->code); in func_partition_bb_head() 305 BPF_OP(insn->code) == BPF_EXIT) { in func_add_bb_edges() 310 } else if (BPF_OP(insn->code) == BPF_JA) { in func_add_bb_edges()
|
| /linux/arch/powerpc/net/ |
| A D | bpf_jit_comp64.c | 401 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 411 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 426 if (BPF_OP(code) == BPF_DIV) { in bpf_jit_build_body() 437 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 451 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body()
|
| A D | bpf_jit_comp32.c | 360 if (imm >= 0 || (BPF_OP(code) == BPF_SUB && imm == 0x80000000)) in bpf_jit_build_body() 1110 !insn_is_zext(&insn[i + 1]) && !(BPF_OP(code) == BPF_END && imm == 64)) in bpf_jit_build_body()
|
| /linux/arch/riscv/net/ |
| A D | bpf_jit_comp32.c | 994 emit_alu_r64(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 998 emit_alu_r64(dst, tmp2, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1014 emit_alu_i64(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1047 emit_alu_r32(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1063 emit_alu_i32(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1071 emit_alu_r32(dst, tmp2, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1221 emit_branch_r64(dst, src, rvoff, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1223 emit_branch_r32(dst, src, rvoff, ctx, BPF_OP(code)); in bpf_jit_emit_insn()
|
| A D | bpf_jit_comp64.c | 846 if (is_signed_bpf_cond(BPF_OP(code))) in bpf_jit_emit_insn() 856 if (BPF_OP(code) == BPF_JSET) { in bpf_jit_emit_insn() 863 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn() 898 if (is_signed_bpf_cond(BPF_OP(code))) in bpf_jit_emit_insn() 907 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn()
|
| /linux/arch/s390/net/ |
| A D | bpf_jit_comp.c | 836 int rc_reg = BPF_OP(insn->code) == BPF_DIV ? REG_W1 : REG_W0; in bpf_jit_insn() 853 int rc_reg = BPF_OP(insn->code) == BPF_DIV ? REG_W1 : REG_W0; in bpf_jit_insn() 868 int rc_reg = BPF_OP(insn->code) == BPF_DIV ? REG_W1 : REG_W0; in bpf_jit_insn() 871 if (BPF_OP(insn->code) == BPF_MOD) in bpf_jit_insn() 903 int rc_reg = BPF_OP(insn->code) == BPF_DIV ? REG_W1 : REG_W0; in bpf_jit_insn() 906 if (BPF_OP(insn->code) == BPF_MOD) in bpf_jit_insn()
|
| /linux/arch/arm/net/ |
| A D | bpf_jit_32.c | 675 switch (BPF_OP(op)) { in emit_alu_r() 1446 emit_a32_alu_r64(is64, dst, src, ctx, BPF_OP(code)); in build_insn() 1456 emit_a32_alu_r64(is64, dst, tmp2, ctx, BPF_OP(code)); in build_insn() 1479 emit_udivmod(rd_lo, rd_lo, rt, ctx, BPF_OP(code)); in build_insn() 1498 emit_a32_alu_i(dst_lo, imm, ctx, BPF_OP(code)); in build_insn() 1534 emit_a32_alu_i(dst_lo, 0, ctx, BPF_OP(code)); in build_insn() 1738 emit_ar_r(rd[0], rd[1], rm, rn, ctx, BPF_OP(code), in build_insn() 1743 switch (BPF_OP(code)) { in build_insn()
|
| /linux/tools/perf/util/ |
| A D | bpf-prologue.c | 340 opcode = BPF_OP(insn->code); in prologue_relocate()
|
| /linux/arch/sparc/net/ |
| A D | bpf_jit_comp_64.c | 668 BPF_OP(code) == BPF_JSET) in emit_compare_and_branch() 691 if (BPF_OP(code) == BPF_JSET) { in emit_compare_and_branch() 702 switch (BPF_OP(code)) { in emit_compare_and_branch() 745 switch (BPF_OP(code)) { in emit_compare_and_branch()
|
| /linux/drivers/net/ethernet/netronome/nfp/bpf/ |
| A D | main.h | 363 return BPF_OP(meta->insn.code); in mbpf_op()
|
| A D | jit.c | 1390 op = BPF_OP(meta->insn.code) >> 4; in nfp_jmp_code_get() 3932 switch (BPF_OP(insn.code)) { in nfp_bpf_opt_neg_add_sub() 3943 if (BPF_OP(insn.code) == BPF_ADD) in nfp_bpf_opt_neg_add_sub() 3945 else if (BPF_OP(insn.code) == BPF_SUB) in nfp_bpf_opt_neg_add_sub() 4504 if (BPF_OP(code) == BPF_EXIT) in nfp_bpf_jit_prepare() 4512 pseudo_call = BPF_OP(code) == BPF_CALL; in nfp_bpf_jit_prepare()
|
| /linux/arch/arm64/net/ |
| A D | bpf_jit_comp.c | 674 switch (BPF_OP(code)) { in build_insn()
|