| /linux/samples/bpf/ |
| H A D | bpf_insn.h | 12 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 20 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 30 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 38 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 177 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 187 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 197 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 207 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/kernel/bpf/ |
| H A D | disasm.c | 159 return (BPF_OP(insn->code) == BPF_DIV || BPF_OP(insn->code) == BPF_MOD) && in is_sdiv_smod() 165 return BPF_OP(insn->code) == BPF_MOV && in is_movsx() 194 if (BPF_OP(insn->code) == BPF_END) { in print_bpf_insn() 199 } else if (BPF_OP(insn->code) == BPF_NEG) { in print_bpf_insn() 215 is_sdiv_smod(insn) ? bpf_alu_sign_string[BPF_OP(insn->code) >> 4] in print_bpf_insn() 216 : bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 224 is_sdiv_smod(insn) ? bpf_alu_sign_string[BPF_OP(insn->code) >> 4] in print_bpf_insn() 225 : bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 242 bpf_alu_string[BPF_OP(ins in print_bpf_insn() [all...] |
| /linux/tools/testing/selftests/bpf/ |
| H A D | disasm.c | 159 return (BPF_OP(insn->code) == BPF_DIV || BPF_OP(insn->code) == BPF_MOD) && in is_sdiv_smod() 165 return BPF_OP(insn->code) == BPF_MOV && in is_movsx() 194 if (BPF_OP(insn->code) == BPF_END) { in print_bpf_insn() 199 } else if (BPF_OP(insn->code) == BPF_NEG) { in print_bpf_insn() 215 is_sdiv_smod(insn) ? bpf_alu_sign_string[BPF_OP(insn->code) >> 4] in print_bpf_insn() 216 : bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 224 is_sdiv_smod(insn) ? bpf_alu_sign_string[BPF_OP(insn->code) >> 4] in print_bpf_insn() 225 : bpf_alu_string[BPF_OP(insn->code) >> 4], in print_bpf_insn() 242 bpf_alu_string[BPF_OP(ins in print_bpf_insn() [all...] |
| /linux/tools/include/linux/ |
| H A D | filter.h | 36 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 44 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 54 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 62 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 230 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 240 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 250 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 260 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/arch/mips/net/ |
| H A D | bpf_jit_comp64.c | 160 switch (BPF_OP(op)) { in emit_alu_i64() 203 switch (BPF_OP(op)) { in emit_alu_r64() 677 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 679 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() 680 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) { in build_insn() 699 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 702 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() 703 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) { in build_insn() 717 emit_alu_r(ctx, dst, src, BPF_OP(code)); in build_insn() 736 emit_alu_r(ctx, dst, MIPS_R_T4, BPF_OP(code)); in build_insn() [all …]
|
| H A D | bpf_jit_comp32.c | 270 switch (BPF_OP(op)) { in emit_alu_r64() 324 switch (BPF_OP(op)) { in emit_shift_i64() 376 switch (BPF_OP(op)) { in emit_shift_r64() 546 switch (BPF_OP(op)) { in emit_divmod_r64() 1519 if (!valid_alu_i(BPF_OP(code), imm)) { in build_insn() 1521 emit_alu_r(ctx, lo(dst), MIPS_R_T6, BPF_OP(code)); in build_insn() 1522 } else if (rewrite_alu_i(BPF_OP(code), imm, &alu, &val)) { in build_insn() 1549 emit_alu_r(ctx, lo(dst), lo(src), BPF_OP(code)); in build_insn() 1567 emit_alu_i64(ctx, dst, imm, BPF_OP(code)); in build_insn() 1578 emit_alu_i64(ctx, dst, imm, BPF_OP(code)); in build_insn() [all …]
|
| H A D | bpf_jit_comp.c | 213 switch (BPF_OP(op)) { in valid_alu_i() 251 switch (BPF_OP(op)) { in rewrite_alu_i() 300 switch (BPF_OP(op)) { in emit_alu_i() 344 switch (BPF_OP(op)) { in emit_alu_r()
|
| /linux/tools/bpf/bpftool/ |
| H A D | cfg.c | 179 __u8 opcode = BPF_OP(cur->code); in func_partition_bb_head() 305 BPF_OP(insn->code) == BPF_CALL || in func_add_bb_edges() 306 BPF_OP(insn->code) == BPF_EXIT) { in func_add_bb_edges() 311 } else if (BPF_OP(insn->code) == BPF_JA) { in func_add_bb_edges()
|
| /linux/arch/parisc/net/ |
| H A D | bpf_jit_comp32.c | 340 if (BPF_OP(opcode) == BPF_DIV || BPF_OP(opcode) == BPF_MOD) { in emit_call_millicode() 341 if (BPF_OP(opcode) == BPF_DIV) in emit_call_millicode() 379 if (BPF_OP(opcode) == BPF_DIV || BPF_OP(opcode) == BPF_MOD) { in emit_call_libgcc_ll() 381 if (BPF_OP(opcode) == BPF_DIV) in emit_call_libgcc_ll() 1134 BPF_OP(code), insn->src_reg, insn->dst_reg); in bpf_jit_emit_insn() 1166 emit_alu_r64(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1171 emit_alu_r64(dst, tmp2, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1181 emit_alu_i64(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1214 emit_alu_r32(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1230 emit_alu_i32(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() [all …]
|
| H A D | bpf_jit_comp64.c | 533 if (BPF_OP(opcode) == BPF_DIV || BPF_OP(opcode) == BPF_MOD) { in emit_call_libgcc_ll() 534 if (BPF_OP(opcode) == BPF_DIV) in emit_call_libgcc_ll() 890 if (is_signed_bpf_cond(BPF_OP(code))) in bpf_jit_emit_insn() 899 if (BPF_OP(code) == BPF_JSET) { in bpf_jit_emit_insn() 906 emit_branch(BPF_OP(code), rd, rs, paoff, ctx); in bpf_jit_emit_insn() 940 if (is_signed_bpf_cond(BPF_OP(code))) in bpf_jit_emit_insn() 949 emit_branch(BPF_OP(code), rd, rs, paoff, ctx); in bpf_jit_emit_insn()
|
| /linux/include/uapi/linux/ |
| H A D | bpf_common.h | 31 #define BPF_OP(code) ((code) & 0xf0) macro
|
| /linux/tools/include/uapi/linux/ |
| H A D | bpf_common.h | 31 #define BPF_OP(code) ((code) & 0xf0) macro
|
| /linux/arch/x86/net/ |
| H A D | bpf_jit_comp32.c | 540 switch (BPF_OP(op)) { in emit_ia32_alu_r() 1740 emit_ia32_alu_r64(is64, BPF_OP(code), dst, in do_jit() 1745 emit_ia32_alu_i64(is64, BPF_OP(code), dst, in do_jit() 1775 emit_ia32_shift_r(BPF_OP(code), dst_lo, src_lo, in do_jit() 1782 emit_ia32_shift_r(BPF_OP(code), dst_lo, in do_jit() 1798 emit_ia32_div_mod_r(BPF_OP(code), dst_lo, in do_jit() 1805 emit_ia32_div_mod_r(BPF_OP(code), dst_lo, in do_jit() 1826 emit_ia32_shift_r(BPF_OP(code), dst_lo, IA32_ECX, dstk, in do_jit() 1863 emit_ia32_alu_i(is64, false, BPF_OP(code), in do_jit() 2362 emit_cond_jmp: jmp_cond = get_cond_jmp_opcode(BPF_OP(code), false); in do_jit() [all …]
|
| H A D | bpf_jit_comp.c | 1747 b2 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 1831 switch (BPF_OP(insn->code)) { in do_jit() 1928 if (BPF_OP(insn->code) == BPF_MOD && in do_jit() 1932 else if (BPF_OP(insn->code) == BPF_DIV && in do_jit() 1979 b3 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 1998 switch (BPF_OP(insn->code)) { in do_jit() 2032 b3 = simple_alu_opcodes[BPF_OP(insn->code)]; in do_jit() 2383 EMIT2(simple_alu_opcodes[BPF_OP(insn->imm)], in do_jit() 2558 switch (BPF_OP(insn->code)) { in do_jit()
|
| /linux/include/linux/ |
| H A D | filter.h | 107 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \ 118 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \ 131 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \ 141 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \ 399 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ 409 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ 419 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \ 429 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
|
| /linux/arch/riscv/net/ |
| H A D | bpf_jit_comp32.c | 991 emit_alu_r64(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 995 emit_alu_r64(dst, tmp2, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1011 emit_alu_i64(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1044 emit_alu_r32(dst, src, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1060 emit_alu_i32(dst, imm, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1068 emit_alu_r32(dst, tmp2, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1218 emit_branch_r64(dst, src, rvoff, ctx, BPF_OP(code)); in bpf_jit_emit_insn() 1220 emit_branch_r32(dst, src, rvoff, ctx, BPF_OP(code)); in bpf_jit_emit_insn()
|
| H A D | bpf_jit_comp64.c | 1650 if (is_signed_bpf_cond(BPF_OP(code))) { in bpf_jit_emit_insn() 1663 if (BPF_OP(code) == BPF_JSET) { in bpf_jit_emit_insn() 1669 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn() 1700 if (is_signed_bpf_cond(BPF_OP(code))) { in bpf_jit_emit_insn() 1713 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn()
|
| /linux/arch/arm/net/ |
| H A D | bpf_jit_32.c | 802 switch (BPF_OP(op)) { in emit_alu_r() 1675 emit_a32_alu_r64(is64, dst, src, ctx, BPF_OP(code)); in build_insn() 1685 emit_a32_alu_r64(is64, dst, tmp2, ctx, BPF_OP(code)); in build_insn() 1708 emit_udivmod(rd_lo, rd_lo, rt, ctx, BPF_OP(code), off); in build_insn() 1727 emit_udivmod64(rd, rd, rs, ctx, BPF_OP(code), off); in build_insn() 1739 emit_a32_alu_i(dst_lo, imm, ctx, BPF_OP(code)); in build_insn() 1775 emit_a32_alu_i(dst_lo, 0, ctx, BPF_OP(code)); in build_insn() 1988 emit_ar_r(rd[0], rd[1], rm, rn, ctx, BPF_OP(code), in build_insn() 1993 switch (BPF_OP(code)) { in build_insn()
|
| /linux/arch/powerpc/net/ |
| H A D | bpf_jit_comp64.c | 732 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 748 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 768 if (BPF_OP(code) == BPF_DIV) { in bpf_jit_build_body() 779 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body() 793 if (BPF_OP(code) == BPF_MOD) { in bpf_jit_build_body()
|
| H A D | bpf_jit_comp32.c | 319 BPF_OP(prevcode) == BPF_MOV && BPF_SRC(prevcode) == BPF_X && in bpf_jit_build_body() 399 if (imm >= 0 || (BPF_OP(code) == BPF_SUB && imm == 0x80000000)) in bpf_jit_build_body() 1380 !insn_is_zext(&insn[i + 1]) && !(BPF_OP(code) == BPF_END && imm == 64)) in bpf_jit_build_body()
|
| /linux/drivers/net/ethernet/netronome/nfp/bpf/ |
| H A D | main.h | 363 return BPF_OP(meta->insn.code); in mbpf_op()
|
| H A D | jit.c | 1390 op = BPF_OP(meta->insn.code) >> 4; in nfp_jmp_code_get() 3932 switch (BPF_OP(insn.code)) { in nfp_bpf_opt_neg_add_sub() 3943 if (BPF_OP(insn.code) == BPF_ADD) in nfp_bpf_opt_neg_add_sub() 3945 else if (BPF_OP(insn.code) == BPF_SUB) in nfp_bpf_opt_neg_add_sub() 4504 if (BPF_OP(code) == BPF_EXIT) in nfp_bpf_jit_prepare() 4512 pseudo_call = BPF_OP(code) == BPF_CALL; in nfp_bpf_jit_prepare()
|
| /linux/arch/sparc/net/ |
| H A D | bpf_jit_comp_64.c | 668 BPF_OP(code) == BPF_JSET) in emit_compare_and_branch() 691 if (BPF_OP(code) == BPF_JSET) { in emit_compare_and_branch() 702 switch (BPF_OP(code)) { in emit_compare_and_branch() 745 switch (BPF_OP(code)) { in emit_compare_and_branch()
|
| /linux/Documentation/bpf/ |
| H A D | classic_vs_extended.rst | 289 If BPF_CLASS(code) == BPF_ALU or BPF_ALU64 [ in eBPF ], BPF_OP(code) is one of:: 306 If BPF_CLASS(code) == BPF_JMP or BPF_JMP32 [ in eBPF ], BPF_OP(code) is one of::
|
| /linux/arch/loongarch/net/ |
| H A D | bpf_jit.c | 514 const u8 cond = BPF_OP(code); in build_insn() 860 if (is_signed_bpf_cond(BPF_OP(code))) { in build_insn() 901 if (is_signed_bpf_cond(BPF_OP(code))) { in build_insn()
|