/linux/tools/testing/selftests/bpf/verifier/ |
H A D | atomic_fetch.c | 10 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0), 34 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0), 58 BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0), 81 BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0), 121 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 1, BPF_ADD | BPF_FETCH, 2, 3), 122 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 1, BPF_ADD | BPF_FETCH, 2, 3), 123 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 1, BPF_ADD | BPF_FETCH, 2, 3), 124 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 1, BPF_ADD | BPF_FETCH, 2, 3), 125 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3), 126 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 1, BPF_ADD | BPF_FETCH, 2, 3), [all …]
|
H A D | atomic_invalid.c | 15 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH), 17 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH), 19 __INVALID_ATOMIC_ACCESS_TEST(BPF_AND | BPF_FETCH), 21 __INVALID_ATOMIC_ACCESS_TEST(BPF_OR | BPF_FETCH), 23 __INVALID_ATOMIC_ACCESS_TEST(BPF_XOR | BPF_FETCH),
|
H A D | atomic_fetch_add.c | 9 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8), 31 BPF_ATOMIC_OP(BPF_W, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -4), 50 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_10, -8), 62 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_2, -8), 75 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_2, BPF_REG_0, -8), 96 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_2, BPF_REG_3, 0),
|
H A D | atomic_or.c | 30 BPF_ATOMIC_OP(BPF_DW, BPF_OR | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8), 60 BPF_ATOMIC_OP(BPF_W, BPF_OR | BPF_FETCH, BPF_REG_10, BPF_REG_1, -4), 88 BPF_ATOMIC_OP(BPF_W, BPF_OR | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8),
|
H A D | atomic_and.c | 30 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8), 60 BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_10, BPF_REG_1, -4), 85 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_10, BPF_REG_0, -8),
|
H A D | atomic_xor.c | 30 BPF_ATOMIC_OP(BPF_DW, BPF_XOR | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8), 60 BPF_ATOMIC_OP(BPF_W, BPF_XOR | BPF_FETCH, BPF_REG_10, BPF_REG_1, -4),
|
H A D | atomic_bounds.c | 18 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8),
|
/linux/arch/mips/net/ |
H A D | bpf_jit_comp32.c | 741 case BPF_ADD | BPF_FETCH: in emit_atomic_r32() 747 case BPF_SUB | BPF_FETCH: in emit_atomic_r32() 753 case BPF_OR | BPF_FETCH: in emit_atomic_r32() 759 case BPF_AND | BPF_FETCH: in emit_atomic_r32() 765 case BPF_XOR | BPF_FETCH: in emit_atomic_r32() 777 if (code & BPF_FETCH) { in emit_atomic_r32() 822 case BPF_ADD | BPF_FETCH: in emit_atomic_r64() 828 case BPF_SUB | BPF_FETCH: in emit_atomic_r64() 834 case BPF_OR | BPF_FETCH: in emit_atomic_r64() 840 case BPF_AND | BPF_FETCH: in emit_atomic_r64() [all …]
|
H A D | bpf_jit_comp64.c | 391 case BPF_ADD | BPF_FETCH: in emit_atomic_r64() 395 case BPF_AND | BPF_FETCH: in emit_atomic_r64() 399 case BPF_OR | BPF_FETCH: in emit_atomic_r64() 403 case BPF_XOR | BPF_FETCH: in emit_atomic_r64() 414 if (code & BPF_FETCH) { in emit_atomic_r64() 853 case BPF_ADD | BPF_FETCH: in build_insn() 855 case BPF_AND | BPF_FETCH: in build_insn() 857 case BPF_OR | BPF_FETCH: in build_insn() 859 case BPF_XOR | BPF_FETCH: in build_insn() 863 } else if (imm & BPF_FETCH) { in build_insn()
|
H A D | bpf_jit_comp.c | 415 case BPF_ADD | BPF_FETCH: in emit_atomic_r() 419 case BPF_AND | BPF_FETCH: in emit_atomic_r() 423 case BPF_OR | BPF_FETCH: in emit_atomic_r() 427 case BPF_XOR | BPF_FETCH: in emit_atomic_r() 438 if (code & BPF_FETCH) { in emit_atomic_r()
|
/linux/kernel/bpf/ |
H A D | disasm.c | 245 (insn->imm == (BPF_ADD | BPF_FETCH) || in print_bpf_insn() 246 insn->imm == (BPF_AND | BPF_FETCH) || in print_bpf_insn() 247 insn->imm == (BPF_OR | BPF_FETCH) || in print_bpf_insn() 248 insn->imm == (BPF_XOR | BPF_FETCH))) { in print_bpf_insn()
|
H A D | core.c | 2159 case BOP | BPF_FETCH: \ in ___bpf_prog_run()
|
H A D | verifier.c | 3533 (insn->imm & BPF_FETCH)) { in insn_def_regno() 7528 case BPF_ADD | BPF_FETCH: in check_atomic() 7530 case BPF_AND | BPF_FETCH: in check_atomic() 7532 case BPF_OR | BPF_FETCH: in check_atomic() 7534 case BPF_XOR | BPF_FETCH: in check_atomic() 7588 if (insn->imm & BPF_FETCH) { in check_atomic()
|
/linux/tools/testing/selftests/bpf/ |
H A D | disasm.c | 245 (insn->imm == (BPF_ADD | BPF_FETCH) || in print_bpf_insn() 246 insn->imm == (BPF_AND | BPF_FETCH) || in print_bpf_insn() 247 insn->imm == (BPF_OR | BPF_FETCH) || in print_bpf_insn() 248 insn->imm == (BPF_XOR | BPF_FETCH))) { in print_bpf_insn()
|
/linux/lib/ |
H A D | test_bpf.c | 1633 if (op & BPF_FETCH) in __bpf_emit_atomic64() 1680 if (op & BPF_FETCH) in __bpf_emit_atomic32() 1828 return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH); in bpf_fill_atomic64_add_fetch() 1833 return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH); in bpf_fill_atomic64_and_fetch() 1838 return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH); in bpf_fill_atomic64_or_fetch() 1843 return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH); in bpf_fill_atomic64_xor_fetch() 1880 return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH); in bpf_fill_atomic32_add_fetch() 1885 return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH); in bpf_fill_atomic32_and_fetch() 1890 return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH); in bpf_fill_atomic32_or_fetch() 1895 return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH); in bpf_fill_atomic32_xor_fetch() [all …]
|
/linux/Documentation/bpf/ |
H A D | clang-notes.rst | 34 Clang can generate is ``BPF_ADD`` *without* ``BPF_FETCH``. If you need to enable
|
/linux/arch/arm64/net/ |
H A D | bpf_jit_comp.c | 689 case BPF_ADD | BPF_FETCH: in emit_lse_atomic() 692 case BPF_AND | BPF_FETCH: in emit_lse_atomic() 696 case BPF_OR | BPF_FETCH: in emit_lse_atomic() 699 case BPF_XOR | BPF_FETCH: in emit_lse_atomic() 766 } else if (imm == (BPF_ADD | BPF_FETCH) || in emit_ll_sc_atomic() 767 imm == (BPF_AND | BPF_FETCH) || in emit_ll_sc_atomic() 768 imm == (BPF_OR | BPF_FETCH) || in emit_ll_sc_atomic() 769 imm == (BPF_XOR | BPF_FETCH)) { in emit_ll_sc_atomic() 775 if (imm == (BPF_ADD | BPF_FETCH)) in emit_ll_sc_atomic() 777 else if (imm == (BPF_AND | BPF_FETCH)) in emit_ll_sc_atomic() [all …]
|
/linux/arch/x86/net/ |
H A D | bpf_jit_comp.c | 1263 case BPF_ADD | BPF_FETCH: in emit_atomic() 1313 case BPF_ADD | BPF_FETCH: in emit_atomic_index() 2118 if (insn->imm == (BPF_AND | BPF_FETCH) || in do_jit() 2119 insn->imm == (BPF_OR | BPF_FETCH) || in do_jit() 2120 insn->imm == (BPF_XOR | BPF_FETCH)) { in do_jit() 3776 if (insn->imm == (BPF_AND | BPF_FETCH) || in bpf_jit_supports_insn() 3777 insn->imm == (BPF_OR | BPF_FETCH) || in bpf_jit_supports_insn() 3778 insn->imm == (BPF_XOR | BPF_FETCH)) in bpf_jit_supports_insn()
|
/linux/arch/loongarch/net/ |
H A D | bpf_jit.c | 325 case BPF_ADD | BPF_FETCH: in emit_atomic() 333 case BPF_AND | BPF_FETCH: in emit_atomic() 341 case BPF_OR | BPF_FETCH: in emit_atomic() 349 case BPF_XOR | BPF_FETCH: in emit_atomic()
|
/linux/arch/s390/net/ |
H A D | bpf_jit_comp.c | 1580 (insn->imm & BPF_FETCH) ? src_reg : REG_W0, \ in bpf_jit_insn() 1585 if (insn->imm & BPF_FETCH) { \ in bpf_jit_insn() 1593 case BPF_ADD | BPF_FETCH: in bpf_jit_insn() 1598 case BPF_AND | BPF_FETCH: in bpf_jit_insn() 1603 case BPF_OR | BPF_FETCH: in bpf_jit_insn() 1608 case BPF_XOR | BPF_FETCH: in bpf_jit_insn()
|
/linux/include/uapi/linux/ |
H A D | bpf.h | 50 #define BPF_FETCH 0x01 /* not an opcode on its own, used to build others */ macro 51 #define BPF_XCHG (0xe0 | BPF_FETCH) /* atomic exchange */ 52 #define BPF_CMPXCHG (0xf0 | BPF_FETCH) /* atomic compare-and-write */
|
/linux/tools/include/uapi/linux/ |
H A D | bpf.h | 50 #define BPF_FETCH 0x01 /* not an opcode on its own, used to build others */ macro 51 #define BPF_XCHG (0xe0 | BPF_FETCH) /* atomic exchange */ 52 #define BPF_CMPXCHG (0xf0 | BPF_FETCH) /* atomic compare-and-write */
|
/linux/arch/riscv/net/ |
H A D | bpf_jit_comp64.c | 511 case BPF_ADD | BPF_FETCH: in emit_atomic() 517 case BPF_AND | BPF_FETCH: in emit_atomic() 523 case BPF_OR | BPF_FETCH: in emit_atomic() 529 case BPF_XOR | BPF_FETCH: in emit_atomic()
|