xref: /linux/tools/testing/selftests/bpf/progs/verifier_may_goto_1.c (revision d0d106a2bd21499901299160744e5fe9f4c83ddb)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */
3 
4 #include <linux/bpf.h>
5 #include <bpf/bpf_helpers.h>
6 #include "../../../include/linux/filter.h"
7 #include "bpf_misc.h"
8 
9 SEC("raw_tp")
10 __description("may_goto 0")
11 __arch_x86_64
12 __xlated("0: r0 = 1")
13 __xlated("1: exit")
14 __success
may_goto_simple(void)15 __naked void may_goto_simple(void)
16 {
17 	asm volatile (
18 	".8byte %[may_goto];"
19 	"r0 = 1;"
20 	".8byte %[may_goto];"
21 	"exit;"
22 	:
23 	: __imm_insn(may_goto, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
24 	: __clobber_all);
25 }
26 
27 SEC("raw_tp")
28 __description("batch 2 of may_goto 0")
29 __arch_x86_64
30 __xlated("0: r0 = 1")
31 __xlated("1: exit")
32 __success
may_goto_batch_0(void)33 __naked void may_goto_batch_0(void)
34 {
35 	asm volatile (
36 	".8byte %[may_goto1];"
37 	".8byte %[may_goto1];"
38 	"r0 = 1;"
39 	".8byte %[may_goto1];"
40 	".8byte %[may_goto1];"
41 	"exit;"
42 	:
43 	: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
44 	: __clobber_all);
45 }
46 
47 SEC("raw_tp")
48 __description("may_goto batch with offsets 2/1/0")
49 __arch_x86_64
50 __xlated("0: r0 = 1")
51 __xlated("1: exit")
52 __success
may_goto_batch_1(void)53 __naked void may_goto_batch_1(void)
54 {
55 	asm volatile (
56 	".8byte %[may_goto1];"
57 	".8byte %[may_goto2];"
58 	".8byte %[may_goto3];"
59 	"r0 = 1;"
60 	".8byte %[may_goto1];"
61 	".8byte %[may_goto2];"
62 	".8byte %[may_goto3];"
63 	"exit;"
64 	:
65 	: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
66 	  __imm_insn(may_goto2, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 1 /* offset */, 0)),
67 	  __imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
68 	: __clobber_all);
69 }
70 
71 SEC("raw_tp")
72 __description("may_goto batch with offsets 2/0")
73 __arch_x86_64
74 __xlated("0: *(u64 *)(r10 -8) = 8388608")
75 __xlated("1: r11 = *(u64 *)(r10 -8)")
76 __xlated("2: if r11 == 0x0 goto pc+3")
77 __xlated("3: r11 -= 1")
78 __xlated("4: *(u64 *)(r10 -8) = r11")
79 __xlated("5: r0 = 1")
80 __xlated("6: r0 = 2")
81 __xlated("7: exit")
82 __success
may_goto_batch_2(void)83 __naked void may_goto_batch_2(void)
84 {
85 	asm volatile (
86 	".8byte %[may_goto1];"
87 	".8byte %[may_goto3];"
88 	"r0 = 1;"
89 	"r0 = 2;"
90 	"exit;"
91 	:
92 	: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
93 	  __imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
94 	: __clobber_all);
95 }
96 
97 char _license[] SEC("license") = "GPL";
98