xref: /linux/tools/testing/selftests/bpf/verifier/atomic_fetch.c (revision fd7d598270724cc787982ea48bbe17ad383a8b7f)
1 {
2 	"atomic dw/fetch and address leakage of (map ptr & -1) via stack slot",
3 	.insns = {
4 		BPF_LD_IMM64(BPF_REG_1, -1),
5 		BPF_LD_MAP_FD(BPF_REG_8, 0),
6 		BPF_LD_MAP_FD(BPF_REG_9, 0),
7 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
8 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
9 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
10 		BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
11 		BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
12 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
13 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
14 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
15 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
16 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
17 		BPF_MOV64_IMM(BPF_REG_0, 0),
18 		BPF_EXIT_INSN(),
19 	},
20 	.fixup_map_array_48b = { 2, 4 },
21 	.result = ACCEPT,
22 	.result_unpriv = REJECT,
23 	.errstr_unpriv = "leaking pointer from stack off -8",
24 },
25 {
26 	"atomic dw/fetch and address leakage of (map ptr & -1) via returned value",
27 	.insns = {
28 		BPF_LD_IMM64(BPF_REG_1, -1),
29 		BPF_LD_MAP_FD(BPF_REG_8, 0),
30 		BPF_LD_MAP_FD(BPF_REG_9, 0),
31 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
32 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
33 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
34 		BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
35 		BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
36 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
37 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
38 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
39 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
40 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
41 		BPF_MOV64_IMM(BPF_REG_0, 0),
42 		BPF_EXIT_INSN(),
43 	},
44 	.fixup_map_array_48b = { 2, 4 },
45 	.result = ACCEPT,
46 	.result_unpriv = REJECT,
47 	.errstr_unpriv = "leaking pointer from stack off -8",
48 },
49 {
50 	"atomic w/fetch and address leakage of (map ptr & -1) via stack slot",
51 	.insns = {
52 		BPF_LD_IMM64(BPF_REG_1, -1),
53 		BPF_LD_MAP_FD(BPF_REG_8, 0),
54 		BPF_LD_MAP_FD(BPF_REG_9, 0),
55 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
56 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
57 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
58 		BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
59 		BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
60 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
61 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
62 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
63 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
64 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
65 		BPF_MOV64_IMM(BPF_REG_0, 0),
66 		BPF_EXIT_INSN(),
67 	},
68 	.fixup_map_array_48b = { 2, 4 },
69 	.result = REJECT,
70 	.errstr = "invalid size of register fill",
71 },
72 {
73 	"atomic w/fetch and address leakage of (map ptr & -1) via returned value",
74 	.insns = {
75 		BPF_LD_IMM64(BPF_REG_1, -1),
76 		BPF_LD_MAP_FD(BPF_REG_8, 0),
77 		BPF_LD_MAP_FD(BPF_REG_9, 0),
78 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
79 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
80 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
81 		BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
82 		BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
83 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
84 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
85 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
86 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
87 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
88 		BPF_MOV64_IMM(BPF_REG_0, 0),
89 		BPF_EXIT_INSN(),
90 	},
91 	.fixup_map_array_48b = { 2, 4 },
92 	.result = REJECT,
93 	.errstr = "invalid size of register fill",
94 },
95 #define __ATOMIC_FETCH_OP_TEST(src_reg, dst_reg, operand1, op, operand2, expect) \
96 	{								\
97 		"atomic fetch " #op ", src=" #dst_reg " dst=" #dst_reg,	\
98 		.insns = {						\
99 			/* u64 val = operan1; */			\
100 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, operand1),	\
101 			/* u64 old = atomic_fetch_add(&val, operand2); */ \
102 			BPF_MOV64_REG(dst_reg, BPF_REG_10),		\
103 			BPF_MOV64_IMM(src_reg, operand2),		\
104 			BPF_ATOMIC_OP(BPF_DW, op,			\
105 				      dst_reg, src_reg, -8),		\
106 			/* if (old != operand1) exit(1); */		\
107 			BPF_JMP_IMM(BPF_JEQ, src_reg, operand1, 2),	\
108 			BPF_MOV64_IMM(BPF_REG_0, 1),			\
109 			BPF_EXIT_INSN(),				\
110 			/* if (val != result) exit (2); */		\
111 			BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -8),	\
112 			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, expect, 2),	\
113 			BPF_MOV64_IMM(BPF_REG_0, 2),			\
114 			BPF_EXIT_INSN(),				\
115 			/* exit(0); */					\
116 			BPF_MOV64_IMM(BPF_REG_0, 0),			\
117 			BPF_EXIT_INSN(),				\
118 		},							\
119 		.result = ACCEPT,					\
120 	}
121 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 1, BPF_ADD | BPF_FETCH, 2, 3),
122 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 1, BPF_ADD | BPF_FETCH, 2, 3),
123 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 1, BPF_ADD | BPF_FETCH, 2, 3),
124 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 1, BPF_ADD | BPF_FETCH, 2, 3),
125 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3),
126 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 1, BPF_ADD | BPF_FETCH, 2, 3),
127 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
128 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
129 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
130 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
131 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
132 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
133 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
134 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
135 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
136 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
137 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
138 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
139 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
140 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
141 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
142 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
143 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
144 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
145 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XCHG, 0x011, 0x011),
146 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XCHG, 0x011, 0x011),
147 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XCHG, 0x011, 0x011),
148 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XCHG, 0x011, 0x011),
149 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XCHG, 0x011, 0x011),
150 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XCHG, 0x011, 0x011),
151 #undef __ATOMIC_FETCH_OP_TEST
152