xref: /linux/tools/testing/selftests/bpf/progs/verifier_align.c (revision f5ad4101009e7f5f5984ffea6923d4fcd470932a)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2026 Meta Platforms, Inc. and affiliates. */
3 /* Converted from tools/testing/selftests/bpf/prog_tests/align.c */
4 
5 #include <linux/bpf.h>
6 #include <bpf/bpf_helpers.h>
7 #include "bpf_misc.h"
8 
9 /* Four tests of known constants.  These aren't staggeringly
10  * interesting since we track exact values now.
11  */
12 
13 SEC("tc")
14 __success __log_level(2)
15 __flag(BPF_F_ANY_ALIGNMENT)
16 __msg("0: R1=ctx() R10=fp0")
17 __msg("0: {{.*}} R3=2")
18 __msg("1: {{.*}} R3=4")
19 __msg("2: {{.*}} R3=8")
20 __msg("3: {{.*}} R3=16")
21 __msg("4: {{.*}} R3=32")
22 __naked void mov(void)
23 {
24 	asm volatile ("					\
25 	r3 = 2;						\
26 	r3 = 4;						\
27 	r3 = 8;						\
28 	r3 = 16;					\
29 	r3 = 32;					\
30 	r0 = 0;						\
31 	exit;						\
32 "	::: __clobber_all);
33 }
34 
35 SEC("tc")
36 __success __log_level(2)
37 __flag(BPF_F_ANY_ALIGNMENT)
38 __msg("0: R1=ctx() R10=fp0")
39 __msg("0: {{.*}}R3=1")
40 __msg("1: {{.*}}R3=2")
41 __msg("2: {{.*}}R3=4")
42 __msg("3: {{.*}}R3=8")
43 __msg("4: {{.*}}R3=16")
44 __msg("5: {{.*}}R3=1")
45 __msg("6: {{.*}}R4=32")
46 __msg("7: {{.*}}R4=16")
47 __msg("8: {{.*}}R4=8")
48 __msg("9: {{.*}}R4=4")
49 __msg("10: {{.*}}R4=2")
50 __naked void shift(void)
51 {
52 	asm volatile ("					\
53 	r3 = 1;						\
54 	r3 <<= 1;					\
55 	r3 <<= 1;					\
56 	r3 <<= 1;					\
57 	r3 <<= 1;					\
58 	r3 >>= 4;					\
59 	r4 = 32;					\
60 	r4 >>= 1;					\
61 	r4 >>= 1;					\
62 	r4 >>= 1;					\
63 	r4 >>= 1;					\
64 	r0 = 0;						\
65 	exit;						\
66 "	::: __clobber_all);
67 }
68 
69 SEC("tc")
70 __success __log_level(2)
71 __flag(BPF_F_ANY_ALIGNMENT)
72 __msg("0: R1=ctx() R10=fp0")
73 __msg("0: {{.*}}R3=4")
74 __msg("1: {{.*}}R3=8")
75 __msg("2: {{.*}}R3=10")
76 __msg("3: {{.*}}R4=8")
77 __msg("4: {{.*}}R4=12")
78 __msg("5: {{.*}}R4=14")
79 __naked void addsub(void)
80 {
81 	asm volatile ("					\
82 	r3 = 4;						\
83 	r3 += 4;					\
84 	r3 += 2;					\
85 	r4 = 8;						\
86 	r4 += 4;					\
87 	r4 += 2;					\
88 	r0 = 0;						\
89 	exit;						\
90 "	::: __clobber_all);
91 }
92 
93 SEC("tc")
94 __success __log_level(2)
95 __flag(BPF_F_ANY_ALIGNMENT)
96 __msg("0: R1=ctx() R10=fp0")
97 __msg("0: {{.*}}R3=7")
98 __msg("1: {{.*}}R3=7")
99 __msg("2: {{.*}}R3=14")
100 __msg("3: {{.*}}R3=56")
101 __naked void mul(void)
102 {
103 	asm volatile ("					\
104 	r3 = 7;						\
105 	r3 *= 1;					\
106 	r3 *= 2;					\
107 	r3 *= 4;					\
108 	r0 = 0;						\
109 	exit;						\
110 "	::: __clobber_all);
111 }
112 
113 /* Tests using unknown values */
114 
115 #define PREP_PKT_POINTERS				\
116 	"r2 = *(u32*)(r1 + %[__sk_buff_data]);"		\
117 	"r3 = *(u32*)(r1 + %[__sk_buff_data_end]);"
118 
119 #define __LOAD_UNKNOWN(DST_REG, LBL)			\
120 	"r2 = *(u32*)(r1 + %[__sk_buff_data]);"		\
121 	"r3 = *(u32*)(r1 + %[__sk_buff_data_end]);"	\
122 	"r0 = r2;"					\
123 	"r0 += 8;"					\
124 	"if r3 >= r0 goto " LBL ";"			\
125 	"exit;"						\
126 LBL ":"							\
127 	DST_REG " = *(u8*)(r2 + 0);"
128 
129 #define LOAD_UNKNOWN(DST_REG) __LOAD_UNKNOWN(DST_REG, "l99_%=")
130 
131 SEC("tc")
132 __success __log_level(2)
133 __flag(BPF_F_ANY_ALIGNMENT)
134 __msg("6: {{.*}} R2=pkt(r=8)")
135 __msg("6: {{.*}} R3={{[^)]*}}var_off=(0x0; 0xff)")
136 __msg("7: {{.*}} R3={{[^)]*}}var_off=(0x0; 0x1fe)")
137 __msg("8: {{.*}} R3={{[^)]*}}var_off=(0x0; 0x3fc)")
138 __msg("9: {{.*}} R3={{[^)]*}}var_off=(0x0; 0x7f8)")
139 __msg("10: {{.*}} R3={{[^)]*}}var_off=(0x0; 0xff0)")
140 __msg("12: {{.*}} R3=pkt_end()")
141 __msg("17: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
142 __msg("18: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x1fe0)")
143 __msg("19: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff0)")
144 __msg("20: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x7f8)")
145 __msg("21: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x3fc)")
146 __msg("22: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x1fe)")
147 __naked void unknown_shift(void)
148 {
149 	asm volatile ("					\
150 	" __LOAD_UNKNOWN("r3", "l99_%=") "		\
151 	r3 <<= 1;					\
152 	r3 <<= 1;					\
153 	r3 <<= 1;					\
154 	r3 <<= 1;					\
155 	" __LOAD_UNKNOWN("r4", "l98_%=") "		\
156 	r4 <<= 5;					\
157 	r4 >>= 1;					\
158 	r4 >>= 1;					\
159 	r4 >>= 1;					\
160 	r4 >>= 1;					\
161 	r0 = 0;						\
162 	exit;						\
163 "	:
164 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
165 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
166 	: __clobber_all);
167 }
168 
169 SEC("tc")
170 __success __log_level(2)
171 __flag(BPF_F_ANY_ALIGNMENT)
172 __msg("6: {{.*}} R3={{[^)]*}}var_off=(0x0; 0xff)")
173 __msg("7: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
174 __msg("8: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
175 __msg("9: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
176 __msg("10: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x1fe)")
177 __msg("11: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
178 __msg("12: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x3fc)")
179 __msg("13: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff)")
180 __msg("14: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x7f8)")
181 __msg("15: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff0)")
182 __naked void unknown_mul(void)
183 {
184 	asm volatile ("					\
185 	" LOAD_UNKNOWN("r3") "				\
186 	r4 = r3;					\
187 	r4 *= 1;					\
188 	r4 = r3;					\
189 	r4 *= 2;					\
190 	r4 = r3;					\
191 	r4 *= 4;					\
192 	r4 = r3;					\
193 	r4 *= 8;					\
194 	r4 *= 2;					\
195 	r0 = 0;						\
196 	exit;						\
197 "	:
198 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
199 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
200 	: __clobber_all);
201 }
202 
203 SEC("tc")
204 __success __log_level(2)
205 __msg("2: {{.*}} R5=pkt(r=0)")
206 __msg("4: {{.*}} R5=pkt(r=0,imm=14)")
207 __msg("5: {{.*}} R4=pkt(r=0,imm=14)")
208 __msg("9: {{.*}} R5=pkt(r=18,imm=14)")
209 __msg("10: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xff){{.*}} R5=pkt(r=18,imm=14)")
210 __msg("13: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xffff)")
211 __msg("14: {{.*}} R4={{[^)]*}}var_off=(0x0; 0xffff)")
212 __naked void packet_const_offset(void)
213 {
214 	asm volatile ("					\
215 	" PREP_PKT_POINTERS "				\
216 	r5 = r2;					\
217 	r0 = 0;						\
218 	/* Skip over ethernet header.  */		\
219 	r5 += 14;					\
220 	r4 = r5;					\
221 	r4 += 4;					\
222 	if r3 >= r4 goto l0_%=;				\
223 	exit;						\
224 l0_%=:	r4 = *(u8*)(r5 + 0);				\
225 	r4 = *(u8*)(r5 + 1);				\
226 	r4 = *(u8*)(r5 + 2);				\
227 	r4 = *(u8*)(r5 + 3);				\
228 	r4 = *(u16*)(r5 + 0);				\
229 	r4 = *(u16*)(r5 + 2);				\
230 	r4 = *(u32*)(r5 + 0);				\
231 	r0 = 0;						\
232 	exit;						\
233 "	:
234 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
235 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
236 	: __clobber_all);
237 }
238 
239 SEC("tc")
240 __success __log_level(2)
241 __flag(BPF_F_ANY_ALIGNMENT)
242 /* Calculated offset in R6 has unknown value, but known
243  * alignment of 4.
244  */
245 __msg("6: {{.*}} R2=pkt(r=8)")
246 __msg("7: {{.*}} R6={{[^)]*}}var_off=(0x0; 0x3fc)")
247 /* Offset is added to packet pointer R5, resulting in
248  * known fixed offset, and variable offset from R6.
249  */
250 __msg("11: {{.*}} R5=pkt(id=1,{{[^)]*}},var_off=(0x2; 0x7fc)")
251 /* At the time the word size load is performed from R5,
252  * it's total offset is NET_IP_ALIGN + reg->off (0) +
253  * reg->aux_off (14) which is 16.  Then the variable
254  * offset is considered using reg->aux_off_align which
255  * is 4 and meets the load's requirements.
256  */
257 __msg("15: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
258 /* Variable offset is added to R5 packet pointer,
259  * resulting in auxiliary alignment of 4. To avoid BPF
260  * verifier's precision backtracking logging
261  * interfering we also have a no-op R4 = R5
262  * instruction to validate R5 state. We also check
263  * that R4 is what it should be in such case.
264  */
265 __msg("18: {{.*}} R4={{[^)]*}}var_off=(0x0; 0x3fc){{.*}} R5={{[^)]*}}var_off=(0x0; 0x3fc)")
266 /* Constant offset is added to R5, resulting in
267  * reg->off of 14.
268  */
269 __msg("19: {{.*}} R5=pkt(id=2,{{[^)]*}}var_off=(0x2; 0x7fc)")
270 /* At the time the word size load is performed from R5,
271  * its total fixed offset is NET_IP_ALIGN + reg->off
272  * (14) which is 16.  Then the variable offset is 4-byte
273  * aligned, so the total offset is 4-byte aligned and
274  * meets the load's requirements.
275  */
276 __msg("24: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
277 /* Constant offset is added to R5 packet pointer,
278  * resulting in reg->off value of 14.
279  */
280 __msg("26: {{.*}} R5=pkt(r=8,imm=14)")
281 /* Variable offset is added to R5, resulting in a
282  * variable offset of (4n). See comment for insn #18
283  * for R4 = R5 trick.
284  */
285 __msg("28: {{.*}} R4={{[^)]*}}var_off=(0x2; 0x7fc){{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
286 /* Constant is added to R5 again, setting reg->off to 18. */
287 __msg("29: {{.*}} R5=pkt(id=3,{{[^)]*}}var_off=(0x2; 0x7fc)")
288 /* And once more we add a variable; resulting {{[^)]*}}var_off
289  * is still (4n), fixed offset is not changed.
290  * Also, we create a new reg->id.
291  */
292 __msg("31: {{.*}} R4={{[^)]*}}var_off=(0x2; 0xffc){{.*}} R5={{[^)]*}}var_off=(0x2; 0xffc)")
293 /* At the time the word size load is performed from R5,
294  * its total fixed offset is NET_IP_ALIGN + reg->off (18)
295  * which is 20.  Then the variable offset is (4n), so
296  * the total offset is 4-byte aligned and meets the
297  * load's requirements.
298  */
299 __msg("35: {{.*}} R5={{[^)]*}}var_off=(0x2; 0xffc)")
300 __naked void packet_variable_offset(void)
301 {
302 	asm volatile ("					\
303 	" LOAD_UNKNOWN("r6") "				\
304 	r6 <<= 2;					\
305 	/* First, add a constant to the R5 packet pointer,\
306 	 * then a variable with a known alignment.	\
307 	 */						\
308 	r5 = r2;					\
309 	r5 += 14;					\
310 	r5 += r6;					\
311 	r4 = r5;					\
312 	r4 += 4;					\
313 	if r3 >= r4 goto l0_%=;				\
314 	exit;						\
315 l0_%=:	r4 = *(u32*)(r5 + 0);				\
316 	/* Now, test in the other direction.  Adding first\
317 	 * the variable offset to R5, then the constant.\
318 	 */						\
319 	r5 = r2;					\
320 	r5 += r6;					\
321 	r4 = r5;					\
322 	r5 += 14;					\
323 	r4 = r5;					\
324 	r4 += 4;					\
325 	if r3 >= r4 goto l1_%=;				\
326 	exit;						\
327 l1_%=:	r4 = *(u32*)(r5 + 0);				\
328 	/* Test multiple accumulations of unknown values\
329 	 * into a packet pointer.			\
330 	 */						\
331 	r5 = r2;					\
332 	r5 += 14;					\
333 	r5 += r6;					\
334 	r4 = r5;					\
335 	r5 += 4;					\
336 	r5 += r6;					\
337 	r4 = r5;					\
338 	r4 += 4;					\
339 	if r3 >= r4 goto l2_%=;				\
340 	exit;						\
341 l2_%=:	r4 = *(u32*)(r5 + 0);				\
342 	r0 = 0;						\
343 	exit;						\
344 "	:
345 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
346 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
347 	: __clobber_all);
348 }
349 
350 SEC("tc")
351 __success __log_level(2)
352 __flag(BPF_F_ANY_ALIGNMENT)
353 /* Calculated offset in R6 has unknown value, but known
354  * alignment of 4.
355  */
356 __msg("6: {{.*}} R2=pkt(r=8)")
357 __msg("7: {{.*}} R6={{[^)]*}}var_off=(0x0; 0x3fc)")
358 /* Adding 14 makes R6 be (4n+2) */
359 __msg("8: {{.*}} R6={{[^)]*}}var_off=(0x2; 0x7fc)")
360 /* Packet pointer has (4n+2) offset */
361 __msg("11: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
362 __msg("12: {{.*}} R4={{[^)]*}}var_off=(0x2; 0x7fc)")
363 /* At the time the word size load is performed from R5,
364  * its total fixed offset is NET_IP_ALIGN + reg->off (0)
365  * which is 2.  Then the variable offset is (4n+2), so
366  * the total offset is 4-byte aligned and meets the
367  * load's requirements.
368  */
369 __msg("15: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
370 /* Newly read value in R6 was shifted left by 2, so has
371  * known alignment of 4.
372  */
373 __msg("17: {{.*}} R6={{[^)]*}}var_off=(0x0; 0x3fc)")
374 /* Added (4n) to packet pointer's (4n+2) {{[^)]*}}var_off, giving
375  * another (4n+2).
376  */
377 __msg("19: {{.*}} R5={{[^)]*}}var_off=(0x2; 0xffc)")
378 __msg("20: {{.*}} R4={{[^)]*}}var_off=(0x2; 0xffc)")
379 /* At the time the word size load is performed from R5,
380  * its total fixed offset is NET_IP_ALIGN + reg->off (0)
381  * which is 2.  Then the variable offset is (4n+2), so
382  * the total offset is 4-byte aligned and meets the
383  * load's requirements.
384  */
385 __msg("23: {{.*}} R5={{[^)]*}}var_off=(0x2; 0xffc)")
386 __naked void packet_variable_offset_2(void)
387 {
388 	asm volatile ("					\
389 	/* Create an unknown offset, (4n+2)-aligned */	\
390 	" LOAD_UNKNOWN("r6") "			\
391 	r6 <<= 2;					\
392 	r6 += 14;					\
393 	/* Add it to the packet pointer */		\
394 	r5 = r2;					\
395 	r5 += r6;					\
396 	/* Check bounds and perform a read */		\
397 	r4 = r5;					\
398 	r4 += 4;					\
399 	if r3 >= r4 goto l0_%=;				\
400 	exit;						\
401 l0_%=:	r6 = *(u32*)(r5 + 0);				\
402 	/* Make a (4n) offset from the value we just read */\
403 	r6 &= 0xff;					\
404 	r6 <<= 2;					\
405 	/* Add it to the packet pointer */		\
406 	r5 += r6;					\
407 	/* Check bounds and perform a read */		\
408 	r4 = r5;					\
409 	r4 += 4;					\
410 	if r3 >= r4 goto l1_%=;				\
411 	exit;						\
412 l1_%=:	r6 = *(u32*)(r5 + 0);				\
413 	r0 = 0;						\
414 	exit;						\
415 "	:
416 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
417 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
418 	: __clobber_all);
419 }
420 
421 SEC("tc")
422 __failure __log_level(2)
423 __msg("3: {{.*}} R5=pkt_end()")
424 /* (ptr - ptr) << 2 == unknown, (4n) */
425 __msg("5: {{.*}} R5={{[^)]*}}var_off=(0x0; 0xfffffffffffffffc)")
426 /* (4n) + 14 == (4n+2).  We blow our bounds, because
427  * the add could overflow.
428  */
429 __msg("6: {{.*}} R5={{[^)]*}}var_off=(0x2; 0xfffffffffffffffc)")
430 /* Checked s>=0 */
431 __msg("9: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7ffffffffffffffc)")
432 /* packet pointer + nonnegative (4n+2) */
433 __msg("11: {{.*}} R4={{[^)]*}}var_off=(0x2; 0x7ffffffffffffffc){{.*}} R6={{[^)]*}}var_off=(0x2; 0x7ffffffffffffffc)")
434 __msg("12: (07) r4 += 4")
435 /* packet smax bound overflow */
436 __msg("pkt pointer offset -9223372036854775808 is not allowed")
437 __naked void dubious_pointer_arithmetic(void)
438 {
439 	asm volatile ("					\
440 	" PREP_PKT_POINTERS "				\
441 	r0 = 0;						\
442 	/* (ptr - ptr) << 2 */				\
443 	r5 = r3;					\
444 	r5 -= r2;					\
445 	r5 <<= 2;					\
446 	/* We have a (4n) value.  Let's make a packet offset\
447 	 * out of it.  First add 14, to make it a (4n+2)\
448 	 */						\
449 	r5 += 14;					\
450 	/* Then make sure it's nonnegative */		\
451 	if r5 s>= 0 goto l0_%=;				\
452 	exit;						\
453 l0_%=:	/* Add it to packet pointer */			\
454 	r6 = r2;					\
455 	r6 += r5;					\
456 	/* Check bounds and perform a read */		\
457 	r4 = r6;					\
458 	r4 += 4;					\
459 	if r3 >= r4 goto l1_%=;				\
460 	exit;						\
461 l1_%=:	r4 = *(u32*)(r6 + 0);				\
462 	exit;						\
463 "	:
464 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
465 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
466 	: __clobber_all);
467 }
468 
469 SEC("tc")
470 __success __log_level(2)
471 __flag(BPF_F_ANY_ALIGNMENT)
472 /* Calculated offset in R6 has unknown value, but known
473  * alignment of 4.
474  */
475 __msg("6: {{.*}} R2=pkt(r=8)")
476 __msg("8: {{.*}} R6={{[^)]*}}var_off=(0x0; 0x3fc)")
477 /* Adding 14 makes R6 be (4n+2) */
478 __msg("9: {{.*}} R6={{[^)]*}}var_off=(0x2; 0x7fc)")
479 /* New unknown value in R7 is (4n) */
480 __msg("10: {{.*}} R7={{[^)]*}}var_off=(0x0; 0x3fc)")
481 /* Subtracting it from R6 blows our unsigned bounds */
482 __msg("11: {{.*}} R6={{[^)]*}}var_off=(0x2; 0xfffffffffffffffc)")
483 /* Checked s>= 0 */
484 __msg("14: {{.*}} R6={{[^)]*}}var_off=(0x2; 0x7fc)")
485 /* At the time the word size load is performed from R5,
486  * its total fixed offset is NET_IP_ALIGN + reg->off (0)
487  * which is 2.  Then the variable offset is (4n+2), so
488  * the total offset is 4-byte aligned and meets the
489  * load's requirements.
490  */
491 __msg("20: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
492 __naked void variable_subtraction(void)
493 {
494 	asm volatile ("					\
495 	/* Create an unknown offset, (4n+2)-aligned */	\
496 	" LOAD_UNKNOWN("r6") "				\
497 	r7 = r6;					\
498 	r6 <<= 2;					\
499 	r6 += 14;					\
500 	/* Create another unknown, (4n)-aligned, and subtract\
501 	 * it from the first one			\
502 	 */						\
503 	r7 <<= 2;					\
504 	r6 -= r7;					\
505 	/* Bounds-check the result */			\
506 	if r6 s>= 0 goto l0_%=;				\
507 	exit;						\
508 l0_%=:	/* Add it to the packet pointer */		\
509 	r5 = r2;					\
510 	r5 += r6;					\
511 	/* Check bounds and perform a read */		\
512 	r4 = r5;					\
513 	r4 += 4;					\
514 	if r3 >= r4 goto l1_%=;				\
515 	exit;						\
516 l1_%=:	r6 = *(u32*)(r5 + 0);				\
517 	exit;						\
518 "	:
519 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
520 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
521 	: __clobber_all);
522 }
523 
524 SEC("tc")
525 __success __log_level(2)
526 __flag(BPF_F_ANY_ALIGNMENT)
527 /* Calculated offset in R6 has unknown value, but known
528  * alignment of 4.
529  */
530 __msg("6: {{.*}} R2=pkt(r=8)")
531 __msg("9: {{.*}} R6={{[^)]*}}var_off=(0x0; 0x3c)")
532 /* Adding 14 makes R6 be (4n+2) */
533 __msg("10: {{.*}} R6={{[^)]*}}var_off=(0x2; 0x7c)")
534 /* Subtracting from packet pointer overflows ubounds */
535 __msg("13: R5={{[^)]*}}var_off=(0xffffffffffffff82; 0x7c)")
536 /* New unknown value in R7 is (4n), >= 76 */
537 __msg("14: {{.*}} R7={{[^)]*}}var_off=(0x0; 0x7fc)")
538 /* Adding it to packet pointer gives nice bounds again */
539 __msg("16: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
540 /* At the time the word size load is performed from R5,
541  * its total fixed offset is NET_IP_ALIGN + reg->off (0)
542  * which is 2.  Then the variable offset is (4n+2), so
543  * the total offset is 4-byte aligned and meets the
544  * load's requirements.
545  */
546 __msg("20: {{.*}} R5={{[^)]*}}var_off=(0x2; 0x7fc)")
547 __naked void pointer_variable_subtraction(void)
548 {
549 	asm volatile ("					\
550 	/* Create an unknown offset, (4n+2)-aligned and bounded\
551 	 * to [14,74]					\
552 	 */						\
553 	" LOAD_UNKNOWN("r6") "				\
554 	r7 = r6;					\
555 	r6 &= 0xf;					\
556 	r6 <<= 2;					\
557 	r6 += 14;					\
558 	/* Subtract it from the packet pointer */	\
559 	r5 = r2;					\
560 	r5 -= r6;					\
561 	/* Create another unknown, (4n)-aligned and >= 74.\
562 	 * That in fact means >= 76, since 74 mod 4 == 2\
563 	 */						\
564 	r7 <<= 2;					\
565 	r7 += 76;					\
566 	/* Add it to the packet pointer */		\
567 	r5 += r7;					\
568 	/* Check bounds and perform a read */		\
569 	r4 = r5;					\
570 	r4 += 4;					\
571 	if r3 >= r4 goto l0_%=;				\
572 	exit;						\
573 l0_%=:	r6 = *(u32*)(r5 + 0);				\
574 	exit;						\
575 "	:
576 	: __imm_const(__sk_buff_data, offsetof(struct __sk_buff, data)),
577 	  __imm_const(__sk_buff_data_end, offsetof(struct __sk_buff, data_end))
578 	: __clobber_all);
579 }
580 
581 char _license[] SEC("license") = "GPL";
582