1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Testsuite for BPF interpreter and BPF JIT compiler
4 *
5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
6 */
7
8 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/filter.h>
13 #include <linux/bpf.h>
14 #include <linux/skbuff.h>
15 #include <linux/netdevice.h>
16 #include <linux/if_vlan.h>
17 #include <linux/prandom.h>
18 #include <linux/highmem.h>
19 #include <linux/sched.h>
20
21 /* General test specific settings */
22 #define MAX_SUBTESTS 3
23 #define MAX_TESTRUNS 1000
24 #define MAX_DATA 128
25 #define MAX_INSNS 512
26 #define MAX_K 0xffffFFFF
27
28 /* Few constants used to init test 'skb' */
29 #define SKB_TYPE 3
30 #define SKB_MARK 0x1234aaaa
31 #define SKB_HASH 0x1234aaab
32 #define SKB_QUEUE_MAP 123
33 #define SKB_VLAN_TCI 0xffff
34 #define SKB_VLAN_PRESENT 1
35 #define SKB_DEV_IFINDEX 577
36 #define SKB_DEV_TYPE 588
37
38 /* Redefine REGs to make tests less verbose */
39 #define R0 BPF_REG_0
40 #define R1 BPF_REG_1
41 #define R2 BPF_REG_2
42 #define R3 BPF_REG_3
43 #define R4 BPF_REG_4
44 #define R5 BPF_REG_5
45 #define R6 BPF_REG_6
46 #define R7 BPF_REG_7
47 #define R8 BPF_REG_8
48 #define R9 BPF_REG_9
49 #define R10 BPF_REG_10
50
51 /* Flags that can be passed to test cases */
52 #define FLAG_NO_DATA BIT(0)
53 #define FLAG_EXPECTED_FAIL BIT(1)
54 #define FLAG_SKB_FRAG BIT(2)
55 #define FLAG_VERIFIER_ZEXT BIT(3)
56 #define FLAG_LARGE_MEM BIT(4)
57
58 enum {
59 CLASSIC = BIT(6), /* Old BPF instructions only. */
60 INTERNAL = BIT(7), /* Extended instruction set. */
61 };
62
63 #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
64
65 struct bpf_test {
66 const char *descr;
67 union {
68 struct sock_filter insns[MAX_INSNS];
69 struct bpf_insn insns_int[MAX_INSNS];
70 struct {
71 void *insns;
72 unsigned int len;
73 } ptr;
74 } u;
75 __u8 aux;
76 __u8 data[MAX_DATA];
77 struct {
78 int data_size;
79 __u32 result;
80 } test[MAX_SUBTESTS];
81 int (*fill_helper)(struct bpf_test *self);
82 int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
83 __u8 frag_data[MAX_DATA];
84 int stack_depth; /* for eBPF only, since tests don't call verifier */
85 int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
86 };
87
88 /* Large test cases need separate allocation and fill handler. */
89
bpf_fill_maxinsns1(struct bpf_test * self)90 static int bpf_fill_maxinsns1(struct bpf_test *self)
91 {
92 unsigned int len = BPF_MAXINSNS;
93 struct sock_filter *insn;
94 __u32 k = ~0;
95 int i;
96
97 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
98 if (!insn)
99 return -ENOMEM;
100
101 for (i = 0; i < len; i++, k--)
102 insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
103
104 self->u.ptr.insns = insn;
105 self->u.ptr.len = len;
106
107 return 0;
108 }
109
bpf_fill_maxinsns2(struct bpf_test * self)110 static int bpf_fill_maxinsns2(struct bpf_test *self)
111 {
112 unsigned int len = BPF_MAXINSNS;
113 struct sock_filter *insn;
114 int i;
115
116 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
117 if (!insn)
118 return -ENOMEM;
119
120 for (i = 0; i < len; i++)
121 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
122
123 self->u.ptr.insns = insn;
124 self->u.ptr.len = len;
125
126 return 0;
127 }
128
bpf_fill_maxinsns3(struct bpf_test * self)129 static int bpf_fill_maxinsns3(struct bpf_test *self)
130 {
131 unsigned int len = BPF_MAXINSNS;
132 struct sock_filter *insn;
133 struct rnd_state rnd;
134 int i;
135
136 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
137 if (!insn)
138 return -ENOMEM;
139
140 prandom_seed_state(&rnd, 3141592653589793238ULL);
141
142 for (i = 0; i < len - 1; i++) {
143 __u32 k = prandom_u32_state(&rnd);
144
145 insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
146 }
147
148 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
149
150 self->u.ptr.insns = insn;
151 self->u.ptr.len = len;
152
153 return 0;
154 }
155
bpf_fill_maxinsns4(struct bpf_test * self)156 static int bpf_fill_maxinsns4(struct bpf_test *self)
157 {
158 unsigned int len = BPF_MAXINSNS + 1;
159 struct sock_filter *insn;
160 int i;
161
162 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
163 if (!insn)
164 return -ENOMEM;
165
166 for (i = 0; i < len; i++)
167 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
168
169 self->u.ptr.insns = insn;
170 self->u.ptr.len = len;
171
172 return 0;
173 }
174
bpf_fill_maxinsns5(struct bpf_test * self)175 static int bpf_fill_maxinsns5(struct bpf_test *self)
176 {
177 unsigned int len = BPF_MAXINSNS;
178 struct sock_filter *insn;
179 int i;
180
181 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
182 if (!insn)
183 return -ENOMEM;
184
185 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
186
187 for (i = 1; i < len - 1; i++)
188 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
189
190 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
191
192 self->u.ptr.insns = insn;
193 self->u.ptr.len = len;
194
195 return 0;
196 }
197
bpf_fill_maxinsns6(struct bpf_test * self)198 static int bpf_fill_maxinsns6(struct bpf_test *self)
199 {
200 unsigned int len = BPF_MAXINSNS;
201 struct sock_filter *insn;
202 int i;
203
204 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
205 if (!insn)
206 return -ENOMEM;
207
208 for (i = 0; i < len - 1; i++)
209 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
210 SKF_AD_VLAN_TAG_PRESENT);
211
212 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
213
214 self->u.ptr.insns = insn;
215 self->u.ptr.len = len;
216
217 return 0;
218 }
219
bpf_fill_maxinsns7(struct bpf_test * self)220 static int bpf_fill_maxinsns7(struct bpf_test *self)
221 {
222 unsigned int len = BPF_MAXINSNS;
223 struct sock_filter *insn;
224 int i;
225
226 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
227 if (!insn)
228 return -ENOMEM;
229
230 for (i = 0; i < len - 4; i++)
231 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
232 SKF_AD_CPU);
233
234 insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
235 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
236 SKF_AD_CPU);
237 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
238 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
239
240 self->u.ptr.insns = insn;
241 self->u.ptr.len = len;
242
243 return 0;
244 }
245
bpf_fill_maxinsns8(struct bpf_test * self)246 static int bpf_fill_maxinsns8(struct bpf_test *self)
247 {
248 unsigned int len = BPF_MAXINSNS;
249 struct sock_filter *insn;
250 int i, jmp_off = len - 3;
251
252 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
253 if (!insn)
254 return -ENOMEM;
255
256 insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
257
258 for (i = 1; i < len - 1; i++)
259 insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
260
261 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
262
263 self->u.ptr.insns = insn;
264 self->u.ptr.len = len;
265
266 return 0;
267 }
268
bpf_fill_maxinsns9(struct bpf_test * self)269 static int bpf_fill_maxinsns9(struct bpf_test *self)
270 {
271 unsigned int len = BPF_MAXINSNS;
272 struct bpf_insn *insn;
273 int i;
274
275 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
276 if (!insn)
277 return -ENOMEM;
278
279 insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
280 insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
281 insn[2] = BPF_EXIT_INSN();
282
283 for (i = 3; i < len - 2; i++)
284 insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
285
286 insn[len - 2] = BPF_EXIT_INSN();
287 insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
288
289 self->u.ptr.insns = insn;
290 self->u.ptr.len = len;
291
292 return 0;
293 }
294
bpf_fill_maxinsns10(struct bpf_test * self)295 static int bpf_fill_maxinsns10(struct bpf_test *self)
296 {
297 unsigned int len = BPF_MAXINSNS, hlen = len - 2;
298 struct bpf_insn *insn;
299 int i;
300
301 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
302 if (!insn)
303 return -ENOMEM;
304
305 for (i = 0; i < hlen / 2; i++)
306 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
307 for (i = hlen - 1; i > hlen / 2; i--)
308 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
309
310 insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
311 insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
312 insn[hlen + 1] = BPF_EXIT_INSN();
313
314 self->u.ptr.insns = insn;
315 self->u.ptr.len = len;
316
317 return 0;
318 }
319
__bpf_fill_ja(struct bpf_test * self,unsigned int len,unsigned int plen)320 static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
321 unsigned int plen)
322 {
323 struct sock_filter *insn;
324 unsigned int rlen;
325 int i, j;
326
327 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
328 if (!insn)
329 return -ENOMEM;
330
331 rlen = (len % plen) - 1;
332
333 for (i = 0; i + plen < len; i += plen)
334 for (j = 0; j < plen; j++)
335 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
336 plen - 1 - j, 0, 0);
337 for (j = 0; j < rlen; j++)
338 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
339 0, 0);
340
341 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
342
343 self->u.ptr.insns = insn;
344 self->u.ptr.len = len;
345
346 return 0;
347 }
348
bpf_fill_maxinsns11(struct bpf_test * self)349 static int bpf_fill_maxinsns11(struct bpf_test *self)
350 {
351 /* Hits 70 passes on x86_64 and triggers NOPs padding. */
352 return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
353 }
354
bpf_fill_maxinsns12(struct bpf_test * self)355 static int bpf_fill_maxinsns12(struct bpf_test *self)
356 {
357 unsigned int len = BPF_MAXINSNS;
358 struct sock_filter *insn;
359 int i = 0;
360
361 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
362 if (!insn)
363 return -ENOMEM;
364
365 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
366
367 for (i = 1; i < len - 1; i++)
368 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
369
370 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
371
372 self->u.ptr.insns = insn;
373 self->u.ptr.len = len;
374
375 return 0;
376 }
377
bpf_fill_maxinsns13(struct bpf_test * self)378 static int bpf_fill_maxinsns13(struct bpf_test *self)
379 {
380 unsigned int len = BPF_MAXINSNS;
381 struct sock_filter *insn;
382 int i = 0;
383
384 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
385 if (!insn)
386 return -ENOMEM;
387
388 for (i = 0; i < len - 3; i++)
389 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
390
391 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
392 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
393 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
394
395 self->u.ptr.insns = insn;
396 self->u.ptr.len = len;
397
398 return 0;
399 }
400
bpf_fill_ja(struct bpf_test * self)401 static int bpf_fill_ja(struct bpf_test *self)
402 {
403 /* Hits exactly 11 passes on x86_64 JIT. */
404 return __bpf_fill_ja(self, 12, 9);
405 }
406
bpf_fill_ld_abs_get_processor_id(struct bpf_test * self)407 static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
408 {
409 unsigned int len = BPF_MAXINSNS;
410 struct sock_filter *insn;
411 int i;
412
413 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
414 if (!insn)
415 return -ENOMEM;
416
417 for (i = 0; i < len - 1; i += 2) {
418 insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
419 insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
420 SKF_AD_OFF + SKF_AD_CPU);
421 }
422
423 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
424
425 self->u.ptr.insns = insn;
426 self->u.ptr.len = len;
427
428 return 0;
429 }
430
__bpf_fill_stxdw(struct bpf_test * self,int size)431 static int __bpf_fill_stxdw(struct bpf_test *self, int size)
432 {
433 unsigned int len = BPF_MAXINSNS;
434 struct bpf_insn *insn;
435 int i;
436
437 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
438 if (!insn)
439 return -ENOMEM;
440
441 insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
442 insn[1] = BPF_ST_MEM(size, R10, -40, 42);
443
444 for (i = 2; i < len - 2; i++)
445 insn[i] = BPF_STX_XADD(size, R10, R0, -40);
446
447 insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
448 insn[len - 1] = BPF_EXIT_INSN();
449
450 self->u.ptr.insns = insn;
451 self->u.ptr.len = len;
452 self->stack_depth = 40;
453
454 return 0;
455 }
456
bpf_fill_stxw(struct bpf_test * self)457 static int bpf_fill_stxw(struct bpf_test *self)
458 {
459 return __bpf_fill_stxdw(self, BPF_W);
460 }
461
bpf_fill_stxdw(struct bpf_test * self)462 static int bpf_fill_stxdw(struct bpf_test *self)
463 {
464 return __bpf_fill_stxdw(self, BPF_DW);
465 }
466
__bpf_ld_imm64(struct bpf_insn insns[2],u8 reg,s64 imm64)467 static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
468 {
469 struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
470
471 memcpy(insns, tmp, sizeof(tmp));
472 return 2;
473 }
474
475 /*
476 * Branch conversion tests. Complex operations can expand to a lot
477 * of instructions when JITed. This in turn may cause jump offsets
478 * to overflow the field size of the native instruction, triggering
479 * a branch conversion mechanism in some JITs.
480 */
__bpf_fill_max_jmp(struct bpf_test * self,int jmp,int imm,bool alu32)481 static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm, bool alu32)
482 {
483 struct bpf_insn *insns;
484 int len = S16_MAX + 5;
485 int i;
486
487 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
488 if (!insns)
489 return -ENOMEM;
490
491 i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
492 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
493 insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
494 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
495 insns[i++] = BPF_EXIT_INSN();
496
497 while (i < len - 1) {
498 static const int ops[] = {
499 BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
500 BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
501 };
502 int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
503
504 if ((i & 1) || alu32)
505 insns[i++] = BPF_ALU32_REG(op, R0, R1);
506 else
507 insns[i++] = BPF_ALU64_REG(op, R0, R1);
508 }
509
510 insns[i++] = BPF_EXIT_INSN();
511 self->u.ptr.insns = insns;
512 self->u.ptr.len = len;
513 BUG_ON(i != len);
514
515 return 0;
516 }
517
518 /* Branch taken by runtime decision */
bpf_fill_max_jmp_taken_32(struct bpf_test * self)519 static int bpf_fill_max_jmp_taken_32(struct bpf_test *self)
520 {
521 return __bpf_fill_max_jmp(self, BPF_JEQ, 1, true);
522 }
523
bpf_fill_max_jmp_taken(struct bpf_test * self)524 static int bpf_fill_max_jmp_taken(struct bpf_test *self)
525 {
526 return __bpf_fill_max_jmp(self, BPF_JEQ, 1, false);
527 }
528
529 /* Branch not taken by runtime decision */
bpf_fill_max_jmp_not_taken_32(struct bpf_test * self)530 static int bpf_fill_max_jmp_not_taken_32(struct bpf_test *self)
531 {
532 return __bpf_fill_max_jmp(self, BPF_JEQ, 0, true);
533 }
534
bpf_fill_max_jmp_not_taken(struct bpf_test * self)535 static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
536 {
537 return __bpf_fill_max_jmp(self, BPF_JEQ, 0, false);
538 }
539
540 /* Branch always taken, known at JIT time */
bpf_fill_max_jmp_always_taken_32(struct bpf_test * self)541 static int bpf_fill_max_jmp_always_taken_32(struct bpf_test *self)
542 {
543 return __bpf_fill_max_jmp(self, BPF_JGE, 0, true);
544 }
545
bpf_fill_max_jmp_always_taken(struct bpf_test * self)546 static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
547 {
548 return __bpf_fill_max_jmp(self, BPF_JGE, 0, false);
549 }
550
551 /* Branch never taken, known at JIT time */
bpf_fill_max_jmp_never_taken_32(struct bpf_test * self)552 static int bpf_fill_max_jmp_never_taken_32(struct bpf_test *self)
553 {
554 return __bpf_fill_max_jmp(self, BPF_JLT, 0, true);
555 }
556
bpf_fill_max_jmp_never_taken(struct bpf_test * self)557 static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
558 {
559 return __bpf_fill_max_jmp(self, BPF_JLT, 0, false);
560 }
561
562 /* ALU result computation used in tests */
__bpf_alu_result(u64 * res,u64 v1,u64 v2,u8 op)563 static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
564 {
565 *res = 0;
566 switch (op) {
567 case BPF_MOV:
568 *res = v2;
569 break;
570 case BPF_AND:
571 *res = v1 & v2;
572 break;
573 case BPF_OR:
574 *res = v1 | v2;
575 break;
576 case BPF_XOR:
577 *res = v1 ^ v2;
578 break;
579 case BPF_LSH:
580 *res = v1 << v2;
581 break;
582 case BPF_RSH:
583 *res = v1 >> v2;
584 break;
585 case BPF_ARSH:
586 *res = v1 >> v2;
587 if (v2 > 0 && v1 > S64_MAX)
588 *res |= ~0ULL << (64 - v2);
589 break;
590 case BPF_ADD:
591 *res = v1 + v2;
592 break;
593 case BPF_SUB:
594 *res = v1 - v2;
595 break;
596 case BPF_MUL:
597 *res = v1 * v2;
598 break;
599 case BPF_DIV:
600 if (v2 == 0)
601 return false;
602 *res = div64_u64(v1, v2);
603 break;
604 case BPF_MOD:
605 if (v2 == 0)
606 return false;
607 div64_u64_rem(v1, v2, res);
608 break;
609 }
610 return true;
611 }
612
613 /* Test an ALU shift operation for all valid shift values */
__bpf_fill_alu_shift(struct bpf_test * self,u8 op,u8 mode,bool alu32)614 static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
615 u8 mode, bool alu32)
616 {
617 static const s64 regs[] = {
618 0x0123456789abcdefLL, /* dword > 0, word < 0 */
619 0xfedcba9876543210LL, /* dword < 0, word > 0 */
620 0xfedcba0198765432LL, /* dword < 0, word < 0 */
621 0x0123458967abcdefLL, /* dword > 0, word > 0 */
622 };
623 int bits = alu32 ? 32 : 64;
624 int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
625 struct bpf_insn *insn;
626 int imm, k;
627 int i = 0;
628
629 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
630 if (!insn)
631 return -ENOMEM;
632
633 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
634
635 for (k = 0; k < ARRAY_SIZE(regs); k++) {
636 s64 reg = regs[k];
637
638 i += __bpf_ld_imm64(&insn[i], R3, reg);
639
640 for (imm = 0; imm < bits; imm++) {
641 u64 val;
642
643 /* Perform operation */
644 insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
645 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
646 if (alu32) {
647 if (mode == BPF_K)
648 insn[i++] = BPF_ALU32_IMM(op, R1, imm);
649 else
650 insn[i++] = BPF_ALU32_REG(op, R1, R2);
651
652 if (op == BPF_ARSH)
653 reg = (s32)reg;
654 else
655 reg = (u32)reg;
656 __bpf_alu_result(&val, reg, imm, op);
657 val = (u32)val;
658 } else {
659 if (mode == BPF_K)
660 insn[i++] = BPF_ALU64_IMM(op, R1, imm);
661 else
662 insn[i++] = BPF_ALU64_REG(op, R1, R2);
663 __bpf_alu_result(&val, reg, imm, op);
664 }
665
666 /*
667 * When debugging a JIT that fails this test, one
668 * can write the immediate value to R0 here to find
669 * out which operand values that fail.
670 */
671
672 /* Load reference and check the result */
673 i += __bpf_ld_imm64(&insn[i], R4, val);
674 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
675 insn[i++] = BPF_EXIT_INSN();
676 }
677 }
678
679 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
680 insn[i++] = BPF_EXIT_INSN();
681
682 self->u.ptr.insns = insn;
683 self->u.ptr.len = len;
684 BUG_ON(i != len);
685
686 return 0;
687 }
688
bpf_fill_alu64_lsh_imm(struct bpf_test * self)689 static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
690 {
691 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
692 }
693
bpf_fill_alu64_rsh_imm(struct bpf_test * self)694 static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
695 {
696 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
697 }
698
bpf_fill_alu64_arsh_imm(struct bpf_test * self)699 static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
700 {
701 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
702 }
703
bpf_fill_alu64_lsh_reg(struct bpf_test * self)704 static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
705 {
706 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
707 }
708
bpf_fill_alu64_rsh_reg(struct bpf_test * self)709 static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
710 {
711 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
712 }
713
bpf_fill_alu64_arsh_reg(struct bpf_test * self)714 static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
715 {
716 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
717 }
718
bpf_fill_alu32_lsh_imm(struct bpf_test * self)719 static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
720 {
721 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
722 }
723
bpf_fill_alu32_rsh_imm(struct bpf_test * self)724 static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
725 {
726 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
727 }
728
bpf_fill_alu32_arsh_imm(struct bpf_test * self)729 static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
730 {
731 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
732 }
733
bpf_fill_alu32_lsh_reg(struct bpf_test * self)734 static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
735 {
736 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
737 }
738
bpf_fill_alu32_rsh_reg(struct bpf_test * self)739 static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
740 {
741 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
742 }
743
bpf_fill_alu32_arsh_reg(struct bpf_test * self)744 static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
745 {
746 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
747 }
748
749 /*
750 * Test an ALU register shift operation for all valid shift values
751 * for the case when the source and destination are the same.
752 */
__bpf_fill_alu_shift_same_reg(struct bpf_test * self,u8 op,bool alu32)753 static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
754 bool alu32)
755 {
756 int bits = alu32 ? 32 : 64;
757 int len = 3 + 6 * bits;
758 struct bpf_insn *insn;
759 int i = 0;
760 u64 val;
761
762 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
763 if (!insn)
764 return -ENOMEM;
765
766 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
767
768 for (val = 0; val < bits; val++) {
769 u64 res;
770
771 /* Perform operation */
772 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
773 if (alu32)
774 insn[i++] = BPF_ALU32_REG(op, R1, R1);
775 else
776 insn[i++] = BPF_ALU64_REG(op, R1, R1);
777
778 /* Compute the reference result */
779 __bpf_alu_result(&res, val, val, op);
780 if (alu32)
781 res = (u32)res;
782 i += __bpf_ld_imm64(&insn[i], R2, res);
783
784 /* Check the actual result */
785 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
786 insn[i++] = BPF_EXIT_INSN();
787 }
788
789 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
790 insn[i++] = BPF_EXIT_INSN();
791
792 self->u.ptr.insns = insn;
793 self->u.ptr.len = len;
794 BUG_ON(i != len);
795
796 return 0;
797 }
798
bpf_fill_alu64_lsh_same_reg(struct bpf_test * self)799 static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
800 {
801 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
802 }
803
bpf_fill_alu64_rsh_same_reg(struct bpf_test * self)804 static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
805 {
806 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
807 }
808
bpf_fill_alu64_arsh_same_reg(struct bpf_test * self)809 static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
810 {
811 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
812 }
813
bpf_fill_alu32_lsh_same_reg(struct bpf_test * self)814 static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
815 {
816 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
817 }
818
bpf_fill_alu32_rsh_same_reg(struct bpf_test * self)819 static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
820 {
821 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
822 }
823
bpf_fill_alu32_arsh_same_reg(struct bpf_test * self)824 static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
825 {
826 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
827 }
828
829 /*
830 * Common operand pattern generator for exhaustive power-of-two magnitudes
831 * tests. The block size parameters can be adjusted to increase/reduce the
832 * number of combinatons tested and thereby execution speed and memory
833 * footprint.
834 */
835
value(int msb,int delta,int sign)836 static inline s64 value(int msb, int delta, int sign)
837 {
838 return sign * (1LL << msb) + delta;
839 }
840
__bpf_fill_pattern(struct bpf_test * self,void * arg,int dbits,int sbits,int block1,int block2,int (* emit)(struct bpf_test *,void *,struct bpf_insn *,s64,s64))841 static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
842 int dbits, int sbits, int block1, int block2,
843 int (*emit)(struct bpf_test*, void*,
844 struct bpf_insn*, s64, s64))
845 {
846 static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
847 struct bpf_insn *insns;
848 int di, si, bt, db, sb;
849 int count, len, k;
850 int extra = 1 + 2;
851 int i = 0;
852
853 /* Total number of iterations for the two pattern */
854 count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
855 count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
856
857 /* Compute the maximum number of insns and allocate the buffer */
858 len = extra + count * (*emit)(self, arg, NULL, 0, 0);
859 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
860 if (!insns)
861 return -ENOMEM;
862
863 /* Add head instruction(s) */
864 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
865
866 /*
867 * Pattern 1: all combinations of power-of-two magnitudes and sign,
868 * and with a block of contiguous values around each magnitude.
869 */
870 for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
871 for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
872 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
873 for (db = -(block1 / 2);
874 db < (block1 + 1) / 2; db++)
875 for (sb = -(block1 / 2);
876 sb < (block1 + 1) / 2; sb++) {
877 s64 dst, src;
878
879 dst = value(di, db, sgn[k][0]);
880 src = value(si, sb, sgn[k][1]);
881 i += (*emit)(self, arg,
882 &insns[i],
883 dst, src);
884 }
885 /*
886 * Pattern 2: all combinations for a larger block of values
887 * for each power-of-two magnitude and sign, where the magnitude is
888 * the same for both operands.
889 */
890 for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
891 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
892 for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
893 for (sb = -(block2 / 2);
894 sb < (block2 + 1) / 2; sb++) {
895 s64 dst, src;
896
897 dst = value(bt % dbits, db, sgn[k][0]);
898 src = value(bt % sbits, sb, sgn[k][1]);
899 i += (*emit)(self, arg, &insns[i],
900 dst, src);
901 }
902
903 /* Append tail instructions */
904 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
905 insns[i++] = BPF_EXIT_INSN();
906 BUG_ON(i > len);
907
908 self->u.ptr.insns = insns;
909 self->u.ptr.len = i;
910
911 return 0;
912 }
913
914 /*
915 * Block size parameters used in pattern tests below. une as needed to
916 * increase/reduce the number combinations tested, see following examples.
917 * block values per operand MSB
918 * ----------------------------------------
919 * 0 none
920 * 1 (1 << MSB)
921 * 2 (1 << MSB) + [-1, 0]
922 * 3 (1 << MSB) + [-1, 0, 1]
923 */
924 #define PATTERN_BLOCK1 1
925 #define PATTERN_BLOCK2 5
926
927 /* Number of test runs for a pattern test */
928 #define NR_PATTERN_RUNS 1
929
930 /*
931 * Exhaustive tests of ALU operations for all combinations of power-of-two
932 * magnitudes of the operands, both for positive and negative values. The
933 * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
934 * emit different code depending on the magnitude of the immediate value.
935 */
__bpf_emit_alu64_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)936 static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
937 struct bpf_insn *insns, s64 dst, s64 imm)
938 {
939 int op = *(int *)arg;
940 int i = 0;
941 u64 res;
942
943 if (!insns)
944 return 7;
945
946 if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
947 i += __bpf_ld_imm64(&insns[i], R1, dst);
948 i += __bpf_ld_imm64(&insns[i], R3, res);
949 insns[i++] = BPF_ALU64_IMM(op, R1, imm);
950 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
951 insns[i++] = BPF_EXIT_INSN();
952 }
953
954 return i;
955 }
956
__bpf_emit_alu32_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)957 static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
958 struct bpf_insn *insns, s64 dst, s64 imm)
959 {
960 int op = *(int *)arg;
961 int i = 0;
962 u64 res;
963
964 if (!insns)
965 return 7;
966
967 if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
968 i += __bpf_ld_imm64(&insns[i], R1, dst);
969 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
970 insns[i++] = BPF_ALU32_IMM(op, R1, imm);
971 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
972 insns[i++] = BPF_EXIT_INSN();
973 }
974
975 return i;
976 }
977
__bpf_emit_alu64_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)978 static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
979 struct bpf_insn *insns, s64 dst, s64 src)
980 {
981 int op = *(int *)arg;
982 int i = 0;
983 u64 res;
984
985 if (!insns)
986 return 9;
987
988 if (__bpf_alu_result(&res, dst, src, op)) {
989 i += __bpf_ld_imm64(&insns[i], R1, dst);
990 i += __bpf_ld_imm64(&insns[i], R2, src);
991 i += __bpf_ld_imm64(&insns[i], R3, res);
992 insns[i++] = BPF_ALU64_REG(op, R1, R2);
993 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
994 insns[i++] = BPF_EXIT_INSN();
995 }
996
997 return i;
998 }
999
__bpf_emit_alu32_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1000 static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
1001 struct bpf_insn *insns, s64 dst, s64 src)
1002 {
1003 int op = *(int *)arg;
1004 int i = 0;
1005 u64 res;
1006
1007 if (!insns)
1008 return 9;
1009
1010 if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
1011 i += __bpf_ld_imm64(&insns[i], R1, dst);
1012 i += __bpf_ld_imm64(&insns[i], R2, src);
1013 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1014 insns[i++] = BPF_ALU32_REG(op, R1, R2);
1015 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1016 insns[i++] = BPF_EXIT_INSN();
1017 }
1018
1019 return i;
1020 }
1021
__bpf_fill_alu64_imm(struct bpf_test * self,int op)1022 static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1023 {
1024 return __bpf_fill_pattern(self, &op, 64, 32,
1025 PATTERN_BLOCK1, PATTERN_BLOCK2,
1026 &__bpf_emit_alu64_imm);
1027 }
1028
__bpf_fill_alu32_imm(struct bpf_test * self,int op)1029 static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1030 {
1031 return __bpf_fill_pattern(self, &op, 64, 32,
1032 PATTERN_BLOCK1, PATTERN_BLOCK2,
1033 &__bpf_emit_alu32_imm);
1034 }
1035
__bpf_fill_alu64_reg(struct bpf_test * self,int op)1036 static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1037 {
1038 return __bpf_fill_pattern(self, &op, 64, 64,
1039 PATTERN_BLOCK1, PATTERN_BLOCK2,
1040 &__bpf_emit_alu64_reg);
1041 }
1042
__bpf_fill_alu32_reg(struct bpf_test * self,int op)1043 static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1044 {
1045 return __bpf_fill_pattern(self, &op, 64, 64,
1046 PATTERN_BLOCK1, PATTERN_BLOCK2,
1047 &__bpf_emit_alu32_reg);
1048 }
1049
1050 /* ALU64 immediate operations */
bpf_fill_alu64_mov_imm(struct bpf_test * self)1051 static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1052 {
1053 return __bpf_fill_alu64_imm(self, BPF_MOV);
1054 }
1055
bpf_fill_alu64_and_imm(struct bpf_test * self)1056 static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1057 {
1058 return __bpf_fill_alu64_imm(self, BPF_AND);
1059 }
1060
bpf_fill_alu64_or_imm(struct bpf_test * self)1061 static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1062 {
1063 return __bpf_fill_alu64_imm(self, BPF_OR);
1064 }
1065
bpf_fill_alu64_xor_imm(struct bpf_test * self)1066 static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1067 {
1068 return __bpf_fill_alu64_imm(self, BPF_XOR);
1069 }
1070
bpf_fill_alu64_add_imm(struct bpf_test * self)1071 static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1072 {
1073 return __bpf_fill_alu64_imm(self, BPF_ADD);
1074 }
1075
bpf_fill_alu64_sub_imm(struct bpf_test * self)1076 static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1077 {
1078 return __bpf_fill_alu64_imm(self, BPF_SUB);
1079 }
1080
bpf_fill_alu64_mul_imm(struct bpf_test * self)1081 static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1082 {
1083 return __bpf_fill_alu64_imm(self, BPF_MUL);
1084 }
1085
bpf_fill_alu64_div_imm(struct bpf_test * self)1086 static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1087 {
1088 return __bpf_fill_alu64_imm(self, BPF_DIV);
1089 }
1090
bpf_fill_alu64_mod_imm(struct bpf_test * self)1091 static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1092 {
1093 return __bpf_fill_alu64_imm(self, BPF_MOD);
1094 }
1095
1096 /* ALU32 immediate operations */
bpf_fill_alu32_mov_imm(struct bpf_test * self)1097 static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1098 {
1099 return __bpf_fill_alu32_imm(self, BPF_MOV);
1100 }
1101
bpf_fill_alu32_and_imm(struct bpf_test * self)1102 static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1103 {
1104 return __bpf_fill_alu32_imm(self, BPF_AND);
1105 }
1106
bpf_fill_alu32_or_imm(struct bpf_test * self)1107 static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1108 {
1109 return __bpf_fill_alu32_imm(self, BPF_OR);
1110 }
1111
bpf_fill_alu32_xor_imm(struct bpf_test * self)1112 static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1113 {
1114 return __bpf_fill_alu32_imm(self, BPF_XOR);
1115 }
1116
bpf_fill_alu32_add_imm(struct bpf_test * self)1117 static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1118 {
1119 return __bpf_fill_alu32_imm(self, BPF_ADD);
1120 }
1121
bpf_fill_alu32_sub_imm(struct bpf_test * self)1122 static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1123 {
1124 return __bpf_fill_alu32_imm(self, BPF_SUB);
1125 }
1126
bpf_fill_alu32_mul_imm(struct bpf_test * self)1127 static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1128 {
1129 return __bpf_fill_alu32_imm(self, BPF_MUL);
1130 }
1131
bpf_fill_alu32_div_imm(struct bpf_test * self)1132 static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1133 {
1134 return __bpf_fill_alu32_imm(self, BPF_DIV);
1135 }
1136
bpf_fill_alu32_mod_imm(struct bpf_test * self)1137 static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1138 {
1139 return __bpf_fill_alu32_imm(self, BPF_MOD);
1140 }
1141
1142 /* ALU64 register operations */
bpf_fill_alu64_mov_reg(struct bpf_test * self)1143 static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1144 {
1145 return __bpf_fill_alu64_reg(self, BPF_MOV);
1146 }
1147
bpf_fill_alu64_and_reg(struct bpf_test * self)1148 static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1149 {
1150 return __bpf_fill_alu64_reg(self, BPF_AND);
1151 }
1152
bpf_fill_alu64_or_reg(struct bpf_test * self)1153 static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1154 {
1155 return __bpf_fill_alu64_reg(self, BPF_OR);
1156 }
1157
bpf_fill_alu64_xor_reg(struct bpf_test * self)1158 static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1159 {
1160 return __bpf_fill_alu64_reg(self, BPF_XOR);
1161 }
1162
bpf_fill_alu64_add_reg(struct bpf_test * self)1163 static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1164 {
1165 return __bpf_fill_alu64_reg(self, BPF_ADD);
1166 }
1167
bpf_fill_alu64_sub_reg(struct bpf_test * self)1168 static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1169 {
1170 return __bpf_fill_alu64_reg(self, BPF_SUB);
1171 }
1172
bpf_fill_alu64_mul_reg(struct bpf_test * self)1173 static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1174 {
1175 return __bpf_fill_alu64_reg(self, BPF_MUL);
1176 }
1177
bpf_fill_alu64_div_reg(struct bpf_test * self)1178 static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1179 {
1180 return __bpf_fill_alu64_reg(self, BPF_DIV);
1181 }
1182
bpf_fill_alu64_mod_reg(struct bpf_test * self)1183 static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1184 {
1185 return __bpf_fill_alu64_reg(self, BPF_MOD);
1186 }
1187
1188 /* ALU32 register operations */
bpf_fill_alu32_mov_reg(struct bpf_test * self)1189 static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1190 {
1191 return __bpf_fill_alu32_reg(self, BPF_MOV);
1192 }
1193
bpf_fill_alu32_and_reg(struct bpf_test * self)1194 static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1195 {
1196 return __bpf_fill_alu32_reg(self, BPF_AND);
1197 }
1198
bpf_fill_alu32_or_reg(struct bpf_test * self)1199 static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1200 {
1201 return __bpf_fill_alu32_reg(self, BPF_OR);
1202 }
1203
bpf_fill_alu32_xor_reg(struct bpf_test * self)1204 static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1205 {
1206 return __bpf_fill_alu32_reg(self, BPF_XOR);
1207 }
1208
bpf_fill_alu32_add_reg(struct bpf_test * self)1209 static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1210 {
1211 return __bpf_fill_alu32_reg(self, BPF_ADD);
1212 }
1213
bpf_fill_alu32_sub_reg(struct bpf_test * self)1214 static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1215 {
1216 return __bpf_fill_alu32_reg(self, BPF_SUB);
1217 }
1218
bpf_fill_alu32_mul_reg(struct bpf_test * self)1219 static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1220 {
1221 return __bpf_fill_alu32_reg(self, BPF_MUL);
1222 }
1223
bpf_fill_alu32_div_reg(struct bpf_test * self)1224 static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1225 {
1226 return __bpf_fill_alu32_reg(self, BPF_DIV);
1227 }
1228
bpf_fill_alu32_mod_reg(struct bpf_test * self)1229 static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1230 {
1231 return __bpf_fill_alu32_reg(self, BPF_MOD);
1232 }
1233
1234 /*
1235 * Test JITs that implement complex ALU operations as function
1236 * calls, and must re-arrange operands for argument passing.
1237 */
__bpf_fill_alu_imm_regs(struct bpf_test * self,u8 op,bool alu32)1238 static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1239 {
1240 int len = 2 + 10 * 10;
1241 struct bpf_insn *insns;
1242 u64 dst, res;
1243 int i = 0;
1244 u32 imm;
1245 int rd;
1246
1247 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1248 if (!insns)
1249 return -ENOMEM;
1250
1251 /* Operand and result values according to operation */
1252 if (alu32)
1253 dst = 0x76543210U;
1254 else
1255 dst = 0x7edcba9876543210ULL;
1256 imm = 0x01234567U;
1257
1258 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1259 imm &= 31;
1260
1261 __bpf_alu_result(&res, dst, imm, op);
1262
1263 if (alu32)
1264 res = (u32)res;
1265
1266 /* Check all operand registers */
1267 for (rd = R0; rd <= R9; rd++) {
1268 i += __bpf_ld_imm64(&insns[i], rd, dst);
1269
1270 if (alu32)
1271 insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1272 else
1273 insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1274
1275 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1276 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1277 insns[i++] = BPF_EXIT_INSN();
1278
1279 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1280 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1281 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1282 insns[i++] = BPF_EXIT_INSN();
1283 }
1284
1285 insns[i++] = BPF_MOV64_IMM(R0, 1);
1286 insns[i++] = BPF_EXIT_INSN();
1287
1288 self->u.ptr.insns = insns;
1289 self->u.ptr.len = len;
1290 BUG_ON(i != len);
1291
1292 return 0;
1293 }
1294
1295 /* ALU64 K registers */
bpf_fill_alu64_mov_imm_regs(struct bpf_test * self)1296 static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1297 {
1298 return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
1299 }
1300
bpf_fill_alu64_and_imm_regs(struct bpf_test * self)1301 static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1302 {
1303 return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
1304 }
1305
bpf_fill_alu64_or_imm_regs(struct bpf_test * self)1306 static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1307 {
1308 return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
1309 }
1310
bpf_fill_alu64_xor_imm_regs(struct bpf_test * self)1311 static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1312 {
1313 return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
1314 }
1315
bpf_fill_alu64_lsh_imm_regs(struct bpf_test * self)1316 static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1317 {
1318 return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
1319 }
1320
bpf_fill_alu64_rsh_imm_regs(struct bpf_test * self)1321 static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1322 {
1323 return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
1324 }
1325
bpf_fill_alu64_arsh_imm_regs(struct bpf_test * self)1326 static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1327 {
1328 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
1329 }
1330
bpf_fill_alu64_add_imm_regs(struct bpf_test * self)1331 static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1332 {
1333 return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
1334 }
1335
bpf_fill_alu64_sub_imm_regs(struct bpf_test * self)1336 static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1337 {
1338 return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
1339 }
1340
bpf_fill_alu64_mul_imm_regs(struct bpf_test * self)1341 static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1342 {
1343 return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
1344 }
1345
bpf_fill_alu64_div_imm_regs(struct bpf_test * self)1346 static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1347 {
1348 return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
1349 }
1350
bpf_fill_alu64_mod_imm_regs(struct bpf_test * self)1351 static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1352 {
1353 return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
1354 }
1355
1356 /* ALU32 K registers */
bpf_fill_alu32_mov_imm_regs(struct bpf_test * self)1357 static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1358 {
1359 return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
1360 }
1361
bpf_fill_alu32_and_imm_regs(struct bpf_test * self)1362 static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1363 {
1364 return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
1365 }
1366
bpf_fill_alu32_or_imm_regs(struct bpf_test * self)1367 static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1368 {
1369 return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
1370 }
1371
bpf_fill_alu32_xor_imm_regs(struct bpf_test * self)1372 static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1373 {
1374 return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
1375 }
1376
bpf_fill_alu32_lsh_imm_regs(struct bpf_test * self)1377 static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1378 {
1379 return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
1380 }
1381
bpf_fill_alu32_rsh_imm_regs(struct bpf_test * self)1382 static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1383 {
1384 return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
1385 }
1386
bpf_fill_alu32_arsh_imm_regs(struct bpf_test * self)1387 static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1388 {
1389 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
1390 }
1391
bpf_fill_alu32_add_imm_regs(struct bpf_test * self)1392 static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1393 {
1394 return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
1395 }
1396
bpf_fill_alu32_sub_imm_regs(struct bpf_test * self)1397 static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1398 {
1399 return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
1400 }
1401
bpf_fill_alu32_mul_imm_regs(struct bpf_test * self)1402 static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1403 {
1404 return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
1405 }
1406
bpf_fill_alu32_div_imm_regs(struct bpf_test * self)1407 static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1408 {
1409 return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
1410 }
1411
bpf_fill_alu32_mod_imm_regs(struct bpf_test * self)1412 static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1413 {
1414 return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
1415 }
1416
1417 /*
1418 * Test JITs that implement complex ALU operations as function
1419 * calls, and must re-arrange operands for argument passing.
1420 */
__bpf_fill_alu_reg_pairs(struct bpf_test * self,u8 op,bool alu32)1421 static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1422 {
1423 int len = 2 + 10 * 10 * 12;
1424 u64 dst, src, res, same;
1425 struct bpf_insn *insns;
1426 int rd, rs;
1427 int i = 0;
1428
1429 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1430 if (!insns)
1431 return -ENOMEM;
1432
1433 /* Operand and result values according to operation */
1434 if (alu32) {
1435 dst = 0x76543210U;
1436 src = 0x01234567U;
1437 } else {
1438 dst = 0x7edcba9876543210ULL;
1439 src = 0x0123456789abcdefULL;
1440 }
1441
1442 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1443 src &= 31;
1444
1445 __bpf_alu_result(&res, dst, src, op);
1446 __bpf_alu_result(&same, src, src, op);
1447
1448 if (alu32) {
1449 res = (u32)res;
1450 same = (u32)same;
1451 }
1452
1453 /* Check all combinations of operand registers */
1454 for (rd = R0; rd <= R9; rd++) {
1455 for (rs = R0; rs <= R9; rs++) {
1456 u64 val = rd == rs ? same : res;
1457
1458 i += __bpf_ld_imm64(&insns[i], rd, dst);
1459 i += __bpf_ld_imm64(&insns[i], rs, src);
1460
1461 if (alu32)
1462 insns[i++] = BPF_ALU32_REG(op, rd, rs);
1463 else
1464 insns[i++] = BPF_ALU64_REG(op, rd, rs);
1465
1466 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1467 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1468 insns[i++] = BPF_EXIT_INSN();
1469
1470 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1471 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1472 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1473 insns[i++] = BPF_EXIT_INSN();
1474 }
1475 }
1476
1477 insns[i++] = BPF_MOV64_IMM(R0, 1);
1478 insns[i++] = BPF_EXIT_INSN();
1479
1480 self->u.ptr.insns = insns;
1481 self->u.ptr.len = len;
1482 BUG_ON(i != len);
1483
1484 return 0;
1485 }
1486
1487 /* ALU64 X register combinations */
bpf_fill_alu64_mov_reg_pairs(struct bpf_test * self)1488 static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1489 {
1490 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
1491 }
1492
bpf_fill_alu64_and_reg_pairs(struct bpf_test * self)1493 static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1494 {
1495 return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
1496 }
1497
bpf_fill_alu64_or_reg_pairs(struct bpf_test * self)1498 static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1499 {
1500 return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
1501 }
1502
bpf_fill_alu64_xor_reg_pairs(struct bpf_test * self)1503 static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1504 {
1505 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
1506 }
1507
bpf_fill_alu64_lsh_reg_pairs(struct bpf_test * self)1508 static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1509 {
1510 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
1511 }
1512
bpf_fill_alu64_rsh_reg_pairs(struct bpf_test * self)1513 static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1514 {
1515 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
1516 }
1517
bpf_fill_alu64_arsh_reg_pairs(struct bpf_test * self)1518 static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1519 {
1520 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
1521 }
1522
bpf_fill_alu64_add_reg_pairs(struct bpf_test * self)1523 static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1524 {
1525 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
1526 }
1527
bpf_fill_alu64_sub_reg_pairs(struct bpf_test * self)1528 static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1529 {
1530 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
1531 }
1532
bpf_fill_alu64_mul_reg_pairs(struct bpf_test * self)1533 static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1534 {
1535 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
1536 }
1537
bpf_fill_alu64_div_reg_pairs(struct bpf_test * self)1538 static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1539 {
1540 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
1541 }
1542
bpf_fill_alu64_mod_reg_pairs(struct bpf_test * self)1543 static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1544 {
1545 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
1546 }
1547
1548 /* ALU32 X register combinations */
bpf_fill_alu32_mov_reg_pairs(struct bpf_test * self)1549 static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1550 {
1551 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
1552 }
1553
bpf_fill_alu32_and_reg_pairs(struct bpf_test * self)1554 static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1555 {
1556 return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
1557 }
1558
bpf_fill_alu32_or_reg_pairs(struct bpf_test * self)1559 static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1560 {
1561 return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
1562 }
1563
bpf_fill_alu32_xor_reg_pairs(struct bpf_test * self)1564 static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1565 {
1566 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
1567 }
1568
bpf_fill_alu32_lsh_reg_pairs(struct bpf_test * self)1569 static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1570 {
1571 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
1572 }
1573
bpf_fill_alu32_rsh_reg_pairs(struct bpf_test * self)1574 static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1575 {
1576 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
1577 }
1578
bpf_fill_alu32_arsh_reg_pairs(struct bpf_test * self)1579 static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1580 {
1581 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
1582 }
1583
bpf_fill_alu32_add_reg_pairs(struct bpf_test * self)1584 static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1585 {
1586 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
1587 }
1588
bpf_fill_alu32_sub_reg_pairs(struct bpf_test * self)1589 static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1590 {
1591 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
1592 }
1593
bpf_fill_alu32_mul_reg_pairs(struct bpf_test * self)1594 static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1595 {
1596 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
1597 }
1598
bpf_fill_alu32_div_reg_pairs(struct bpf_test * self)1599 static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1600 {
1601 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
1602 }
1603
bpf_fill_alu32_mod_reg_pairs(struct bpf_test * self)1604 static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1605 {
1606 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
1607 }
1608
1609 /*
1610 * Exhaustive tests of atomic operations for all power-of-two operand
1611 * magnitudes, both for positive and negative values.
1612 */
1613
__bpf_emit_atomic64(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1614 static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1615 struct bpf_insn *insns, s64 dst, s64 src)
1616 {
1617 int op = *(int *)arg;
1618 u64 keep, fetch, res;
1619 int i = 0;
1620
1621 if (!insns)
1622 return 21;
1623
1624 switch (op) {
1625 case BPF_XCHG:
1626 res = src;
1627 break;
1628 default:
1629 __bpf_alu_result(&res, dst, src, BPF_OP(op));
1630 }
1631
1632 keep = 0x0123456789abcdefULL;
1633 if (op & BPF_FETCH)
1634 fetch = dst;
1635 else
1636 fetch = src;
1637
1638 i += __bpf_ld_imm64(&insns[i], R0, keep);
1639 i += __bpf_ld_imm64(&insns[i], R1, dst);
1640 i += __bpf_ld_imm64(&insns[i], R2, src);
1641 i += __bpf_ld_imm64(&insns[i], R3, res);
1642 i += __bpf_ld_imm64(&insns[i], R4, fetch);
1643 i += __bpf_ld_imm64(&insns[i], R5, keep);
1644
1645 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1646 insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1647 insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1648
1649 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1650 insns[i++] = BPF_EXIT_INSN();
1651
1652 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1653 insns[i++] = BPF_EXIT_INSN();
1654
1655 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1656 insns[i++] = BPF_EXIT_INSN();
1657
1658 return i;
1659 }
1660
__bpf_emit_atomic32(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1661 static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1662 struct bpf_insn *insns, s64 dst, s64 src)
1663 {
1664 int op = *(int *)arg;
1665 u64 keep, fetch, res;
1666 int i = 0;
1667
1668 if (!insns)
1669 return 21;
1670
1671 switch (op) {
1672 case BPF_XCHG:
1673 res = src;
1674 break;
1675 default:
1676 __bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
1677 }
1678
1679 keep = 0x0123456789abcdefULL;
1680 if (op & BPF_FETCH)
1681 fetch = (u32)dst;
1682 else
1683 fetch = src;
1684
1685 i += __bpf_ld_imm64(&insns[i], R0, keep);
1686 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1687 i += __bpf_ld_imm64(&insns[i], R2, src);
1688 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1689 i += __bpf_ld_imm64(&insns[i], R4, fetch);
1690 i += __bpf_ld_imm64(&insns[i], R5, keep);
1691
1692 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1693 insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1694 insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1695
1696 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1697 insns[i++] = BPF_EXIT_INSN();
1698
1699 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1700 insns[i++] = BPF_EXIT_INSN();
1701
1702 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1703 insns[i++] = BPF_EXIT_INSN();
1704
1705 return i;
1706 }
1707
__bpf_emit_cmpxchg64(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1708 static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1709 struct bpf_insn *insns, s64 dst, s64 src)
1710 {
1711 int i = 0;
1712
1713 if (!insns)
1714 return 23;
1715
1716 i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1717 i += __bpf_ld_imm64(&insns[i], R1, dst);
1718 i += __bpf_ld_imm64(&insns[i], R2, src);
1719
1720 /* Result unsuccessful */
1721 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1722 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1723 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1724
1725 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1726 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1727 insns[i++] = BPF_EXIT_INSN();
1728
1729 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1730 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1731 insns[i++] = BPF_EXIT_INSN();
1732
1733 /* Result successful */
1734 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1735 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1736
1737 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1738 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1739 insns[i++] = BPF_EXIT_INSN();
1740
1741 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1742 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1743 insns[i++] = BPF_EXIT_INSN();
1744
1745 return i;
1746 }
1747
__bpf_emit_cmpxchg32(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1748 static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1749 struct bpf_insn *insns, s64 dst, s64 src)
1750 {
1751 int i = 0;
1752
1753 if (!insns)
1754 return 27;
1755
1756 i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1757 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1758 i += __bpf_ld_imm64(&insns[i], R2, src);
1759
1760 /* Result unsuccessful */
1761 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1762 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1763 insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */
1764 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1765
1766 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1767 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1768 insns[i++] = BPF_EXIT_INSN();
1769
1770 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1771 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1772 insns[i++] = BPF_EXIT_INSN();
1773
1774 /* Result successful */
1775 i += __bpf_ld_imm64(&insns[i], R0, dst);
1776 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1777 insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */
1778 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1779
1780 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1781 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1782 insns[i++] = BPF_EXIT_INSN();
1783
1784 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1785 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1786 insns[i++] = BPF_EXIT_INSN();
1787
1788 return i;
1789 }
1790
__bpf_fill_atomic64(struct bpf_test * self,int op)1791 static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1792 {
1793 return __bpf_fill_pattern(self, &op, 64, 64,
1794 0, PATTERN_BLOCK2,
1795 &__bpf_emit_atomic64);
1796 }
1797
__bpf_fill_atomic32(struct bpf_test * self,int op)1798 static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1799 {
1800 return __bpf_fill_pattern(self, &op, 64, 64,
1801 0, PATTERN_BLOCK2,
1802 &__bpf_emit_atomic32);
1803 }
1804
1805 /* 64-bit atomic operations */
bpf_fill_atomic64_add(struct bpf_test * self)1806 static int bpf_fill_atomic64_add(struct bpf_test *self)
1807 {
1808 return __bpf_fill_atomic64(self, BPF_ADD);
1809 }
1810
bpf_fill_atomic64_and(struct bpf_test * self)1811 static int bpf_fill_atomic64_and(struct bpf_test *self)
1812 {
1813 return __bpf_fill_atomic64(self, BPF_AND);
1814 }
1815
bpf_fill_atomic64_or(struct bpf_test * self)1816 static int bpf_fill_atomic64_or(struct bpf_test *self)
1817 {
1818 return __bpf_fill_atomic64(self, BPF_OR);
1819 }
1820
bpf_fill_atomic64_xor(struct bpf_test * self)1821 static int bpf_fill_atomic64_xor(struct bpf_test *self)
1822 {
1823 return __bpf_fill_atomic64(self, BPF_XOR);
1824 }
1825
bpf_fill_atomic64_add_fetch(struct bpf_test * self)1826 static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1827 {
1828 return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1829 }
1830
bpf_fill_atomic64_and_fetch(struct bpf_test * self)1831 static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1832 {
1833 return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1834 }
1835
bpf_fill_atomic64_or_fetch(struct bpf_test * self)1836 static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1837 {
1838 return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1839 }
1840
bpf_fill_atomic64_xor_fetch(struct bpf_test * self)1841 static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1842 {
1843 return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1844 }
1845
bpf_fill_atomic64_xchg(struct bpf_test * self)1846 static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1847 {
1848 return __bpf_fill_atomic64(self, BPF_XCHG);
1849 }
1850
bpf_fill_cmpxchg64(struct bpf_test * self)1851 static int bpf_fill_cmpxchg64(struct bpf_test *self)
1852 {
1853 return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1854 &__bpf_emit_cmpxchg64);
1855 }
1856
1857 /* 32-bit atomic operations */
bpf_fill_atomic32_add(struct bpf_test * self)1858 static int bpf_fill_atomic32_add(struct bpf_test *self)
1859 {
1860 return __bpf_fill_atomic32(self, BPF_ADD);
1861 }
1862
bpf_fill_atomic32_and(struct bpf_test * self)1863 static int bpf_fill_atomic32_and(struct bpf_test *self)
1864 {
1865 return __bpf_fill_atomic32(self, BPF_AND);
1866 }
1867
bpf_fill_atomic32_or(struct bpf_test * self)1868 static int bpf_fill_atomic32_or(struct bpf_test *self)
1869 {
1870 return __bpf_fill_atomic32(self, BPF_OR);
1871 }
1872
bpf_fill_atomic32_xor(struct bpf_test * self)1873 static int bpf_fill_atomic32_xor(struct bpf_test *self)
1874 {
1875 return __bpf_fill_atomic32(self, BPF_XOR);
1876 }
1877
bpf_fill_atomic32_add_fetch(struct bpf_test * self)1878 static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1879 {
1880 return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1881 }
1882
bpf_fill_atomic32_and_fetch(struct bpf_test * self)1883 static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1884 {
1885 return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1886 }
1887
bpf_fill_atomic32_or_fetch(struct bpf_test * self)1888 static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1889 {
1890 return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1891 }
1892
bpf_fill_atomic32_xor_fetch(struct bpf_test * self)1893 static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1894 {
1895 return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1896 }
1897
bpf_fill_atomic32_xchg(struct bpf_test * self)1898 static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1899 {
1900 return __bpf_fill_atomic32(self, BPF_XCHG);
1901 }
1902
bpf_fill_cmpxchg32(struct bpf_test * self)1903 static int bpf_fill_cmpxchg32(struct bpf_test *self)
1904 {
1905 return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1906 &__bpf_emit_cmpxchg32);
1907 }
1908
1909 /*
1910 * Test JITs that implement ATOMIC operations as function calls or
1911 * other primitives, and must re-arrange operands for argument passing.
1912 */
__bpf_fill_atomic_reg_pairs(struct bpf_test * self,u8 width,u8 op)1913 static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1914 {
1915 struct bpf_insn *insn;
1916 int len = 2 + 34 * 10 * 10;
1917 u64 mem, upd, res;
1918 int rd, rs, i = 0;
1919
1920 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1921 if (!insn)
1922 return -ENOMEM;
1923
1924 /* Operand and memory values */
1925 if (width == BPF_DW) {
1926 mem = 0x0123456789abcdefULL;
1927 upd = 0xfedcba9876543210ULL;
1928 } else { /* BPF_W */
1929 mem = 0x01234567U;
1930 upd = 0x76543210U;
1931 }
1932
1933 /* Memory updated according to operation */
1934 switch (op) {
1935 case BPF_XCHG:
1936 res = upd;
1937 break;
1938 case BPF_CMPXCHG:
1939 res = mem;
1940 break;
1941 default:
1942 __bpf_alu_result(&res, mem, upd, BPF_OP(op));
1943 }
1944
1945 /* Test all operand registers */
1946 for (rd = R0; rd <= R9; rd++) {
1947 for (rs = R0; rs <= R9; rs++) {
1948 u64 cmp, src;
1949
1950 /* Initialize value in memory */
1951 i += __bpf_ld_imm64(&insn[i], R0, mem);
1952 insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1953
1954 /* Initialize registers in order */
1955 i += __bpf_ld_imm64(&insn[i], R0, ~mem);
1956 i += __bpf_ld_imm64(&insn[i], rs, upd);
1957 insn[i++] = BPF_MOV64_REG(rd, R10);
1958
1959 /* Perform atomic operation */
1960 insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1961 if (op == BPF_CMPXCHG && width == BPF_W)
1962 insn[i++] = BPF_ZEXT_REG(R0);
1963
1964 /* Check R0 register value */
1965 if (op == BPF_CMPXCHG)
1966 cmp = mem; /* Expect value from memory */
1967 else if (R0 == rd || R0 == rs)
1968 cmp = 0; /* Aliased, checked below */
1969 else
1970 cmp = ~mem; /* Expect value to be preserved */
1971 if (cmp) {
1972 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1973 (u32)cmp, 2);
1974 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1975 insn[i++] = BPF_EXIT_INSN();
1976 insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1977 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1978 cmp >> 32, 2);
1979 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1980 insn[i++] = BPF_EXIT_INSN();
1981 }
1982
1983 /* Check source register value */
1984 if (rs == R0 && op == BPF_CMPXCHG)
1985 src = 0; /* Aliased with R0, checked above */
1986 else if (rs == rd && (op == BPF_CMPXCHG ||
1987 !(op & BPF_FETCH)))
1988 src = 0; /* Aliased with rd, checked below */
1989 else if (op == BPF_CMPXCHG)
1990 src = upd; /* Expect value to be preserved */
1991 else if (op & BPF_FETCH)
1992 src = mem; /* Expect fetched value from mem */
1993 else /* no fetch */
1994 src = upd; /* Expect value to be preserved */
1995 if (src) {
1996 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1997 (u32)src, 2);
1998 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1999 insn[i++] = BPF_EXIT_INSN();
2000 insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
2001 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
2002 src >> 32, 2);
2003 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2004 insn[i++] = BPF_EXIT_INSN();
2005 }
2006
2007 /* Check destination register value */
2008 if (!(rd == R0 && op == BPF_CMPXCHG) &&
2009 !(rd == rs && (op & BPF_FETCH))) {
2010 insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
2011 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2012 insn[i++] = BPF_EXIT_INSN();
2013 }
2014
2015 /* Check value in memory */
2016 if (rs != rd) { /* No aliasing */
2017 i += __bpf_ld_imm64(&insn[i], R1, res);
2018 } else if (op == BPF_XCHG) { /* Aliased, XCHG */
2019 insn[i++] = BPF_MOV64_REG(R1, R10);
2020 } else if (op == BPF_CMPXCHG) { /* Aliased, CMPXCHG */
2021 i += __bpf_ld_imm64(&insn[i], R1, mem);
2022 } else { /* Aliased, ALU oper */
2023 i += __bpf_ld_imm64(&insn[i], R1, mem);
2024 insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2025 }
2026
2027 insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2028 if (width == BPF_DW)
2029 insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2030 else /* width == BPF_W */
2031 insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2032 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2033 insn[i++] = BPF_EXIT_INSN();
2034 }
2035 }
2036
2037 insn[i++] = BPF_MOV64_IMM(R0, 1);
2038 insn[i++] = BPF_EXIT_INSN();
2039
2040 self->u.ptr.insns = insn;
2041 self->u.ptr.len = i;
2042 BUG_ON(i > len);
2043
2044 return 0;
2045 }
2046
2047 /* 64-bit atomic register tests */
bpf_fill_atomic64_add_reg_pairs(struct bpf_test * self)2048 static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2049 {
2050 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2051 }
2052
bpf_fill_atomic64_and_reg_pairs(struct bpf_test * self)2053 static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2054 {
2055 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2056 }
2057
bpf_fill_atomic64_or_reg_pairs(struct bpf_test * self)2058 static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2059 {
2060 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2061 }
2062
bpf_fill_atomic64_xor_reg_pairs(struct bpf_test * self)2063 static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2064 {
2065 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2066 }
2067
bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test * self)2068 static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2069 {
2070 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2071 }
2072
bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test * self)2073 static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2074 {
2075 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2076 }
2077
bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test * self)2078 static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2079 {
2080 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2081 }
2082
bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test * self)2083 static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2084 {
2085 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2086 }
2087
bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test * self)2088 static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2089 {
2090 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2091 }
2092
bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test * self)2093 static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2094 {
2095 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2096 }
2097
2098 /* 32-bit atomic register tests */
bpf_fill_atomic32_add_reg_pairs(struct bpf_test * self)2099 static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2100 {
2101 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2102 }
2103
bpf_fill_atomic32_and_reg_pairs(struct bpf_test * self)2104 static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2105 {
2106 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2107 }
2108
bpf_fill_atomic32_or_reg_pairs(struct bpf_test * self)2109 static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2110 {
2111 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2112 }
2113
bpf_fill_atomic32_xor_reg_pairs(struct bpf_test * self)2114 static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2115 {
2116 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2117 }
2118
bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test * self)2119 static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2120 {
2121 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2122 }
2123
bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test * self)2124 static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2125 {
2126 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2127 }
2128
bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test * self)2129 static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2130 {
2131 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2132 }
2133
bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test * self)2134 static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2135 {
2136 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2137 }
2138
bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test * self)2139 static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2140 {
2141 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2142 }
2143
bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test * self)2144 static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2145 {
2146 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2147 }
2148
2149 /*
2150 * Test the two-instruction 64-bit immediate load operation for all
2151 * power-of-two magnitudes of the immediate operand. For each MSB, a block
2152 * of immediate values centered around the power-of-two MSB are tested,
2153 * both for positive and negative values. The test is designed to verify
2154 * the operation for JITs that emit different code depending on the magnitude
2155 * of the immediate value. This is often the case if the native instruction
2156 * immediate field width is narrower than 32 bits.
2157 */
bpf_fill_ld_imm64_magn(struct bpf_test * self)2158 static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
2159 {
2160 int block = 64; /* Increase for more tests per MSB position */
2161 int len = 3 + 8 * 63 * block * 2;
2162 struct bpf_insn *insn;
2163 int bit, adj, sign;
2164 int i = 0;
2165
2166 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2167 if (!insn)
2168 return -ENOMEM;
2169
2170 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2171
2172 for (bit = 0; bit <= 62; bit++) {
2173 for (adj = -block / 2; adj < block / 2; adj++) {
2174 for (sign = -1; sign <= 1; sign += 2) {
2175 s64 imm = sign * ((1LL << bit) + adj);
2176
2177 /* Perform operation */
2178 i += __bpf_ld_imm64(&insn[i], R1, imm);
2179
2180 /* Load reference */
2181 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2182 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2183 (u32)(imm >> 32));
2184 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2185 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2186
2187 /* Check result */
2188 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2189 insn[i++] = BPF_EXIT_INSN();
2190 }
2191 }
2192 }
2193
2194 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2195 insn[i++] = BPF_EXIT_INSN();
2196
2197 self->u.ptr.insns = insn;
2198 self->u.ptr.len = len;
2199 BUG_ON(i != len);
2200
2201 return 0;
2202 }
2203
2204 /*
2205 * Test the two-instruction 64-bit immediate load operation for different
2206 * combinations of bytes. Each byte in the 64-bit word is constructed as
2207 * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2208 * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2209 */
__bpf_fill_ld_imm64_bytes(struct bpf_test * self,u8 base1,u8 mask1,u8 base2,u8 mask2)2210 static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
2211 u8 base1, u8 mask1,
2212 u8 base2, u8 mask2)
2213 {
2214 struct bpf_insn *insn;
2215 int len = 3 + 8 * BIT(8);
2216 int pattern, index;
2217 u32 rand = 1;
2218 int i = 0;
2219
2220 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2221 if (!insn)
2222 return -ENOMEM;
2223
2224 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2225
2226 for (pattern = 0; pattern < BIT(8); pattern++) {
2227 u64 imm = 0;
2228
2229 for (index = 0; index < 8; index++) {
2230 int byte;
2231
2232 if (pattern & BIT(index))
2233 byte = (base1 & mask1) | (rand & ~mask1);
2234 else
2235 byte = (base2 & mask2) | (rand & ~mask2);
2236 imm = (imm << 8) | byte;
2237 }
2238
2239 /* Update our LCG */
2240 rand = rand * 1664525 + 1013904223;
2241
2242 /* Perform operation */
2243 i += __bpf_ld_imm64(&insn[i], R1, imm);
2244
2245 /* Load reference */
2246 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2247 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
2248 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2249 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2250
2251 /* Check result */
2252 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2253 insn[i++] = BPF_EXIT_INSN();
2254 }
2255
2256 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2257 insn[i++] = BPF_EXIT_INSN();
2258
2259 self->u.ptr.insns = insn;
2260 self->u.ptr.len = len;
2261 BUG_ON(i != len);
2262
2263 return 0;
2264 }
2265
bpf_fill_ld_imm64_checker(struct bpf_test * self)2266 static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
2267 {
2268 return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
2269 }
2270
bpf_fill_ld_imm64_pos_neg(struct bpf_test * self)2271 static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
2272 {
2273 return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
2274 }
2275
bpf_fill_ld_imm64_pos_zero(struct bpf_test * self)2276 static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
2277 {
2278 return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
2279 }
2280
bpf_fill_ld_imm64_neg_zero(struct bpf_test * self)2281 static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
2282 {
2283 return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
2284 }
2285
2286 /*
2287 * Exhaustive tests of JMP operations for all combinations of power-of-two
2288 * magnitudes of the operands, both for positive and negative values. The
2289 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2290 * emit different code depending on the magnitude of the immediate value.
2291 */
2292
__bpf_match_jmp_cond(s64 v1,s64 v2,u8 op)2293 static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2294 {
2295 switch (op) {
2296 case BPF_JSET:
2297 return !!(v1 & v2);
2298 case BPF_JEQ:
2299 return v1 == v2;
2300 case BPF_JNE:
2301 return v1 != v2;
2302 case BPF_JGT:
2303 return (u64)v1 > (u64)v2;
2304 case BPF_JGE:
2305 return (u64)v1 >= (u64)v2;
2306 case BPF_JLT:
2307 return (u64)v1 < (u64)v2;
2308 case BPF_JLE:
2309 return (u64)v1 <= (u64)v2;
2310 case BPF_JSGT:
2311 return v1 > v2;
2312 case BPF_JSGE:
2313 return v1 >= v2;
2314 case BPF_JSLT:
2315 return v1 < v2;
2316 case BPF_JSLE:
2317 return v1 <= v2;
2318 }
2319 return false;
2320 }
2321
__bpf_emit_jmp_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)2322 static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2323 struct bpf_insn *insns, s64 dst, s64 imm)
2324 {
2325 int op = *(int *)arg;
2326
2327 if (insns) {
2328 bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
2329 int i = 0;
2330
2331 insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2332
2333 i += __bpf_ld_imm64(&insns[i], R1, dst);
2334 insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2335 if (!match)
2336 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2337 insns[i++] = BPF_EXIT_INSN();
2338
2339 return i;
2340 }
2341
2342 return 5 + 1;
2343 }
2344
__bpf_emit_jmp32_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)2345 static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2346 struct bpf_insn *insns, s64 dst, s64 imm)
2347 {
2348 int op = *(int *)arg;
2349
2350 if (insns) {
2351 bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
2352 int i = 0;
2353
2354 i += __bpf_ld_imm64(&insns[i], R1, dst);
2355 insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2356 if (!match)
2357 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2358 insns[i++] = BPF_EXIT_INSN();
2359
2360 return i;
2361 }
2362
2363 return 5;
2364 }
2365
__bpf_emit_jmp_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)2366 static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2367 struct bpf_insn *insns, s64 dst, s64 src)
2368 {
2369 int op = *(int *)arg;
2370
2371 if (insns) {
2372 bool match = __bpf_match_jmp_cond(dst, src, op);
2373 int i = 0;
2374
2375 i += __bpf_ld_imm64(&insns[i], R1, dst);
2376 i += __bpf_ld_imm64(&insns[i], R2, src);
2377 insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2378 if (!match)
2379 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2380 insns[i++] = BPF_EXIT_INSN();
2381
2382 return i;
2383 }
2384
2385 return 7;
2386 }
2387
__bpf_emit_jmp32_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)2388 static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2389 struct bpf_insn *insns, s64 dst, s64 src)
2390 {
2391 int op = *(int *)arg;
2392
2393 if (insns) {
2394 bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
2395 int i = 0;
2396
2397 i += __bpf_ld_imm64(&insns[i], R1, dst);
2398 i += __bpf_ld_imm64(&insns[i], R2, src);
2399 insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2400 if (!match)
2401 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2402 insns[i++] = BPF_EXIT_INSN();
2403
2404 return i;
2405 }
2406
2407 return 7;
2408 }
2409
__bpf_fill_jmp_imm(struct bpf_test * self,int op)2410 static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2411 {
2412 return __bpf_fill_pattern(self, &op, 64, 32,
2413 PATTERN_BLOCK1, PATTERN_BLOCK2,
2414 &__bpf_emit_jmp_imm);
2415 }
2416
__bpf_fill_jmp32_imm(struct bpf_test * self,int op)2417 static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2418 {
2419 return __bpf_fill_pattern(self, &op, 64, 32,
2420 PATTERN_BLOCK1, PATTERN_BLOCK2,
2421 &__bpf_emit_jmp32_imm);
2422 }
2423
__bpf_fill_jmp_reg(struct bpf_test * self,int op)2424 static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2425 {
2426 return __bpf_fill_pattern(self, &op, 64, 64,
2427 PATTERN_BLOCK1, PATTERN_BLOCK2,
2428 &__bpf_emit_jmp_reg);
2429 }
2430
__bpf_fill_jmp32_reg(struct bpf_test * self,int op)2431 static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2432 {
2433 return __bpf_fill_pattern(self, &op, 64, 64,
2434 PATTERN_BLOCK1, PATTERN_BLOCK2,
2435 &__bpf_emit_jmp32_reg);
2436 }
2437
2438 /* JMP immediate tests */
bpf_fill_jmp_jset_imm(struct bpf_test * self)2439 static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2440 {
2441 return __bpf_fill_jmp_imm(self, BPF_JSET);
2442 }
2443
bpf_fill_jmp_jeq_imm(struct bpf_test * self)2444 static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2445 {
2446 return __bpf_fill_jmp_imm(self, BPF_JEQ);
2447 }
2448
bpf_fill_jmp_jne_imm(struct bpf_test * self)2449 static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2450 {
2451 return __bpf_fill_jmp_imm(self, BPF_JNE);
2452 }
2453
bpf_fill_jmp_jgt_imm(struct bpf_test * self)2454 static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2455 {
2456 return __bpf_fill_jmp_imm(self, BPF_JGT);
2457 }
2458
bpf_fill_jmp_jge_imm(struct bpf_test * self)2459 static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2460 {
2461 return __bpf_fill_jmp_imm(self, BPF_JGE);
2462 }
2463
bpf_fill_jmp_jlt_imm(struct bpf_test * self)2464 static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2465 {
2466 return __bpf_fill_jmp_imm(self, BPF_JLT);
2467 }
2468
bpf_fill_jmp_jle_imm(struct bpf_test * self)2469 static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2470 {
2471 return __bpf_fill_jmp_imm(self, BPF_JLE);
2472 }
2473
bpf_fill_jmp_jsgt_imm(struct bpf_test * self)2474 static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2475 {
2476 return __bpf_fill_jmp_imm(self, BPF_JSGT);
2477 }
2478
bpf_fill_jmp_jsge_imm(struct bpf_test * self)2479 static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2480 {
2481 return __bpf_fill_jmp_imm(self, BPF_JSGE);
2482 }
2483
bpf_fill_jmp_jslt_imm(struct bpf_test * self)2484 static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2485 {
2486 return __bpf_fill_jmp_imm(self, BPF_JSLT);
2487 }
2488
bpf_fill_jmp_jsle_imm(struct bpf_test * self)2489 static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2490 {
2491 return __bpf_fill_jmp_imm(self, BPF_JSLE);
2492 }
2493
2494 /* JMP32 immediate tests */
bpf_fill_jmp32_jset_imm(struct bpf_test * self)2495 static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2496 {
2497 return __bpf_fill_jmp32_imm(self, BPF_JSET);
2498 }
2499
bpf_fill_jmp32_jeq_imm(struct bpf_test * self)2500 static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2501 {
2502 return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2503 }
2504
bpf_fill_jmp32_jne_imm(struct bpf_test * self)2505 static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2506 {
2507 return __bpf_fill_jmp32_imm(self, BPF_JNE);
2508 }
2509
bpf_fill_jmp32_jgt_imm(struct bpf_test * self)2510 static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2511 {
2512 return __bpf_fill_jmp32_imm(self, BPF_JGT);
2513 }
2514
bpf_fill_jmp32_jge_imm(struct bpf_test * self)2515 static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2516 {
2517 return __bpf_fill_jmp32_imm(self, BPF_JGE);
2518 }
2519
bpf_fill_jmp32_jlt_imm(struct bpf_test * self)2520 static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2521 {
2522 return __bpf_fill_jmp32_imm(self, BPF_JLT);
2523 }
2524
bpf_fill_jmp32_jle_imm(struct bpf_test * self)2525 static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2526 {
2527 return __bpf_fill_jmp32_imm(self, BPF_JLE);
2528 }
2529
bpf_fill_jmp32_jsgt_imm(struct bpf_test * self)2530 static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2531 {
2532 return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2533 }
2534
bpf_fill_jmp32_jsge_imm(struct bpf_test * self)2535 static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2536 {
2537 return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2538 }
2539
bpf_fill_jmp32_jslt_imm(struct bpf_test * self)2540 static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2541 {
2542 return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2543 }
2544
bpf_fill_jmp32_jsle_imm(struct bpf_test * self)2545 static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2546 {
2547 return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2548 }
2549
2550 /* JMP register tests */
bpf_fill_jmp_jset_reg(struct bpf_test * self)2551 static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2552 {
2553 return __bpf_fill_jmp_reg(self, BPF_JSET);
2554 }
2555
bpf_fill_jmp_jeq_reg(struct bpf_test * self)2556 static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2557 {
2558 return __bpf_fill_jmp_reg(self, BPF_JEQ);
2559 }
2560
bpf_fill_jmp_jne_reg(struct bpf_test * self)2561 static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2562 {
2563 return __bpf_fill_jmp_reg(self, BPF_JNE);
2564 }
2565
bpf_fill_jmp_jgt_reg(struct bpf_test * self)2566 static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2567 {
2568 return __bpf_fill_jmp_reg(self, BPF_JGT);
2569 }
2570
bpf_fill_jmp_jge_reg(struct bpf_test * self)2571 static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2572 {
2573 return __bpf_fill_jmp_reg(self, BPF_JGE);
2574 }
2575
bpf_fill_jmp_jlt_reg(struct bpf_test * self)2576 static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2577 {
2578 return __bpf_fill_jmp_reg(self, BPF_JLT);
2579 }
2580
bpf_fill_jmp_jle_reg(struct bpf_test * self)2581 static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2582 {
2583 return __bpf_fill_jmp_reg(self, BPF_JLE);
2584 }
2585
bpf_fill_jmp_jsgt_reg(struct bpf_test * self)2586 static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2587 {
2588 return __bpf_fill_jmp_reg(self, BPF_JSGT);
2589 }
2590
bpf_fill_jmp_jsge_reg(struct bpf_test * self)2591 static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2592 {
2593 return __bpf_fill_jmp_reg(self, BPF_JSGE);
2594 }
2595
bpf_fill_jmp_jslt_reg(struct bpf_test * self)2596 static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2597 {
2598 return __bpf_fill_jmp_reg(self, BPF_JSLT);
2599 }
2600
bpf_fill_jmp_jsle_reg(struct bpf_test * self)2601 static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2602 {
2603 return __bpf_fill_jmp_reg(self, BPF_JSLE);
2604 }
2605
2606 /* JMP32 register tests */
bpf_fill_jmp32_jset_reg(struct bpf_test * self)2607 static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2608 {
2609 return __bpf_fill_jmp32_reg(self, BPF_JSET);
2610 }
2611
bpf_fill_jmp32_jeq_reg(struct bpf_test * self)2612 static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2613 {
2614 return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2615 }
2616
bpf_fill_jmp32_jne_reg(struct bpf_test * self)2617 static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2618 {
2619 return __bpf_fill_jmp32_reg(self, BPF_JNE);
2620 }
2621
bpf_fill_jmp32_jgt_reg(struct bpf_test * self)2622 static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2623 {
2624 return __bpf_fill_jmp32_reg(self, BPF_JGT);
2625 }
2626
bpf_fill_jmp32_jge_reg(struct bpf_test * self)2627 static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2628 {
2629 return __bpf_fill_jmp32_reg(self, BPF_JGE);
2630 }
2631
bpf_fill_jmp32_jlt_reg(struct bpf_test * self)2632 static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2633 {
2634 return __bpf_fill_jmp32_reg(self, BPF_JLT);
2635 }
2636
bpf_fill_jmp32_jle_reg(struct bpf_test * self)2637 static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2638 {
2639 return __bpf_fill_jmp32_reg(self, BPF_JLE);
2640 }
2641
bpf_fill_jmp32_jsgt_reg(struct bpf_test * self)2642 static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2643 {
2644 return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2645 }
2646
bpf_fill_jmp32_jsge_reg(struct bpf_test * self)2647 static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2648 {
2649 return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2650 }
2651
bpf_fill_jmp32_jslt_reg(struct bpf_test * self)2652 static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2653 {
2654 return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2655 }
2656
bpf_fill_jmp32_jsle_reg(struct bpf_test * self)2657 static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2658 {
2659 return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2660 }
2661
2662 /*
2663 * Set up a sequence of staggered jumps, forwards and backwards with
2664 * increasing offset. This tests the conversion of relative jumps to
2665 * JITed native jumps. On some architectures, for example MIPS, a large
2666 * PC-relative jump offset may overflow the immediate field of the native
2667 * conditional branch instruction, triggering a conversion to use an
2668 * absolute jump instead. Since this changes the jump offsets, another
2669 * offset computation pass is necessary, and that may in turn trigger
2670 * another branch conversion. This jump sequence is particularly nasty
2671 * in that regard.
2672 *
2673 * The sequence generation is parameterized by size and jump type.
2674 * The size must be even, and the expected result is always size + 1.
2675 * Below is an example with size=8 and result=9.
2676 *
2677 * ________________________Start
2678 * R0 = 0
2679 * R1 = r1
2680 * R2 = r2
2681 * ,------- JMP +4 * 3______________Preamble: 4 insns
2682 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2683 * | | R0 = 8 |
2684 * | | JMP +7 * 3 ------------------------.
2685 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
2686 * | | | R0 = 6 | | |
2687 * | | | JMP +5 * 3 ------------------. | |
2688 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
2689 * | | | | R0 = 4 | | | | |
2690 * | | | | JMP +3 * 3 ------------. | | | |
2691 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
2692 * | | | | | R0 = 2 | | | | | | |
2693 * | | | | | JMP +1 * 3 ------. | | | | | |
2694 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
2695 * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
2696 * | | | | | JMP -2 * 3 ---' | | | | | | |
2697 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
2698 * | | | | | | R0 = 3 | | | | | |
2699 * | | | | | | JMP -4 * 3 ---------' | | | | |
2700 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
2701 * | | | | | | | R0 = 5 | | | |
2702 * | | | | | | | JMP -6 * 3 ---------------' | | |
2703 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
2704 * | | | | | | | | R0 = 7 | |
2705 * | | Error | | | JMP -8 * 3 ---------------------' |
2706 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2707 * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
2708 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2709 *
2710 */
2711
2712 /* The maximum size parameter */
2713 #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2714
2715 /* We use a reduced number of iterations to get a reasonable execution time */
2716 #define NR_STAGGERED_JMP_RUNS 10
2717
__bpf_fill_staggered_jumps(struct bpf_test * self,const struct bpf_insn * jmp,u64 r1,u64 r2)2718 static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2719 const struct bpf_insn *jmp,
2720 u64 r1, u64 r2)
2721 {
2722 int size = self->test[0].result - 1;
2723 int len = 4 + 3 * (size + 1);
2724 struct bpf_insn *insns;
2725 int off, ind;
2726
2727 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
2728 if (!insns)
2729 return -ENOMEM;
2730
2731 /* Preamble */
2732 insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2733 insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2734 insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2735 insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2736
2737 /* Sequence */
2738 for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2739 struct bpf_insn *ins = &insns[4 + 3 * ind];
2740 int loc;
2741
2742 if (off == 0)
2743 off--;
2744
2745 loc = abs(off);
2746 ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2747 3 * (size - ind) + 1);
2748 ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2749 ins[2] = *jmp;
2750 ins[2].off = 3 * (off - 1);
2751 }
2752
2753 /* Return */
2754 insns[len - 1] = BPF_EXIT_INSN();
2755
2756 self->u.ptr.insns = insns;
2757 self->u.ptr.len = len;
2758
2759 return 0;
2760 }
2761
2762 /* 64-bit unconditional jump */
bpf_fill_staggered_ja(struct bpf_test * self)2763 static int bpf_fill_staggered_ja(struct bpf_test *self)
2764 {
2765 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2766
2767 return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
2768 }
2769
2770 /* 64-bit immediate jumps */
bpf_fill_staggered_jeq_imm(struct bpf_test * self)2771 static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2772 {
2773 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2774
2775 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2776 }
2777
bpf_fill_staggered_jne_imm(struct bpf_test * self)2778 static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2779 {
2780 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2781
2782 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2783 }
2784
bpf_fill_staggered_jset_imm(struct bpf_test * self)2785 static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2786 {
2787 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2788
2789 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2790 }
2791
bpf_fill_staggered_jgt_imm(struct bpf_test * self)2792 static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2793 {
2794 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2795
2796 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2797 }
2798
bpf_fill_staggered_jge_imm(struct bpf_test * self)2799 static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2800 {
2801 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2802
2803 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2804 }
2805
bpf_fill_staggered_jlt_imm(struct bpf_test * self)2806 static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2807 {
2808 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2809
2810 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2811 }
2812
bpf_fill_staggered_jle_imm(struct bpf_test * self)2813 static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2814 {
2815 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2816
2817 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2818 }
2819
bpf_fill_staggered_jsgt_imm(struct bpf_test * self)2820 static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2821 {
2822 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2823
2824 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2825 }
2826
bpf_fill_staggered_jsge_imm(struct bpf_test * self)2827 static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2828 {
2829 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2830
2831 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2832 }
2833
bpf_fill_staggered_jslt_imm(struct bpf_test * self)2834 static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2835 {
2836 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2837
2838 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2839 }
2840
bpf_fill_staggered_jsle_imm(struct bpf_test * self)2841 static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2842 {
2843 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2844
2845 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2846 }
2847
2848 /* 64-bit register jumps */
bpf_fill_staggered_jeq_reg(struct bpf_test * self)2849 static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2850 {
2851 struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2852
2853 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2854 }
2855
bpf_fill_staggered_jne_reg(struct bpf_test * self)2856 static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2857 {
2858 struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2859
2860 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2861 }
2862
bpf_fill_staggered_jset_reg(struct bpf_test * self)2863 static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2864 {
2865 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2866
2867 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2868 }
2869
bpf_fill_staggered_jgt_reg(struct bpf_test * self)2870 static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2871 {
2872 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2873
2874 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2875 }
2876
bpf_fill_staggered_jge_reg(struct bpf_test * self)2877 static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2878 {
2879 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2880
2881 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2882 }
2883
bpf_fill_staggered_jlt_reg(struct bpf_test * self)2884 static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2885 {
2886 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2887
2888 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2889 }
2890
bpf_fill_staggered_jle_reg(struct bpf_test * self)2891 static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2892 {
2893 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2894
2895 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2896 }
2897
bpf_fill_staggered_jsgt_reg(struct bpf_test * self)2898 static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2899 {
2900 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2901
2902 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2903 }
2904
bpf_fill_staggered_jsge_reg(struct bpf_test * self)2905 static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2906 {
2907 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2908
2909 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2910 }
2911
bpf_fill_staggered_jslt_reg(struct bpf_test * self)2912 static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2913 {
2914 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2915
2916 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2917 }
2918
bpf_fill_staggered_jsle_reg(struct bpf_test * self)2919 static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2920 {
2921 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2922
2923 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2924 }
2925
2926 /* 32-bit immediate jumps */
bpf_fill_staggered_jeq32_imm(struct bpf_test * self)2927 static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2928 {
2929 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2930
2931 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2932 }
2933
bpf_fill_staggered_jne32_imm(struct bpf_test * self)2934 static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2935 {
2936 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2937
2938 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2939 }
2940
bpf_fill_staggered_jset32_imm(struct bpf_test * self)2941 static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2942 {
2943 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2944
2945 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2946 }
2947
bpf_fill_staggered_jgt32_imm(struct bpf_test * self)2948 static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2949 {
2950 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2951
2952 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2953 }
2954
bpf_fill_staggered_jge32_imm(struct bpf_test * self)2955 static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2956 {
2957 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2958
2959 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2960 }
2961
bpf_fill_staggered_jlt32_imm(struct bpf_test * self)2962 static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2963 {
2964 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2965
2966 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2967 }
2968
bpf_fill_staggered_jle32_imm(struct bpf_test * self)2969 static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2970 {
2971 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2972
2973 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2974 }
2975
bpf_fill_staggered_jsgt32_imm(struct bpf_test * self)2976 static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2977 {
2978 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2979
2980 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2981 }
2982
bpf_fill_staggered_jsge32_imm(struct bpf_test * self)2983 static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2984 {
2985 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2986
2987 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2988 }
2989
bpf_fill_staggered_jslt32_imm(struct bpf_test * self)2990 static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2991 {
2992 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2993
2994 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2995 }
2996
bpf_fill_staggered_jsle32_imm(struct bpf_test * self)2997 static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2998 {
2999 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
3000
3001 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
3002 }
3003
3004 /* 32-bit register jumps */
bpf_fill_staggered_jeq32_reg(struct bpf_test * self)3005 static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
3006 {
3007 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
3008
3009 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3010 }
3011
bpf_fill_staggered_jne32_reg(struct bpf_test * self)3012 static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
3013 {
3014 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
3015
3016 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
3017 }
3018
bpf_fill_staggered_jset32_reg(struct bpf_test * self)3019 static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
3020 {
3021 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
3022
3023 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
3024 }
3025
bpf_fill_staggered_jgt32_reg(struct bpf_test * self)3026 static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
3027 {
3028 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
3029
3030 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
3031 }
3032
bpf_fill_staggered_jge32_reg(struct bpf_test * self)3033 static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
3034 {
3035 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
3036
3037 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3038 }
3039
bpf_fill_staggered_jlt32_reg(struct bpf_test * self)3040 static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
3041 {
3042 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
3043
3044 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
3045 }
3046
bpf_fill_staggered_jle32_reg(struct bpf_test * self)3047 static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
3048 {
3049 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
3050
3051 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3052 }
3053
bpf_fill_staggered_jsgt32_reg(struct bpf_test * self)3054 static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
3055 {
3056 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
3057
3058 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
3059 }
3060
bpf_fill_staggered_jsge32_reg(struct bpf_test * self)3061 static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
3062 {
3063 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
3064
3065 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
3066 }
3067
bpf_fill_staggered_jslt32_reg(struct bpf_test * self)3068 static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
3069 {
3070 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
3071
3072 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
3073 }
3074
bpf_fill_staggered_jsle32_reg(struct bpf_test * self)3075 static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
3076 {
3077 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
3078
3079 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
3080 }
3081
3082
3083 static struct bpf_test tests[] = {
3084 {
3085 "TAX",
3086 .u.insns = {
3087 BPF_STMT(BPF_LD | BPF_IMM, 1),
3088 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3089 BPF_STMT(BPF_LD | BPF_IMM, 2),
3090 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3091 BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
3092 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3093 BPF_STMT(BPF_LD | BPF_LEN, 0),
3094 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3095 BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
3096 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
3097 BPF_STMT(BPF_RET | BPF_A, 0)
3098 },
3099 CLASSIC,
3100 { 10, 20, 30, 40, 50 },
3101 { { 2, 10 }, { 3, 20 }, { 4, 30 } },
3102 },
3103 {
3104 "TXA",
3105 .u.insns = {
3106 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3107 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3108 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3109 BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3110 },
3111 CLASSIC,
3112 { 10, 20, 30, 40, 50 },
3113 { { 1, 2 }, { 3, 6 }, { 4, 8 } },
3114 },
3115 {
3116 "ADD_SUB_MUL_K",
3117 .u.insns = {
3118 BPF_STMT(BPF_LD | BPF_IMM, 1),
3119 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3120 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3121 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3122 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3123 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3124 BPF_STMT(BPF_RET | BPF_A, 0)
3125 },
3126 CLASSIC | FLAG_NO_DATA,
3127 { },
3128 { { 0, 0xfffffffd } }
3129 },
3130 {
3131 "DIV_MOD_KX",
3132 .u.insns = {
3133 BPF_STMT(BPF_LD | BPF_IMM, 8),
3134 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3135 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3136 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3137 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3138 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3139 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3140 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3141 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3142 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3143 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3144 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3145 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3146 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3147 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3148 BPF_STMT(BPF_RET | BPF_A, 0)
3149 },
3150 CLASSIC | FLAG_NO_DATA,
3151 { },
3152 { { 0, 0x20000000 } }
3153 },
3154 {
3155 "AND_OR_LSH_K",
3156 .u.insns = {
3157 BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3158 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3159 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3160 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3161 BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3162 BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3163 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3164 BPF_STMT(BPF_RET | BPF_A, 0)
3165 },
3166 CLASSIC | FLAG_NO_DATA,
3167 { },
3168 { { 0, 0x800000ff }, { 1, 0x800000ff } },
3169 },
3170 {
3171 "LD_IMM_0",
3172 .u.insns = {
3173 BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3174 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3175 BPF_STMT(BPF_RET | BPF_K, 0),
3176 BPF_STMT(BPF_RET | BPF_K, 1),
3177 },
3178 CLASSIC,
3179 { },
3180 { { 1, 1 } },
3181 },
3182 {
3183 "LD_IND",
3184 .u.insns = {
3185 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3186 BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3187 BPF_STMT(BPF_RET | BPF_K, 1)
3188 },
3189 CLASSIC,
3190 { },
3191 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3192 },
3193 {
3194 "LD_ABS",
3195 .u.insns = {
3196 BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3197 BPF_STMT(BPF_RET | BPF_K, 1)
3198 },
3199 CLASSIC,
3200 { },
3201 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3202 },
3203 {
3204 "LD_ABS_LL",
3205 .u.insns = {
3206 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3207 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3208 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3209 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3210 BPF_STMT(BPF_RET | BPF_A, 0)
3211 },
3212 CLASSIC,
3213 { 1, 2, 3 },
3214 { { 1, 0 }, { 2, 3 } },
3215 },
3216 {
3217 "LD_IND_LL",
3218 .u.insns = {
3219 BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3220 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3221 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3222 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3223 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3224 BPF_STMT(BPF_RET | BPF_A, 0)
3225 },
3226 CLASSIC,
3227 { 1, 2, 3, 0xff },
3228 { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3229 },
3230 {
3231 "LD_ABS_NET",
3232 .u.insns = {
3233 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3234 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3235 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3236 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3237 BPF_STMT(BPF_RET | BPF_A, 0)
3238 },
3239 CLASSIC,
3240 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3241 { { 15, 0 }, { 16, 3 } },
3242 },
3243 {
3244 "LD_IND_NET",
3245 .u.insns = {
3246 BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3247 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3248 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3249 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3250 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3251 BPF_STMT(BPF_RET | BPF_A, 0)
3252 },
3253 CLASSIC,
3254 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3255 { { 14, 0 }, { 15, 1 }, { 17, 3 } },
3256 },
3257 {
3258 "LD_PKTTYPE",
3259 .u.insns = {
3260 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3261 SKF_AD_OFF + SKF_AD_PKTTYPE),
3262 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3263 BPF_STMT(BPF_RET | BPF_K, 1),
3264 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3265 SKF_AD_OFF + SKF_AD_PKTTYPE),
3266 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3267 BPF_STMT(BPF_RET | BPF_K, 1),
3268 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3269 SKF_AD_OFF + SKF_AD_PKTTYPE),
3270 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3271 BPF_STMT(BPF_RET | BPF_K, 1),
3272 BPF_STMT(BPF_RET | BPF_A, 0)
3273 },
3274 CLASSIC,
3275 { },
3276 { { 1, 3 }, { 10, 3 } },
3277 },
3278 {
3279 "LD_MARK",
3280 .u.insns = {
3281 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3282 SKF_AD_OFF + SKF_AD_MARK),
3283 BPF_STMT(BPF_RET | BPF_A, 0)
3284 },
3285 CLASSIC,
3286 { },
3287 { { 1, SKB_MARK}, { 10, SKB_MARK} },
3288 },
3289 {
3290 "LD_RXHASH",
3291 .u.insns = {
3292 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3293 SKF_AD_OFF + SKF_AD_RXHASH),
3294 BPF_STMT(BPF_RET | BPF_A, 0)
3295 },
3296 CLASSIC,
3297 { },
3298 { { 1, SKB_HASH}, { 10, SKB_HASH} },
3299 },
3300 {
3301 "LD_QUEUE",
3302 .u.insns = {
3303 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3304 SKF_AD_OFF + SKF_AD_QUEUE),
3305 BPF_STMT(BPF_RET | BPF_A, 0)
3306 },
3307 CLASSIC,
3308 { },
3309 { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3310 },
3311 {
3312 "LD_PROTOCOL",
3313 .u.insns = {
3314 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3315 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3316 BPF_STMT(BPF_RET | BPF_K, 0),
3317 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3318 SKF_AD_OFF + SKF_AD_PROTOCOL),
3319 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3320 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3321 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3322 BPF_STMT(BPF_RET | BPF_K, 0),
3323 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3324 BPF_STMT(BPF_RET | BPF_A, 0)
3325 },
3326 CLASSIC,
3327 { 10, 20, 30 },
3328 { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3329 },
3330 {
3331 "LD_VLAN_TAG",
3332 .u.insns = {
3333 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3334 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3335 BPF_STMT(BPF_RET | BPF_A, 0)
3336 },
3337 CLASSIC,
3338 { },
3339 {
3340 { 1, SKB_VLAN_TCI },
3341 { 10, SKB_VLAN_TCI }
3342 },
3343 },
3344 {
3345 "LD_VLAN_TAG_PRESENT",
3346 .u.insns = {
3347 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3348 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3349 BPF_STMT(BPF_RET | BPF_A, 0)
3350 },
3351 CLASSIC,
3352 { },
3353 {
3354 { 1, SKB_VLAN_PRESENT },
3355 { 10, SKB_VLAN_PRESENT }
3356 },
3357 },
3358 {
3359 "LD_IFINDEX",
3360 .u.insns = {
3361 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3362 SKF_AD_OFF + SKF_AD_IFINDEX),
3363 BPF_STMT(BPF_RET | BPF_A, 0)
3364 },
3365 CLASSIC,
3366 { },
3367 { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3368 },
3369 {
3370 "LD_HATYPE",
3371 .u.insns = {
3372 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3373 SKF_AD_OFF + SKF_AD_HATYPE),
3374 BPF_STMT(BPF_RET | BPF_A, 0)
3375 },
3376 CLASSIC,
3377 { },
3378 { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3379 },
3380 {
3381 "LD_CPU",
3382 .u.insns = {
3383 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3384 SKF_AD_OFF + SKF_AD_CPU),
3385 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3386 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3387 SKF_AD_OFF + SKF_AD_CPU),
3388 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3389 BPF_STMT(BPF_RET | BPF_A, 0)
3390 },
3391 CLASSIC,
3392 { },
3393 { { 1, 0 }, { 10, 0 } },
3394 },
3395 {
3396 "LD_NLATTR",
3397 .u.insns = {
3398 BPF_STMT(BPF_LDX | BPF_IMM, 2),
3399 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3400 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3401 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3402 SKF_AD_OFF + SKF_AD_NLATTR),
3403 BPF_STMT(BPF_RET | BPF_A, 0)
3404 },
3405 CLASSIC,
3406 #ifdef __BIG_ENDIAN
3407 { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3408 #else
3409 { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3410 #endif
3411 { { 4, 0 }, { 20, 6 } },
3412 },
3413 {
3414 "LD_NLATTR_NEST",
3415 .u.insns = {
3416 BPF_STMT(BPF_LD | BPF_IMM, 2),
3417 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3418 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3419 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3420 BPF_STMT(BPF_LD | BPF_IMM, 2),
3421 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3422 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3423 BPF_STMT(BPF_LD | BPF_IMM, 2),
3424 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3425 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3426 BPF_STMT(BPF_LD | BPF_IMM, 2),
3427 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3428 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3429 BPF_STMT(BPF_LD | BPF_IMM, 2),
3430 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3431 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3432 BPF_STMT(BPF_LD | BPF_IMM, 2),
3433 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3434 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3435 BPF_STMT(BPF_LD | BPF_IMM, 2),
3436 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3437 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3438 BPF_STMT(BPF_LD | BPF_IMM, 2),
3439 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3440 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3441 BPF_STMT(BPF_RET | BPF_A, 0)
3442 },
3443 CLASSIC,
3444 #ifdef __BIG_ENDIAN
3445 { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3446 #else
3447 { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3448 #endif
3449 { { 4, 0 }, { 20, 10 } },
3450 },
3451 {
3452 "LD_PAYLOAD_OFF",
3453 .u.insns = {
3454 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3455 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3456 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3457 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3458 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3459 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3460 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3461 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3462 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3463 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3464 BPF_STMT(BPF_RET | BPF_A, 0)
3465 },
3466 CLASSIC,
3467 /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3468 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3469 * id 9737, seq 1, length 64
3470 */
3471 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3472 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3473 0x08, 0x00,
3474 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3475 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3476 { { 30, 0 }, { 100, 42 } },
3477 },
3478 {
3479 "LD_ANC_XOR",
3480 .u.insns = {
3481 BPF_STMT(BPF_LD | BPF_IMM, 10),
3482 BPF_STMT(BPF_LDX | BPF_IMM, 300),
3483 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3484 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3485 BPF_STMT(BPF_RET | BPF_A, 0)
3486 },
3487 CLASSIC,
3488 { },
3489 { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3490 },
3491 {
3492 "SPILL_FILL",
3493 .u.insns = {
3494 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3495 BPF_STMT(BPF_LD | BPF_IMM, 2),
3496 BPF_STMT(BPF_ALU | BPF_RSH, 1),
3497 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3498 BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3499 BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3500 BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3501 BPF_STMT(BPF_STX, 15), /* M3 = len */
3502 BPF_STMT(BPF_LDX | BPF_MEM, 1),
3503 BPF_STMT(BPF_LD | BPF_MEM, 2),
3504 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3505 BPF_STMT(BPF_LDX | BPF_MEM, 15),
3506 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3507 BPF_STMT(BPF_RET | BPF_A, 0)
3508 },
3509 CLASSIC,
3510 { },
3511 { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3512 },
3513 {
3514 "JEQ",
3515 .u.insns = {
3516 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3517 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3518 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3519 BPF_STMT(BPF_RET | BPF_K, 1),
3520 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3521 },
3522 CLASSIC,
3523 { 3, 3, 3, 3, 3 },
3524 { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3525 },
3526 {
3527 "JGT",
3528 .u.insns = {
3529 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3530 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3531 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3532 BPF_STMT(BPF_RET | BPF_K, 1),
3533 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3534 },
3535 CLASSIC,
3536 { 4, 4, 4, 3, 3 },
3537 { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3538 },
3539 {
3540 "JGE (jt 0), test 1",
3541 .u.insns = {
3542 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3543 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3544 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3545 BPF_STMT(BPF_RET | BPF_K, 1),
3546 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3547 },
3548 CLASSIC,
3549 { 4, 4, 4, 3, 3 },
3550 { { 2, 0 }, { 3, 1 }, { 4, 1 } },
3551 },
3552 {
3553 "JGE (jt 0), test 2",
3554 .u.insns = {
3555 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3556 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3557 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3558 BPF_STMT(BPF_RET | BPF_K, 1),
3559 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3560 },
3561 CLASSIC,
3562 { 4, 4, 5, 3, 3 },
3563 { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3564 },
3565 {
3566 "JGE",
3567 .u.insns = {
3568 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3569 BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3570 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3571 BPF_STMT(BPF_RET | BPF_K, 10),
3572 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3573 BPF_STMT(BPF_RET | BPF_K, 20),
3574 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3575 BPF_STMT(BPF_RET | BPF_K, 30),
3576 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3577 BPF_STMT(BPF_RET | BPF_K, 40),
3578 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3579 },
3580 CLASSIC,
3581 { 1, 2, 3, 4, 5 },
3582 { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3583 },
3584 {
3585 "JSET",
3586 .u.insns = {
3587 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3588 BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3589 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3590 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3591 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3592 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3593 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3594 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3595 BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3596 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3597 BPF_STMT(BPF_RET | BPF_K, 10),
3598 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3599 BPF_STMT(BPF_RET | BPF_K, 20),
3600 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3601 BPF_STMT(BPF_RET | BPF_K, 30),
3602 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3603 BPF_STMT(BPF_RET | BPF_K, 30),
3604 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3605 BPF_STMT(BPF_RET | BPF_K, 30),
3606 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3607 BPF_STMT(BPF_RET | BPF_K, 30),
3608 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3609 BPF_STMT(BPF_RET | BPF_K, 30),
3610 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3611 },
3612 CLASSIC,
3613 { 0, 0xAA, 0x55, 1 },
3614 { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3615 },
3616 {
3617 "tcpdump port 22",
3618 .u.insns = {
3619 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3620 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3621 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3622 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3623 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3624 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3625 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3626 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3627 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3628 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3629 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3630 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3631 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3632 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3633 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3634 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3635 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3636 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3637 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3638 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3639 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3640 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3641 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3642 BPF_STMT(BPF_RET | BPF_K, 0),
3643 },
3644 CLASSIC,
3645 /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3646 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3647 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3648 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3649 */
3650 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3651 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3652 0x08, 0x00,
3653 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3654 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3655 0x0a, 0x01, 0x01, 0x95, /* ip src */
3656 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3657 0xc2, 0x24,
3658 0x00, 0x16 /* dst port */ },
3659 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3660 },
3661 {
3662 "tcpdump complex",
3663 .u.insns = {
3664 /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3665 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3666 * (len > 115 or len < 30000000000)' -d
3667 */
3668 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3669 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3670 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3671 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3672 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3673 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3674 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3675 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3676 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3677 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3678 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3679 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3680 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3681 BPF_STMT(BPF_ST, 1),
3682 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3683 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3684 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3685 BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3686 BPF_STMT(BPF_LD | BPF_MEM, 1),
3687 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3688 BPF_STMT(BPF_ST, 5),
3689 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3690 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3691 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3692 BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3693 BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3694 BPF_STMT(BPF_LD | BPF_MEM, 5),
3695 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3696 BPF_STMT(BPF_LD | BPF_LEN, 0),
3697 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3698 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3699 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3700 BPF_STMT(BPF_RET | BPF_K, 0),
3701 },
3702 CLASSIC,
3703 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3704 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3705 0x08, 0x00,
3706 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3707 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3708 0x0a, 0x01, 0x01, 0x95, /* ip src */
3709 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3710 0xc2, 0x24,
3711 0x00, 0x16 /* dst port */ },
3712 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3713 },
3714 {
3715 "RET_A",
3716 .u.insns = {
3717 /* check that uninitialized X and A contain zeros */
3718 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3719 BPF_STMT(BPF_RET | BPF_A, 0)
3720 },
3721 CLASSIC,
3722 { },
3723 { {1, 0}, {2, 0} },
3724 },
3725 {
3726 "INT: ADD trivial",
3727 .u.insns_int = {
3728 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3729 BPF_ALU64_IMM(BPF_ADD, R1, 2),
3730 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3731 BPF_ALU64_REG(BPF_SUB, R1, R2),
3732 BPF_ALU64_IMM(BPF_ADD, R1, -1),
3733 BPF_ALU64_IMM(BPF_MUL, R1, 3),
3734 BPF_ALU64_REG(BPF_MOV, R0, R1),
3735 BPF_EXIT_INSN(),
3736 },
3737 INTERNAL,
3738 { },
3739 { { 0, 0xfffffffd } }
3740 },
3741 {
3742 "INT: MUL_X",
3743 .u.insns_int = {
3744 BPF_ALU64_IMM(BPF_MOV, R0, -1),
3745 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3746 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3747 BPF_ALU64_REG(BPF_MUL, R1, R2),
3748 BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3749 BPF_EXIT_INSN(),
3750 BPF_ALU64_IMM(BPF_MOV, R0, 1),
3751 BPF_EXIT_INSN(),
3752 },
3753 INTERNAL,
3754 { },
3755 { { 0, 1 } }
3756 },
3757 {
3758 "INT: MUL_X2",
3759 .u.insns_int = {
3760 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3761 BPF_ALU32_IMM(BPF_MOV, R1, -1),
3762 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3763 BPF_ALU64_REG(BPF_MUL, R1, R2),
3764 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3765 BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3766 BPF_EXIT_INSN(),
3767 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3768 BPF_EXIT_INSN(),
3769 },
3770 INTERNAL,
3771 { },
3772 { { 0, 1 } }
3773 },
3774 {
3775 "INT: MUL32_X",
3776 .u.insns_int = {
3777 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3778 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3779 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3780 BPF_ALU32_REG(BPF_MUL, R1, R2),
3781 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3782 BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3783 BPF_EXIT_INSN(),
3784 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3785 BPF_EXIT_INSN(),
3786 },
3787 INTERNAL,
3788 { },
3789 { { 0, 1 } }
3790 },
3791 {
3792 /* Have to test all register combinations, since
3793 * JITing of different registers will produce
3794 * different asm code.
3795 */
3796 "INT: ADD 64-bit",
3797 .u.insns_int = {
3798 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3799 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3800 BPF_ALU64_IMM(BPF_MOV, R2, 2),
3801 BPF_ALU64_IMM(BPF_MOV, R3, 3),
3802 BPF_ALU64_IMM(BPF_MOV, R4, 4),
3803 BPF_ALU64_IMM(BPF_MOV, R5, 5),
3804 BPF_ALU64_IMM(BPF_MOV, R6, 6),
3805 BPF_ALU64_IMM(BPF_MOV, R7, 7),
3806 BPF_ALU64_IMM(BPF_MOV, R8, 8),
3807 BPF_ALU64_IMM(BPF_MOV, R9, 9),
3808 BPF_ALU64_IMM(BPF_ADD, R0, 20),
3809 BPF_ALU64_IMM(BPF_ADD, R1, 20),
3810 BPF_ALU64_IMM(BPF_ADD, R2, 20),
3811 BPF_ALU64_IMM(BPF_ADD, R3, 20),
3812 BPF_ALU64_IMM(BPF_ADD, R4, 20),
3813 BPF_ALU64_IMM(BPF_ADD, R5, 20),
3814 BPF_ALU64_IMM(BPF_ADD, R6, 20),
3815 BPF_ALU64_IMM(BPF_ADD, R7, 20),
3816 BPF_ALU64_IMM(BPF_ADD, R8, 20),
3817 BPF_ALU64_IMM(BPF_ADD, R9, 20),
3818 BPF_ALU64_IMM(BPF_SUB, R0, 10),
3819 BPF_ALU64_IMM(BPF_SUB, R1, 10),
3820 BPF_ALU64_IMM(BPF_SUB, R2, 10),
3821 BPF_ALU64_IMM(BPF_SUB, R3, 10),
3822 BPF_ALU64_IMM(BPF_SUB, R4, 10),
3823 BPF_ALU64_IMM(BPF_SUB, R5, 10),
3824 BPF_ALU64_IMM(BPF_SUB, R6, 10),
3825 BPF_ALU64_IMM(BPF_SUB, R7, 10),
3826 BPF_ALU64_IMM(BPF_SUB, R8, 10),
3827 BPF_ALU64_IMM(BPF_SUB, R9, 10),
3828 BPF_ALU64_REG(BPF_ADD, R0, R0),
3829 BPF_ALU64_REG(BPF_ADD, R0, R1),
3830 BPF_ALU64_REG(BPF_ADD, R0, R2),
3831 BPF_ALU64_REG(BPF_ADD, R0, R3),
3832 BPF_ALU64_REG(BPF_ADD, R0, R4),
3833 BPF_ALU64_REG(BPF_ADD, R0, R5),
3834 BPF_ALU64_REG(BPF_ADD, R0, R6),
3835 BPF_ALU64_REG(BPF_ADD, R0, R7),
3836 BPF_ALU64_REG(BPF_ADD, R0, R8),
3837 BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3838 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3839 BPF_EXIT_INSN(),
3840 BPF_ALU64_REG(BPF_ADD, R1, R0),
3841 BPF_ALU64_REG(BPF_ADD, R1, R1),
3842 BPF_ALU64_REG(BPF_ADD, R1, R2),
3843 BPF_ALU64_REG(BPF_ADD, R1, R3),
3844 BPF_ALU64_REG(BPF_ADD, R1, R4),
3845 BPF_ALU64_REG(BPF_ADD, R1, R5),
3846 BPF_ALU64_REG(BPF_ADD, R1, R6),
3847 BPF_ALU64_REG(BPF_ADD, R1, R7),
3848 BPF_ALU64_REG(BPF_ADD, R1, R8),
3849 BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3850 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3851 BPF_EXIT_INSN(),
3852 BPF_ALU64_REG(BPF_ADD, R2, R0),
3853 BPF_ALU64_REG(BPF_ADD, R2, R1),
3854 BPF_ALU64_REG(BPF_ADD, R2, R2),
3855 BPF_ALU64_REG(BPF_ADD, R2, R3),
3856 BPF_ALU64_REG(BPF_ADD, R2, R4),
3857 BPF_ALU64_REG(BPF_ADD, R2, R5),
3858 BPF_ALU64_REG(BPF_ADD, R2, R6),
3859 BPF_ALU64_REG(BPF_ADD, R2, R7),
3860 BPF_ALU64_REG(BPF_ADD, R2, R8),
3861 BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3862 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3863 BPF_EXIT_INSN(),
3864 BPF_ALU64_REG(BPF_ADD, R3, R0),
3865 BPF_ALU64_REG(BPF_ADD, R3, R1),
3866 BPF_ALU64_REG(BPF_ADD, R3, R2),
3867 BPF_ALU64_REG(BPF_ADD, R3, R3),
3868 BPF_ALU64_REG(BPF_ADD, R3, R4),
3869 BPF_ALU64_REG(BPF_ADD, R3, R5),
3870 BPF_ALU64_REG(BPF_ADD, R3, R6),
3871 BPF_ALU64_REG(BPF_ADD, R3, R7),
3872 BPF_ALU64_REG(BPF_ADD, R3, R8),
3873 BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3874 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3875 BPF_EXIT_INSN(),
3876 BPF_ALU64_REG(BPF_ADD, R4, R0),
3877 BPF_ALU64_REG(BPF_ADD, R4, R1),
3878 BPF_ALU64_REG(BPF_ADD, R4, R2),
3879 BPF_ALU64_REG(BPF_ADD, R4, R3),
3880 BPF_ALU64_REG(BPF_ADD, R4, R4),
3881 BPF_ALU64_REG(BPF_ADD, R4, R5),
3882 BPF_ALU64_REG(BPF_ADD, R4, R6),
3883 BPF_ALU64_REG(BPF_ADD, R4, R7),
3884 BPF_ALU64_REG(BPF_ADD, R4, R8),
3885 BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3886 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3887 BPF_EXIT_INSN(),
3888 BPF_ALU64_REG(BPF_ADD, R5, R0),
3889 BPF_ALU64_REG(BPF_ADD, R5, R1),
3890 BPF_ALU64_REG(BPF_ADD, R5, R2),
3891 BPF_ALU64_REG(BPF_ADD, R5, R3),
3892 BPF_ALU64_REG(BPF_ADD, R5, R4),
3893 BPF_ALU64_REG(BPF_ADD, R5, R5),
3894 BPF_ALU64_REG(BPF_ADD, R5, R6),
3895 BPF_ALU64_REG(BPF_ADD, R5, R7),
3896 BPF_ALU64_REG(BPF_ADD, R5, R8),
3897 BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3898 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3899 BPF_EXIT_INSN(),
3900 BPF_ALU64_REG(BPF_ADD, R6, R0),
3901 BPF_ALU64_REG(BPF_ADD, R6, R1),
3902 BPF_ALU64_REG(BPF_ADD, R6, R2),
3903 BPF_ALU64_REG(BPF_ADD, R6, R3),
3904 BPF_ALU64_REG(BPF_ADD, R6, R4),
3905 BPF_ALU64_REG(BPF_ADD, R6, R5),
3906 BPF_ALU64_REG(BPF_ADD, R6, R6),
3907 BPF_ALU64_REG(BPF_ADD, R6, R7),
3908 BPF_ALU64_REG(BPF_ADD, R6, R8),
3909 BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3910 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3911 BPF_EXIT_INSN(),
3912 BPF_ALU64_REG(BPF_ADD, R7, R0),
3913 BPF_ALU64_REG(BPF_ADD, R7, R1),
3914 BPF_ALU64_REG(BPF_ADD, R7, R2),
3915 BPF_ALU64_REG(BPF_ADD, R7, R3),
3916 BPF_ALU64_REG(BPF_ADD, R7, R4),
3917 BPF_ALU64_REG(BPF_ADD, R7, R5),
3918 BPF_ALU64_REG(BPF_ADD, R7, R6),
3919 BPF_ALU64_REG(BPF_ADD, R7, R7),
3920 BPF_ALU64_REG(BPF_ADD, R7, R8),
3921 BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3922 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3923 BPF_EXIT_INSN(),
3924 BPF_ALU64_REG(BPF_ADD, R8, R0),
3925 BPF_ALU64_REG(BPF_ADD, R8, R1),
3926 BPF_ALU64_REG(BPF_ADD, R8, R2),
3927 BPF_ALU64_REG(BPF_ADD, R8, R3),
3928 BPF_ALU64_REG(BPF_ADD, R8, R4),
3929 BPF_ALU64_REG(BPF_ADD, R8, R5),
3930 BPF_ALU64_REG(BPF_ADD, R8, R6),
3931 BPF_ALU64_REG(BPF_ADD, R8, R7),
3932 BPF_ALU64_REG(BPF_ADD, R8, R8),
3933 BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3934 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3935 BPF_EXIT_INSN(),
3936 BPF_ALU64_REG(BPF_ADD, R9, R0),
3937 BPF_ALU64_REG(BPF_ADD, R9, R1),
3938 BPF_ALU64_REG(BPF_ADD, R9, R2),
3939 BPF_ALU64_REG(BPF_ADD, R9, R3),
3940 BPF_ALU64_REG(BPF_ADD, R9, R4),
3941 BPF_ALU64_REG(BPF_ADD, R9, R5),
3942 BPF_ALU64_REG(BPF_ADD, R9, R6),
3943 BPF_ALU64_REG(BPF_ADD, R9, R7),
3944 BPF_ALU64_REG(BPF_ADD, R9, R8),
3945 BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3946 BPF_ALU64_REG(BPF_MOV, R0, R9),
3947 BPF_EXIT_INSN(),
3948 },
3949 INTERNAL,
3950 { },
3951 { { 0, 2957380 } }
3952 },
3953 {
3954 "INT: ADD 32-bit",
3955 .u.insns_int = {
3956 BPF_ALU32_IMM(BPF_MOV, R0, 20),
3957 BPF_ALU32_IMM(BPF_MOV, R1, 1),
3958 BPF_ALU32_IMM(BPF_MOV, R2, 2),
3959 BPF_ALU32_IMM(BPF_MOV, R3, 3),
3960 BPF_ALU32_IMM(BPF_MOV, R4, 4),
3961 BPF_ALU32_IMM(BPF_MOV, R5, 5),
3962 BPF_ALU32_IMM(BPF_MOV, R6, 6),
3963 BPF_ALU32_IMM(BPF_MOV, R7, 7),
3964 BPF_ALU32_IMM(BPF_MOV, R8, 8),
3965 BPF_ALU32_IMM(BPF_MOV, R9, 9),
3966 BPF_ALU64_IMM(BPF_ADD, R1, 10),
3967 BPF_ALU64_IMM(BPF_ADD, R2, 10),
3968 BPF_ALU64_IMM(BPF_ADD, R3, 10),
3969 BPF_ALU64_IMM(BPF_ADD, R4, 10),
3970 BPF_ALU64_IMM(BPF_ADD, R5, 10),
3971 BPF_ALU64_IMM(BPF_ADD, R6, 10),
3972 BPF_ALU64_IMM(BPF_ADD, R7, 10),
3973 BPF_ALU64_IMM(BPF_ADD, R8, 10),
3974 BPF_ALU64_IMM(BPF_ADD, R9, 10),
3975 BPF_ALU32_REG(BPF_ADD, R0, R1),
3976 BPF_ALU32_REG(BPF_ADD, R0, R2),
3977 BPF_ALU32_REG(BPF_ADD, R0, R3),
3978 BPF_ALU32_REG(BPF_ADD, R0, R4),
3979 BPF_ALU32_REG(BPF_ADD, R0, R5),
3980 BPF_ALU32_REG(BPF_ADD, R0, R6),
3981 BPF_ALU32_REG(BPF_ADD, R0, R7),
3982 BPF_ALU32_REG(BPF_ADD, R0, R8),
3983 BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3984 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3985 BPF_EXIT_INSN(),
3986 BPF_ALU32_REG(BPF_ADD, R1, R0),
3987 BPF_ALU32_REG(BPF_ADD, R1, R1),
3988 BPF_ALU32_REG(BPF_ADD, R1, R2),
3989 BPF_ALU32_REG(BPF_ADD, R1, R3),
3990 BPF_ALU32_REG(BPF_ADD, R1, R4),
3991 BPF_ALU32_REG(BPF_ADD, R1, R5),
3992 BPF_ALU32_REG(BPF_ADD, R1, R6),
3993 BPF_ALU32_REG(BPF_ADD, R1, R7),
3994 BPF_ALU32_REG(BPF_ADD, R1, R8),
3995 BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3996 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3997 BPF_EXIT_INSN(),
3998 BPF_ALU32_REG(BPF_ADD, R2, R0),
3999 BPF_ALU32_REG(BPF_ADD, R2, R1),
4000 BPF_ALU32_REG(BPF_ADD, R2, R2),
4001 BPF_ALU32_REG(BPF_ADD, R2, R3),
4002 BPF_ALU32_REG(BPF_ADD, R2, R4),
4003 BPF_ALU32_REG(BPF_ADD, R2, R5),
4004 BPF_ALU32_REG(BPF_ADD, R2, R6),
4005 BPF_ALU32_REG(BPF_ADD, R2, R7),
4006 BPF_ALU32_REG(BPF_ADD, R2, R8),
4007 BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
4008 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
4009 BPF_EXIT_INSN(),
4010 BPF_ALU32_REG(BPF_ADD, R3, R0),
4011 BPF_ALU32_REG(BPF_ADD, R3, R1),
4012 BPF_ALU32_REG(BPF_ADD, R3, R2),
4013 BPF_ALU32_REG(BPF_ADD, R3, R3),
4014 BPF_ALU32_REG(BPF_ADD, R3, R4),
4015 BPF_ALU32_REG(BPF_ADD, R3, R5),
4016 BPF_ALU32_REG(BPF_ADD, R3, R6),
4017 BPF_ALU32_REG(BPF_ADD, R3, R7),
4018 BPF_ALU32_REG(BPF_ADD, R3, R8),
4019 BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
4020 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
4021 BPF_EXIT_INSN(),
4022 BPF_ALU32_REG(BPF_ADD, R4, R0),
4023 BPF_ALU32_REG(BPF_ADD, R4, R1),
4024 BPF_ALU32_REG(BPF_ADD, R4, R2),
4025 BPF_ALU32_REG(BPF_ADD, R4, R3),
4026 BPF_ALU32_REG(BPF_ADD, R4, R4),
4027 BPF_ALU32_REG(BPF_ADD, R4, R5),
4028 BPF_ALU32_REG(BPF_ADD, R4, R6),
4029 BPF_ALU32_REG(BPF_ADD, R4, R7),
4030 BPF_ALU32_REG(BPF_ADD, R4, R8),
4031 BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
4032 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
4033 BPF_EXIT_INSN(),
4034 BPF_ALU32_REG(BPF_ADD, R5, R0),
4035 BPF_ALU32_REG(BPF_ADD, R5, R1),
4036 BPF_ALU32_REG(BPF_ADD, R5, R2),
4037 BPF_ALU32_REG(BPF_ADD, R5, R3),
4038 BPF_ALU32_REG(BPF_ADD, R5, R4),
4039 BPF_ALU32_REG(BPF_ADD, R5, R5),
4040 BPF_ALU32_REG(BPF_ADD, R5, R6),
4041 BPF_ALU32_REG(BPF_ADD, R5, R7),
4042 BPF_ALU32_REG(BPF_ADD, R5, R8),
4043 BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
4044 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
4045 BPF_EXIT_INSN(),
4046 BPF_ALU32_REG(BPF_ADD, R6, R0),
4047 BPF_ALU32_REG(BPF_ADD, R6, R1),
4048 BPF_ALU32_REG(BPF_ADD, R6, R2),
4049 BPF_ALU32_REG(BPF_ADD, R6, R3),
4050 BPF_ALU32_REG(BPF_ADD, R6, R4),
4051 BPF_ALU32_REG(BPF_ADD, R6, R5),
4052 BPF_ALU32_REG(BPF_ADD, R6, R6),
4053 BPF_ALU32_REG(BPF_ADD, R6, R7),
4054 BPF_ALU32_REG(BPF_ADD, R6, R8),
4055 BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
4056 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
4057 BPF_EXIT_INSN(),
4058 BPF_ALU32_REG(BPF_ADD, R7, R0),
4059 BPF_ALU32_REG(BPF_ADD, R7, R1),
4060 BPF_ALU32_REG(BPF_ADD, R7, R2),
4061 BPF_ALU32_REG(BPF_ADD, R7, R3),
4062 BPF_ALU32_REG(BPF_ADD, R7, R4),
4063 BPF_ALU32_REG(BPF_ADD, R7, R5),
4064 BPF_ALU32_REG(BPF_ADD, R7, R6),
4065 BPF_ALU32_REG(BPF_ADD, R7, R7),
4066 BPF_ALU32_REG(BPF_ADD, R7, R8),
4067 BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
4068 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
4069 BPF_EXIT_INSN(),
4070 BPF_ALU32_REG(BPF_ADD, R8, R0),
4071 BPF_ALU32_REG(BPF_ADD, R8, R1),
4072 BPF_ALU32_REG(BPF_ADD, R8, R2),
4073 BPF_ALU32_REG(BPF_ADD, R8, R3),
4074 BPF_ALU32_REG(BPF_ADD, R8, R4),
4075 BPF_ALU32_REG(BPF_ADD, R8, R5),
4076 BPF_ALU32_REG(BPF_ADD, R8, R6),
4077 BPF_ALU32_REG(BPF_ADD, R8, R7),
4078 BPF_ALU32_REG(BPF_ADD, R8, R8),
4079 BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
4080 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
4081 BPF_EXIT_INSN(),
4082 BPF_ALU32_REG(BPF_ADD, R9, R0),
4083 BPF_ALU32_REG(BPF_ADD, R9, R1),
4084 BPF_ALU32_REG(BPF_ADD, R9, R2),
4085 BPF_ALU32_REG(BPF_ADD, R9, R3),
4086 BPF_ALU32_REG(BPF_ADD, R9, R4),
4087 BPF_ALU32_REG(BPF_ADD, R9, R5),
4088 BPF_ALU32_REG(BPF_ADD, R9, R6),
4089 BPF_ALU32_REG(BPF_ADD, R9, R7),
4090 BPF_ALU32_REG(BPF_ADD, R9, R8),
4091 BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
4092 BPF_ALU32_REG(BPF_MOV, R0, R9),
4093 BPF_EXIT_INSN(),
4094 },
4095 INTERNAL,
4096 { },
4097 { { 0, 2957380 } }
4098 },
4099 { /* Mainly checking JIT here. */
4100 "INT: SUB",
4101 .u.insns_int = {
4102 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4103 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4104 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4105 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4106 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4107 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4108 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4109 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4110 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4111 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4112 BPF_ALU64_REG(BPF_SUB, R0, R0),
4113 BPF_ALU64_REG(BPF_SUB, R0, R1),
4114 BPF_ALU64_REG(BPF_SUB, R0, R2),
4115 BPF_ALU64_REG(BPF_SUB, R0, R3),
4116 BPF_ALU64_REG(BPF_SUB, R0, R4),
4117 BPF_ALU64_REG(BPF_SUB, R0, R5),
4118 BPF_ALU64_REG(BPF_SUB, R0, R6),
4119 BPF_ALU64_REG(BPF_SUB, R0, R7),
4120 BPF_ALU64_REG(BPF_SUB, R0, R8),
4121 BPF_ALU64_REG(BPF_SUB, R0, R9),
4122 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4123 BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4124 BPF_EXIT_INSN(),
4125 BPF_ALU64_REG(BPF_SUB, R1, R0),
4126 BPF_ALU64_REG(BPF_SUB, R1, R2),
4127 BPF_ALU64_REG(BPF_SUB, R1, R3),
4128 BPF_ALU64_REG(BPF_SUB, R1, R4),
4129 BPF_ALU64_REG(BPF_SUB, R1, R5),
4130 BPF_ALU64_REG(BPF_SUB, R1, R6),
4131 BPF_ALU64_REG(BPF_SUB, R1, R7),
4132 BPF_ALU64_REG(BPF_SUB, R1, R8),
4133 BPF_ALU64_REG(BPF_SUB, R1, R9),
4134 BPF_ALU64_IMM(BPF_SUB, R1, 10),
4135 BPF_ALU64_REG(BPF_SUB, R2, R0),
4136 BPF_ALU64_REG(BPF_SUB, R2, R1),
4137 BPF_ALU64_REG(BPF_SUB, R2, R3),
4138 BPF_ALU64_REG(BPF_SUB, R2, R4),
4139 BPF_ALU64_REG(BPF_SUB, R2, R5),
4140 BPF_ALU64_REG(BPF_SUB, R2, R6),
4141 BPF_ALU64_REG(BPF_SUB, R2, R7),
4142 BPF_ALU64_REG(BPF_SUB, R2, R8),
4143 BPF_ALU64_REG(BPF_SUB, R2, R9),
4144 BPF_ALU64_IMM(BPF_SUB, R2, 10),
4145 BPF_ALU64_REG(BPF_SUB, R3, R0),
4146 BPF_ALU64_REG(BPF_SUB, R3, R1),
4147 BPF_ALU64_REG(BPF_SUB, R3, R2),
4148 BPF_ALU64_REG(BPF_SUB, R3, R4),
4149 BPF_ALU64_REG(BPF_SUB, R3, R5),
4150 BPF_ALU64_REG(BPF_SUB, R3, R6),
4151 BPF_ALU64_REG(BPF_SUB, R3, R7),
4152 BPF_ALU64_REG(BPF_SUB, R3, R8),
4153 BPF_ALU64_REG(BPF_SUB, R3, R9),
4154 BPF_ALU64_IMM(BPF_SUB, R3, 10),
4155 BPF_ALU64_REG(BPF_SUB, R4, R0),
4156 BPF_ALU64_REG(BPF_SUB, R4, R1),
4157 BPF_ALU64_REG(BPF_SUB, R4, R2),
4158 BPF_ALU64_REG(BPF_SUB, R4, R3),
4159 BPF_ALU64_REG(BPF_SUB, R4, R5),
4160 BPF_ALU64_REG(BPF_SUB, R4, R6),
4161 BPF_ALU64_REG(BPF_SUB, R4, R7),
4162 BPF_ALU64_REG(BPF_SUB, R4, R8),
4163 BPF_ALU64_REG(BPF_SUB, R4, R9),
4164 BPF_ALU64_IMM(BPF_SUB, R4, 10),
4165 BPF_ALU64_REG(BPF_SUB, R5, R0),
4166 BPF_ALU64_REG(BPF_SUB, R5, R1),
4167 BPF_ALU64_REG(BPF_SUB, R5, R2),
4168 BPF_ALU64_REG(BPF_SUB, R5, R3),
4169 BPF_ALU64_REG(BPF_SUB, R5, R4),
4170 BPF_ALU64_REG(BPF_SUB, R5, R6),
4171 BPF_ALU64_REG(BPF_SUB, R5, R7),
4172 BPF_ALU64_REG(BPF_SUB, R5, R8),
4173 BPF_ALU64_REG(BPF_SUB, R5, R9),
4174 BPF_ALU64_IMM(BPF_SUB, R5, 10),
4175 BPF_ALU64_REG(BPF_SUB, R6, R0),
4176 BPF_ALU64_REG(BPF_SUB, R6, R1),
4177 BPF_ALU64_REG(BPF_SUB, R6, R2),
4178 BPF_ALU64_REG(BPF_SUB, R6, R3),
4179 BPF_ALU64_REG(BPF_SUB, R6, R4),
4180 BPF_ALU64_REG(BPF_SUB, R6, R5),
4181 BPF_ALU64_REG(BPF_SUB, R6, R7),
4182 BPF_ALU64_REG(BPF_SUB, R6, R8),
4183 BPF_ALU64_REG(BPF_SUB, R6, R9),
4184 BPF_ALU64_IMM(BPF_SUB, R6, 10),
4185 BPF_ALU64_REG(BPF_SUB, R7, R0),
4186 BPF_ALU64_REG(BPF_SUB, R7, R1),
4187 BPF_ALU64_REG(BPF_SUB, R7, R2),
4188 BPF_ALU64_REG(BPF_SUB, R7, R3),
4189 BPF_ALU64_REG(BPF_SUB, R7, R4),
4190 BPF_ALU64_REG(BPF_SUB, R7, R5),
4191 BPF_ALU64_REG(BPF_SUB, R7, R6),
4192 BPF_ALU64_REG(BPF_SUB, R7, R8),
4193 BPF_ALU64_REG(BPF_SUB, R7, R9),
4194 BPF_ALU64_IMM(BPF_SUB, R7, 10),
4195 BPF_ALU64_REG(BPF_SUB, R8, R0),
4196 BPF_ALU64_REG(BPF_SUB, R8, R1),
4197 BPF_ALU64_REG(BPF_SUB, R8, R2),
4198 BPF_ALU64_REG(BPF_SUB, R8, R3),
4199 BPF_ALU64_REG(BPF_SUB, R8, R4),
4200 BPF_ALU64_REG(BPF_SUB, R8, R5),
4201 BPF_ALU64_REG(BPF_SUB, R8, R6),
4202 BPF_ALU64_REG(BPF_SUB, R8, R7),
4203 BPF_ALU64_REG(BPF_SUB, R8, R9),
4204 BPF_ALU64_IMM(BPF_SUB, R8, 10),
4205 BPF_ALU64_REG(BPF_SUB, R9, R0),
4206 BPF_ALU64_REG(BPF_SUB, R9, R1),
4207 BPF_ALU64_REG(BPF_SUB, R9, R2),
4208 BPF_ALU64_REG(BPF_SUB, R9, R3),
4209 BPF_ALU64_REG(BPF_SUB, R9, R4),
4210 BPF_ALU64_REG(BPF_SUB, R9, R5),
4211 BPF_ALU64_REG(BPF_SUB, R9, R6),
4212 BPF_ALU64_REG(BPF_SUB, R9, R7),
4213 BPF_ALU64_REG(BPF_SUB, R9, R8),
4214 BPF_ALU64_IMM(BPF_SUB, R9, 10),
4215 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4216 BPF_ALU64_IMM(BPF_NEG, R0, 0),
4217 BPF_ALU64_REG(BPF_SUB, R0, R1),
4218 BPF_ALU64_REG(BPF_SUB, R0, R2),
4219 BPF_ALU64_REG(BPF_SUB, R0, R3),
4220 BPF_ALU64_REG(BPF_SUB, R0, R4),
4221 BPF_ALU64_REG(BPF_SUB, R0, R5),
4222 BPF_ALU64_REG(BPF_SUB, R0, R6),
4223 BPF_ALU64_REG(BPF_SUB, R0, R7),
4224 BPF_ALU64_REG(BPF_SUB, R0, R8),
4225 BPF_ALU64_REG(BPF_SUB, R0, R9),
4226 BPF_EXIT_INSN(),
4227 },
4228 INTERNAL,
4229 { },
4230 { { 0, 11 } }
4231 },
4232 { /* Mainly checking JIT here. */
4233 "INT: XOR",
4234 .u.insns_int = {
4235 BPF_ALU64_REG(BPF_SUB, R0, R0),
4236 BPF_ALU64_REG(BPF_XOR, R1, R1),
4237 BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4238 BPF_EXIT_INSN(),
4239 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4240 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4241 BPF_ALU64_REG(BPF_SUB, R1, R1),
4242 BPF_ALU64_REG(BPF_XOR, R2, R2),
4243 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4244 BPF_EXIT_INSN(),
4245 BPF_ALU64_REG(BPF_SUB, R2, R2),
4246 BPF_ALU64_REG(BPF_XOR, R3, R3),
4247 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4248 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4249 BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4250 BPF_EXIT_INSN(),
4251 BPF_ALU64_REG(BPF_SUB, R3, R3),
4252 BPF_ALU64_REG(BPF_XOR, R4, R4),
4253 BPF_ALU64_IMM(BPF_MOV, R2, 1),
4254 BPF_ALU64_IMM(BPF_MOV, R5, -1),
4255 BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4256 BPF_EXIT_INSN(),
4257 BPF_ALU64_REG(BPF_SUB, R4, R4),
4258 BPF_ALU64_REG(BPF_XOR, R5, R5),
4259 BPF_ALU64_IMM(BPF_MOV, R3, 1),
4260 BPF_ALU64_IMM(BPF_MOV, R7, -1),
4261 BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4262 BPF_EXIT_INSN(),
4263 BPF_ALU64_IMM(BPF_MOV, R5, 1),
4264 BPF_ALU64_REG(BPF_SUB, R5, R5),
4265 BPF_ALU64_REG(BPF_XOR, R6, R6),
4266 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4267 BPF_ALU64_IMM(BPF_MOV, R8, -1),
4268 BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4269 BPF_EXIT_INSN(),
4270 BPF_ALU64_REG(BPF_SUB, R6, R6),
4271 BPF_ALU64_REG(BPF_XOR, R7, R7),
4272 BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4273 BPF_EXIT_INSN(),
4274 BPF_ALU64_REG(BPF_SUB, R7, R7),
4275 BPF_ALU64_REG(BPF_XOR, R8, R8),
4276 BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4277 BPF_EXIT_INSN(),
4278 BPF_ALU64_REG(BPF_SUB, R8, R8),
4279 BPF_ALU64_REG(BPF_XOR, R9, R9),
4280 BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4281 BPF_EXIT_INSN(),
4282 BPF_ALU64_REG(BPF_SUB, R9, R9),
4283 BPF_ALU64_REG(BPF_XOR, R0, R0),
4284 BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4285 BPF_EXIT_INSN(),
4286 BPF_ALU64_REG(BPF_SUB, R1, R1),
4287 BPF_ALU64_REG(BPF_XOR, R0, R0),
4288 BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4289 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4290 BPF_EXIT_INSN(),
4291 BPF_ALU64_IMM(BPF_MOV, R0, 1),
4292 BPF_EXIT_INSN(),
4293 },
4294 INTERNAL,
4295 { },
4296 { { 0, 1 } }
4297 },
4298 { /* Mainly checking JIT here. */
4299 "INT: MUL",
4300 .u.insns_int = {
4301 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4302 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4303 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4304 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4305 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4306 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4307 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4308 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4309 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4310 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4311 BPF_ALU64_REG(BPF_MUL, R0, R0),
4312 BPF_ALU64_REG(BPF_MUL, R0, R1),
4313 BPF_ALU64_REG(BPF_MUL, R0, R2),
4314 BPF_ALU64_REG(BPF_MUL, R0, R3),
4315 BPF_ALU64_REG(BPF_MUL, R0, R4),
4316 BPF_ALU64_REG(BPF_MUL, R0, R5),
4317 BPF_ALU64_REG(BPF_MUL, R0, R6),
4318 BPF_ALU64_REG(BPF_MUL, R0, R7),
4319 BPF_ALU64_REG(BPF_MUL, R0, R8),
4320 BPF_ALU64_REG(BPF_MUL, R0, R9),
4321 BPF_ALU64_IMM(BPF_MUL, R0, 10),
4322 BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4323 BPF_EXIT_INSN(),
4324 BPF_ALU64_REG(BPF_MUL, R1, R0),
4325 BPF_ALU64_REG(BPF_MUL, R1, R2),
4326 BPF_ALU64_REG(BPF_MUL, R1, R3),
4327 BPF_ALU64_REG(BPF_MUL, R1, R4),
4328 BPF_ALU64_REG(BPF_MUL, R1, R5),
4329 BPF_ALU64_REG(BPF_MUL, R1, R6),
4330 BPF_ALU64_REG(BPF_MUL, R1, R7),
4331 BPF_ALU64_REG(BPF_MUL, R1, R8),
4332 BPF_ALU64_REG(BPF_MUL, R1, R9),
4333 BPF_ALU64_IMM(BPF_MUL, R1, 10),
4334 BPF_ALU64_REG(BPF_MOV, R2, R1),
4335 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4336 BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4337 BPF_EXIT_INSN(),
4338 BPF_ALU64_IMM(BPF_LSH, R1, 32),
4339 BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4340 BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4341 BPF_EXIT_INSN(),
4342 BPF_ALU64_REG(BPF_MUL, R2, R0),
4343 BPF_ALU64_REG(BPF_MUL, R2, R1),
4344 BPF_ALU64_REG(BPF_MUL, R2, R3),
4345 BPF_ALU64_REG(BPF_MUL, R2, R4),
4346 BPF_ALU64_REG(BPF_MUL, R2, R5),
4347 BPF_ALU64_REG(BPF_MUL, R2, R6),
4348 BPF_ALU64_REG(BPF_MUL, R2, R7),
4349 BPF_ALU64_REG(BPF_MUL, R2, R8),
4350 BPF_ALU64_REG(BPF_MUL, R2, R9),
4351 BPF_ALU64_IMM(BPF_MUL, R2, 10),
4352 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4353 BPF_ALU64_REG(BPF_MOV, R0, R2),
4354 BPF_EXIT_INSN(),
4355 },
4356 INTERNAL,
4357 { },
4358 { { 0, 0x35d97ef2 } }
4359 },
4360 { /* Mainly checking JIT here. */
4361 "MOV REG64",
4362 .u.insns_int = {
4363 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4364 BPF_MOV64_REG(R1, R0),
4365 BPF_MOV64_REG(R2, R1),
4366 BPF_MOV64_REG(R3, R2),
4367 BPF_MOV64_REG(R4, R3),
4368 BPF_MOV64_REG(R5, R4),
4369 BPF_MOV64_REG(R6, R5),
4370 BPF_MOV64_REG(R7, R6),
4371 BPF_MOV64_REG(R8, R7),
4372 BPF_MOV64_REG(R9, R8),
4373 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4374 BPF_ALU64_IMM(BPF_MOV, R1, 0),
4375 BPF_ALU64_IMM(BPF_MOV, R2, 0),
4376 BPF_ALU64_IMM(BPF_MOV, R3, 0),
4377 BPF_ALU64_IMM(BPF_MOV, R4, 0),
4378 BPF_ALU64_IMM(BPF_MOV, R5, 0),
4379 BPF_ALU64_IMM(BPF_MOV, R6, 0),
4380 BPF_ALU64_IMM(BPF_MOV, R7, 0),
4381 BPF_ALU64_IMM(BPF_MOV, R8, 0),
4382 BPF_ALU64_IMM(BPF_MOV, R9, 0),
4383 BPF_ALU64_REG(BPF_ADD, R0, R0),
4384 BPF_ALU64_REG(BPF_ADD, R0, R1),
4385 BPF_ALU64_REG(BPF_ADD, R0, R2),
4386 BPF_ALU64_REG(BPF_ADD, R0, R3),
4387 BPF_ALU64_REG(BPF_ADD, R0, R4),
4388 BPF_ALU64_REG(BPF_ADD, R0, R5),
4389 BPF_ALU64_REG(BPF_ADD, R0, R6),
4390 BPF_ALU64_REG(BPF_ADD, R0, R7),
4391 BPF_ALU64_REG(BPF_ADD, R0, R8),
4392 BPF_ALU64_REG(BPF_ADD, R0, R9),
4393 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4394 BPF_EXIT_INSN(),
4395 },
4396 INTERNAL,
4397 { },
4398 { { 0, 0xfefe } }
4399 },
4400 { /* Mainly checking JIT here. */
4401 "MOV REG32",
4402 .u.insns_int = {
4403 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4404 BPF_MOV64_REG(R1, R0),
4405 BPF_MOV64_REG(R2, R1),
4406 BPF_MOV64_REG(R3, R2),
4407 BPF_MOV64_REG(R4, R3),
4408 BPF_MOV64_REG(R5, R4),
4409 BPF_MOV64_REG(R6, R5),
4410 BPF_MOV64_REG(R7, R6),
4411 BPF_MOV64_REG(R8, R7),
4412 BPF_MOV64_REG(R9, R8),
4413 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4414 BPF_ALU32_IMM(BPF_MOV, R1, 0),
4415 BPF_ALU32_IMM(BPF_MOV, R2, 0),
4416 BPF_ALU32_IMM(BPF_MOV, R3, 0),
4417 BPF_ALU32_IMM(BPF_MOV, R4, 0),
4418 BPF_ALU32_IMM(BPF_MOV, R5, 0),
4419 BPF_ALU32_IMM(BPF_MOV, R6, 0),
4420 BPF_ALU32_IMM(BPF_MOV, R7, 0),
4421 BPF_ALU32_IMM(BPF_MOV, R8, 0),
4422 BPF_ALU32_IMM(BPF_MOV, R9, 0),
4423 BPF_ALU64_REG(BPF_ADD, R0, R0),
4424 BPF_ALU64_REG(BPF_ADD, R0, R1),
4425 BPF_ALU64_REG(BPF_ADD, R0, R2),
4426 BPF_ALU64_REG(BPF_ADD, R0, R3),
4427 BPF_ALU64_REG(BPF_ADD, R0, R4),
4428 BPF_ALU64_REG(BPF_ADD, R0, R5),
4429 BPF_ALU64_REG(BPF_ADD, R0, R6),
4430 BPF_ALU64_REG(BPF_ADD, R0, R7),
4431 BPF_ALU64_REG(BPF_ADD, R0, R8),
4432 BPF_ALU64_REG(BPF_ADD, R0, R9),
4433 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4434 BPF_EXIT_INSN(),
4435 },
4436 INTERNAL,
4437 { },
4438 { { 0, 0xfefe } }
4439 },
4440 { /* Mainly checking JIT here. */
4441 "LD IMM64",
4442 .u.insns_int = {
4443 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4444 BPF_MOV64_REG(R1, R0),
4445 BPF_MOV64_REG(R2, R1),
4446 BPF_MOV64_REG(R3, R2),
4447 BPF_MOV64_REG(R4, R3),
4448 BPF_MOV64_REG(R5, R4),
4449 BPF_MOV64_REG(R6, R5),
4450 BPF_MOV64_REG(R7, R6),
4451 BPF_MOV64_REG(R8, R7),
4452 BPF_MOV64_REG(R9, R8),
4453 BPF_LD_IMM64(R0, 0x0LL),
4454 BPF_LD_IMM64(R1, 0x0LL),
4455 BPF_LD_IMM64(R2, 0x0LL),
4456 BPF_LD_IMM64(R3, 0x0LL),
4457 BPF_LD_IMM64(R4, 0x0LL),
4458 BPF_LD_IMM64(R5, 0x0LL),
4459 BPF_LD_IMM64(R6, 0x0LL),
4460 BPF_LD_IMM64(R7, 0x0LL),
4461 BPF_LD_IMM64(R8, 0x0LL),
4462 BPF_LD_IMM64(R9, 0x0LL),
4463 BPF_ALU64_REG(BPF_ADD, R0, R0),
4464 BPF_ALU64_REG(BPF_ADD, R0, R1),
4465 BPF_ALU64_REG(BPF_ADD, R0, R2),
4466 BPF_ALU64_REG(BPF_ADD, R0, R3),
4467 BPF_ALU64_REG(BPF_ADD, R0, R4),
4468 BPF_ALU64_REG(BPF_ADD, R0, R5),
4469 BPF_ALU64_REG(BPF_ADD, R0, R6),
4470 BPF_ALU64_REG(BPF_ADD, R0, R7),
4471 BPF_ALU64_REG(BPF_ADD, R0, R8),
4472 BPF_ALU64_REG(BPF_ADD, R0, R9),
4473 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4474 BPF_EXIT_INSN(),
4475 },
4476 INTERNAL,
4477 { },
4478 { { 0, 0xfefe } }
4479 },
4480 {
4481 "INT: ALU MIX",
4482 .u.insns_int = {
4483 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4484 BPF_ALU64_IMM(BPF_ADD, R0, -1),
4485 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4486 BPF_ALU64_IMM(BPF_XOR, R2, 3),
4487 BPF_ALU64_REG(BPF_DIV, R0, R2),
4488 BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4489 BPF_EXIT_INSN(),
4490 BPF_ALU64_IMM(BPF_MOD, R0, 3),
4491 BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4492 BPF_EXIT_INSN(),
4493 BPF_ALU64_IMM(BPF_MOV, R0, -1),
4494 BPF_EXIT_INSN(),
4495 },
4496 INTERNAL,
4497 { },
4498 { { 0, -1 } }
4499 },
4500 {
4501 "INT: shifts by register",
4502 .u.insns_int = {
4503 BPF_MOV64_IMM(R0, -1234),
4504 BPF_MOV64_IMM(R1, 1),
4505 BPF_ALU32_REG(BPF_RSH, R0, R1),
4506 BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4507 BPF_EXIT_INSN(),
4508 BPF_MOV64_IMM(R2, 1),
4509 BPF_ALU64_REG(BPF_LSH, R0, R2),
4510 BPF_MOV32_IMM(R4, -1234),
4511 BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4512 BPF_EXIT_INSN(),
4513 BPF_ALU64_IMM(BPF_AND, R4, 63),
4514 BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4515 BPF_MOV64_IMM(R3, 47),
4516 BPF_ALU64_REG(BPF_ARSH, R0, R3),
4517 BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4518 BPF_EXIT_INSN(),
4519 BPF_MOV64_IMM(R2, 1),
4520 BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4521 BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4522 BPF_EXIT_INSN(),
4523 BPF_MOV64_IMM(R4, 4),
4524 BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4525 BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4526 BPF_EXIT_INSN(),
4527 BPF_MOV64_IMM(R4, 5),
4528 BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4529 BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4530 BPF_EXIT_INSN(),
4531 BPF_MOV64_IMM(R0, -1),
4532 BPF_EXIT_INSN(),
4533 },
4534 INTERNAL,
4535 { },
4536 { { 0, -1 } }
4537 },
4538 #ifdef CONFIG_32BIT
4539 {
4540 "INT: 32-bit context pointer word order and zero-extension",
4541 .u.insns_int = {
4542 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4543 BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4544 BPF_ALU64_IMM(BPF_RSH, R1, 32),
4545 BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4546 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4547 BPF_EXIT_INSN(),
4548 },
4549 INTERNAL,
4550 { },
4551 { { 0, 1 } }
4552 },
4553 #endif
4554 {
4555 "check: missing ret",
4556 .u.insns = {
4557 BPF_STMT(BPF_LD | BPF_IMM, 1),
4558 },
4559 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4560 { },
4561 { },
4562 .fill_helper = NULL,
4563 .expected_errcode = -EINVAL,
4564 },
4565 {
4566 "check: div_k_0",
4567 .u.insns = {
4568 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4569 BPF_STMT(BPF_RET | BPF_K, 0)
4570 },
4571 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4572 { },
4573 { },
4574 .fill_helper = NULL,
4575 .expected_errcode = -EINVAL,
4576 },
4577 {
4578 "check: unknown insn",
4579 .u.insns = {
4580 /* seccomp insn, rejected in socket filter */
4581 BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4582 BPF_STMT(BPF_RET | BPF_K, 0)
4583 },
4584 CLASSIC | FLAG_EXPECTED_FAIL,
4585 { },
4586 { },
4587 .fill_helper = NULL,
4588 .expected_errcode = -EINVAL,
4589 },
4590 {
4591 "check: out of range spill/fill",
4592 .u.insns = {
4593 BPF_STMT(BPF_STX, 16),
4594 BPF_STMT(BPF_RET | BPF_K, 0)
4595 },
4596 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4597 { },
4598 { },
4599 .fill_helper = NULL,
4600 .expected_errcode = -EINVAL,
4601 },
4602 {
4603 "JUMPS + HOLES",
4604 .u.insns = {
4605 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4606 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4607 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4608 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4609 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4610 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4611 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4612 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4613 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4614 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4615 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4616 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4617 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4618 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4619 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4620 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4621 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4622 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4623 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4624 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4625 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4626 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4627 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4628 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4629 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4630 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4631 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4632 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4633 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4634 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4635 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4636 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4637 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4638 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4639 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4640 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4641 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4642 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4643 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4644 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4645 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4646 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4647 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4648 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4649 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4650 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4651 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4652 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4653 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4654 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4655 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4656 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4657 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4658 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4659 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4660 BPF_STMT(BPF_RET | BPF_A, 0),
4661 BPF_STMT(BPF_RET | BPF_A, 0),
4662 },
4663 CLASSIC,
4664 { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4665 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4666 0x08, 0x00,
4667 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4668 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4669 0xc0, 0xa8, 0x33, 0x01,
4670 0xc0, 0xa8, 0x33, 0x02,
4671 0xbb, 0xb6,
4672 0xa9, 0xfa,
4673 0x00, 0x14, 0x00, 0x00,
4674 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4675 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4676 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4677 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4678 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4679 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4680 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4681 0xcc, 0xcc, 0xcc, 0xcc },
4682 { { 88, 0x001b } }
4683 },
4684 {
4685 "check: RET X",
4686 .u.insns = {
4687 BPF_STMT(BPF_RET | BPF_X, 0),
4688 },
4689 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4690 { },
4691 { },
4692 .fill_helper = NULL,
4693 .expected_errcode = -EINVAL,
4694 },
4695 {
4696 "check: LDX + RET X",
4697 .u.insns = {
4698 BPF_STMT(BPF_LDX | BPF_IMM, 42),
4699 BPF_STMT(BPF_RET | BPF_X, 0),
4700 },
4701 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4702 { },
4703 { },
4704 .fill_helper = NULL,
4705 .expected_errcode = -EINVAL,
4706 },
4707 { /* Mainly checking JIT here. */
4708 "M[]: alt STX + LDX",
4709 .u.insns = {
4710 BPF_STMT(BPF_LDX | BPF_IMM, 100),
4711 BPF_STMT(BPF_STX, 0),
4712 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4713 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4714 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4715 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4716 BPF_STMT(BPF_STX, 1),
4717 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4718 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4719 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4720 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4721 BPF_STMT(BPF_STX, 2),
4722 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4723 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4724 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4725 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4726 BPF_STMT(BPF_STX, 3),
4727 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4728 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4729 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4730 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4731 BPF_STMT(BPF_STX, 4),
4732 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4733 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4734 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4735 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4736 BPF_STMT(BPF_STX, 5),
4737 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4738 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4739 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4740 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4741 BPF_STMT(BPF_STX, 6),
4742 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4743 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4744 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4745 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4746 BPF_STMT(BPF_STX, 7),
4747 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4748 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4749 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4750 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4751 BPF_STMT(BPF_STX, 8),
4752 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4753 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4754 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4755 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4756 BPF_STMT(BPF_STX, 9),
4757 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4758 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4759 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4760 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4761 BPF_STMT(BPF_STX, 10),
4762 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4763 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4764 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4765 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4766 BPF_STMT(BPF_STX, 11),
4767 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4768 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4769 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4770 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4771 BPF_STMT(BPF_STX, 12),
4772 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4773 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4774 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4775 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4776 BPF_STMT(BPF_STX, 13),
4777 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4778 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4779 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4780 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4781 BPF_STMT(BPF_STX, 14),
4782 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4783 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4784 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4785 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4786 BPF_STMT(BPF_STX, 15),
4787 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4788 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4789 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4790 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4791 BPF_STMT(BPF_RET | BPF_A, 0),
4792 },
4793 CLASSIC | FLAG_NO_DATA,
4794 { },
4795 { { 0, 116 } },
4796 },
4797 { /* Mainly checking JIT here. */
4798 "M[]: full STX + full LDX",
4799 .u.insns = {
4800 BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4801 BPF_STMT(BPF_STX, 0),
4802 BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4803 BPF_STMT(BPF_STX, 1),
4804 BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4805 BPF_STMT(BPF_STX, 2),
4806 BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4807 BPF_STMT(BPF_STX, 3),
4808 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4809 BPF_STMT(BPF_STX, 4),
4810 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4811 BPF_STMT(BPF_STX, 5),
4812 BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4813 BPF_STMT(BPF_STX, 6),
4814 BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4815 BPF_STMT(BPF_STX, 7),
4816 BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4817 BPF_STMT(BPF_STX, 8),
4818 BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4819 BPF_STMT(BPF_STX, 9),
4820 BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4821 BPF_STMT(BPF_STX, 10),
4822 BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4823 BPF_STMT(BPF_STX, 11),
4824 BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4825 BPF_STMT(BPF_STX, 12),
4826 BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4827 BPF_STMT(BPF_STX, 13),
4828 BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4829 BPF_STMT(BPF_STX, 14),
4830 BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4831 BPF_STMT(BPF_STX, 15),
4832 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4833 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4834 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4835 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4836 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4837 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4838 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4839 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4840 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4841 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4842 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4843 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4844 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4845 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4846 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4847 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4848 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4849 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4850 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4851 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4852 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4853 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4854 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4855 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4856 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4857 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4858 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4859 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4860 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4861 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4862 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4863 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4864 BPF_STMT(BPF_RET | BPF_A, 0),
4865 },
4866 CLASSIC | FLAG_NO_DATA,
4867 { },
4868 { { 0, 0x2a5a5e5 } },
4869 },
4870 {
4871 "check: SKF_AD_MAX",
4872 .u.insns = {
4873 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4874 SKF_AD_OFF + SKF_AD_MAX),
4875 BPF_STMT(BPF_RET | BPF_A, 0),
4876 },
4877 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4878 { },
4879 { },
4880 .fill_helper = NULL,
4881 .expected_errcode = -EINVAL,
4882 },
4883 { /* Passes checker but fails during runtime. */
4884 "LD [SKF_AD_OFF-1]",
4885 .u.insns = {
4886 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4887 SKF_AD_OFF - 1),
4888 BPF_STMT(BPF_RET | BPF_K, 1),
4889 },
4890 CLASSIC,
4891 { },
4892 { { 1, 0 } },
4893 },
4894 {
4895 "load 64-bit immediate",
4896 .u.insns_int = {
4897 BPF_LD_IMM64(R1, 0x567800001234LL),
4898 BPF_MOV64_REG(R2, R1),
4899 BPF_MOV64_REG(R3, R2),
4900 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4901 BPF_ALU64_IMM(BPF_LSH, R3, 32),
4902 BPF_ALU64_IMM(BPF_RSH, R3, 32),
4903 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4904 BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4905 BPF_EXIT_INSN(),
4906 BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4907 BPF_EXIT_INSN(),
4908 BPF_LD_IMM64(R0, 0x1ffffffffLL),
4909 BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4910 BPF_EXIT_INSN(),
4911 },
4912 INTERNAL,
4913 { },
4914 { { 0, 1 } }
4915 },
4916 /* BPF_ALU | BPF_MOV | BPF_X */
4917 {
4918 "ALU_MOV_X: dst = 2",
4919 .u.insns_int = {
4920 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4921 BPF_ALU32_REG(BPF_MOV, R0, R1),
4922 BPF_EXIT_INSN(),
4923 },
4924 INTERNAL,
4925 { },
4926 { { 0, 2 } },
4927 },
4928 {
4929 "ALU_MOV_X: dst = 4294967295",
4930 .u.insns_int = {
4931 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4932 BPF_ALU32_REG(BPF_MOV, R0, R1),
4933 BPF_EXIT_INSN(),
4934 },
4935 INTERNAL,
4936 { },
4937 { { 0, 4294967295U } },
4938 },
4939 {
4940 "ALU64_MOV_X: dst = 2",
4941 .u.insns_int = {
4942 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4943 BPF_ALU64_REG(BPF_MOV, R0, R1),
4944 BPF_EXIT_INSN(),
4945 },
4946 INTERNAL,
4947 { },
4948 { { 0, 2 } },
4949 },
4950 {
4951 "ALU64_MOV_X: dst = 4294967295",
4952 .u.insns_int = {
4953 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4954 BPF_ALU64_REG(BPF_MOV, R0, R1),
4955 BPF_EXIT_INSN(),
4956 },
4957 INTERNAL,
4958 { },
4959 { { 0, 4294967295U } },
4960 },
4961 /* BPF_ALU | BPF_MOV | BPF_K */
4962 {
4963 "ALU_MOV_K: dst = 2",
4964 .u.insns_int = {
4965 BPF_ALU32_IMM(BPF_MOV, R0, 2),
4966 BPF_EXIT_INSN(),
4967 },
4968 INTERNAL,
4969 { },
4970 { { 0, 2 } },
4971 },
4972 {
4973 "ALU_MOV_K: dst = 4294967295",
4974 .u.insns_int = {
4975 BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4976 BPF_EXIT_INSN(),
4977 },
4978 INTERNAL,
4979 { },
4980 { { 0, 4294967295U } },
4981 },
4982 {
4983 "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4984 .u.insns_int = {
4985 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4986 BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4987 BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4988 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4989 BPF_MOV32_IMM(R0, 2),
4990 BPF_EXIT_INSN(),
4991 BPF_MOV32_IMM(R0, 1),
4992 BPF_EXIT_INSN(),
4993 },
4994 INTERNAL,
4995 { },
4996 { { 0, 0x1 } },
4997 },
4998 {
4999 "ALU_MOV_K: small negative",
5000 .u.insns_int = {
5001 BPF_ALU32_IMM(BPF_MOV, R0, -123),
5002 BPF_EXIT_INSN(),
5003 },
5004 INTERNAL,
5005 { },
5006 { { 0, -123 } }
5007 },
5008 {
5009 "ALU_MOV_K: small negative zero extension",
5010 .u.insns_int = {
5011 BPF_ALU32_IMM(BPF_MOV, R0, -123),
5012 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5013 BPF_EXIT_INSN(),
5014 },
5015 INTERNAL,
5016 { },
5017 { { 0, 0 } }
5018 },
5019 {
5020 "ALU_MOV_K: large negative",
5021 .u.insns_int = {
5022 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5023 BPF_EXIT_INSN(),
5024 },
5025 INTERNAL,
5026 { },
5027 { { 0, -123456789 } }
5028 },
5029 {
5030 "ALU_MOV_K: large negative zero extension",
5031 .u.insns_int = {
5032 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5033 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5034 BPF_EXIT_INSN(),
5035 },
5036 INTERNAL,
5037 { },
5038 { { 0, 0 } }
5039 },
5040 {
5041 "ALU64_MOV_K: dst = 2",
5042 .u.insns_int = {
5043 BPF_ALU64_IMM(BPF_MOV, R0, 2),
5044 BPF_EXIT_INSN(),
5045 },
5046 INTERNAL,
5047 { },
5048 { { 0, 2 } },
5049 },
5050 {
5051 "ALU64_MOV_K: dst = 2147483647",
5052 .u.insns_int = {
5053 BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
5054 BPF_EXIT_INSN(),
5055 },
5056 INTERNAL,
5057 { },
5058 { { 0, 2147483647 } },
5059 },
5060 {
5061 "ALU64_OR_K: dst = 0x0",
5062 .u.insns_int = {
5063 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5064 BPF_LD_IMM64(R3, 0x0),
5065 BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
5066 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5067 BPF_MOV32_IMM(R0, 2),
5068 BPF_EXIT_INSN(),
5069 BPF_MOV32_IMM(R0, 1),
5070 BPF_EXIT_INSN(),
5071 },
5072 INTERNAL,
5073 { },
5074 { { 0, 0x1 } },
5075 },
5076 {
5077 "ALU64_MOV_K: dst = -1",
5078 .u.insns_int = {
5079 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5080 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5081 BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
5082 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5083 BPF_MOV32_IMM(R0, 2),
5084 BPF_EXIT_INSN(),
5085 BPF_MOV32_IMM(R0, 1),
5086 BPF_EXIT_INSN(),
5087 },
5088 INTERNAL,
5089 { },
5090 { { 0, 0x1 } },
5091 },
5092 {
5093 "ALU64_MOV_K: small negative",
5094 .u.insns_int = {
5095 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5096 BPF_EXIT_INSN(),
5097 },
5098 INTERNAL,
5099 { },
5100 { { 0, -123 } }
5101 },
5102 {
5103 "ALU64_MOV_K: small negative sign extension",
5104 .u.insns_int = {
5105 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5106 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5107 BPF_EXIT_INSN(),
5108 },
5109 INTERNAL,
5110 { },
5111 { { 0, 0xffffffff } }
5112 },
5113 {
5114 "ALU64_MOV_K: large negative",
5115 .u.insns_int = {
5116 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5117 BPF_EXIT_INSN(),
5118 },
5119 INTERNAL,
5120 { },
5121 { { 0, -123456789 } }
5122 },
5123 {
5124 "ALU64_MOV_K: large negative sign extension",
5125 .u.insns_int = {
5126 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5127 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5128 BPF_EXIT_INSN(),
5129 },
5130 INTERNAL,
5131 { },
5132 { { 0, 0xffffffff } }
5133 },
5134 /* MOVSX32 */
5135 {
5136 "ALU_MOVSX | BPF_B",
5137 .u.insns_int = {
5138 BPF_LD_IMM64(R2, 0x00000000ffffffefLL),
5139 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5140 BPF_MOVSX32_REG(R1, R3, 8),
5141 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5142 BPF_MOV32_IMM(R0, 2),
5143 BPF_EXIT_INSN(),
5144 BPF_MOV32_IMM(R0, 1),
5145 BPF_EXIT_INSN(),
5146 },
5147 INTERNAL,
5148 { },
5149 { { 0, 0x1 } },
5150 },
5151 {
5152 "ALU_MOVSX | BPF_H",
5153 .u.insns_int = {
5154 BPF_LD_IMM64(R2, 0x00000000ffffbeefLL),
5155 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5156 BPF_MOVSX32_REG(R1, R3, 16),
5157 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5158 BPF_MOV32_IMM(R0, 2),
5159 BPF_EXIT_INSN(),
5160 BPF_MOV32_IMM(R0, 1),
5161 BPF_EXIT_INSN(),
5162 },
5163 INTERNAL,
5164 { },
5165 { { 0, 0x1 } },
5166 },
5167 /* MOVSX64 REG */
5168 {
5169 "ALU64_MOVSX | BPF_B",
5170 .u.insns_int = {
5171 BPF_LD_IMM64(R2, 0xffffffffffffffefLL),
5172 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5173 BPF_MOVSX64_REG(R1, R3, 8),
5174 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5175 BPF_MOV32_IMM(R0, 2),
5176 BPF_EXIT_INSN(),
5177 BPF_MOV32_IMM(R0, 1),
5178 BPF_EXIT_INSN(),
5179 },
5180 INTERNAL,
5181 { },
5182 { { 0, 0x1 } },
5183 },
5184 {
5185 "ALU64_MOVSX | BPF_H",
5186 .u.insns_int = {
5187 BPF_LD_IMM64(R2, 0xffffffffffffbeefLL),
5188 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5189 BPF_MOVSX64_REG(R1, R3, 16),
5190 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5191 BPF_MOV32_IMM(R0, 2),
5192 BPF_EXIT_INSN(),
5193 BPF_MOV32_IMM(R0, 1),
5194 BPF_EXIT_INSN(),
5195 },
5196 INTERNAL,
5197 { },
5198 { { 0, 0x1 } },
5199 },
5200 {
5201 "ALU64_MOVSX | BPF_W",
5202 .u.insns_int = {
5203 BPF_LD_IMM64(R2, 0xffffffffdeadbeefLL),
5204 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5205 BPF_MOVSX64_REG(R1, R3, 32),
5206 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5207 BPF_MOV32_IMM(R0, 2),
5208 BPF_EXIT_INSN(),
5209 BPF_MOV32_IMM(R0, 1),
5210 BPF_EXIT_INSN(),
5211 },
5212 INTERNAL,
5213 { },
5214 { { 0, 0x1 } },
5215 },
5216 /* BPF_ALU | BPF_ADD | BPF_X */
5217 {
5218 "ALU_ADD_X: 1 + 2 = 3",
5219 .u.insns_int = {
5220 BPF_LD_IMM64(R0, 1),
5221 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5222 BPF_ALU32_REG(BPF_ADD, R0, R1),
5223 BPF_EXIT_INSN(),
5224 },
5225 INTERNAL,
5226 { },
5227 { { 0, 3 } },
5228 },
5229 {
5230 "ALU_ADD_X: 1 + 4294967294 = 4294967295",
5231 .u.insns_int = {
5232 BPF_LD_IMM64(R0, 1),
5233 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5234 BPF_ALU32_REG(BPF_ADD, R0, R1),
5235 BPF_EXIT_INSN(),
5236 },
5237 INTERNAL,
5238 { },
5239 { { 0, 4294967295U } },
5240 },
5241 {
5242 "ALU_ADD_X: 2 + 4294967294 = 0",
5243 .u.insns_int = {
5244 BPF_LD_IMM64(R0, 2),
5245 BPF_LD_IMM64(R1, 4294967294U),
5246 BPF_ALU32_REG(BPF_ADD, R0, R1),
5247 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5248 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5249 BPF_EXIT_INSN(),
5250 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5251 BPF_EXIT_INSN(),
5252 },
5253 INTERNAL,
5254 { },
5255 { { 0, 1 } },
5256 },
5257 {
5258 "ALU64_ADD_X: 1 + 2 = 3",
5259 .u.insns_int = {
5260 BPF_LD_IMM64(R0, 1),
5261 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5262 BPF_ALU64_REG(BPF_ADD, R0, R1),
5263 BPF_EXIT_INSN(),
5264 },
5265 INTERNAL,
5266 { },
5267 { { 0, 3 } },
5268 },
5269 {
5270 "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5271 .u.insns_int = {
5272 BPF_LD_IMM64(R0, 1),
5273 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5274 BPF_ALU64_REG(BPF_ADD, R0, R1),
5275 BPF_EXIT_INSN(),
5276 },
5277 INTERNAL,
5278 { },
5279 { { 0, 4294967295U } },
5280 },
5281 {
5282 "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5283 .u.insns_int = {
5284 BPF_LD_IMM64(R0, 2),
5285 BPF_LD_IMM64(R1, 4294967294U),
5286 BPF_LD_IMM64(R2, 4294967296ULL),
5287 BPF_ALU64_REG(BPF_ADD, R0, R1),
5288 BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5289 BPF_MOV32_IMM(R0, 0),
5290 BPF_EXIT_INSN(),
5291 BPF_MOV32_IMM(R0, 1),
5292 BPF_EXIT_INSN(),
5293 },
5294 INTERNAL,
5295 { },
5296 { { 0, 1 } },
5297 },
5298 /* BPF_ALU | BPF_ADD | BPF_K */
5299 {
5300 "ALU_ADD_K: 1 + 2 = 3",
5301 .u.insns_int = {
5302 BPF_LD_IMM64(R0, 1),
5303 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5304 BPF_EXIT_INSN(),
5305 },
5306 INTERNAL,
5307 { },
5308 { { 0, 3 } },
5309 },
5310 {
5311 "ALU_ADD_K: 3 + 0 = 3",
5312 .u.insns_int = {
5313 BPF_LD_IMM64(R0, 3),
5314 BPF_ALU32_IMM(BPF_ADD, R0, 0),
5315 BPF_EXIT_INSN(),
5316 },
5317 INTERNAL,
5318 { },
5319 { { 0, 3 } },
5320 },
5321 {
5322 "ALU_ADD_K: 1 + 4294967294 = 4294967295",
5323 .u.insns_int = {
5324 BPF_LD_IMM64(R0, 1),
5325 BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5326 BPF_EXIT_INSN(),
5327 },
5328 INTERNAL,
5329 { },
5330 { { 0, 4294967295U } },
5331 },
5332 {
5333 "ALU_ADD_K: 4294967294 + 2 = 0",
5334 .u.insns_int = {
5335 BPF_LD_IMM64(R0, 4294967294U),
5336 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5337 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5338 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5339 BPF_EXIT_INSN(),
5340 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5341 BPF_EXIT_INSN(),
5342 },
5343 INTERNAL,
5344 { },
5345 { { 0, 1 } },
5346 },
5347 {
5348 "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5349 .u.insns_int = {
5350 BPF_LD_IMM64(R2, 0x0),
5351 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5352 BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5353 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5354 BPF_MOV32_IMM(R0, 2),
5355 BPF_EXIT_INSN(),
5356 BPF_MOV32_IMM(R0, 1),
5357 BPF_EXIT_INSN(),
5358 },
5359 INTERNAL,
5360 { },
5361 { { 0, 0x1 } },
5362 },
5363 {
5364 "ALU_ADD_K: 0 + 0xffff = 0xffff",
5365 .u.insns_int = {
5366 BPF_LD_IMM64(R2, 0x0),
5367 BPF_LD_IMM64(R3, 0xffff),
5368 BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5369 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5370 BPF_MOV32_IMM(R0, 2),
5371 BPF_EXIT_INSN(),
5372 BPF_MOV32_IMM(R0, 1),
5373 BPF_EXIT_INSN(),
5374 },
5375 INTERNAL,
5376 { },
5377 { { 0, 0x1 } },
5378 },
5379 {
5380 "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5381 .u.insns_int = {
5382 BPF_LD_IMM64(R2, 0x0),
5383 BPF_LD_IMM64(R3, 0x7fffffff),
5384 BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5385 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5386 BPF_MOV32_IMM(R0, 2),
5387 BPF_EXIT_INSN(),
5388 BPF_MOV32_IMM(R0, 1),
5389 BPF_EXIT_INSN(),
5390 },
5391 INTERNAL,
5392 { },
5393 { { 0, 0x1 } },
5394 },
5395 {
5396 "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5397 .u.insns_int = {
5398 BPF_LD_IMM64(R2, 0x0),
5399 BPF_LD_IMM64(R3, 0x80000000),
5400 BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5401 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5402 BPF_MOV32_IMM(R0, 2),
5403 BPF_EXIT_INSN(),
5404 BPF_MOV32_IMM(R0, 1),
5405 BPF_EXIT_INSN(),
5406 },
5407 INTERNAL,
5408 { },
5409 { { 0, 0x1 } },
5410 },
5411 {
5412 "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5413 .u.insns_int = {
5414 BPF_LD_IMM64(R2, 0x0),
5415 BPF_LD_IMM64(R3, 0x80008000),
5416 BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5417 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5418 BPF_MOV32_IMM(R0, 2),
5419 BPF_EXIT_INSN(),
5420 BPF_MOV32_IMM(R0, 1),
5421 BPF_EXIT_INSN(),
5422 },
5423 INTERNAL,
5424 { },
5425 { { 0, 0x1 } },
5426 },
5427 {
5428 "ALU64_ADD_K: 1 + 2 = 3",
5429 .u.insns_int = {
5430 BPF_LD_IMM64(R0, 1),
5431 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5432 BPF_EXIT_INSN(),
5433 },
5434 INTERNAL,
5435 { },
5436 { { 0, 3 } },
5437 },
5438 {
5439 "ALU64_ADD_K: 3 + 0 = 3",
5440 .u.insns_int = {
5441 BPF_LD_IMM64(R0, 3),
5442 BPF_ALU64_IMM(BPF_ADD, R0, 0),
5443 BPF_EXIT_INSN(),
5444 },
5445 INTERNAL,
5446 { },
5447 { { 0, 3 } },
5448 },
5449 {
5450 "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5451 .u.insns_int = {
5452 BPF_LD_IMM64(R0, 1),
5453 BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5454 BPF_EXIT_INSN(),
5455 },
5456 INTERNAL,
5457 { },
5458 { { 0, 2147483647 } },
5459 },
5460 {
5461 "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5462 .u.insns_int = {
5463 BPF_LD_IMM64(R0, 4294967294U),
5464 BPF_LD_IMM64(R1, 4294967296ULL),
5465 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5466 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5467 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5468 BPF_EXIT_INSN(),
5469 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5470 BPF_EXIT_INSN(),
5471 },
5472 INTERNAL,
5473 { },
5474 { { 0, 1 } },
5475 },
5476 {
5477 "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5478 .u.insns_int = {
5479 BPF_LD_IMM64(R0, 2147483646),
5480 BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5481 BPF_EXIT_INSN(),
5482 },
5483 INTERNAL,
5484 { },
5485 { { 0, -1 } },
5486 },
5487 {
5488 "ALU64_ADD_K: 1 + 0 = 1",
5489 .u.insns_int = {
5490 BPF_LD_IMM64(R2, 0x1),
5491 BPF_LD_IMM64(R3, 0x1),
5492 BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5493 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5494 BPF_MOV32_IMM(R0, 2),
5495 BPF_EXIT_INSN(),
5496 BPF_MOV32_IMM(R0, 1),
5497 BPF_EXIT_INSN(),
5498 },
5499 INTERNAL,
5500 { },
5501 { { 0, 0x1 } },
5502 },
5503 {
5504 "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5505 .u.insns_int = {
5506 BPF_LD_IMM64(R2, 0x0),
5507 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5508 BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5509 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5510 BPF_MOV32_IMM(R0, 2),
5511 BPF_EXIT_INSN(),
5512 BPF_MOV32_IMM(R0, 1),
5513 BPF_EXIT_INSN(),
5514 },
5515 INTERNAL,
5516 { },
5517 { { 0, 0x1 } },
5518 },
5519 {
5520 "ALU64_ADD_K: 0 + 0xffff = 0xffff",
5521 .u.insns_int = {
5522 BPF_LD_IMM64(R2, 0x0),
5523 BPF_LD_IMM64(R3, 0xffff),
5524 BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5525 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5526 BPF_MOV32_IMM(R0, 2),
5527 BPF_EXIT_INSN(),
5528 BPF_MOV32_IMM(R0, 1),
5529 BPF_EXIT_INSN(),
5530 },
5531 INTERNAL,
5532 { },
5533 { { 0, 0x1 } },
5534 },
5535 {
5536 "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5537 .u.insns_int = {
5538 BPF_LD_IMM64(R2, 0x0),
5539 BPF_LD_IMM64(R3, 0x7fffffff),
5540 BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5541 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5542 BPF_MOV32_IMM(R0, 2),
5543 BPF_EXIT_INSN(),
5544 BPF_MOV32_IMM(R0, 1),
5545 BPF_EXIT_INSN(),
5546 },
5547 INTERNAL,
5548 { },
5549 { { 0, 0x1 } },
5550 },
5551 {
5552 "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5553 .u.insns_int = {
5554 BPF_LD_IMM64(R2, 0x0),
5555 BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5556 BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5557 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5558 BPF_MOV32_IMM(R0, 2),
5559 BPF_EXIT_INSN(),
5560 BPF_MOV32_IMM(R0, 1),
5561 BPF_EXIT_INSN(),
5562 },
5563 INTERNAL,
5564 { },
5565 { { 0, 0x1 } },
5566 },
5567 {
5568 "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5569 .u.insns_int = {
5570 BPF_LD_IMM64(R2, 0x0),
5571 BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5572 BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5573 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5574 BPF_MOV32_IMM(R0, 2),
5575 BPF_EXIT_INSN(),
5576 BPF_MOV32_IMM(R0, 1),
5577 BPF_EXIT_INSN(),
5578 },
5579 INTERNAL,
5580 { },
5581 { { 0, 0x1 } },
5582 },
5583 /* BPF_ALU | BPF_SUB | BPF_X */
5584 {
5585 "ALU_SUB_X: 3 - 1 = 2",
5586 .u.insns_int = {
5587 BPF_LD_IMM64(R0, 3),
5588 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5589 BPF_ALU32_REG(BPF_SUB, R0, R1),
5590 BPF_EXIT_INSN(),
5591 },
5592 INTERNAL,
5593 { },
5594 { { 0, 2 } },
5595 },
5596 {
5597 "ALU_SUB_X: 4294967295 - 4294967294 = 1",
5598 .u.insns_int = {
5599 BPF_LD_IMM64(R0, 4294967295U),
5600 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5601 BPF_ALU32_REG(BPF_SUB, R0, R1),
5602 BPF_EXIT_INSN(),
5603 },
5604 INTERNAL,
5605 { },
5606 { { 0, 1 } },
5607 },
5608 {
5609 "ALU64_SUB_X: 3 - 1 = 2",
5610 .u.insns_int = {
5611 BPF_LD_IMM64(R0, 3),
5612 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5613 BPF_ALU64_REG(BPF_SUB, R0, R1),
5614 BPF_EXIT_INSN(),
5615 },
5616 INTERNAL,
5617 { },
5618 { { 0, 2 } },
5619 },
5620 {
5621 "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5622 .u.insns_int = {
5623 BPF_LD_IMM64(R0, 4294967295U),
5624 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5625 BPF_ALU64_REG(BPF_SUB, R0, R1),
5626 BPF_EXIT_INSN(),
5627 },
5628 INTERNAL,
5629 { },
5630 { { 0, 1 } },
5631 },
5632 /* BPF_ALU | BPF_SUB | BPF_K */
5633 {
5634 "ALU_SUB_K: 3 - 1 = 2",
5635 .u.insns_int = {
5636 BPF_LD_IMM64(R0, 3),
5637 BPF_ALU32_IMM(BPF_SUB, R0, 1),
5638 BPF_EXIT_INSN(),
5639 },
5640 INTERNAL,
5641 { },
5642 { { 0, 2 } },
5643 },
5644 {
5645 "ALU_SUB_K: 3 - 0 = 3",
5646 .u.insns_int = {
5647 BPF_LD_IMM64(R0, 3),
5648 BPF_ALU32_IMM(BPF_SUB, R0, 0),
5649 BPF_EXIT_INSN(),
5650 },
5651 INTERNAL,
5652 { },
5653 { { 0, 3 } },
5654 },
5655 {
5656 "ALU_SUB_K: 4294967295 - 4294967294 = 1",
5657 .u.insns_int = {
5658 BPF_LD_IMM64(R0, 4294967295U),
5659 BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5660 BPF_EXIT_INSN(),
5661 },
5662 INTERNAL,
5663 { },
5664 { { 0, 1 } },
5665 },
5666 {
5667 "ALU64_SUB_K: 3 - 1 = 2",
5668 .u.insns_int = {
5669 BPF_LD_IMM64(R0, 3),
5670 BPF_ALU64_IMM(BPF_SUB, R0, 1),
5671 BPF_EXIT_INSN(),
5672 },
5673 INTERNAL,
5674 { },
5675 { { 0, 2 } },
5676 },
5677 {
5678 "ALU64_SUB_K: 3 - 0 = 3",
5679 .u.insns_int = {
5680 BPF_LD_IMM64(R0, 3),
5681 BPF_ALU64_IMM(BPF_SUB, R0, 0),
5682 BPF_EXIT_INSN(),
5683 },
5684 INTERNAL,
5685 { },
5686 { { 0, 3 } },
5687 },
5688 {
5689 "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5690 .u.insns_int = {
5691 BPF_LD_IMM64(R0, 4294967294U),
5692 BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5693 BPF_EXIT_INSN(),
5694 },
5695 INTERNAL,
5696 { },
5697 { { 0, -1 } },
5698 },
5699 {
5700 "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5701 .u.insns_int = {
5702 BPF_LD_IMM64(R0, 2147483646),
5703 BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5704 BPF_EXIT_INSN(),
5705 },
5706 INTERNAL,
5707 { },
5708 { { 0, -1 } },
5709 },
5710 /* BPF_ALU | BPF_MUL | BPF_X */
5711 {
5712 "ALU_MUL_X: 2 * 3 = 6",
5713 .u.insns_int = {
5714 BPF_LD_IMM64(R0, 2),
5715 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5716 BPF_ALU32_REG(BPF_MUL, R0, R1),
5717 BPF_EXIT_INSN(),
5718 },
5719 INTERNAL,
5720 { },
5721 { { 0, 6 } },
5722 },
5723 {
5724 "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5725 .u.insns_int = {
5726 BPF_LD_IMM64(R0, 2),
5727 BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5728 BPF_ALU32_REG(BPF_MUL, R0, R1),
5729 BPF_EXIT_INSN(),
5730 },
5731 INTERNAL,
5732 { },
5733 { { 0, 0xFFFFFFF0 } },
5734 },
5735 {
5736 "ALU_MUL_X: -1 * -1 = 1",
5737 .u.insns_int = {
5738 BPF_LD_IMM64(R0, -1),
5739 BPF_ALU32_IMM(BPF_MOV, R1, -1),
5740 BPF_ALU32_REG(BPF_MUL, R0, R1),
5741 BPF_EXIT_INSN(),
5742 },
5743 INTERNAL,
5744 { },
5745 { { 0, 1 } },
5746 },
5747 {
5748 "ALU64_MUL_X: 2 * 3 = 6",
5749 .u.insns_int = {
5750 BPF_LD_IMM64(R0, 2),
5751 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5752 BPF_ALU64_REG(BPF_MUL, R0, R1),
5753 BPF_EXIT_INSN(),
5754 },
5755 INTERNAL,
5756 { },
5757 { { 0, 6 } },
5758 },
5759 {
5760 "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5761 .u.insns_int = {
5762 BPF_LD_IMM64(R0, 1),
5763 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5764 BPF_ALU64_REG(BPF_MUL, R0, R1),
5765 BPF_EXIT_INSN(),
5766 },
5767 INTERNAL,
5768 { },
5769 { { 0, 2147483647 } },
5770 },
5771 {
5772 "ALU64_MUL_X: 64x64 multiply, low word",
5773 .u.insns_int = {
5774 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5775 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5776 BPF_ALU64_REG(BPF_MUL, R0, R1),
5777 BPF_EXIT_INSN(),
5778 },
5779 INTERNAL,
5780 { },
5781 { { 0, 0xe5618cf0 } }
5782 },
5783 {
5784 "ALU64_MUL_X: 64x64 multiply, high word",
5785 .u.insns_int = {
5786 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5787 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5788 BPF_ALU64_REG(BPF_MUL, R0, R1),
5789 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5790 BPF_EXIT_INSN(),
5791 },
5792 INTERNAL,
5793 { },
5794 { { 0, 0x2236d88f } }
5795 },
5796 /* BPF_ALU | BPF_MUL | BPF_K */
5797 {
5798 "ALU_MUL_K: 2 * 3 = 6",
5799 .u.insns_int = {
5800 BPF_LD_IMM64(R0, 2),
5801 BPF_ALU32_IMM(BPF_MUL, R0, 3),
5802 BPF_EXIT_INSN(),
5803 },
5804 INTERNAL,
5805 { },
5806 { { 0, 6 } },
5807 },
5808 {
5809 "ALU_MUL_K: 3 * 1 = 3",
5810 .u.insns_int = {
5811 BPF_LD_IMM64(R0, 3),
5812 BPF_ALU32_IMM(BPF_MUL, R0, 1),
5813 BPF_EXIT_INSN(),
5814 },
5815 INTERNAL,
5816 { },
5817 { { 0, 3 } },
5818 },
5819 {
5820 "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5821 .u.insns_int = {
5822 BPF_LD_IMM64(R0, 2),
5823 BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5824 BPF_EXIT_INSN(),
5825 },
5826 INTERNAL,
5827 { },
5828 { { 0, 0xFFFFFFF0 } },
5829 },
5830 {
5831 "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5832 .u.insns_int = {
5833 BPF_LD_IMM64(R2, 0x1),
5834 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5835 BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5836 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5837 BPF_MOV32_IMM(R0, 2),
5838 BPF_EXIT_INSN(),
5839 BPF_MOV32_IMM(R0, 1),
5840 BPF_EXIT_INSN(),
5841 },
5842 INTERNAL,
5843 { },
5844 { { 0, 0x1 } },
5845 },
5846 {
5847 "ALU64_MUL_K: 2 * 3 = 6",
5848 .u.insns_int = {
5849 BPF_LD_IMM64(R0, 2),
5850 BPF_ALU64_IMM(BPF_MUL, R0, 3),
5851 BPF_EXIT_INSN(),
5852 },
5853 INTERNAL,
5854 { },
5855 { { 0, 6 } },
5856 },
5857 {
5858 "ALU64_MUL_K: 3 * 1 = 3",
5859 .u.insns_int = {
5860 BPF_LD_IMM64(R0, 3),
5861 BPF_ALU64_IMM(BPF_MUL, R0, 1),
5862 BPF_EXIT_INSN(),
5863 },
5864 INTERNAL,
5865 { },
5866 { { 0, 3 } },
5867 },
5868 {
5869 "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5870 .u.insns_int = {
5871 BPF_LD_IMM64(R0, 1),
5872 BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5873 BPF_EXIT_INSN(),
5874 },
5875 INTERNAL,
5876 { },
5877 { { 0, 2147483647 } },
5878 },
5879 {
5880 "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5881 .u.insns_int = {
5882 BPF_LD_IMM64(R0, 1),
5883 BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5884 BPF_EXIT_INSN(),
5885 },
5886 INTERNAL,
5887 { },
5888 { { 0, -2147483647 } },
5889 },
5890 {
5891 "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5892 .u.insns_int = {
5893 BPF_LD_IMM64(R2, 0x1),
5894 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5895 BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5896 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5897 BPF_MOV32_IMM(R0, 2),
5898 BPF_EXIT_INSN(),
5899 BPF_MOV32_IMM(R0, 1),
5900 BPF_EXIT_INSN(),
5901 },
5902 INTERNAL,
5903 { },
5904 { { 0, 0x1 } },
5905 },
5906 {
5907 "ALU64_MUL_K: 64x32 multiply, low word",
5908 .u.insns_int = {
5909 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5910 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5911 BPF_EXIT_INSN(),
5912 },
5913 INTERNAL,
5914 { },
5915 { { 0, 0xe242d208 } }
5916 },
5917 {
5918 "ALU64_MUL_K: 64x32 multiply, high word",
5919 .u.insns_int = {
5920 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5921 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5922 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5923 BPF_EXIT_INSN(),
5924 },
5925 INTERNAL,
5926 { },
5927 { { 0, 0xc28f5c28 } }
5928 },
5929 /* BPF_ALU | BPF_DIV | BPF_X */
5930 {
5931 "ALU_DIV_X: 6 / 2 = 3",
5932 .u.insns_int = {
5933 BPF_LD_IMM64(R0, 6),
5934 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5935 BPF_ALU32_REG(BPF_DIV, R0, R1),
5936 BPF_EXIT_INSN(),
5937 },
5938 INTERNAL,
5939 { },
5940 { { 0, 3 } },
5941 },
5942 {
5943 "ALU_DIV_X: 4294967295 / 4294967295 = 1",
5944 .u.insns_int = {
5945 BPF_LD_IMM64(R0, 4294967295U),
5946 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5947 BPF_ALU32_REG(BPF_DIV, R0, R1),
5948 BPF_EXIT_INSN(),
5949 },
5950 INTERNAL,
5951 { },
5952 { { 0, 1 } },
5953 },
5954 {
5955 "ALU64_DIV_X: 6 / 2 = 3",
5956 .u.insns_int = {
5957 BPF_LD_IMM64(R0, 6),
5958 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5959 BPF_ALU64_REG(BPF_DIV, R0, R1),
5960 BPF_EXIT_INSN(),
5961 },
5962 INTERNAL,
5963 { },
5964 { { 0, 3 } },
5965 },
5966 {
5967 "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5968 .u.insns_int = {
5969 BPF_LD_IMM64(R0, 2147483647),
5970 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5971 BPF_ALU64_REG(BPF_DIV, R0, R1),
5972 BPF_EXIT_INSN(),
5973 },
5974 INTERNAL,
5975 { },
5976 { { 0, 1 } },
5977 },
5978 {
5979 "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5980 .u.insns_int = {
5981 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5982 BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5983 BPF_LD_IMM64(R3, 0x0000000000000001LL),
5984 BPF_ALU64_REG(BPF_DIV, R2, R4),
5985 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5986 BPF_MOV32_IMM(R0, 2),
5987 BPF_EXIT_INSN(),
5988 BPF_MOV32_IMM(R0, 1),
5989 BPF_EXIT_INSN(),
5990 },
5991 INTERNAL,
5992 { },
5993 { { 0, 0x1 } },
5994 },
5995 /* BPF_ALU | BPF_DIV | BPF_K */
5996 {
5997 "ALU_DIV_K: 6 / 2 = 3",
5998 .u.insns_int = {
5999 BPF_LD_IMM64(R0, 6),
6000 BPF_ALU32_IMM(BPF_DIV, R0, 2),
6001 BPF_EXIT_INSN(),
6002 },
6003 INTERNAL,
6004 { },
6005 { { 0, 3 } },
6006 },
6007 {
6008 "ALU_DIV_K: 3 / 1 = 3",
6009 .u.insns_int = {
6010 BPF_LD_IMM64(R0, 3),
6011 BPF_ALU32_IMM(BPF_DIV, R0, 1),
6012 BPF_EXIT_INSN(),
6013 },
6014 INTERNAL,
6015 { },
6016 { { 0, 3 } },
6017 },
6018 {
6019 "ALU_DIV_K: 4294967295 / 4294967295 = 1",
6020 .u.insns_int = {
6021 BPF_LD_IMM64(R0, 4294967295U),
6022 BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
6023 BPF_EXIT_INSN(),
6024 },
6025 INTERNAL,
6026 { },
6027 { { 0, 1 } },
6028 },
6029 {
6030 "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
6031 .u.insns_int = {
6032 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6033 BPF_LD_IMM64(R3, 0x1UL),
6034 BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
6035 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6036 BPF_MOV32_IMM(R0, 2),
6037 BPF_EXIT_INSN(),
6038 BPF_MOV32_IMM(R0, 1),
6039 BPF_EXIT_INSN(),
6040 },
6041 INTERNAL,
6042 { },
6043 { { 0, 0x1 } },
6044 },
6045 {
6046 "ALU64_DIV_K: 6 / 2 = 3",
6047 .u.insns_int = {
6048 BPF_LD_IMM64(R0, 6),
6049 BPF_ALU64_IMM(BPF_DIV, R0, 2),
6050 BPF_EXIT_INSN(),
6051 },
6052 INTERNAL,
6053 { },
6054 { { 0, 3 } },
6055 },
6056 {
6057 "ALU64_DIV_K: 3 / 1 = 3",
6058 .u.insns_int = {
6059 BPF_LD_IMM64(R0, 3),
6060 BPF_ALU64_IMM(BPF_DIV, R0, 1),
6061 BPF_EXIT_INSN(),
6062 },
6063 INTERNAL,
6064 { },
6065 { { 0, 3 } },
6066 },
6067 {
6068 "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
6069 .u.insns_int = {
6070 BPF_LD_IMM64(R0, 2147483647),
6071 BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
6072 BPF_EXIT_INSN(),
6073 },
6074 INTERNAL,
6075 { },
6076 { { 0, 1 } },
6077 },
6078 {
6079 "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
6080 .u.insns_int = {
6081 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6082 BPF_LD_IMM64(R3, 0x0000000000000001LL),
6083 BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
6084 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6085 BPF_MOV32_IMM(R0, 2),
6086 BPF_EXIT_INSN(),
6087 BPF_MOV32_IMM(R0, 1),
6088 BPF_EXIT_INSN(),
6089 },
6090 INTERNAL,
6091 { },
6092 { { 0, 0x1 } },
6093 },
6094 /* BPF_ALU | BPF_MOD | BPF_X */
6095 {
6096 "ALU_MOD_X: 3 % 2 = 1",
6097 .u.insns_int = {
6098 BPF_LD_IMM64(R0, 3),
6099 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6100 BPF_ALU32_REG(BPF_MOD, R0, R1),
6101 BPF_EXIT_INSN(),
6102 },
6103 INTERNAL,
6104 { },
6105 { { 0, 1 } },
6106 },
6107 {
6108 "ALU_MOD_X: 4294967295 % 4294967293 = 2",
6109 .u.insns_int = {
6110 BPF_LD_IMM64(R0, 4294967295U),
6111 BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
6112 BPF_ALU32_REG(BPF_MOD, R0, R1),
6113 BPF_EXIT_INSN(),
6114 },
6115 INTERNAL,
6116 { },
6117 { { 0, 2 } },
6118 },
6119 {
6120 "ALU64_MOD_X: 3 % 2 = 1",
6121 .u.insns_int = {
6122 BPF_LD_IMM64(R0, 3),
6123 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6124 BPF_ALU64_REG(BPF_MOD, R0, R1),
6125 BPF_EXIT_INSN(),
6126 },
6127 INTERNAL,
6128 { },
6129 { { 0, 1 } },
6130 },
6131 {
6132 "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6133 .u.insns_int = {
6134 BPF_LD_IMM64(R0, 2147483647),
6135 BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
6136 BPF_ALU64_REG(BPF_MOD, R0, R1),
6137 BPF_EXIT_INSN(),
6138 },
6139 INTERNAL,
6140 { },
6141 { { 0, 2 } },
6142 },
6143 /* BPF_ALU | BPF_MOD | BPF_K */
6144 {
6145 "ALU_MOD_K: 3 % 2 = 1",
6146 .u.insns_int = {
6147 BPF_LD_IMM64(R0, 3),
6148 BPF_ALU32_IMM(BPF_MOD, R0, 2),
6149 BPF_EXIT_INSN(),
6150 },
6151 INTERNAL,
6152 { },
6153 { { 0, 1 } },
6154 },
6155 {
6156 "ALU_MOD_K: 3 % 1 = 0",
6157 .u.insns_int = {
6158 BPF_LD_IMM64(R0, 3),
6159 BPF_ALU32_IMM(BPF_MOD, R0, 1),
6160 BPF_EXIT_INSN(),
6161 },
6162 INTERNAL,
6163 { },
6164 { { 0, 0 } },
6165 },
6166 {
6167 "ALU_MOD_K: 4294967295 % 4294967293 = 2",
6168 .u.insns_int = {
6169 BPF_LD_IMM64(R0, 4294967295U),
6170 BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
6171 BPF_EXIT_INSN(),
6172 },
6173 INTERNAL,
6174 { },
6175 { { 0, 2 } },
6176 },
6177 {
6178 "ALU64_MOD_K: 3 % 2 = 1",
6179 .u.insns_int = {
6180 BPF_LD_IMM64(R0, 3),
6181 BPF_ALU64_IMM(BPF_MOD, R0, 2),
6182 BPF_EXIT_INSN(),
6183 },
6184 INTERNAL,
6185 { },
6186 { { 0, 1 } },
6187 },
6188 {
6189 "ALU64_MOD_K: 3 % 1 = 0",
6190 .u.insns_int = {
6191 BPF_LD_IMM64(R0, 3),
6192 BPF_ALU64_IMM(BPF_MOD, R0, 1),
6193 BPF_EXIT_INSN(),
6194 },
6195 INTERNAL,
6196 { },
6197 { { 0, 0 } },
6198 },
6199 {
6200 "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6201 .u.insns_int = {
6202 BPF_LD_IMM64(R0, 2147483647),
6203 BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6204 BPF_EXIT_INSN(),
6205 },
6206 INTERNAL,
6207 { },
6208 { { 0, 2 } },
6209 },
6210 /* BPF_ALU | BPF_DIV | BPF_X off=1 (SDIV) */
6211 {
6212 "ALU_SDIV_X: -6 / 2 = -3",
6213 .u.insns_int = {
6214 BPF_LD_IMM64(R0, -6),
6215 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6216 BPF_ALU32_REG_OFF(BPF_DIV, R0, R1, 1),
6217 BPF_EXIT_INSN(),
6218 },
6219 INTERNAL,
6220 { },
6221 { { 0, -3 } },
6222 },
6223 /* BPF_ALU | BPF_DIV | BPF_K off=1 (SDIV) */
6224 {
6225 "ALU_SDIV_K: -6 / 2 = -3",
6226 .u.insns_int = {
6227 BPF_LD_IMM64(R0, -6),
6228 BPF_ALU32_IMM_OFF(BPF_DIV, R0, 2, 1),
6229 BPF_EXIT_INSN(),
6230 },
6231 INTERNAL,
6232 { },
6233 { { 0, -3 } },
6234 },
6235 /* BPF_ALU64 | BPF_DIV | BPF_X off=1 (SDIV64) */
6236 {
6237 "ALU64_SDIV_X: -6 / 2 = -3",
6238 .u.insns_int = {
6239 BPF_LD_IMM64(R0, -6),
6240 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6241 BPF_ALU64_REG_OFF(BPF_DIV, R0, R1, 1),
6242 BPF_EXIT_INSN(),
6243 },
6244 INTERNAL,
6245 { },
6246 { { 0, -3 } },
6247 },
6248 /* BPF_ALU64 | BPF_DIV | BPF_K off=1 (SDIV64) */
6249 {
6250 "ALU64_SDIV_K: -6 / 2 = -3",
6251 .u.insns_int = {
6252 BPF_LD_IMM64(R0, -6),
6253 BPF_ALU64_IMM_OFF(BPF_DIV, R0, 2, 1),
6254 BPF_EXIT_INSN(),
6255 },
6256 INTERNAL,
6257 { },
6258 { { 0, -3 } },
6259 },
6260 /* BPF_ALU | BPF_MOD | BPF_X off=1 (SMOD) */
6261 {
6262 "ALU_SMOD_X: -7 % 2 = -1",
6263 .u.insns_int = {
6264 BPF_LD_IMM64(R0, -7),
6265 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6266 BPF_ALU32_REG_OFF(BPF_MOD, R0, R1, 1),
6267 BPF_EXIT_INSN(),
6268 },
6269 INTERNAL,
6270 { },
6271 { { 0, -1 } },
6272 },
6273 /* BPF_ALU | BPF_MOD | BPF_K off=1 (SMOD) */
6274 {
6275 "ALU_SMOD_K: -7 % 2 = -1",
6276 .u.insns_int = {
6277 BPF_LD_IMM64(R0, -7),
6278 BPF_ALU32_IMM_OFF(BPF_MOD, R0, 2, 1),
6279 BPF_EXIT_INSN(),
6280 },
6281 INTERNAL,
6282 { },
6283 { { 0, -1 } },
6284 },
6285 /* BPF_ALU64 | BPF_MOD | BPF_X off=1 (SMOD64) */
6286 {
6287 "ALU64_SMOD_X: -7 % 2 = -1",
6288 .u.insns_int = {
6289 BPF_LD_IMM64(R0, -7),
6290 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6291 BPF_ALU64_REG_OFF(BPF_MOD, R0, R1, 1),
6292 BPF_EXIT_INSN(),
6293 },
6294 INTERNAL,
6295 { },
6296 { { 0, -1 } },
6297 },
6298 /* BPF_ALU64 | BPF_MOD | BPF_K off=1 (SMOD64) */
6299 {
6300 "ALU64_SMOD_K: -7 % 2 = -1",
6301 .u.insns_int = {
6302 BPF_LD_IMM64(R0, -7),
6303 BPF_ALU64_IMM_OFF(BPF_MOD, R0, 2, 1),
6304 BPF_EXIT_INSN(),
6305 },
6306 INTERNAL,
6307 { },
6308 { { 0, -1 } },
6309 },
6310 /* BPF_ALU | BPF_AND | BPF_X */
6311 {
6312 "ALU_AND_X: 3 & 2 = 2",
6313 .u.insns_int = {
6314 BPF_LD_IMM64(R0, 3),
6315 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6316 BPF_ALU32_REG(BPF_AND, R0, R1),
6317 BPF_EXIT_INSN(),
6318 },
6319 INTERNAL,
6320 { },
6321 { { 0, 2 } },
6322 },
6323 {
6324 "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6325 .u.insns_int = {
6326 BPF_LD_IMM64(R0, 0xffffffff),
6327 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6328 BPF_ALU32_REG(BPF_AND, R0, R1),
6329 BPF_EXIT_INSN(),
6330 },
6331 INTERNAL,
6332 { },
6333 { { 0, 0xffffffff } },
6334 },
6335 {
6336 "ALU64_AND_X: 3 & 2 = 2",
6337 .u.insns_int = {
6338 BPF_LD_IMM64(R0, 3),
6339 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6340 BPF_ALU64_REG(BPF_AND, R0, R1),
6341 BPF_EXIT_INSN(),
6342 },
6343 INTERNAL,
6344 { },
6345 { { 0, 2 } },
6346 },
6347 {
6348 "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6349 .u.insns_int = {
6350 BPF_LD_IMM64(R0, 0xffffffff),
6351 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6352 BPF_ALU64_REG(BPF_AND, R0, R1),
6353 BPF_EXIT_INSN(),
6354 },
6355 INTERNAL,
6356 { },
6357 { { 0, 0xffffffff } },
6358 },
6359 /* BPF_ALU | BPF_AND | BPF_K */
6360 {
6361 "ALU_AND_K: 3 & 2 = 2",
6362 .u.insns_int = {
6363 BPF_LD_IMM64(R0, 3),
6364 BPF_ALU32_IMM(BPF_AND, R0, 2),
6365 BPF_EXIT_INSN(),
6366 },
6367 INTERNAL,
6368 { },
6369 { { 0, 2 } },
6370 },
6371 {
6372 "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6373 .u.insns_int = {
6374 BPF_LD_IMM64(R0, 0xffffffff),
6375 BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6376 BPF_EXIT_INSN(),
6377 },
6378 INTERNAL,
6379 { },
6380 { { 0, 0xffffffff } },
6381 },
6382 {
6383 "ALU_AND_K: Small immediate",
6384 .u.insns_int = {
6385 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6386 BPF_ALU32_IMM(BPF_AND, R0, 15),
6387 BPF_EXIT_INSN(),
6388 },
6389 INTERNAL,
6390 { },
6391 { { 0, 4 } }
6392 },
6393 {
6394 "ALU_AND_K: Large immediate",
6395 .u.insns_int = {
6396 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6397 BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6398 BPF_EXIT_INSN(),
6399 },
6400 INTERNAL,
6401 { },
6402 { { 0, 0xa1b2c3d4 } }
6403 },
6404 {
6405 "ALU_AND_K: Zero extension",
6406 .u.insns_int = {
6407 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6408 BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6409 BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6410 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6411 BPF_MOV32_IMM(R0, 2),
6412 BPF_EXIT_INSN(),
6413 BPF_MOV32_IMM(R0, 1),
6414 BPF_EXIT_INSN(),
6415 },
6416 INTERNAL,
6417 { },
6418 { { 0, 1 } }
6419 },
6420 {
6421 "ALU64_AND_K: 3 & 2 = 2",
6422 .u.insns_int = {
6423 BPF_LD_IMM64(R0, 3),
6424 BPF_ALU64_IMM(BPF_AND, R0, 2),
6425 BPF_EXIT_INSN(),
6426 },
6427 INTERNAL,
6428 { },
6429 { { 0, 2 } },
6430 },
6431 {
6432 "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6433 .u.insns_int = {
6434 BPF_LD_IMM64(R0, 0xffffffff),
6435 BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6436 BPF_EXIT_INSN(),
6437 },
6438 INTERNAL,
6439 { },
6440 { { 0, 0xffffffff } },
6441 },
6442 {
6443 "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6444 .u.insns_int = {
6445 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6446 BPF_LD_IMM64(R3, 0x0000000000000000LL),
6447 BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6448 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6449 BPF_MOV32_IMM(R0, 2),
6450 BPF_EXIT_INSN(),
6451 BPF_MOV32_IMM(R0, 1),
6452 BPF_EXIT_INSN(),
6453 },
6454 INTERNAL,
6455 { },
6456 { { 0, 0x1 } },
6457 },
6458 {
6459 "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6460 .u.insns_int = {
6461 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6462 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6463 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6464 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6465 BPF_MOV32_IMM(R0, 2),
6466 BPF_EXIT_INSN(),
6467 BPF_MOV32_IMM(R0, 1),
6468 BPF_EXIT_INSN(),
6469 },
6470 INTERNAL,
6471 { },
6472 { { 0, 0x1 } },
6473 },
6474 {
6475 "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6476 .u.insns_int = {
6477 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6478 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6479 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6480 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6481 BPF_MOV32_IMM(R0, 2),
6482 BPF_EXIT_INSN(),
6483 BPF_MOV32_IMM(R0, 1),
6484 BPF_EXIT_INSN(),
6485 },
6486 INTERNAL,
6487 { },
6488 { { 0, 0x1 } },
6489 },
6490 {
6491 "ALU64_AND_K: Sign extension 1",
6492 .u.insns_int = {
6493 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6494 BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6495 BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6496 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6497 BPF_MOV32_IMM(R0, 2),
6498 BPF_EXIT_INSN(),
6499 BPF_MOV32_IMM(R0, 1),
6500 BPF_EXIT_INSN(),
6501 },
6502 INTERNAL,
6503 { },
6504 { { 0, 1 } }
6505 },
6506 {
6507 "ALU64_AND_K: Sign extension 2",
6508 .u.insns_int = {
6509 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6510 BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6511 BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6512 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6513 BPF_MOV32_IMM(R0, 2),
6514 BPF_EXIT_INSN(),
6515 BPF_MOV32_IMM(R0, 1),
6516 BPF_EXIT_INSN(),
6517 },
6518 INTERNAL,
6519 { },
6520 { { 0, 1 } }
6521 },
6522 /* BPF_ALU | BPF_OR | BPF_X */
6523 {
6524 "ALU_OR_X: 1 | 2 = 3",
6525 .u.insns_int = {
6526 BPF_LD_IMM64(R0, 1),
6527 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6528 BPF_ALU32_REG(BPF_OR, R0, R1),
6529 BPF_EXIT_INSN(),
6530 },
6531 INTERNAL,
6532 { },
6533 { { 0, 3 } },
6534 },
6535 {
6536 "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6537 .u.insns_int = {
6538 BPF_LD_IMM64(R0, 0),
6539 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6540 BPF_ALU32_REG(BPF_OR, R0, R1),
6541 BPF_EXIT_INSN(),
6542 },
6543 INTERNAL,
6544 { },
6545 { { 0, 0xffffffff } },
6546 },
6547 {
6548 "ALU64_OR_X: 1 | 2 = 3",
6549 .u.insns_int = {
6550 BPF_LD_IMM64(R0, 1),
6551 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6552 BPF_ALU64_REG(BPF_OR, R0, R1),
6553 BPF_EXIT_INSN(),
6554 },
6555 INTERNAL,
6556 { },
6557 { { 0, 3 } },
6558 },
6559 {
6560 "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6561 .u.insns_int = {
6562 BPF_LD_IMM64(R0, 0),
6563 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6564 BPF_ALU64_REG(BPF_OR, R0, R1),
6565 BPF_EXIT_INSN(),
6566 },
6567 INTERNAL,
6568 { },
6569 { { 0, 0xffffffff } },
6570 },
6571 /* BPF_ALU | BPF_OR | BPF_K */
6572 {
6573 "ALU_OR_K: 1 | 2 = 3",
6574 .u.insns_int = {
6575 BPF_LD_IMM64(R0, 1),
6576 BPF_ALU32_IMM(BPF_OR, R0, 2),
6577 BPF_EXIT_INSN(),
6578 },
6579 INTERNAL,
6580 { },
6581 { { 0, 3 } },
6582 },
6583 {
6584 "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6585 .u.insns_int = {
6586 BPF_LD_IMM64(R0, 0),
6587 BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6588 BPF_EXIT_INSN(),
6589 },
6590 INTERNAL,
6591 { },
6592 { { 0, 0xffffffff } },
6593 },
6594 {
6595 "ALU_OR_K: Small immediate",
6596 .u.insns_int = {
6597 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6598 BPF_ALU32_IMM(BPF_OR, R0, 1),
6599 BPF_EXIT_INSN(),
6600 },
6601 INTERNAL,
6602 { },
6603 { { 0, 0x01020305 } }
6604 },
6605 {
6606 "ALU_OR_K: Large immediate",
6607 .u.insns_int = {
6608 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6609 BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6610 BPF_EXIT_INSN(),
6611 },
6612 INTERNAL,
6613 { },
6614 { { 0, 0xa1b2c3d4 } }
6615 },
6616 {
6617 "ALU_OR_K: Zero extension",
6618 .u.insns_int = {
6619 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6620 BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6621 BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6622 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6623 BPF_MOV32_IMM(R0, 2),
6624 BPF_EXIT_INSN(),
6625 BPF_MOV32_IMM(R0, 1),
6626 BPF_EXIT_INSN(),
6627 },
6628 INTERNAL,
6629 { },
6630 { { 0, 1 } }
6631 },
6632 {
6633 "ALU64_OR_K: 1 | 2 = 3",
6634 .u.insns_int = {
6635 BPF_LD_IMM64(R0, 1),
6636 BPF_ALU64_IMM(BPF_OR, R0, 2),
6637 BPF_EXIT_INSN(),
6638 },
6639 INTERNAL,
6640 { },
6641 { { 0, 3 } },
6642 },
6643 {
6644 "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6645 .u.insns_int = {
6646 BPF_LD_IMM64(R0, 0),
6647 BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6648 BPF_EXIT_INSN(),
6649 },
6650 INTERNAL,
6651 { },
6652 { { 0, 0xffffffff } },
6653 },
6654 {
6655 "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6656 .u.insns_int = {
6657 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6658 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6659 BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6660 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6661 BPF_MOV32_IMM(R0, 2),
6662 BPF_EXIT_INSN(),
6663 BPF_MOV32_IMM(R0, 1),
6664 BPF_EXIT_INSN(),
6665 },
6666 INTERNAL,
6667 { },
6668 { { 0, 0x1 } },
6669 },
6670 {
6671 "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6672 .u.insns_int = {
6673 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6674 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6675 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6676 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6677 BPF_MOV32_IMM(R0, 2),
6678 BPF_EXIT_INSN(),
6679 BPF_MOV32_IMM(R0, 1),
6680 BPF_EXIT_INSN(),
6681 },
6682 INTERNAL,
6683 { },
6684 { { 0, 0x1 } },
6685 },
6686 {
6687 "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6688 .u.insns_int = {
6689 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6690 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6691 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6692 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6693 BPF_MOV32_IMM(R0, 2),
6694 BPF_EXIT_INSN(),
6695 BPF_MOV32_IMM(R0, 1),
6696 BPF_EXIT_INSN(),
6697 },
6698 INTERNAL,
6699 { },
6700 { { 0, 0x1 } },
6701 },
6702 {
6703 "ALU64_OR_K: Sign extension 1",
6704 .u.insns_int = {
6705 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6706 BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6707 BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6708 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6709 BPF_MOV32_IMM(R0, 2),
6710 BPF_EXIT_INSN(),
6711 BPF_MOV32_IMM(R0, 1),
6712 BPF_EXIT_INSN(),
6713 },
6714 INTERNAL,
6715 { },
6716 { { 0, 1 } }
6717 },
6718 {
6719 "ALU64_OR_K: Sign extension 2",
6720 .u.insns_int = {
6721 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6722 BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6723 BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6724 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6725 BPF_MOV32_IMM(R0, 2),
6726 BPF_EXIT_INSN(),
6727 BPF_MOV32_IMM(R0, 1),
6728 BPF_EXIT_INSN(),
6729 },
6730 INTERNAL,
6731 { },
6732 { { 0, 1 } }
6733 },
6734 /* BPF_ALU | BPF_XOR | BPF_X */
6735 {
6736 "ALU_XOR_X: 5 ^ 6 = 3",
6737 .u.insns_int = {
6738 BPF_LD_IMM64(R0, 5),
6739 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6740 BPF_ALU32_REG(BPF_XOR, R0, R1),
6741 BPF_EXIT_INSN(),
6742 },
6743 INTERNAL,
6744 { },
6745 { { 0, 3 } },
6746 },
6747 {
6748 "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6749 .u.insns_int = {
6750 BPF_LD_IMM64(R0, 1),
6751 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6752 BPF_ALU32_REG(BPF_XOR, R0, R1),
6753 BPF_EXIT_INSN(),
6754 },
6755 INTERNAL,
6756 { },
6757 { { 0, 0xfffffffe } },
6758 },
6759 {
6760 "ALU64_XOR_X: 5 ^ 6 = 3",
6761 .u.insns_int = {
6762 BPF_LD_IMM64(R0, 5),
6763 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6764 BPF_ALU64_REG(BPF_XOR, R0, R1),
6765 BPF_EXIT_INSN(),
6766 },
6767 INTERNAL,
6768 { },
6769 { { 0, 3 } },
6770 },
6771 {
6772 "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6773 .u.insns_int = {
6774 BPF_LD_IMM64(R0, 1),
6775 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6776 BPF_ALU64_REG(BPF_XOR, R0, R1),
6777 BPF_EXIT_INSN(),
6778 },
6779 INTERNAL,
6780 { },
6781 { { 0, 0xfffffffe } },
6782 },
6783 /* BPF_ALU | BPF_XOR | BPF_K */
6784 {
6785 "ALU_XOR_K: 5 ^ 6 = 3",
6786 .u.insns_int = {
6787 BPF_LD_IMM64(R0, 5),
6788 BPF_ALU32_IMM(BPF_XOR, R0, 6),
6789 BPF_EXIT_INSN(),
6790 },
6791 INTERNAL,
6792 { },
6793 { { 0, 3 } },
6794 },
6795 {
6796 "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6797 .u.insns_int = {
6798 BPF_LD_IMM64(R0, 1),
6799 BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6800 BPF_EXIT_INSN(),
6801 },
6802 INTERNAL,
6803 { },
6804 { { 0, 0xfffffffe } },
6805 },
6806 {
6807 "ALU_XOR_K: Small immediate",
6808 .u.insns_int = {
6809 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6810 BPF_ALU32_IMM(BPF_XOR, R0, 15),
6811 BPF_EXIT_INSN(),
6812 },
6813 INTERNAL,
6814 { },
6815 { { 0, 0x0102030b } }
6816 },
6817 {
6818 "ALU_XOR_K: Large immediate",
6819 .u.insns_int = {
6820 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6821 BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6822 BPF_EXIT_INSN(),
6823 },
6824 INTERNAL,
6825 { },
6826 { { 0, 0x5e4d3c2b } }
6827 },
6828 {
6829 "ALU_XOR_K: Zero extension",
6830 .u.insns_int = {
6831 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6832 BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6833 BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6834 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6835 BPF_MOV32_IMM(R0, 2),
6836 BPF_EXIT_INSN(),
6837 BPF_MOV32_IMM(R0, 1),
6838 BPF_EXIT_INSN(),
6839 },
6840 INTERNAL,
6841 { },
6842 { { 0, 1 } }
6843 },
6844 {
6845 "ALU64_XOR_K: 5 ^ 6 = 3",
6846 .u.insns_int = {
6847 BPF_LD_IMM64(R0, 5),
6848 BPF_ALU64_IMM(BPF_XOR, R0, 6),
6849 BPF_EXIT_INSN(),
6850 },
6851 INTERNAL,
6852 { },
6853 { { 0, 3 } },
6854 },
6855 {
6856 "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6857 .u.insns_int = {
6858 BPF_LD_IMM64(R0, 1),
6859 BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6860 BPF_EXIT_INSN(),
6861 },
6862 INTERNAL,
6863 { },
6864 { { 0, 0xfffffffe } },
6865 },
6866 {
6867 "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6868 .u.insns_int = {
6869 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6870 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6871 BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6872 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6873 BPF_MOV32_IMM(R0, 2),
6874 BPF_EXIT_INSN(),
6875 BPF_MOV32_IMM(R0, 1),
6876 BPF_EXIT_INSN(),
6877 },
6878 INTERNAL,
6879 { },
6880 { { 0, 0x1 } },
6881 },
6882 {
6883 "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6884 .u.insns_int = {
6885 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6886 BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6887 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6888 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6889 BPF_MOV32_IMM(R0, 2),
6890 BPF_EXIT_INSN(),
6891 BPF_MOV32_IMM(R0, 1),
6892 BPF_EXIT_INSN(),
6893 },
6894 INTERNAL,
6895 { },
6896 { { 0, 0x1 } },
6897 },
6898 {
6899 "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6900 .u.insns_int = {
6901 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6902 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6903 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6904 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6905 BPF_MOV32_IMM(R0, 2),
6906 BPF_EXIT_INSN(),
6907 BPF_MOV32_IMM(R0, 1),
6908 BPF_EXIT_INSN(),
6909 },
6910 INTERNAL,
6911 { },
6912 { { 0, 0x1 } },
6913 },
6914 {
6915 "ALU64_XOR_K: Sign extension 1",
6916 .u.insns_int = {
6917 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6918 BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6919 BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6920 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6921 BPF_MOV32_IMM(R0, 2),
6922 BPF_EXIT_INSN(),
6923 BPF_MOV32_IMM(R0, 1),
6924 BPF_EXIT_INSN(),
6925 },
6926 INTERNAL,
6927 { },
6928 { { 0, 1 } }
6929 },
6930 {
6931 "ALU64_XOR_K: Sign extension 2",
6932 .u.insns_int = {
6933 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6934 BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6935 BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6936 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6937 BPF_MOV32_IMM(R0, 2),
6938 BPF_EXIT_INSN(),
6939 BPF_MOV32_IMM(R0, 1),
6940 BPF_EXIT_INSN(),
6941 },
6942 INTERNAL,
6943 { },
6944 { { 0, 1 } }
6945 },
6946 /* BPF_ALU | BPF_LSH | BPF_X */
6947 {
6948 "ALU_LSH_X: 1 << 1 = 2",
6949 .u.insns_int = {
6950 BPF_LD_IMM64(R0, 1),
6951 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6952 BPF_ALU32_REG(BPF_LSH, R0, R1),
6953 BPF_EXIT_INSN(),
6954 },
6955 INTERNAL,
6956 { },
6957 { { 0, 2 } },
6958 },
6959 {
6960 "ALU_LSH_X: 1 << 31 = 0x80000000",
6961 .u.insns_int = {
6962 BPF_LD_IMM64(R0, 1),
6963 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6964 BPF_ALU32_REG(BPF_LSH, R0, R1),
6965 BPF_EXIT_INSN(),
6966 },
6967 INTERNAL,
6968 { },
6969 { { 0, 0x80000000 } },
6970 },
6971 {
6972 "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6973 .u.insns_int = {
6974 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6975 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6976 BPF_ALU32_REG(BPF_LSH, R0, R1),
6977 BPF_EXIT_INSN(),
6978 },
6979 INTERNAL,
6980 { },
6981 { { 0, 0x45678000 } }
6982 },
6983 {
6984 "ALU64_LSH_X: 1 << 1 = 2",
6985 .u.insns_int = {
6986 BPF_LD_IMM64(R0, 1),
6987 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6988 BPF_ALU64_REG(BPF_LSH, R0, R1),
6989 BPF_EXIT_INSN(),
6990 },
6991 INTERNAL,
6992 { },
6993 { { 0, 2 } },
6994 },
6995 {
6996 "ALU64_LSH_X: 1 << 31 = 0x80000000",
6997 .u.insns_int = {
6998 BPF_LD_IMM64(R0, 1),
6999 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7000 BPF_ALU64_REG(BPF_LSH, R0, R1),
7001 BPF_EXIT_INSN(),
7002 },
7003 INTERNAL,
7004 { },
7005 { { 0, 0x80000000 } },
7006 },
7007 {
7008 "ALU64_LSH_X: Shift < 32, low word",
7009 .u.insns_int = {
7010 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7011 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7012 BPF_ALU64_REG(BPF_LSH, R0, R1),
7013 BPF_EXIT_INSN(),
7014 },
7015 INTERNAL,
7016 { },
7017 { { 0, 0xbcdef000 } }
7018 },
7019 {
7020 "ALU64_LSH_X: Shift < 32, high word",
7021 .u.insns_int = {
7022 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7023 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7024 BPF_ALU64_REG(BPF_LSH, R0, R1),
7025 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7026 BPF_EXIT_INSN(),
7027 },
7028 INTERNAL,
7029 { },
7030 { { 0, 0x3456789a } }
7031 },
7032 {
7033 "ALU64_LSH_X: Shift > 32, low word",
7034 .u.insns_int = {
7035 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7036 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7037 BPF_ALU64_REG(BPF_LSH, R0, R1),
7038 BPF_EXIT_INSN(),
7039 },
7040 INTERNAL,
7041 { },
7042 { { 0, 0 } }
7043 },
7044 {
7045 "ALU64_LSH_X: Shift > 32, high word",
7046 .u.insns_int = {
7047 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7048 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7049 BPF_ALU64_REG(BPF_LSH, R0, R1),
7050 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7051 BPF_EXIT_INSN(),
7052 },
7053 INTERNAL,
7054 { },
7055 { { 0, 0x9abcdef0 } }
7056 },
7057 {
7058 "ALU64_LSH_X: Shift == 32, low word",
7059 .u.insns_int = {
7060 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7061 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7062 BPF_ALU64_REG(BPF_LSH, R0, R1),
7063 BPF_EXIT_INSN(),
7064 },
7065 INTERNAL,
7066 { },
7067 { { 0, 0 } }
7068 },
7069 {
7070 "ALU64_LSH_X: Shift == 32, high word",
7071 .u.insns_int = {
7072 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7073 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7074 BPF_ALU64_REG(BPF_LSH, R0, R1),
7075 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7076 BPF_EXIT_INSN(),
7077 },
7078 INTERNAL,
7079 { },
7080 { { 0, 0x89abcdef } }
7081 },
7082 {
7083 "ALU64_LSH_X: Zero shift, low word",
7084 .u.insns_int = {
7085 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7086 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7087 BPF_ALU64_REG(BPF_LSH, R0, R1),
7088 BPF_EXIT_INSN(),
7089 },
7090 INTERNAL,
7091 { },
7092 { { 0, 0x89abcdef } }
7093 },
7094 {
7095 "ALU64_LSH_X: Zero shift, high word",
7096 .u.insns_int = {
7097 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7098 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7099 BPF_ALU64_REG(BPF_LSH, R0, R1),
7100 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7101 BPF_EXIT_INSN(),
7102 },
7103 INTERNAL,
7104 { },
7105 { { 0, 0x01234567 } }
7106 },
7107 /* BPF_ALU | BPF_LSH | BPF_K */
7108 {
7109 "ALU_LSH_K: 1 << 1 = 2",
7110 .u.insns_int = {
7111 BPF_LD_IMM64(R0, 1),
7112 BPF_ALU32_IMM(BPF_LSH, R0, 1),
7113 BPF_EXIT_INSN(),
7114 },
7115 INTERNAL,
7116 { },
7117 { { 0, 2 } },
7118 },
7119 {
7120 "ALU_LSH_K: 1 << 31 = 0x80000000",
7121 .u.insns_int = {
7122 BPF_LD_IMM64(R0, 1),
7123 BPF_ALU32_IMM(BPF_LSH, R0, 31),
7124 BPF_EXIT_INSN(),
7125 },
7126 INTERNAL,
7127 { },
7128 { { 0, 0x80000000 } },
7129 },
7130 {
7131 "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
7132 .u.insns_int = {
7133 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7134 BPF_ALU32_IMM(BPF_LSH, R0, 12),
7135 BPF_EXIT_INSN(),
7136 },
7137 INTERNAL,
7138 { },
7139 { { 0, 0x45678000 } }
7140 },
7141 {
7142 "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
7143 .u.insns_int = {
7144 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7145 BPF_ALU32_IMM(BPF_LSH, R0, 0),
7146 BPF_EXIT_INSN(),
7147 },
7148 INTERNAL,
7149 { },
7150 { { 0, 0x12345678 } }
7151 },
7152 {
7153 "ALU64_LSH_K: 1 << 1 = 2",
7154 .u.insns_int = {
7155 BPF_LD_IMM64(R0, 1),
7156 BPF_ALU64_IMM(BPF_LSH, R0, 1),
7157 BPF_EXIT_INSN(),
7158 },
7159 INTERNAL,
7160 { },
7161 { { 0, 2 } },
7162 },
7163 {
7164 "ALU64_LSH_K: 1 << 31 = 0x80000000",
7165 .u.insns_int = {
7166 BPF_LD_IMM64(R0, 1),
7167 BPF_ALU64_IMM(BPF_LSH, R0, 31),
7168 BPF_EXIT_INSN(),
7169 },
7170 INTERNAL,
7171 { },
7172 { { 0, 0x80000000 } },
7173 },
7174 {
7175 "ALU64_LSH_K: Shift < 32, low word",
7176 .u.insns_int = {
7177 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7178 BPF_ALU64_IMM(BPF_LSH, R0, 12),
7179 BPF_EXIT_INSN(),
7180 },
7181 INTERNAL,
7182 { },
7183 { { 0, 0xbcdef000 } }
7184 },
7185 {
7186 "ALU64_LSH_K: Shift < 32, high word",
7187 .u.insns_int = {
7188 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7189 BPF_ALU64_IMM(BPF_LSH, R0, 12),
7190 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7191 BPF_EXIT_INSN(),
7192 },
7193 INTERNAL,
7194 { },
7195 { { 0, 0x3456789a } }
7196 },
7197 {
7198 "ALU64_LSH_K: Shift > 32, low word",
7199 .u.insns_int = {
7200 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7201 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7202 BPF_EXIT_INSN(),
7203 },
7204 INTERNAL,
7205 { },
7206 { { 0, 0 } }
7207 },
7208 {
7209 "ALU64_LSH_K: Shift > 32, high word",
7210 .u.insns_int = {
7211 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7212 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7213 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7214 BPF_EXIT_INSN(),
7215 },
7216 INTERNAL,
7217 { },
7218 { { 0, 0x9abcdef0 } }
7219 },
7220 {
7221 "ALU64_LSH_K: Shift == 32, low word",
7222 .u.insns_int = {
7223 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7224 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7225 BPF_EXIT_INSN(),
7226 },
7227 INTERNAL,
7228 { },
7229 { { 0, 0 } }
7230 },
7231 {
7232 "ALU64_LSH_K: Shift == 32, high word",
7233 .u.insns_int = {
7234 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7235 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7236 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7237 BPF_EXIT_INSN(),
7238 },
7239 INTERNAL,
7240 { },
7241 { { 0, 0x89abcdef } }
7242 },
7243 {
7244 "ALU64_LSH_K: Zero shift",
7245 .u.insns_int = {
7246 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7247 BPF_ALU64_IMM(BPF_LSH, R0, 0),
7248 BPF_EXIT_INSN(),
7249 },
7250 INTERNAL,
7251 { },
7252 { { 0, 0x89abcdef } }
7253 },
7254 /* BPF_ALU | BPF_RSH | BPF_X */
7255 {
7256 "ALU_RSH_X: 2 >> 1 = 1",
7257 .u.insns_int = {
7258 BPF_LD_IMM64(R0, 2),
7259 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7260 BPF_ALU32_REG(BPF_RSH, R0, R1),
7261 BPF_EXIT_INSN(),
7262 },
7263 INTERNAL,
7264 { },
7265 { { 0, 1 } },
7266 },
7267 {
7268 "ALU_RSH_X: 0x80000000 >> 31 = 1",
7269 .u.insns_int = {
7270 BPF_LD_IMM64(R0, 0x80000000),
7271 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7272 BPF_ALU32_REG(BPF_RSH, R0, R1),
7273 BPF_EXIT_INSN(),
7274 },
7275 INTERNAL,
7276 { },
7277 { { 0, 1 } },
7278 },
7279 {
7280 "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7281 .u.insns_int = {
7282 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7283 BPF_ALU32_IMM(BPF_MOV, R1, 20),
7284 BPF_ALU32_REG(BPF_RSH, R0, R1),
7285 BPF_EXIT_INSN(),
7286 },
7287 INTERNAL,
7288 { },
7289 { { 0, 0x123 } }
7290 },
7291 {
7292 "ALU64_RSH_X: 2 >> 1 = 1",
7293 .u.insns_int = {
7294 BPF_LD_IMM64(R0, 2),
7295 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7296 BPF_ALU64_REG(BPF_RSH, R0, R1),
7297 BPF_EXIT_INSN(),
7298 },
7299 INTERNAL,
7300 { },
7301 { { 0, 1 } },
7302 },
7303 {
7304 "ALU64_RSH_X: 0x80000000 >> 31 = 1",
7305 .u.insns_int = {
7306 BPF_LD_IMM64(R0, 0x80000000),
7307 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7308 BPF_ALU64_REG(BPF_RSH, R0, R1),
7309 BPF_EXIT_INSN(),
7310 },
7311 INTERNAL,
7312 { },
7313 { { 0, 1 } },
7314 },
7315 {
7316 "ALU64_RSH_X: Shift < 32, low word",
7317 .u.insns_int = {
7318 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7319 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7320 BPF_ALU64_REG(BPF_RSH, R0, R1),
7321 BPF_EXIT_INSN(),
7322 },
7323 INTERNAL,
7324 { },
7325 { { 0, 0x56789abc } }
7326 },
7327 {
7328 "ALU64_RSH_X: Shift < 32, high word",
7329 .u.insns_int = {
7330 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7331 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7332 BPF_ALU64_REG(BPF_RSH, R0, R1),
7333 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7334 BPF_EXIT_INSN(),
7335 },
7336 INTERNAL,
7337 { },
7338 { { 0, 0x00081234 } }
7339 },
7340 {
7341 "ALU64_RSH_X: Shift > 32, low word",
7342 .u.insns_int = {
7343 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7344 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7345 BPF_ALU64_REG(BPF_RSH, R0, R1),
7346 BPF_EXIT_INSN(),
7347 },
7348 INTERNAL,
7349 { },
7350 { { 0, 0x08123456 } }
7351 },
7352 {
7353 "ALU64_RSH_X: Shift > 32, high word",
7354 .u.insns_int = {
7355 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7356 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7357 BPF_ALU64_REG(BPF_RSH, R0, R1),
7358 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7359 BPF_EXIT_INSN(),
7360 },
7361 INTERNAL,
7362 { },
7363 { { 0, 0 } }
7364 },
7365 {
7366 "ALU64_RSH_X: Shift == 32, low word",
7367 .u.insns_int = {
7368 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7369 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7370 BPF_ALU64_REG(BPF_RSH, R0, R1),
7371 BPF_EXIT_INSN(),
7372 },
7373 INTERNAL,
7374 { },
7375 { { 0, 0x81234567 } }
7376 },
7377 {
7378 "ALU64_RSH_X: Shift == 32, high word",
7379 .u.insns_int = {
7380 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7381 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7382 BPF_ALU64_REG(BPF_RSH, R0, R1),
7383 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7384 BPF_EXIT_INSN(),
7385 },
7386 INTERNAL,
7387 { },
7388 { { 0, 0 } }
7389 },
7390 {
7391 "ALU64_RSH_X: Zero shift, low word",
7392 .u.insns_int = {
7393 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7394 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7395 BPF_ALU64_REG(BPF_RSH, R0, R1),
7396 BPF_EXIT_INSN(),
7397 },
7398 INTERNAL,
7399 { },
7400 { { 0, 0x89abcdef } }
7401 },
7402 {
7403 "ALU64_RSH_X: Zero shift, high word",
7404 .u.insns_int = {
7405 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7406 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7407 BPF_ALU64_REG(BPF_RSH, R0, R1),
7408 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7409 BPF_EXIT_INSN(),
7410 },
7411 INTERNAL,
7412 { },
7413 { { 0, 0x81234567 } }
7414 },
7415 /* BPF_ALU | BPF_RSH | BPF_K */
7416 {
7417 "ALU_RSH_K: 2 >> 1 = 1",
7418 .u.insns_int = {
7419 BPF_LD_IMM64(R0, 2),
7420 BPF_ALU32_IMM(BPF_RSH, R0, 1),
7421 BPF_EXIT_INSN(),
7422 },
7423 INTERNAL,
7424 { },
7425 { { 0, 1 } },
7426 },
7427 {
7428 "ALU_RSH_K: 0x80000000 >> 31 = 1",
7429 .u.insns_int = {
7430 BPF_LD_IMM64(R0, 0x80000000),
7431 BPF_ALU32_IMM(BPF_RSH, R0, 31),
7432 BPF_EXIT_INSN(),
7433 },
7434 INTERNAL,
7435 { },
7436 { { 0, 1 } },
7437 },
7438 {
7439 "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7440 .u.insns_int = {
7441 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7442 BPF_ALU32_IMM(BPF_RSH, R0, 20),
7443 BPF_EXIT_INSN(),
7444 },
7445 INTERNAL,
7446 { },
7447 { { 0, 0x123 } }
7448 },
7449 {
7450 "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7451 .u.insns_int = {
7452 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7453 BPF_ALU32_IMM(BPF_RSH, R0, 0),
7454 BPF_EXIT_INSN(),
7455 },
7456 INTERNAL,
7457 { },
7458 { { 0, 0x12345678 } }
7459 },
7460 {
7461 "ALU64_RSH_K: 2 >> 1 = 1",
7462 .u.insns_int = {
7463 BPF_LD_IMM64(R0, 2),
7464 BPF_ALU64_IMM(BPF_RSH, R0, 1),
7465 BPF_EXIT_INSN(),
7466 },
7467 INTERNAL,
7468 { },
7469 { { 0, 1 } },
7470 },
7471 {
7472 "ALU64_RSH_K: 0x80000000 >> 31 = 1",
7473 .u.insns_int = {
7474 BPF_LD_IMM64(R0, 0x80000000),
7475 BPF_ALU64_IMM(BPF_RSH, R0, 31),
7476 BPF_EXIT_INSN(),
7477 },
7478 INTERNAL,
7479 { },
7480 { { 0, 1 } },
7481 },
7482 {
7483 "ALU64_RSH_K: Shift < 32, low word",
7484 .u.insns_int = {
7485 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7486 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7487 BPF_EXIT_INSN(),
7488 },
7489 INTERNAL,
7490 { },
7491 { { 0, 0x56789abc } }
7492 },
7493 {
7494 "ALU64_RSH_K: Shift < 32, high word",
7495 .u.insns_int = {
7496 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7497 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7498 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7499 BPF_EXIT_INSN(),
7500 },
7501 INTERNAL,
7502 { },
7503 { { 0, 0x00081234 } }
7504 },
7505 {
7506 "ALU64_RSH_K: Shift > 32, low word",
7507 .u.insns_int = {
7508 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7509 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7510 BPF_EXIT_INSN(),
7511 },
7512 INTERNAL,
7513 { },
7514 { { 0, 0x08123456 } }
7515 },
7516 {
7517 "ALU64_RSH_K: Shift > 32, high word",
7518 .u.insns_int = {
7519 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7520 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7521 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7522 BPF_EXIT_INSN(),
7523 },
7524 INTERNAL,
7525 { },
7526 { { 0, 0 } }
7527 },
7528 {
7529 "ALU64_RSH_K: Shift == 32, low word",
7530 .u.insns_int = {
7531 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7532 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7533 BPF_EXIT_INSN(),
7534 },
7535 INTERNAL,
7536 { },
7537 { { 0, 0x81234567 } }
7538 },
7539 {
7540 "ALU64_RSH_K: Shift == 32, high word",
7541 .u.insns_int = {
7542 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7543 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7544 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7545 BPF_EXIT_INSN(),
7546 },
7547 INTERNAL,
7548 { },
7549 { { 0, 0 } }
7550 },
7551 {
7552 "ALU64_RSH_K: Zero shift",
7553 .u.insns_int = {
7554 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7555 BPF_ALU64_IMM(BPF_RSH, R0, 0),
7556 BPF_EXIT_INSN(),
7557 },
7558 INTERNAL,
7559 { },
7560 { { 0, 0x89abcdef } }
7561 },
7562 /* BPF_ALU | BPF_ARSH | BPF_X */
7563 {
7564 "ALU32_ARSH_X: -1234 >> 7 = -10",
7565 .u.insns_int = {
7566 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7567 BPF_ALU32_IMM(BPF_MOV, R1, 7),
7568 BPF_ALU32_REG(BPF_ARSH, R0, R1),
7569 BPF_EXIT_INSN(),
7570 },
7571 INTERNAL,
7572 { },
7573 { { 0, -10 } }
7574 },
7575 {
7576 "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7577 .u.insns_int = {
7578 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7579 BPF_ALU32_IMM(BPF_MOV, R1, 40),
7580 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7581 BPF_EXIT_INSN(),
7582 },
7583 INTERNAL,
7584 { },
7585 { { 0, 0xffff00ff } },
7586 },
7587 {
7588 "ALU64_ARSH_X: Shift < 32, low word",
7589 .u.insns_int = {
7590 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7591 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7592 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7593 BPF_EXIT_INSN(),
7594 },
7595 INTERNAL,
7596 { },
7597 { { 0, 0x56789abc } }
7598 },
7599 {
7600 "ALU64_ARSH_X: Shift < 32, high word",
7601 .u.insns_int = {
7602 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7603 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7604 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7605 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7606 BPF_EXIT_INSN(),
7607 },
7608 INTERNAL,
7609 { },
7610 { { 0, 0xfff81234 } }
7611 },
7612 {
7613 "ALU64_ARSH_X: Shift > 32, low word",
7614 .u.insns_int = {
7615 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7616 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7617 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7618 BPF_EXIT_INSN(),
7619 },
7620 INTERNAL,
7621 { },
7622 { { 0, 0xf8123456 } }
7623 },
7624 {
7625 "ALU64_ARSH_X: Shift > 32, high word",
7626 .u.insns_int = {
7627 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7628 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7629 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7630 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7631 BPF_EXIT_INSN(),
7632 },
7633 INTERNAL,
7634 { },
7635 { { 0, -1 } }
7636 },
7637 {
7638 "ALU64_ARSH_X: Shift == 32, low word",
7639 .u.insns_int = {
7640 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7641 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7642 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7643 BPF_EXIT_INSN(),
7644 },
7645 INTERNAL,
7646 { },
7647 { { 0, 0x81234567 } }
7648 },
7649 {
7650 "ALU64_ARSH_X: Shift == 32, high word",
7651 .u.insns_int = {
7652 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7653 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7654 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7655 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7656 BPF_EXIT_INSN(),
7657 },
7658 INTERNAL,
7659 { },
7660 { { 0, -1 } }
7661 },
7662 {
7663 "ALU64_ARSH_X: Zero shift, low word",
7664 .u.insns_int = {
7665 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7666 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7667 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7668 BPF_EXIT_INSN(),
7669 },
7670 INTERNAL,
7671 { },
7672 { { 0, 0x89abcdef } }
7673 },
7674 {
7675 "ALU64_ARSH_X: Zero shift, high word",
7676 .u.insns_int = {
7677 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7678 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7679 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7680 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7681 BPF_EXIT_INSN(),
7682 },
7683 INTERNAL,
7684 { },
7685 { { 0, 0x81234567 } }
7686 },
7687 /* BPF_ALU | BPF_ARSH | BPF_K */
7688 {
7689 "ALU32_ARSH_K: -1234 >> 7 = -10",
7690 .u.insns_int = {
7691 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7692 BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7693 BPF_EXIT_INSN(),
7694 },
7695 INTERNAL,
7696 { },
7697 { { 0, -10 } }
7698 },
7699 {
7700 "ALU32_ARSH_K: -1234 >> 0 = -1234",
7701 .u.insns_int = {
7702 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7703 BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7704 BPF_EXIT_INSN(),
7705 },
7706 INTERNAL,
7707 { },
7708 { { 0, -1234 } }
7709 },
7710 {
7711 "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7712 .u.insns_int = {
7713 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7714 BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7715 BPF_EXIT_INSN(),
7716 },
7717 INTERNAL,
7718 { },
7719 { { 0, 0xffff00ff } },
7720 },
7721 {
7722 "ALU64_ARSH_K: Shift < 32, low word",
7723 .u.insns_int = {
7724 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7725 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7726 BPF_EXIT_INSN(),
7727 },
7728 INTERNAL,
7729 { },
7730 { { 0, 0x56789abc } }
7731 },
7732 {
7733 "ALU64_ARSH_K: Shift < 32, high word",
7734 .u.insns_int = {
7735 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7736 BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7737 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7738 BPF_EXIT_INSN(),
7739 },
7740 INTERNAL,
7741 { },
7742 { { 0, 0xfff81234 } }
7743 },
7744 {
7745 "ALU64_ARSH_K: Shift > 32, low word",
7746 .u.insns_int = {
7747 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7748 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7749 BPF_EXIT_INSN(),
7750 },
7751 INTERNAL,
7752 { },
7753 { { 0, 0xf8123456 } }
7754 },
7755 {
7756 "ALU64_ARSH_K: Shift > 32, high word",
7757 .u.insns_int = {
7758 BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7759 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7760 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7761 BPF_EXIT_INSN(),
7762 },
7763 INTERNAL,
7764 { },
7765 { { 0, -1 } }
7766 },
7767 {
7768 "ALU64_ARSH_K: Shift == 32, low word",
7769 .u.insns_int = {
7770 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7771 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7772 BPF_EXIT_INSN(),
7773 },
7774 INTERNAL,
7775 { },
7776 { { 0, 0x81234567 } }
7777 },
7778 {
7779 "ALU64_ARSH_K: Shift == 32, high word",
7780 .u.insns_int = {
7781 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7782 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7783 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7784 BPF_EXIT_INSN(),
7785 },
7786 INTERNAL,
7787 { },
7788 { { 0, -1 } }
7789 },
7790 {
7791 "ALU64_ARSH_K: Zero shift",
7792 .u.insns_int = {
7793 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7794 BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7795 BPF_EXIT_INSN(),
7796 },
7797 INTERNAL,
7798 { },
7799 { { 0, 0x89abcdef } }
7800 },
7801 /* BPF_ALU | BPF_NEG */
7802 {
7803 "ALU_NEG: -(3) = -3",
7804 .u.insns_int = {
7805 BPF_ALU32_IMM(BPF_MOV, R0, 3),
7806 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7807 BPF_EXIT_INSN(),
7808 },
7809 INTERNAL,
7810 { },
7811 { { 0, -3 } },
7812 },
7813 {
7814 "ALU_NEG: -(-3) = 3",
7815 .u.insns_int = {
7816 BPF_ALU32_IMM(BPF_MOV, R0, -3),
7817 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7818 BPF_EXIT_INSN(),
7819 },
7820 INTERNAL,
7821 { },
7822 { { 0, 3 } },
7823 },
7824 {
7825 "ALU64_NEG: -(3) = -3",
7826 .u.insns_int = {
7827 BPF_LD_IMM64(R0, 3),
7828 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7829 BPF_EXIT_INSN(),
7830 },
7831 INTERNAL,
7832 { },
7833 { { 0, -3 } },
7834 },
7835 {
7836 "ALU64_NEG: -(-3) = 3",
7837 .u.insns_int = {
7838 BPF_LD_IMM64(R0, -3),
7839 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7840 BPF_EXIT_INSN(),
7841 },
7842 INTERNAL,
7843 { },
7844 { { 0, 3 } },
7845 },
7846 /* BPF_ALU | BPF_END | BPF_FROM_BE */
7847 {
7848 "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7849 .u.insns_int = {
7850 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7851 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7852 BPF_EXIT_INSN(),
7853 },
7854 INTERNAL,
7855 { },
7856 { { 0, cpu_to_be16(0xcdef) } },
7857 },
7858 {
7859 "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7860 .u.insns_int = {
7861 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7862 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7863 BPF_ALU64_REG(BPF_MOV, R1, R0),
7864 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7865 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7866 BPF_EXIT_INSN(),
7867 },
7868 INTERNAL,
7869 { },
7870 { { 0, cpu_to_be32(0x89abcdef) } },
7871 },
7872 {
7873 "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7874 .u.insns_int = {
7875 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7876 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7877 BPF_EXIT_INSN(),
7878 },
7879 INTERNAL,
7880 { },
7881 { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7882 },
7883 {
7884 "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7885 .u.insns_int = {
7886 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7887 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7888 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7889 BPF_EXIT_INSN(),
7890 },
7891 INTERNAL,
7892 { },
7893 { { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7894 },
7895 /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7896 {
7897 "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7898 .u.insns_int = {
7899 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7900 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7901 BPF_EXIT_INSN(),
7902 },
7903 INTERNAL,
7904 { },
7905 { { 0, cpu_to_be16(0x3210) } },
7906 },
7907 {
7908 "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7909 .u.insns_int = {
7910 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7911 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7912 BPF_ALU64_REG(BPF_MOV, R1, R0),
7913 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7914 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7915 BPF_EXIT_INSN(),
7916 },
7917 INTERNAL,
7918 { },
7919 { { 0, cpu_to_be32(0x76543210) } },
7920 },
7921 {
7922 "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7923 .u.insns_int = {
7924 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7925 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7926 BPF_EXIT_INSN(),
7927 },
7928 INTERNAL,
7929 { },
7930 { { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7931 },
7932 {
7933 "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7934 .u.insns_int = {
7935 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7936 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7937 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7938 BPF_EXIT_INSN(),
7939 },
7940 INTERNAL,
7941 { },
7942 { { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7943 },
7944 /* BPF_ALU | BPF_END | BPF_FROM_LE */
7945 {
7946 "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7947 .u.insns_int = {
7948 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7949 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7950 BPF_EXIT_INSN(),
7951 },
7952 INTERNAL,
7953 { },
7954 { { 0, cpu_to_le16(0xcdef) } },
7955 },
7956 {
7957 "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7958 .u.insns_int = {
7959 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7960 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7961 BPF_ALU64_REG(BPF_MOV, R1, R0),
7962 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7963 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7964 BPF_EXIT_INSN(),
7965 },
7966 INTERNAL,
7967 { },
7968 { { 0, cpu_to_le32(0x89abcdef) } },
7969 },
7970 {
7971 "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7972 .u.insns_int = {
7973 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7974 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7975 BPF_EXIT_INSN(),
7976 },
7977 INTERNAL,
7978 { },
7979 { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7980 },
7981 {
7982 "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7983 .u.insns_int = {
7984 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7985 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7986 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7987 BPF_EXIT_INSN(),
7988 },
7989 INTERNAL,
7990 { },
7991 { { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7992 },
7993 /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7994 {
7995 "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7996 .u.insns_int = {
7997 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7998 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7999 BPF_EXIT_INSN(),
8000 },
8001 INTERNAL,
8002 { },
8003 { { 0, cpu_to_le16(0x3210) } },
8004 },
8005 {
8006 "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
8007 .u.insns_int = {
8008 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8009 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
8010 BPF_ALU64_REG(BPF_MOV, R1, R0),
8011 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8012 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8013 BPF_EXIT_INSN(),
8014 },
8015 INTERNAL,
8016 { },
8017 { { 0, cpu_to_le32(0x76543210) } },
8018 },
8019 {
8020 "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
8021 .u.insns_int = {
8022 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8023 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8024 BPF_EXIT_INSN(),
8025 },
8026 INTERNAL,
8027 { },
8028 { { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
8029 },
8030 {
8031 "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
8032 .u.insns_int = {
8033 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8034 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8035 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8036 BPF_EXIT_INSN(),
8037 },
8038 INTERNAL,
8039 { },
8040 { { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
8041 },
8042 /* BSWAP */
8043 {
8044 "BSWAP 16: 0x0123456789abcdef -> 0xefcd",
8045 .u.insns_int = {
8046 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8047 BPF_BSWAP(R0, 16),
8048 BPF_EXIT_INSN(),
8049 },
8050 INTERNAL,
8051 { },
8052 { { 0, 0xefcd } },
8053 },
8054 {
8055 "BSWAP 32: 0x0123456789abcdef -> 0xefcdab89",
8056 .u.insns_int = {
8057 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8058 BPF_BSWAP(R0, 32),
8059 BPF_ALU64_REG(BPF_MOV, R1, R0),
8060 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8061 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8062 BPF_EXIT_INSN(),
8063 },
8064 INTERNAL,
8065 { },
8066 { { 0, 0xefcdab89 } },
8067 },
8068 {
8069 "BSWAP 64: 0x0123456789abcdef -> 0x67452301",
8070 .u.insns_int = {
8071 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8072 BPF_BSWAP(R0, 64),
8073 BPF_EXIT_INSN(),
8074 },
8075 INTERNAL,
8076 { },
8077 { { 0, 0x67452301 } },
8078 },
8079 {
8080 "BSWAP 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
8081 .u.insns_int = {
8082 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8083 BPF_BSWAP(R0, 64),
8084 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8085 BPF_EXIT_INSN(),
8086 },
8087 INTERNAL,
8088 { },
8089 { { 0, 0xefcdab89 } },
8090 },
8091 /* BSWAP, reversed */
8092 {
8093 "BSWAP 16: 0xfedcba9876543210 -> 0x1032",
8094 .u.insns_int = {
8095 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8096 BPF_BSWAP(R0, 16),
8097 BPF_EXIT_INSN(),
8098 },
8099 INTERNAL,
8100 { },
8101 { { 0, 0x1032 } },
8102 },
8103 {
8104 "BSWAP 32: 0xfedcba9876543210 -> 0x10325476",
8105 .u.insns_int = {
8106 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8107 BPF_BSWAP(R0, 32),
8108 BPF_ALU64_REG(BPF_MOV, R1, R0),
8109 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8110 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8111 BPF_EXIT_INSN(),
8112 },
8113 INTERNAL,
8114 { },
8115 { { 0, 0x10325476 } },
8116 },
8117 {
8118 "BSWAP 64: 0xfedcba9876543210 -> 0x98badcfe",
8119 .u.insns_int = {
8120 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8121 BPF_BSWAP(R0, 64),
8122 BPF_EXIT_INSN(),
8123 },
8124 INTERNAL,
8125 { },
8126 { { 0, 0x98badcfe } },
8127 },
8128 {
8129 "BSWAP 64: 0xfedcba9876543210 >> 32 -> 0x10325476",
8130 .u.insns_int = {
8131 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8132 BPF_BSWAP(R0, 64),
8133 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8134 BPF_EXIT_INSN(),
8135 },
8136 INTERNAL,
8137 { },
8138 { { 0, 0x10325476 } },
8139 },
8140 /* BPF_LDX_MEM B/H/W/DW */
8141 {
8142 "BPF_LDX_MEM | BPF_B, base",
8143 .u.insns_int = {
8144 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8145 BPF_LD_IMM64(R2, 0x0000000000000008ULL),
8146 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8147 #ifdef __BIG_ENDIAN
8148 BPF_LDX_MEM(BPF_B, R0, R10, -1),
8149 #else
8150 BPF_LDX_MEM(BPF_B, R0, R10, -8),
8151 #endif
8152 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8153 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8154 BPF_EXIT_INSN(),
8155 },
8156 INTERNAL,
8157 { },
8158 { { 0, 0 } },
8159 .stack_depth = 8,
8160 },
8161 {
8162 "BPF_LDX_MEM | BPF_B, MSB set",
8163 .u.insns_int = {
8164 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8165 BPF_LD_IMM64(R2, 0x0000000000000088ULL),
8166 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8167 #ifdef __BIG_ENDIAN
8168 BPF_LDX_MEM(BPF_B, R0, R10, -1),
8169 #else
8170 BPF_LDX_MEM(BPF_B, R0, R10, -8),
8171 #endif
8172 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8173 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8174 BPF_EXIT_INSN(),
8175 },
8176 INTERNAL,
8177 { },
8178 { { 0, 0 } },
8179 .stack_depth = 8,
8180 },
8181 {
8182 "BPF_LDX_MEM | BPF_B, negative offset",
8183 .u.insns_int = {
8184 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8185 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8186 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8187 BPF_STX_MEM(BPF_B, R1, R2, -256),
8188 BPF_LDX_MEM(BPF_B, R0, R1, -256),
8189 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8190 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8191 BPF_EXIT_INSN(),
8192 },
8193 INTERNAL | FLAG_LARGE_MEM,
8194 { },
8195 { { 512, 0 } },
8196 .stack_depth = 0,
8197 },
8198 {
8199 "BPF_LDX_MEM | BPF_B, small positive offset",
8200 .u.insns_int = {
8201 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8202 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8203 BPF_STX_MEM(BPF_B, R1, R2, 256),
8204 BPF_LDX_MEM(BPF_B, R0, R1, 256),
8205 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8206 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8207 BPF_EXIT_INSN(),
8208 },
8209 INTERNAL | FLAG_LARGE_MEM,
8210 { },
8211 { { 512, 0 } },
8212 .stack_depth = 0,
8213 },
8214 {
8215 "BPF_LDX_MEM | BPF_B, large positive offset",
8216 .u.insns_int = {
8217 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8218 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8219 BPF_STX_MEM(BPF_B, R1, R2, 4096),
8220 BPF_LDX_MEM(BPF_B, R0, R1, 4096),
8221 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8222 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8223 BPF_EXIT_INSN(),
8224 },
8225 INTERNAL | FLAG_LARGE_MEM,
8226 { },
8227 { { 4096 + 16, 0 } },
8228 .stack_depth = 0,
8229 },
8230 {
8231 "BPF_LDX_MEM | BPF_H, base",
8232 .u.insns_int = {
8233 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8234 BPF_LD_IMM64(R2, 0x0000000000000708ULL),
8235 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8236 #ifdef __BIG_ENDIAN
8237 BPF_LDX_MEM(BPF_H, R0, R10, -2),
8238 #else
8239 BPF_LDX_MEM(BPF_H, R0, R10, -8),
8240 #endif
8241 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8242 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8243 BPF_EXIT_INSN(),
8244 },
8245 INTERNAL,
8246 { },
8247 { { 0, 0 } },
8248 .stack_depth = 8,
8249 },
8250 {
8251 "BPF_LDX_MEM | BPF_H, MSB set",
8252 .u.insns_int = {
8253 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8254 BPF_LD_IMM64(R2, 0x0000000000008788ULL),
8255 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8256 #ifdef __BIG_ENDIAN
8257 BPF_LDX_MEM(BPF_H, R0, R10, -2),
8258 #else
8259 BPF_LDX_MEM(BPF_H, R0, R10, -8),
8260 #endif
8261 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8262 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8263 BPF_EXIT_INSN(),
8264 },
8265 INTERNAL,
8266 { },
8267 { { 0, 0 } },
8268 .stack_depth = 8,
8269 },
8270 {
8271 "BPF_LDX_MEM | BPF_H, negative offset",
8272 .u.insns_int = {
8273 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8274 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8275 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8276 BPF_STX_MEM(BPF_H, R1, R2, -256),
8277 BPF_LDX_MEM(BPF_H, R0, R1, -256),
8278 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8279 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8280 BPF_EXIT_INSN(),
8281 },
8282 INTERNAL | FLAG_LARGE_MEM,
8283 { },
8284 { { 512, 0 } },
8285 .stack_depth = 0,
8286 },
8287 {
8288 "BPF_LDX_MEM | BPF_H, small positive offset",
8289 .u.insns_int = {
8290 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8291 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8292 BPF_STX_MEM(BPF_H, R1, R2, 256),
8293 BPF_LDX_MEM(BPF_H, R0, R1, 256),
8294 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8295 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8296 BPF_EXIT_INSN(),
8297 },
8298 INTERNAL | FLAG_LARGE_MEM,
8299 { },
8300 { { 512, 0 } },
8301 .stack_depth = 0,
8302 },
8303 {
8304 "BPF_LDX_MEM | BPF_H, large positive offset",
8305 .u.insns_int = {
8306 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8307 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8308 BPF_STX_MEM(BPF_H, R1, R2, 8192),
8309 BPF_LDX_MEM(BPF_H, R0, R1, 8192),
8310 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8311 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8312 BPF_EXIT_INSN(),
8313 },
8314 INTERNAL | FLAG_LARGE_MEM,
8315 { },
8316 { { 8192 + 16, 0 } },
8317 .stack_depth = 0,
8318 },
8319 {
8320 "BPF_LDX_MEM | BPF_H, unaligned positive offset",
8321 .u.insns_int = {
8322 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8323 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8324 BPF_STX_MEM(BPF_H, R1, R2, 13),
8325 BPF_LDX_MEM(BPF_H, R0, R1, 13),
8326 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8327 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8328 BPF_EXIT_INSN(),
8329 },
8330 INTERNAL | FLAG_LARGE_MEM,
8331 { },
8332 { { 32, 0 } },
8333 .stack_depth = 0,
8334 },
8335 {
8336 "BPF_LDX_MEM | BPF_W, base",
8337 .u.insns_int = {
8338 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8339 BPF_LD_IMM64(R2, 0x0000000005060708ULL),
8340 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8341 #ifdef __BIG_ENDIAN
8342 BPF_LDX_MEM(BPF_W, R0, R10, -4),
8343 #else
8344 BPF_LDX_MEM(BPF_W, R0, R10, -8),
8345 #endif
8346 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8347 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8348 BPF_EXIT_INSN(),
8349 },
8350 INTERNAL,
8351 { },
8352 { { 0, 0 } },
8353 .stack_depth = 8,
8354 },
8355 {
8356 "BPF_LDX_MEM | BPF_W, MSB set",
8357 .u.insns_int = {
8358 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8359 BPF_LD_IMM64(R2, 0x0000000085868788ULL),
8360 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8361 #ifdef __BIG_ENDIAN
8362 BPF_LDX_MEM(BPF_W, R0, R10, -4),
8363 #else
8364 BPF_LDX_MEM(BPF_W, R0, R10, -8),
8365 #endif
8366 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8367 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8368 BPF_EXIT_INSN(),
8369 },
8370 INTERNAL,
8371 { },
8372 { { 0, 0 } },
8373 .stack_depth = 8,
8374 },
8375 {
8376 "BPF_LDX_MEM | BPF_W, negative offset",
8377 .u.insns_int = {
8378 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8379 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8380 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8381 BPF_STX_MEM(BPF_W, R1, R2, -256),
8382 BPF_LDX_MEM(BPF_W, R0, R1, -256),
8383 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8384 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8385 BPF_EXIT_INSN(),
8386 },
8387 INTERNAL | FLAG_LARGE_MEM,
8388 { },
8389 { { 512, 0 } },
8390 .stack_depth = 0,
8391 },
8392 {
8393 "BPF_LDX_MEM | BPF_W, small positive offset",
8394 .u.insns_int = {
8395 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8396 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8397 BPF_STX_MEM(BPF_W, R1, R2, 256),
8398 BPF_LDX_MEM(BPF_W, R0, R1, 256),
8399 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8400 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8401 BPF_EXIT_INSN(),
8402 },
8403 INTERNAL | FLAG_LARGE_MEM,
8404 { },
8405 { { 512, 0 } },
8406 .stack_depth = 0,
8407 },
8408 {
8409 "BPF_LDX_MEM | BPF_W, large positive offset",
8410 .u.insns_int = {
8411 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8412 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8413 BPF_STX_MEM(BPF_W, R1, R2, 16384),
8414 BPF_LDX_MEM(BPF_W, R0, R1, 16384),
8415 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8416 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8417 BPF_EXIT_INSN(),
8418 },
8419 INTERNAL | FLAG_LARGE_MEM,
8420 { },
8421 { { 16384 + 16, 0 } },
8422 .stack_depth = 0,
8423 },
8424 {
8425 "BPF_LDX_MEM | BPF_W, unaligned positive offset",
8426 .u.insns_int = {
8427 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8428 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8429 BPF_STX_MEM(BPF_W, R1, R2, 13),
8430 BPF_LDX_MEM(BPF_W, R0, R1, 13),
8431 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8432 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8433 BPF_EXIT_INSN(),
8434 },
8435 INTERNAL | FLAG_LARGE_MEM,
8436 { },
8437 { { 32, 0 } },
8438 .stack_depth = 0,
8439 },
8440 {
8441 "BPF_LDX_MEM | BPF_DW, base",
8442 .u.insns_int = {
8443 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8444 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8445 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8446 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8447 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8448 BPF_EXIT_INSN(),
8449 },
8450 INTERNAL,
8451 { },
8452 { { 0, 0 } },
8453 .stack_depth = 8,
8454 },
8455 {
8456 "BPF_LDX_MEM | BPF_DW, MSB set",
8457 .u.insns_int = {
8458 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8459 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8460 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8461 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8462 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8463 BPF_EXIT_INSN(),
8464 },
8465 INTERNAL,
8466 { },
8467 { { 0, 0 } },
8468 .stack_depth = 8,
8469 },
8470 {
8471 "BPF_LDX_MEM | BPF_DW, negative offset",
8472 .u.insns_int = {
8473 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8474 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8475 BPF_STX_MEM(BPF_DW, R1, R2, -256),
8476 BPF_LDX_MEM(BPF_DW, R0, R1, -256),
8477 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8478 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8479 BPF_EXIT_INSN(),
8480 },
8481 INTERNAL | FLAG_LARGE_MEM,
8482 { },
8483 { { 512, 0 } },
8484 .stack_depth = 0,
8485 },
8486 {
8487 "BPF_LDX_MEM | BPF_DW, small positive offset",
8488 .u.insns_int = {
8489 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8490 BPF_STX_MEM(BPF_DW, R1, R2, 256),
8491 BPF_LDX_MEM(BPF_DW, R0, R1, 256),
8492 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8493 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8494 BPF_EXIT_INSN(),
8495 },
8496 INTERNAL | FLAG_LARGE_MEM,
8497 { },
8498 { { 512, 0 } },
8499 .stack_depth = 8,
8500 },
8501 {
8502 "BPF_LDX_MEM | BPF_DW, large positive offset",
8503 .u.insns_int = {
8504 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8505 BPF_STX_MEM(BPF_DW, R1, R2, 32760),
8506 BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
8507 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8508 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8509 BPF_EXIT_INSN(),
8510 },
8511 INTERNAL | FLAG_LARGE_MEM,
8512 { },
8513 { { 32768, 0 } },
8514 .stack_depth = 0,
8515 },
8516 {
8517 "BPF_LDX_MEM | BPF_DW, unaligned positive offset",
8518 .u.insns_int = {
8519 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8520 BPF_STX_MEM(BPF_DW, R1, R2, 13),
8521 BPF_LDX_MEM(BPF_DW, R0, R1, 13),
8522 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8523 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8524 BPF_EXIT_INSN(),
8525 },
8526 INTERNAL | FLAG_LARGE_MEM,
8527 { },
8528 { { 32, 0 } },
8529 .stack_depth = 0,
8530 },
8531 /* BPF_LDX_MEMSX B/H/W */
8532 {
8533 "BPF_LDX_MEMSX | BPF_B",
8534 .u.insns_int = {
8535 BPF_LD_IMM64(R1, 0xdead0000000000f0ULL),
8536 BPF_LD_IMM64(R2, 0xfffffffffffffff0ULL),
8537 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8538 #ifdef __BIG_ENDIAN
8539 BPF_LDX_MEMSX(BPF_B, R0, R10, -1),
8540 #else
8541 BPF_LDX_MEMSX(BPF_B, R0, R10, -8),
8542 #endif
8543 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8544 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8545 BPF_EXIT_INSN(),
8546 },
8547 INTERNAL,
8548 { },
8549 { { 0, 0 } },
8550 .stack_depth = 8,
8551 },
8552 {
8553 "BPF_LDX_MEMSX | BPF_H",
8554 .u.insns_int = {
8555 BPF_LD_IMM64(R1, 0xdead00000000f123ULL),
8556 BPF_LD_IMM64(R2, 0xfffffffffffff123ULL),
8557 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8558 #ifdef __BIG_ENDIAN
8559 BPF_LDX_MEMSX(BPF_H, R0, R10, -2),
8560 #else
8561 BPF_LDX_MEMSX(BPF_H, R0, R10, -8),
8562 #endif
8563 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8564 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8565 BPF_EXIT_INSN(),
8566 },
8567 INTERNAL,
8568 { },
8569 { { 0, 0 } },
8570 .stack_depth = 8,
8571 },
8572 {
8573 "BPF_LDX_MEMSX | BPF_W",
8574 .u.insns_int = {
8575 BPF_LD_IMM64(R1, 0x00000000deadbeefULL),
8576 BPF_LD_IMM64(R2, 0xffffffffdeadbeefULL),
8577 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8578 #ifdef __BIG_ENDIAN
8579 BPF_LDX_MEMSX(BPF_W, R0, R10, -4),
8580 #else
8581 BPF_LDX_MEMSX(BPF_W, R0, R10, -8),
8582 #endif
8583 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8584 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8585 BPF_EXIT_INSN(),
8586 },
8587 INTERNAL,
8588 { },
8589 { { 0, 0 } },
8590 .stack_depth = 8,
8591 },
8592 /* BPF_STX_MEM B/H/W/DW */
8593 {
8594 "BPF_STX_MEM | BPF_B",
8595 .u.insns_int = {
8596 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8597 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8598 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
8599 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8600 #ifdef __BIG_ENDIAN
8601 BPF_STX_MEM(BPF_B, R10, R2, -1),
8602 #else
8603 BPF_STX_MEM(BPF_B, R10, R2, -8),
8604 #endif
8605 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8606 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8607 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8608 BPF_EXIT_INSN(),
8609 },
8610 INTERNAL,
8611 { },
8612 { { 0, 0 } },
8613 .stack_depth = 8,
8614 },
8615 {
8616 "BPF_STX_MEM | BPF_B, MSB set",
8617 .u.insns_int = {
8618 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8619 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8620 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
8621 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8622 #ifdef __BIG_ENDIAN
8623 BPF_STX_MEM(BPF_B, R10, R2, -1),
8624 #else
8625 BPF_STX_MEM(BPF_B, R10, R2, -8),
8626 #endif
8627 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8628 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8629 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8630 BPF_EXIT_INSN(),
8631 },
8632 INTERNAL,
8633 { },
8634 { { 0, 0 } },
8635 .stack_depth = 8,
8636 },
8637 {
8638 "BPF_STX_MEM | BPF_H",
8639 .u.insns_int = {
8640 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8641 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8642 BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
8643 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8644 #ifdef __BIG_ENDIAN
8645 BPF_STX_MEM(BPF_H, R10, R2, -2),
8646 #else
8647 BPF_STX_MEM(BPF_H, R10, R2, -8),
8648 #endif
8649 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8650 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8651 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8652 BPF_EXIT_INSN(),
8653 },
8654 INTERNAL,
8655 { },
8656 { { 0, 0 } },
8657 .stack_depth = 8,
8658 },
8659 {
8660 "BPF_STX_MEM | BPF_H, MSB set",
8661 .u.insns_int = {
8662 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8663 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8664 BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
8665 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8666 #ifdef __BIG_ENDIAN
8667 BPF_STX_MEM(BPF_H, R10, R2, -2),
8668 #else
8669 BPF_STX_MEM(BPF_H, R10, R2, -8),
8670 #endif
8671 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8672 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8673 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8674 BPF_EXIT_INSN(),
8675 },
8676 INTERNAL,
8677 { },
8678 { { 0, 0 } },
8679 .stack_depth = 8,
8680 },
8681 {
8682 "BPF_STX_MEM | BPF_W",
8683 .u.insns_int = {
8684 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8685 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8686 BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
8687 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8688 #ifdef __BIG_ENDIAN
8689 BPF_STX_MEM(BPF_W, R10, R2, -4),
8690 #else
8691 BPF_STX_MEM(BPF_W, R10, R2, -8),
8692 #endif
8693 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8694 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8695 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8696 BPF_EXIT_INSN(),
8697 },
8698 INTERNAL,
8699 { },
8700 { { 0, 0 } },
8701 .stack_depth = 8,
8702 },
8703 {
8704 "BPF_STX_MEM | BPF_W, MSB set",
8705 .u.insns_int = {
8706 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8707 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8708 BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
8709 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8710 #ifdef __BIG_ENDIAN
8711 BPF_STX_MEM(BPF_W, R10, R2, -4),
8712 #else
8713 BPF_STX_MEM(BPF_W, R10, R2, -8),
8714 #endif
8715 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8716 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8717 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8718 BPF_EXIT_INSN(),
8719 },
8720 INTERNAL,
8721 { },
8722 { { 0, 0 } },
8723 .stack_depth = 8,
8724 },
8725 /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8726 {
8727 "ST_MEM_B: Store/Load byte: max negative",
8728 .u.insns_int = {
8729 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8730 BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8731 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8732 BPF_EXIT_INSN(),
8733 },
8734 INTERNAL,
8735 { },
8736 { { 0, 0xff } },
8737 .stack_depth = 40,
8738 },
8739 {
8740 "ST_MEM_B: Store/Load byte: max positive",
8741 .u.insns_int = {
8742 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8743 BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8744 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8745 BPF_EXIT_INSN(),
8746 },
8747 INTERNAL,
8748 { },
8749 { { 0, 0x7f } },
8750 .stack_depth = 40,
8751 },
8752 {
8753 "STX_MEM_B: Store/Load byte: max negative",
8754 .u.insns_int = {
8755 BPF_LD_IMM64(R0, 0),
8756 BPF_LD_IMM64(R1, 0xffLL),
8757 BPF_STX_MEM(BPF_B, R10, R1, -40),
8758 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8759 BPF_EXIT_INSN(),
8760 },
8761 INTERNAL,
8762 { },
8763 { { 0, 0xff } },
8764 .stack_depth = 40,
8765 },
8766 {
8767 "ST_MEM_H: Store/Load half word: max negative",
8768 .u.insns_int = {
8769 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8770 BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8771 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8772 BPF_EXIT_INSN(),
8773 },
8774 INTERNAL,
8775 { },
8776 { { 0, 0xffff } },
8777 .stack_depth = 40,
8778 },
8779 {
8780 "ST_MEM_H: Store/Load half word: max positive",
8781 .u.insns_int = {
8782 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8783 BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8784 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8785 BPF_EXIT_INSN(),
8786 },
8787 INTERNAL,
8788 { },
8789 { { 0, 0x7fff } },
8790 .stack_depth = 40,
8791 },
8792 {
8793 "STX_MEM_H: Store/Load half word: max negative",
8794 .u.insns_int = {
8795 BPF_LD_IMM64(R0, 0),
8796 BPF_LD_IMM64(R1, 0xffffLL),
8797 BPF_STX_MEM(BPF_H, R10, R1, -40),
8798 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8799 BPF_EXIT_INSN(),
8800 },
8801 INTERNAL,
8802 { },
8803 { { 0, 0xffff } },
8804 .stack_depth = 40,
8805 },
8806 {
8807 "ST_MEM_W: Store/Load word: max negative",
8808 .u.insns_int = {
8809 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8810 BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8811 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8812 BPF_EXIT_INSN(),
8813 },
8814 INTERNAL,
8815 { },
8816 { { 0, 0xffffffff } },
8817 .stack_depth = 40,
8818 },
8819 {
8820 "ST_MEM_W: Store/Load word: max positive",
8821 .u.insns_int = {
8822 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8823 BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8824 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8825 BPF_EXIT_INSN(),
8826 },
8827 INTERNAL,
8828 { },
8829 { { 0, 0x7fffffff } },
8830 .stack_depth = 40,
8831 },
8832 {
8833 "STX_MEM_W: Store/Load word: max negative",
8834 .u.insns_int = {
8835 BPF_LD_IMM64(R0, 0),
8836 BPF_LD_IMM64(R1, 0xffffffffLL),
8837 BPF_STX_MEM(BPF_W, R10, R1, -40),
8838 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8839 BPF_EXIT_INSN(),
8840 },
8841 INTERNAL,
8842 { },
8843 { { 0, 0xffffffff } },
8844 .stack_depth = 40,
8845 },
8846 {
8847 "ST_MEM_DW: Store/Load double word: max negative",
8848 .u.insns_int = {
8849 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8850 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8851 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8852 BPF_EXIT_INSN(),
8853 },
8854 INTERNAL,
8855 { },
8856 { { 0, 0xffffffff } },
8857 .stack_depth = 40,
8858 },
8859 {
8860 "ST_MEM_DW: Store/Load double word: max negative 2",
8861 .u.insns_int = {
8862 BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8863 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8864 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8865 BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8866 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8867 BPF_MOV32_IMM(R0, 2),
8868 BPF_EXIT_INSN(),
8869 BPF_MOV32_IMM(R0, 1),
8870 BPF_EXIT_INSN(),
8871 },
8872 INTERNAL,
8873 { },
8874 { { 0, 0x1 } },
8875 .stack_depth = 40,
8876 },
8877 {
8878 "ST_MEM_DW: Store/Load double word: max positive",
8879 .u.insns_int = {
8880 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8881 BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8882 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8883 BPF_EXIT_INSN(),
8884 },
8885 INTERNAL,
8886 { },
8887 { { 0, 0x7fffffff } },
8888 .stack_depth = 40,
8889 },
8890 {
8891 "STX_MEM_DW: Store/Load double word: max negative",
8892 .u.insns_int = {
8893 BPF_LD_IMM64(R0, 0),
8894 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8895 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8896 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8897 BPF_EXIT_INSN(),
8898 },
8899 INTERNAL,
8900 { },
8901 { { 0, 0xffffffff } },
8902 .stack_depth = 40,
8903 },
8904 {
8905 "STX_MEM_DW: Store double word: first word in memory",
8906 .u.insns_int = {
8907 BPF_LD_IMM64(R0, 0),
8908 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8909 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8910 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8911 BPF_EXIT_INSN(),
8912 },
8913 INTERNAL,
8914 { },
8915 #ifdef __BIG_ENDIAN
8916 { { 0, 0x01234567 } },
8917 #else
8918 { { 0, 0x89abcdef } },
8919 #endif
8920 .stack_depth = 40,
8921 },
8922 {
8923 "STX_MEM_DW: Store double word: second word in memory",
8924 .u.insns_int = {
8925 BPF_LD_IMM64(R0, 0),
8926 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8927 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8928 BPF_LDX_MEM(BPF_W, R0, R10, -36),
8929 BPF_EXIT_INSN(),
8930 },
8931 INTERNAL,
8932 { },
8933 #ifdef __BIG_ENDIAN
8934 { { 0, 0x89abcdef } },
8935 #else
8936 { { 0, 0x01234567 } },
8937 #endif
8938 .stack_depth = 40,
8939 },
8940 /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8941 {
8942 "STX_XADD_W: X + 1 + 1 + 1 + ...",
8943 { },
8944 INTERNAL,
8945 { },
8946 { { 0, 4134 } },
8947 .fill_helper = bpf_fill_stxw,
8948 },
8949 {
8950 "STX_XADD_DW: X + 1 + 1 + 1 + ...",
8951 { },
8952 INTERNAL,
8953 { },
8954 { { 0, 4134 } },
8955 .fill_helper = bpf_fill_stxdw,
8956 },
8957 /*
8958 * Exhaustive tests of atomic operation variants.
8959 * Individual tests are expanded from template macros for all
8960 * combinations of ALU operation, word size and fetching.
8961 */
8962 #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8963
8964 #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
8965 { \
8966 "BPF_ATOMIC | " #width ", " #op ": Test: " \
8967 #old " " #logic " " #update " = " #result, \
8968 .u.insns_int = { \
8969 BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
8970 BPF_ST_MEM(width, R10, -40, old), \
8971 BPF_ATOMIC_OP(width, op, R10, R5, -40), \
8972 BPF_LDX_MEM(width, R0, R10, -40), \
8973 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8974 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8975 BPF_ALU64_REG(BPF_OR, R0, R1), \
8976 BPF_EXIT_INSN(), \
8977 }, \
8978 INTERNAL, \
8979 { }, \
8980 { { 0, result } }, \
8981 .stack_depth = 40, \
8982 }
8983 #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
8984 { \
8985 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
8986 #old " " #logic " " #update " = " #result, \
8987 .u.insns_int = { \
8988 BPF_ALU64_REG(BPF_MOV, R1, R10), \
8989 BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
8990 BPF_ST_MEM(BPF_W, R10, -40, old), \
8991 BPF_ATOMIC_OP(width, op, R10, R0, -40), \
8992 BPF_ALU64_REG(BPF_MOV, R0, R10), \
8993 BPF_ALU64_REG(BPF_SUB, R0, R1), \
8994 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8995 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8996 BPF_ALU64_REG(BPF_OR, R0, R1), \
8997 BPF_EXIT_INSN(), \
8998 }, \
8999 INTERNAL, \
9000 { }, \
9001 { { 0, 0 } }, \
9002 .stack_depth = 40, \
9003 }
9004 #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
9005 { \
9006 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
9007 #old " " #logic " " #update " = " #result, \
9008 .u.insns_int = { \
9009 BPF_ALU64_REG(BPF_MOV, R0, R10), \
9010 BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
9011 BPF_ST_MEM(width, R10, -40, old), \
9012 BPF_ATOMIC_OP(width, op, R10, R1, -40), \
9013 BPF_ALU64_REG(BPF_SUB, R0, R10), \
9014 BPF_ALU64_REG(BPF_MOV, R1, R0), \
9015 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
9016 BPF_ALU64_REG(BPF_OR, R0, R1), \
9017 BPF_EXIT_INSN(), \
9018 }, \
9019 INTERNAL, \
9020 { }, \
9021 { { 0, 0 } }, \
9022 .stack_depth = 40, \
9023 }
9024 #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
9025 { \
9026 "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
9027 #old " " #logic " " #update " = " #result, \
9028 .u.insns_int = { \
9029 BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
9030 BPF_ST_MEM(width, R10, -40, old), \
9031 BPF_ATOMIC_OP(width, op, R10, R3, -40), \
9032 BPF_ALU32_REG(BPF_MOV, R0, R3), \
9033 BPF_EXIT_INSN(), \
9034 }, \
9035 INTERNAL, \
9036 { }, \
9037 { { 0, (op) & BPF_FETCH ? old : update } }, \
9038 .stack_depth = 40, \
9039 }
9040 /* BPF_ATOMIC | BPF_W: BPF_ADD */
9041 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9042 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9043 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9044 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9045 /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
9046 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9047 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9048 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9049 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9050 /* BPF_ATOMIC | BPF_DW: BPF_ADD */
9051 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9052 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9053 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9054 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9055 /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
9056 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9057 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9058 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9059 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9060 /* BPF_ATOMIC | BPF_W: BPF_AND */
9061 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9062 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9063 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9064 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9065 /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
9066 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9067 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9068 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9069 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9070 /* BPF_ATOMIC | BPF_DW: BPF_AND */
9071 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9072 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9073 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9074 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9075 /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
9076 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9077 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9078 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9079 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9080 /* BPF_ATOMIC | BPF_W: BPF_OR */
9081 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9082 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9083 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9084 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9085 /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
9086 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9087 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9088 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9089 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9090 /* BPF_ATOMIC | BPF_DW: BPF_OR */
9091 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9092 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9093 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9094 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9095 /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
9096 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9097 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9098 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9099 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9100 /* BPF_ATOMIC | BPF_W: BPF_XOR */
9101 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9102 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9103 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9104 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9105 /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
9106 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9107 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9108 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9109 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9110 /* BPF_ATOMIC | BPF_DW: BPF_XOR */
9111 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9112 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9113 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9114 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9115 /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
9116 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9117 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9118 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9119 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9120 /* BPF_ATOMIC | BPF_W: BPF_XCHG */
9121 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9122 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9123 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9124 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9125 /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
9126 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9127 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9128 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9129 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9130 #undef BPF_ATOMIC_POISON
9131 #undef BPF_ATOMIC_OP_TEST1
9132 #undef BPF_ATOMIC_OP_TEST2
9133 #undef BPF_ATOMIC_OP_TEST3
9134 #undef BPF_ATOMIC_OP_TEST4
9135 /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
9136 {
9137 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
9138 .u.insns_int = {
9139 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9140 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9141 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9142 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9143 BPF_EXIT_INSN(),
9144 },
9145 INTERNAL,
9146 { },
9147 { { 0, 0x01234567 } },
9148 .stack_depth = 40,
9149 },
9150 {
9151 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
9152 .u.insns_int = {
9153 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9154 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9155 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9156 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9157 BPF_LDX_MEM(BPF_W, R0, R10, -40),
9158 BPF_EXIT_INSN(),
9159 },
9160 INTERNAL,
9161 { },
9162 { { 0, 0x89abcdef } },
9163 .stack_depth = 40,
9164 },
9165 {
9166 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
9167 .u.insns_int = {
9168 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9169 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9170 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9171 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9172 BPF_EXIT_INSN(),
9173 },
9174 INTERNAL,
9175 { },
9176 { { 0, 0x01234567 } },
9177 .stack_depth = 40,
9178 },
9179 {
9180 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
9181 .u.insns_int = {
9182 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9183 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9184 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9185 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9186 BPF_LDX_MEM(BPF_W, R0, R10, -40),
9187 BPF_EXIT_INSN(),
9188 },
9189 INTERNAL,
9190 { },
9191 { { 0, 0x01234567 } },
9192 .stack_depth = 40,
9193 },
9194 {
9195 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
9196 .u.insns_int = {
9197 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9198 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9199 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9200 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9201 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9202 BPF_ALU32_REG(BPF_MOV, R0, R3),
9203 BPF_EXIT_INSN(),
9204 },
9205 INTERNAL,
9206 { },
9207 { { 0, 0x89abcdef } },
9208 .stack_depth = 40,
9209 },
9210 /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
9211 {
9212 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
9213 .u.insns_int = {
9214 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9215 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9216 BPF_ALU64_REG(BPF_MOV, R0, R1),
9217 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9218 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9219 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9220 BPF_ALU64_REG(BPF_SUB, R0, R1),
9221 BPF_EXIT_INSN(),
9222 },
9223 INTERNAL,
9224 { },
9225 { { 0, 0 } },
9226 .stack_depth = 40,
9227 },
9228 {
9229 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
9230 .u.insns_int = {
9231 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9232 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9233 BPF_ALU64_REG(BPF_MOV, R0, R1),
9234 BPF_STX_MEM(BPF_DW, R10, R0, -40),
9235 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9236 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9237 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9238 BPF_ALU64_REG(BPF_SUB, R0, R2),
9239 BPF_EXIT_INSN(),
9240 },
9241 INTERNAL,
9242 { },
9243 { { 0, 0 } },
9244 .stack_depth = 40,
9245 },
9246 {
9247 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
9248 .u.insns_int = {
9249 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9250 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9251 BPF_ALU64_REG(BPF_MOV, R0, R1),
9252 BPF_ALU64_IMM(BPF_ADD, R0, 1),
9253 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9254 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9255 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9256 BPF_ALU64_REG(BPF_SUB, R0, R1),
9257 BPF_EXIT_INSN(),
9258 },
9259 INTERNAL,
9260 { },
9261 { { 0, 0 } },
9262 .stack_depth = 40,
9263 },
9264 {
9265 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
9266 .u.insns_int = {
9267 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9268 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9269 BPF_ALU64_REG(BPF_MOV, R0, R1),
9270 BPF_ALU64_IMM(BPF_ADD, R0, 1),
9271 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9272 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9273 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9274 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9275 BPF_ALU64_REG(BPF_SUB, R0, R1),
9276 BPF_EXIT_INSN(),
9277 },
9278 INTERNAL,
9279 { },
9280 { { 0, 0 } },
9281 .stack_depth = 40,
9282 },
9283 {
9284 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
9285 .u.insns_int = {
9286 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9287 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9288 BPF_ALU64_REG(BPF_MOV, R0, R1),
9289 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9290 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9291 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
9292 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9293 BPF_ALU64_REG(BPF_SUB, R0, R2),
9294 BPF_EXIT_INSN(),
9295 },
9296 INTERNAL,
9297 { },
9298 { { 0, 0 } },
9299 .stack_depth = 40,
9300 },
9301 /* BPF_JMP32 | BPF_JEQ | BPF_K */
9302 {
9303 "JMP32_JEQ_K: Small immediate",
9304 .u.insns_int = {
9305 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9306 BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
9307 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
9308 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9309 BPF_EXIT_INSN(),
9310 },
9311 INTERNAL,
9312 { },
9313 { { 0, 123 } }
9314 },
9315 {
9316 "JMP32_JEQ_K: Large immediate",
9317 .u.insns_int = {
9318 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9319 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
9320 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
9321 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9322 BPF_EXIT_INSN(),
9323 },
9324 INTERNAL,
9325 { },
9326 { { 0, 12345678 } }
9327 },
9328 {
9329 "JMP32_JEQ_K: negative immediate",
9330 .u.insns_int = {
9331 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9332 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
9333 BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
9334 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9335 BPF_EXIT_INSN(),
9336 },
9337 INTERNAL,
9338 { },
9339 { { 0, -123 } }
9340 },
9341 /* BPF_JMP32 | BPF_JEQ | BPF_X */
9342 {
9343 "JMP32_JEQ_X",
9344 .u.insns_int = {
9345 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9346 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9347 BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
9348 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9349 BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
9350 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9351 BPF_EXIT_INSN(),
9352 },
9353 INTERNAL,
9354 { },
9355 { { 0, 1234 } }
9356 },
9357 /* BPF_JMP32 | BPF_JNE | BPF_K */
9358 {
9359 "JMP32_JNE_K: Small immediate",
9360 .u.insns_int = {
9361 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9362 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
9363 BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
9364 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9365 BPF_EXIT_INSN(),
9366 },
9367 INTERNAL,
9368 { },
9369 { { 0, 123 } }
9370 },
9371 {
9372 "JMP32_JNE_K: Large immediate",
9373 .u.insns_int = {
9374 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9375 BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
9376 BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
9377 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9378 BPF_EXIT_INSN(),
9379 },
9380 INTERNAL,
9381 { },
9382 { { 0, 12345678 } }
9383 },
9384 {
9385 "JMP32_JNE_K: negative immediate",
9386 .u.insns_int = {
9387 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9388 BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
9389 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
9390 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9391 BPF_EXIT_INSN(),
9392 },
9393 INTERNAL,
9394 { },
9395 { { 0, -123 } }
9396 },
9397 /* BPF_JMP32 | BPF_JNE | BPF_X */
9398 {
9399 "JMP32_JNE_X",
9400 .u.insns_int = {
9401 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9402 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9403 BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
9404 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9405 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9406 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9407 BPF_EXIT_INSN(),
9408 },
9409 INTERNAL,
9410 { },
9411 { { 0, 1234 } }
9412 },
9413 /* BPF_JMP32 | BPF_JSET | BPF_K */
9414 {
9415 "JMP32_JSET_K: Small immediate",
9416 .u.insns_int = {
9417 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9418 BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
9419 BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
9420 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9421 BPF_EXIT_INSN(),
9422 },
9423 INTERNAL,
9424 { },
9425 { { 0, 1 } }
9426 },
9427 {
9428 "JMP32_JSET_K: Large immediate",
9429 .u.insns_int = {
9430 BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
9431 BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
9432 BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
9433 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9434 BPF_EXIT_INSN(),
9435 },
9436 INTERNAL,
9437 { },
9438 { { 0, 0x40000000 } }
9439 },
9440 {
9441 "JMP32_JSET_K: negative immediate",
9442 .u.insns_int = {
9443 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9444 BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
9445 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9446 BPF_EXIT_INSN(),
9447 },
9448 INTERNAL,
9449 { },
9450 { { 0, -123 } }
9451 },
9452 /* BPF_JMP32 | BPF_JSET | BPF_X */
9453 {
9454 "JMP32_JSET_X",
9455 .u.insns_int = {
9456 BPF_ALU32_IMM(BPF_MOV, R0, 8),
9457 BPF_ALU32_IMM(BPF_MOV, R1, 7),
9458 BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
9459 BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
9460 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9461 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9462 BPF_EXIT_INSN(),
9463 },
9464 INTERNAL,
9465 { },
9466 { { 0, 8 } }
9467 },
9468 /* BPF_JMP32 | BPF_JGT | BPF_K */
9469 {
9470 "JMP32_JGT_K: Small immediate",
9471 .u.insns_int = {
9472 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9473 BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
9474 BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
9475 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9476 BPF_EXIT_INSN(),
9477 },
9478 INTERNAL,
9479 { },
9480 { { 0, 123 } }
9481 },
9482 {
9483 "JMP32_JGT_K: Large immediate",
9484 .u.insns_int = {
9485 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9486 BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
9487 BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
9488 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9489 BPF_EXIT_INSN(),
9490 },
9491 INTERNAL,
9492 { },
9493 { { 0, 0xfffffffe } }
9494 },
9495 /* BPF_JMP32 | BPF_JGT | BPF_X */
9496 {
9497 "JMP32_JGT_X",
9498 .u.insns_int = {
9499 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9500 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9501 BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
9502 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9503 BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
9504 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9505 BPF_EXIT_INSN(),
9506 },
9507 INTERNAL,
9508 { },
9509 { { 0, 0xfffffffe } }
9510 },
9511 /* BPF_JMP32 | BPF_JGE | BPF_K */
9512 {
9513 "JMP32_JGE_K: Small immediate",
9514 .u.insns_int = {
9515 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9516 BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
9517 BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
9518 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9519 BPF_EXIT_INSN(),
9520 },
9521 INTERNAL,
9522 { },
9523 { { 0, 123 } }
9524 },
9525 {
9526 "JMP32_JGE_K: Large immediate",
9527 .u.insns_int = {
9528 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9529 BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
9530 BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
9531 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9532 BPF_EXIT_INSN(),
9533 },
9534 INTERNAL,
9535 { },
9536 { { 0, 0xfffffffe } }
9537 },
9538 /* BPF_JMP32 | BPF_JGE | BPF_X */
9539 {
9540 "JMP32_JGE_X",
9541 .u.insns_int = {
9542 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9543 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9544 BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
9545 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9546 BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
9547 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9548 BPF_EXIT_INSN(),
9549 },
9550 INTERNAL,
9551 { },
9552 { { 0, 0xfffffffe } }
9553 },
9554 /* BPF_JMP32 | BPF_JLT | BPF_K */
9555 {
9556 "JMP32_JLT_K: Small immediate",
9557 .u.insns_int = {
9558 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9559 BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
9560 BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
9561 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9562 BPF_EXIT_INSN(),
9563 },
9564 INTERNAL,
9565 { },
9566 { { 0, 123 } }
9567 },
9568 {
9569 "JMP32_JLT_K: Large immediate",
9570 .u.insns_int = {
9571 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9572 BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
9573 BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
9574 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9575 BPF_EXIT_INSN(),
9576 },
9577 INTERNAL,
9578 { },
9579 { { 0, 0xfffffffe } }
9580 },
9581 /* BPF_JMP32 | BPF_JLT | BPF_X */
9582 {
9583 "JMP32_JLT_X",
9584 .u.insns_int = {
9585 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9586 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9587 BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
9588 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9589 BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
9590 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9591 BPF_EXIT_INSN(),
9592 },
9593 INTERNAL,
9594 { },
9595 { { 0, 0xfffffffe } }
9596 },
9597 /* BPF_JMP32 | BPF_JLE | BPF_K */
9598 {
9599 "JMP32_JLE_K: Small immediate",
9600 .u.insns_int = {
9601 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9602 BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
9603 BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
9604 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9605 BPF_EXIT_INSN(),
9606 },
9607 INTERNAL,
9608 { },
9609 { { 0, 123 } }
9610 },
9611 {
9612 "JMP32_JLE_K: Large immediate",
9613 .u.insns_int = {
9614 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9615 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
9616 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
9617 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9618 BPF_EXIT_INSN(),
9619 },
9620 INTERNAL,
9621 { },
9622 { { 0, 0xfffffffe } }
9623 },
9624 /* BPF_JMP32 | BPF_JLE | BPF_X */
9625 {
9626 "JMP32_JLE_X",
9627 .u.insns_int = {
9628 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9629 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9630 BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
9631 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9632 BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
9633 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9634 BPF_EXIT_INSN(),
9635 },
9636 INTERNAL,
9637 { },
9638 { { 0, 0xfffffffe } }
9639 },
9640 /* BPF_JMP32 | BPF_JSGT | BPF_K */
9641 {
9642 "JMP32_JSGT_K: Small immediate",
9643 .u.insns_int = {
9644 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9645 BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
9646 BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
9647 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9648 BPF_EXIT_INSN(),
9649 },
9650 INTERNAL,
9651 { },
9652 { { 0, -123 } }
9653 },
9654 {
9655 "JMP32_JSGT_K: Large immediate",
9656 .u.insns_int = {
9657 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9658 BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
9659 BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
9660 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9661 BPF_EXIT_INSN(),
9662 },
9663 INTERNAL,
9664 { },
9665 { { 0, -12345678 } }
9666 },
9667 /* BPF_JMP32 | BPF_JSGT | BPF_X */
9668 {
9669 "JMP32_JSGT_X",
9670 .u.insns_int = {
9671 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9672 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9673 BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
9674 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9675 BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
9676 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9677 BPF_EXIT_INSN(),
9678 },
9679 INTERNAL,
9680 { },
9681 { { 0, -12345678 } }
9682 },
9683 /* BPF_JMP32 | BPF_JSGE | BPF_K */
9684 {
9685 "JMP32_JSGE_K: Small immediate",
9686 .u.insns_int = {
9687 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9688 BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
9689 BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
9690 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9691 BPF_EXIT_INSN(),
9692 },
9693 INTERNAL,
9694 { },
9695 { { 0, -123 } }
9696 },
9697 {
9698 "JMP32_JSGE_K: Large immediate",
9699 .u.insns_int = {
9700 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9701 BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
9702 BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
9703 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9704 BPF_EXIT_INSN(),
9705 },
9706 INTERNAL,
9707 { },
9708 { { 0, -12345678 } }
9709 },
9710 /* BPF_JMP32 | BPF_JSGE | BPF_X */
9711 {
9712 "JMP32_JSGE_X",
9713 .u.insns_int = {
9714 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9715 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9716 BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9717 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9718 BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9719 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9720 BPF_EXIT_INSN(),
9721 },
9722 INTERNAL,
9723 { },
9724 { { 0, -12345678 } }
9725 },
9726 /* BPF_JMP32 | BPF_JSLT | BPF_K */
9727 {
9728 "JMP32_JSLT_K: Small immediate",
9729 .u.insns_int = {
9730 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9731 BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9732 BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9733 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9734 BPF_EXIT_INSN(),
9735 },
9736 INTERNAL,
9737 { },
9738 { { 0, -123 } }
9739 },
9740 {
9741 "JMP32_JSLT_K: Large immediate",
9742 .u.insns_int = {
9743 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9744 BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9745 BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9746 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9747 BPF_EXIT_INSN(),
9748 },
9749 INTERNAL,
9750 { },
9751 { { 0, -12345678 } }
9752 },
9753 /* BPF_JMP32 | BPF_JSLT | BPF_X */
9754 {
9755 "JMP32_JSLT_X",
9756 .u.insns_int = {
9757 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9758 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9759 BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9760 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9761 BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9762 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9763 BPF_EXIT_INSN(),
9764 },
9765 INTERNAL,
9766 { },
9767 { { 0, -12345678 } }
9768 },
9769 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9770 {
9771 "JMP32_JSLE_K: Small immediate",
9772 .u.insns_int = {
9773 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9774 BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9775 BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9776 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9777 BPF_EXIT_INSN(),
9778 },
9779 INTERNAL,
9780 { },
9781 { { 0, -123 } }
9782 },
9783 {
9784 "JMP32_JSLE_K: Large immediate",
9785 .u.insns_int = {
9786 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9787 BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9788 BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9789 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9790 BPF_EXIT_INSN(),
9791 },
9792 INTERNAL,
9793 { },
9794 { { 0, -12345678 } }
9795 },
9796 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9797 {
9798 "JMP32_JSLE_X",
9799 .u.insns_int = {
9800 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9801 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9802 BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9803 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9804 BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9805 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9806 BPF_EXIT_INSN(),
9807 },
9808 INTERNAL,
9809 { },
9810 { { 0, -12345678 } }
9811 },
9812 /* BPF_JMP | BPF_EXIT */
9813 {
9814 "JMP_EXIT",
9815 .u.insns_int = {
9816 BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9817 BPF_EXIT_INSN(),
9818 BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9819 },
9820 INTERNAL,
9821 { },
9822 { { 0, 0x4711 } },
9823 },
9824 /* BPF_JMP | BPF_JA */
9825 {
9826 "JMP_JA: Unconditional jump: if (true) return 1",
9827 .u.insns_int = {
9828 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9829 BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9830 BPF_EXIT_INSN(),
9831 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9832 BPF_EXIT_INSN(),
9833 },
9834 INTERNAL,
9835 { },
9836 { { 0, 1 } },
9837 },
9838 /* BPF_JMP32 | BPF_JA */
9839 {
9840 "JMP32_JA: Unconditional jump: if (true) return 1",
9841 .u.insns_int = {
9842 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9843 BPF_JMP32_IMM(BPF_JA, 0, 1, 0),
9844 BPF_EXIT_INSN(),
9845 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9846 BPF_EXIT_INSN(),
9847 },
9848 INTERNAL,
9849 { },
9850 { { 0, 1 } },
9851 },
9852 /* BPF_JMP | BPF_JSLT | BPF_K */
9853 {
9854 "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9855 .u.insns_int = {
9856 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9857 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9858 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9859 BPF_EXIT_INSN(),
9860 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9861 BPF_EXIT_INSN(),
9862 },
9863 INTERNAL,
9864 { },
9865 { { 0, 1 } },
9866 },
9867 {
9868 "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9869 .u.insns_int = {
9870 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9871 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9872 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9873 BPF_EXIT_INSN(),
9874 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9875 BPF_EXIT_INSN(),
9876 },
9877 INTERNAL,
9878 { },
9879 { { 0, 1 } },
9880 },
9881 /* BPF_JMP | BPF_JSGT | BPF_K */
9882 {
9883 "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9884 .u.insns_int = {
9885 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9886 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9887 BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9888 BPF_EXIT_INSN(),
9889 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9890 BPF_EXIT_INSN(),
9891 },
9892 INTERNAL,
9893 { },
9894 { { 0, 1 } },
9895 },
9896 {
9897 "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9898 .u.insns_int = {
9899 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9900 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9901 BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9902 BPF_EXIT_INSN(),
9903 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9904 BPF_EXIT_INSN(),
9905 },
9906 INTERNAL,
9907 { },
9908 { { 0, 1 } },
9909 },
9910 /* BPF_JMP | BPF_JSLE | BPF_K */
9911 {
9912 "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9913 .u.insns_int = {
9914 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9915 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9916 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9917 BPF_EXIT_INSN(),
9918 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9919 BPF_EXIT_INSN(),
9920 },
9921 INTERNAL,
9922 { },
9923 { { 0, 1 } },
9924 },
9925 {
9926 "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9927 .u.insns_int = {
9928 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9929 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9930 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9931 BPF_EXIT_INSN(),
9932 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9933 BPF_EXIT_INSN(),
9934 },
9935 INTERNAL,
9936 { },
9937 { { 0, 1 } },
9938 },
9939 {
9940 "JMP_JSLE_K: Signed jump: value walk 1",
9941 .u.insns_int = {
9942 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9943 BPF_LD_IMM64(R1, 3),
9944 BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9945 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9946 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9947 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9948 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9949 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9950 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9951 BPF_EXIT_INSN(), /* bad exit */
9952 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9953 BPF_EXIT_INSN(),
9954 },
9955 INTERNAL,
9956 { },
9957 { { 0, 1 } },
9958 },
9959 {
9960 "JMP_JSLE_K: Signed jump: value walk 2",
9961 .u.insns_int = {
9962 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9963 BPF_LD_IMM64(R1, 3),
9964 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9965 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9966 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9967 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9968 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9969 BPF_EXIT_INSN(), /* bad exit */
9970 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9971 BPF_EXIT_INSN(),
9972 },
9973 INTERNAL,
9974 { },
9975 { { 0, 1 } },
9976 },
9977 /* BPF_JMP | BPF_JSGE | BPF_K */
9978 {
9979 "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9980 .u.insns_int = {
9981 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9982 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9983 BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9984 BPF_EXIT_INSN(),
9985 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9986 BPF_EXIT_INSN(),
9987 },
9988 INTERNAL,
9989 { },
9990 { { 0, 1 } },
9991 },
9992 {
9993 "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9994 .u.insns_int = {
9995 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9996 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9997 BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9998 BPF_EXIT_INSN(),
9999 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10000 BPF_EXIT_INSN(),
10001 },
10002 INTERNAL,
10003 { },
10004 { { 0, 1 } },
10005 },
10006 {
10007 "JMP_JSGE_K: Signed jump: value walk 1",
10008 .u.insns_int = {
10009 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10010 BPF_LD_IMM64(R1, -3),
10011 BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
10012 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10013 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
10014 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10015 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
10016 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10017 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
10018 BPF_EXIT_INSN(), /* bad exit */
10019 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
10020 BPF_EXIT_INSN(),
10021 },
10022 INTERNAL,
10023 { },
10024 { { 0, 1 } },
10025 },
10026 {
10027 "JMP_JSGE_K: Signed jump: value walk 2",
10028 .u.insns_int = {
10029 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10030 BPF_LD_IMM64(R1, -3),
10031 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
10032 BPF_ALU64_IMM(BPF_ADD, R1, 2),
10033 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
10034 BPF_ALU64_IMM(BPF_ADD, R1, 2),
10035 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
10036 BPF_EXIT_INSN(), /* bad exit */
10037 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
10038 BPF_EXIT_INSN(),
10039 },
10040 INTERNAL,
10041 { },
10042 { { 0, 1 } },
10043 },
10044 /* BPF_JMP | BPF_JGT | BPF_K */
10045 {
10046 "JMP_JGT_K: if (3 > 2) return 1",
10047 .u.insns_int = {
10048 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10049 BPF_LD_IMM64(R1, 3),
10050 BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
10051 BPF_EXIT_INSN(),
10052 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10053 BPF_EXIT_INSN(),
10054 },
10055 INTERNAL,
10056 { },
10057 { { 0, 1 } },
10058 },
10059 {
10060 "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
10061 .u.insns_int = {
10062 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10063 BPF_LD_IMM64(R1, -1),
10064 BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
10065 BPF_EXIT_INSN(),
10066 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10067 BPF_EXIT_INSN(),
10068 },
10069 INTERNAL,
10070 { },
10071 { { 0, 1 } },
10072 },
10073 /* BPF_JMP | BPF_JLT | BPF_K */
10074 {
10075 "JMP_JLT_K: if (2 < 3) return 1",
10076 .u.insns_int = {
10077 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10078 BPF_LD_IMM64(R1, 2),
10079 BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
10080 BPF_EXIT_INSN(),
10081 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10082 BPF_EXIT_INSN(),
10083 },
10084 INTERNAL,
10085 { },
10086 { { 0, 1 } },
10087 },
10088 {
10089 "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
10090 .u.insns_int = {
10091 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10092 BPF_LD_IMM64(R1, 1),
10093 BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
10094 BPF_EXIT_INSN(),
10095 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10096 BPF_EXIT_INSN(),
10097 },
10098 INTERNAL,
10099 { },
10100 { { 0, 1 } },
10101 },
10102 /* BPF_JMP | BPF_JGE | BPF_K */
10103 {
10104 "JMP_JGE_K: if (3 >= 2) return 1",
10105 .u.insns_int = {
10106 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10107 BPF_LD_IMM64(R1, 3),
10108 BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
10109 BPF_EXIT_INSN(),
10110 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10111 BPF_EXIT_INSN(),
10112 },
10113 INTERNAL,
10114 { },
10115 { { 0, 1 } },
10116 },
10117 /* BPF_JMP | BPF_JLE | BPF_K */
10118 {
10119 "JMP_JLE_K: if (2 <= 3) return 1",
10120 .u.insns_int = {
10121 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10122 BPF_LD_IMM64(R1, 2),
10123 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10124 BPF_EXIT_INSN(),
10125 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10126 BPF_EXIT_INSN(),
10127 },
10128 INTERNAL,
10129 { },
10130 { { 0, 1 } },
10131 },
10132 /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
10133 {
10134 "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
10135 .u.insns_int = {
10136 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10137 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10138 BPF_EXIT_INSN(),
10139 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10140 BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
10141 BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
10142 BPF_EXIT_INSN(),
10143 },
10144 INTERNAL,
10145 { },
10146 { { 0, 1 } },
10147 },
10148 {
10149 "JMP_JGE_K: if (3 >= 3) return 1",
10150 .u.insns_int = {
10151 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10152 BPF_LD_IMM64(R1, 3),
10153 BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
10154 BPF_EXIT_INSN(),
10155 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10156 BPF_EXIT_INSN(),
10157 },
10158 INTERNAL,
10159 { },
10160 { { 0, 1 } },
10161 },
10162 /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
10163 {
10164 "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
10165 .u.insns_int = {
10166 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10167 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10168 BPF_EXIT_INSN(),
10169 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10170 BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
10171 BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
10172 BPF_EXIT_INSN(),
10173 },
10174 INTERNAL,
10175 { },
10176 { { 0, 1 } },
10177 },
10178 {
10179 "JMP_JLE_K: if (3 <= 3) return 1",
10180 .u.insns_int = {
10181 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10182 BPF_LD_IMM64(R1, 3),
10183 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10184 BPF_EXIT_INSN(),
10185 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10186 BPF_EXIT_INSN(),
10187 },
10188 INTERNAL,
10189 { },
10190 { { 0, 1 } },
10191 },
10192 /* BPF_JMP | BPF_JNE | BPF_K */
10193 {
10194 "JMP_JNE_K: if (3 != 2) return 1",
10195 .u.insns_int = {
10196 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10197 BPF_LD_IMM64(R1, 3),
10198 BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
10199 BPF_EXIT_INSN(),
10200 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10201 BPF_EXIT_INSN(),
10202 },
10203 INTERNAL,
10204 { },
10205 { { 0, 1 } },
10206 },
10207 /* BPF_JMP | BPF_JEQ | BPF_K */
10208 {
10209 "JMP_JEQ_K: if (3 == 3) return 1",
10210 .u.insns_int = {
10211 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10212 BPF_LD_IMM64(R1, 3),
10213 BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
10214 BPF_EXIT_INSN(),
10215 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10216 BPF_EXIT_INSN(),
10217 },
10218 INTERNAL,
10219 { },
10220 { { 0, 1 } },
10221 },
10222 /* BPF_JMP | BPF_JSET | BPF_K */
10223 {
10224 "JMP_JSET_K: if (0x3 & 0x2) return 1",
10225 .u.insns_int = {
10226 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10227 BPF_LD_IMM64(R1, 3),
10228 BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
10229 BPF_EXIT_INSN(),
10230 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10231 BPF_EXIT_INSN(),
10232 },
10233 INTERNAL,
10234 { },
10235 { { 0, 1 } },
10236 },
10237 {
10238 "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
10239 .u.insns_int = {
10240 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10241 BPF_LD_IMM64(R1, 3),
10242 BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
10243 BPF_EXIT_INSN(),
10244 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10245 BPF_EXIT_INSN(),
10246 },
10247 INTERNAL,
10248 { },
10249 { { 0, 1 } },
10250 },
10251 /* BPF_JMP | BPF_JSGT | BPF_X */
10252 {
10253 "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
10254 .u.insns_int = {
10255 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10256 BPF_LD_IMM64(R1, -1),
10257 BPF_LD_IMM64(R2, -2),
10258 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10259 BPF_EXIT_INSN(),
10260 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10261 BPF_EXIT_INSN(),
10262 },
10263 INTERNAL,
10264 { },
10265 { { 0, 1 } },
10266 },
10267 {
10268 "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
10269 .u.insns_int = {
10270 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10271 BPF_LD_IMM64(R1, -1),
10272 BPF_LD_IMM64(R2, -1),
10273 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10274 BPF_EXIT_INSN(),
10275 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10276 BPF_EXIT_INSN(),
10277 },
10278 INTERNAL,
10279 { },
10280 { { 0, 1 } },
10281 },
10282 /* BPF_JMP | BPF_JSLT | BPF_X */
10283 {
10284 "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
10285 .u.insns_int = {
10286 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10287 BPF_LD_IMM64(R1, -1),
10288 BPF_LD_IMM64(R2, -2),
10289 BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
10290 BPF_EXIT_INSN(),
10291 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10292 BPF_EXIT_INSN(),
10293 },
10294 INTERNAL,
10295 { },
10296 { { 0, 1 } },
10297 },
10298 {
10299 "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
10300 .u.insns_int = {
10301 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10302 BPF_LD_IMM64(R1, -1),
10303 BPF_LD_IMM64(R2, -1),
10304 BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
10305 BPF_EXIT_INSN(),
10306 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10307 BPF_EXIT_INSN(),
10308 },
10309 INTERNAL,
10310 { },
10311 { { 0, 1 } },
10312 },
10313 /* BPF_JMP | BPF_JSGE | BPF_X */
10314 {
10315 "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
10316 .u.insns_int = {
10317 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10318 BPF_LD_IMM64(R1, -1),
10319 BPF_LD_IMM64(R2, -2),
10320 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10321 BPF_EXIT_INSN(),
10322 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10323 BPF_EXIT_INSN(),
10324 },
10325 INTERNAL,
10326 { },
10327 { { 0, 1 } },
10328 },
10329 {
10330 "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
10331 .u.insns_int = {
10332 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10333 BPF_LD_IMM64(R1, -1),
10334 BPF_LD_IMM64(R2, -1),
10335 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10336 BPF_EXIT_INSN(),
10337 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10338 BPF_EXIT_INSN(),
10339 },
10340 INTERNAL,
10341 { },
10342 { { 0, 1 } },
10343 },
10344 /* BPF_JMP | BPF_JSLE | BPF_X */
10345 {
10346 "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
10347 .u.insns_int = {
10348 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10349 BPF_LD_IMM64(R1, -1),
10350 BPF_LD_IMM64(R2, -2),
10351 BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
10352 BPF_EXIT_INSN(),
10353 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10354 BPF_EXIT_INSN(),
10355 },
10356 INTERNAL,
10357 { },
10358 { { 0, 1 } },
10359 },
10360 {
10361 "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
10362 .u.insns_int = {
10363 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10364 BPF_LD_IMM64(R1, -1),
10365 BPF_LD_IMM64(R2, -1),
10366 BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
10367 BPF_EXIT_INSN(),
10368 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10369 BPF_EXIT_INSN(),
10370 },
10371 INTERNAL,
10372 { },
10373 { { 0, 1 } },
10374 },
10375 /* BPF_JMP | BPF_JGT | BPF_X */
10376 {
10377 "JMP_JGT_X: if (3 > 2) return 1",
10378 .u.insns_int = {
10379 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10380 BPF_LD_IMM64(R1, 3),
10381 BPF_LD_IMM64(R2, 2),
10382 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10383 BPF_EXIT_INSN(),
10384 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10385 BPF_EXIT_INSN(),
10386 },
10387 INTERNAL,
10388 { },
10389 { { 0, 1 } },
10390 },
10391 {
10392 "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
10393 .u.insns_int = {
10394 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10395 BPF_LD_IMM64(R1, -1),
10396 BPF_LD_IMM64(R2, 1),
10397 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10398 BPF_EXIT_INSN(),
10399 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10400 BPF_EXIT_INSN(),
10401 },
10402 INTERNAL,
10403 { },
10404 { { 0, 1 } },
10405 },
10406 /* BPF_JMP | BPF_JLT | BPF_X */
10407 {
10408 "JMP_JLT_X: if (2 < 3) return 1",
10409 .u.insns_int = {
10410 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10411 BPF_LD_IMM64(R1, 3),
10412 BPF_LD_IMM64(R2, 2),
10413 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10414 BPF_EXIT_INSN(),
10415 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10416 BPF_EXIT_INSN(),
10417 },
10418 INTERNAL,
10419 { },
10420 { { 0, 1 } },
10421 },
10422 {
10423 "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
10424 .u.insns_int = {
10425 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10426 BPF_LD_IMM64(R1, -1),
10427 BPF_LD_IMM64(R2, 1),
10428 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10429 BPF_EXIT_INSN(),
10430 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10431 BPF_EXIT_INSN(),
10432 },
10433 INTERNAL,
10434 { },
10435 { { 0, 1 } },
10436 },
10437 /* BPF_JMP | BPF_JGE | BPF_X */
10438 {
10439 "JMP_JGE_X: if (3 >= 2) return 1",
10440 .u.insns_int = {
10441 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10442 BPF_LD_IMM64(R1, 3),
10443 BPF_LD_IMM64(R2, 2),
10444 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10445 BPF_EXIT_INSN(),
10446 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10447 BPF_EXIT_INSN(),
10448 },
10449 INTERNAL,
10450 { },
10451 { { 0, 1 } },
10452 },
10453 {
10454 "JMP_JGE_X: if (3 >= 3) return 1",
10455 .u.insns_int = {
10456 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10457 BPF_LD_IMM64(R1, 3),
10458 BPF_LD_IMM64(R2, 3),
10459 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10460 BPF_EXIT_INSN(),
10461 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10462 BPF_EXIT_INSN(),
10463 },
10464 INTERNAL,
10465 { },
10466 { { 0, 1 } },
10467 },
10468 /* BPF_JMP | BPF_JLE | BPF_X */
10469 {
10470 "JMP_JLE_X: if (2 <= 3) return 1",
10471 .u.insns_int = {
10472 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10473 BPF_LD_IMM64(R1, 3),
10474 BPF_LD_IMM64(R2, 2),
10475 BPF_JMP_REG(BPF_JLE, R2, R1, 1),
10476 BPF_EXIT_INSN(),
10477 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10478 BPF_EXIT_INSN(),
10479 },
10480 INTERNAL,
10481 { },
10482 { { 0, 1 } },
10483 },
10484 {
10485 "JMP_JLE_X: if (3 <= 3) return 1",
10486 .u.insns_int = {
10487 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10488 BPF_LD_IMM64(R1, 3),
10489 BPF_LD_IMM64(R2, 3),
10490 BPF_JMP_REG(BPF_JLE, R1, R2, 1),
10491 BPF_EXIT_INSN(),
10492 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10493 BPF_EXIT_INSN(),
10494 },
10495 INTERNAL,
10496 { },
10497 { { 0, 1 } },
10498 },
10499 {
10500 /* Mainly testing JIT + imm64 here. */
10501 "JMP_JGE_X: ldimm64 test 1",
10502 .u.insns_int = {
10503 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10504 BPF_LD_IMM64(R1, 3),
10505 BPF_LD_IMM64(R2, 2),
10506 BPF_JMP_REG(BPF_JGE, R1, R2, 2),
10507 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10508 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10509 BPF_EXIT_INSN(),
10510 },
10511 INTERNAL,
10512 { },
10513 { { 0, 0xeeeeeeeeU } },
10514 },
10515 {
10516 "JMP_JGE_X: ldimm64 test 2",
10517 .u.insns_int = {
10518 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10519 BPF_LD_IMM64(R1, 3),
10520 BPF_LD_IMM64(R2, 2),
10521 BPF_JMP_REG(BPF_JGE, R1, R2, 0),
10522 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10523 BPF_EXIT_INSN(),
10524 },
10525 INTERNAL,
10526 { },
10527 { { 0, 0xffffffffU } },
10528 },
10529 {
10530 "JMP_JGE_X: ldimm64 test 3",
10531 .u.insns_int = {
10532 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10533 BPF_LD_IMM64(R1, 3),
10534 BPF_LD_IMM64(R2, 2),
10535 BPF_JMP_REG(BPF_JGE, R1, R2, 4),
10536 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10537 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10538 BPF_EXIT_INSN(),
10539 },
10540 INTERNAL,
10541 { },
10542 { { 0, 1 } },
10543 },
10544 {
10545 "JMP_JLE_X: ldimm64 test 1",
10546 .u.insns_int = {
10547 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10548 BPF_LD_IMM64(R1, 3),
10549 BPF_LD_IMM64(R2, 2),
10550 BPF_JMP_REG(BPF_JLE, R2, R1, 2),
10551 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10552 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10553 BPF_EXIT_INSN(),
10554 },
10555 INTERNAL,
10556 { },
10557 { { 0, 0xeeeeeeeeU } },
10558 },
10559 {
10560 "JMP_JLE_X: ldimm64 test 2",
10561 .u.insns_int = {
10562 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10563 BPF_LD_IMM64(R1, 3),
10564 BPF_LD_IMM64(R2, 2),
10565 BPF_JMP_REG(BPF_JLE, R2, R1, 0),
10566 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10567 BPF_EXIT_INSN(),
10568 },
10569 INTERNAL,
10570 { },
10571 { { 0, 0xffffffffU } },
10572 },
10573 {
10574 "JMP_JLE_X: ldimm64 test 3",
10575 .u.insns_int = {
10576 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10577 BPF_LD_IMM64(R1, 3),
10578 BPF_LD_IMM64(R2, 2),
10579 BPF_JMP_REG(BPF_JLE, R2, R1, 4),
10580 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10581 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10582 BPF_EXIT_INSN(),
10583 },
10584 INTERNAL,
10585 { },
10586 { { 0, 1 } },
10587 },
10588 /* BPF_JMP | BPF_JNE | BPF_X */
10589 {
10590 "JMP_JNE_X: if (3 != 2) return 1",
10591 .u.insns_int = {
10592 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10593 BPF_LD_IMM64(R1, 3),
10594 BPF_LD_IMM64(R2, 2),
10595 BPF_JMP_REG(BPF_JNE, R1, R2, 1),
10596 BPF_EXIT_INSN(),
10597 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10598 BPF_EXIT_INSN(),
10599 },
10600 INTERNAL,
10601 { },
10602 { { 0, 1 } },
10603 },
10604 /* BPF_JMP | BPF_JEQ | BPF_X */
10605 {
10606 "JMP_JEQ_X: if (3 == 3) return 1",
10607 .u.insns_int = {
10608 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10609 BPF_LD_IMM64(R1, 3),
10610 BPF_LD_IMM64(R2, 3),
10611 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
10612 BPF_EXIT_INSN(),
10613 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10614 BPF_EXIT_INSN(),
10615 },
10616 INTERNAL,
10617 { },
10618 { { 0, 1 } },
10619 },
10620 /* BPF_JMP | BPF_JSET | BPF_X */
10621 {
10622 "JMP_JSET_X: if (0x3 & 0x2) return 1",
10623 .u.insns_int = {
10624 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10625 BPF_LD_IMM64(R1, 3),
10626 BPF_LD_IMM64(R2, 2),
10627 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10628 BPF_EXIT_INSN(),
10629 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10630 BPF_EXIT_INSN(),
10631 },
10632 INTERNAL,
10633 { },
10634 { { 0, 1 } },
10635 },
10636 {
10637 "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
10638 .u.insns_int = {
10639 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10640 BPF_LD_IMM64(R1, 3),
10641 BPF_LD_IMM64(R2, 0xffffffff),
10642 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10643 BPF_EXIT_INSN(),
10644 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10645 BPF_EXIT_INSN(),
10646 },
10647 INTERNAL,
10648 { },
10649 { { 0, 1 } },
10650 },
10651 {
10652 "JMP_JA: Jump, gap, jump, ...",
10653 { },
10654 CLASSIC | FLAG_NO_DATA,
10655 { },
10656 { { 0, 0xababcbac } },
10657 .fill_helper = bpf_fill_ja,
10658 },
10659 { /* Mainly checking JIT here. */
10660 "BPF_MAXINSNS: Maximum possible literals",
10661 { },
10662 CLASSIC | FLAG_NO_DATA,
10663 { },
10664 { { 0, 0xffffffff } },
10665 .fill_helper = bpf_fill_maxinsns1,
10666 },
10667 { /* Mainly checking JIT here. */
10668 "BPF_MAXINSNS: Single literal",
10669 { },
10670 CLASSIC | FLAG_NO_DATA,
10671 { },
10672 { { 0, 0xfefefefe } },
10673 .fill_helper = bpf_fill_maxinsns2,
10674 },
10675 { /* Mainly checking JIT here. */
10676 "BPF_MAXINSNS: Run/add until end",
10677 { },
10678 CLASSIC | FLAG_NO_DATA,
10679 { },
10680 { { 0, 0x947bf368 } },
10681 .fill_helper = bpf_fill_maxinsns3,
10682 },
10683 {
10684 "BPF_MAXINSNS: Too many instructions",
10685 { },
10686 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
10687 { },
10688 { },
10689 .fill_helper = bpf_fill_maxinsns4,
10690 .expected_errcode = -EINVAL,
10691 },
10692 { /* Mainly checking JIT here. */
10693 "BPF_MAXINSNS: Very long jump",
10694 { },
10695 CLASSIC | FLAG_NO_DATA,
10696 { },
10697 { { 0, 0xabababab } },
10698 .fill_helper = bpf_fill_maxinsns5,
10699 },
10700 { /* Mainly checking JIT here. */
10701 "BPF_MAXINSNS: Ctx heavy transformations",
10702 { },
10703 CLASSIC,
10704 { },
10705 {
10706 { 1, SKB_VLAN_PRESENT },
10707 { 10, SKB_VLAN_PRESENT }
10708 },
10709 .fill_helper = bpf_fill_maxinsns6,
10710 },
10711 { /* Mainly checking JIT here. */
10712 "BPF_MAXINSNS: Call heavy transformations",
10713 { },
10714 CLASSIC | FLAG_NO_DATA,
10715 { },
10716 { { 1, 0 }, { 10, 0 } },
10717 .fill_helper = bpf_fill_maxinsns7,
10718 },
10719 { /* Mainly checking JIT here. */
10720 "BPF_MAXINSNS: Jump heavy test",
10721 { },
10722 CLASSIC | FLAG_NO_DATA,
10723 { },
10724 { { 0, 0xffffffff } },
10725 .fill_helper = bpf_fill_maxinsns8,
10726 },
10727 { /* Mainly checking JIT here. */
10728 "BPF_MAXINSNS: Very long jump backwards",
10729 { },
10730 INTERNAL | FLAG_NO_DATA,
10731 { },
10732 { { 0, 0xcbababab } },
10733 .fill_helper = bpf_fill_maxinsns9,
10734 },
10735 { /* Mainly checking JIT here. */
10736 "BPF_MAXINSNS: Edge hopping nuthouse",
10737 { },
10738 INTERNAL | FLAG_NO_DATA,
10739 { },
10740 { { 0, 0xabababac } },
10741 .fill_helper = bpf_fill_maxinsns10,
10742 },
10743 {
10744 "BPF_MAXINSNS: Jump, gap, jump, ...",
10745 { },
10746 CLASSIC | FLAG_NO_DATA,
10747 { },
10748 { { 0, 0xababcbac } },
10749 .fill_helper = bpf_fill_maxinsns11,
10750 },
10751 {
10752 "BPF_MAXINSNS: jump over MSH",
10753 { },
10754 CLASSIC | FLAG_EXPECTED_FAIL,
10755 { 0xfa, 0xfb, 0xfc, 0xfd, },
10756 { { 4, 0xabababab } },
10757 .fill_helper = bpf_fill_maxinsns12,
10758 .expected_errcode = -EINVAL,
10759 },
10760 {
10761 "BPF_MAXINSNS: exec all MSH",
10762 { },
10763 CLASSIC,
10764 { 0xfa, 0xfb, 0xfc, 0xfd, },
10765 { { 4, 0xababab83 } },
10766 .fill_helper = bpf_fill_maxinsns13,
10767 },
10768 {
10769 "BPF_MAXINSNS: ld_abs+get_processor_id",
10770 { },
10771 CLASSIC,
10772 { },
10773 { { 1, 0xbee } },
10774 .fill_helper = bpf_fill_ld_abs_get_processor_id,
10775 },
10776 /*
10777 * LD_IND / LD_ABS on fragmented SKBs
10778 */
10779 {
10780 "LD_IND byte frag",
10781 .u.insns = {
10782 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10783 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10784 BPF_STMT(BPF_RET | BPF_A, 0x0),
10785 },
10786 CLASSIC | FLAG_SKB_FRAG,
10787 { },
10788 { {0x40, 0x42} },
10789 .frag_data = {
10790 0x42, 0x00, 0x00, 0x00,
10791 0x43, 0x44, 0x00, 0x00,
10792 0x21, 0x07, 0x19, 0x83,
10793 },
10794 },
10795 {
10796 "LD_IND halfword frag",
10797 .u.insns = {
10798 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10799 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10800 BPF_STMT(BPF_RET | BPF_A, 0x0),
10801 },
10802 CLASSIC | FLAG_SKB_FRAG,
10803 { },
10804 { {0x40, 0x4344} },
10805 .frag_data = {
10806 0x42, 0x00, 0x00, 0x00,
10807 0x43, 0x44, 0x00, 0x00,
10808 0x21, 0x07, 0x19, 0x83,
10809 },
10810 },
10811 {
10812 "LD_IND word frag",
10813 .u.insns = {
10814 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10815 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10816 BPF_STMT(BPF_RET | BPF_A, 0x0),
10817 },
10818 CLASSIC | FLAG_SKB_FRAG,
10819 { },
10820 { {0x40, 0x21071983} },
10821 .frag_data = {
10822 0x42, 0x00, 0x00, 0x00,
10823 0x43, 0x44, 0x00, 0x00,
10824 0x21, 0x07, 0x19, 0x83,
10825 },
10826 },
10827 {
10828 "LD_IND halfword mixed head/frag",
10829 .u.insns = {
10830 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10831 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10832 BPF_STMT(BPF_RET | BPF_A, 0x0),
10833 },
10834 CLASSIC | FLAG_SKB_FRAG,
10835 { [0x3e] = 0x25, [0x3f] = 0x05, },
10836 { {0x40, 0x0519} },
10837 .frag_data = { 0x19, 0x82 },
10838 },
10839 {
10840 "LD_IND word mixed head/frag",
10841 .u.insns = {
10842 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10843 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10844 BPF_STMT(BPF_RET | BPF_A, 0x0),
10845 },
10846 CLASSIC | FLAG_SKB_FRAG,
10847 { [0x3e] = 0x25, [0x3f] = 0x05, },
10848 { {0x40, 0x25051982} },
10849 .frag_data = { 0x19, 0x82 },
10850 },
10851 {
10852 "LD_ABS byte frag",
10853 .u.insns = {
10854 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10855 BPF_STMT(BPF_RET | BPF_A, 0x0),
10856 },
10857 CLASSIC | FLAG_SKB_FRAG,
10858 { },
10859 { {0x40, 0x42} },
10860 .frag_data = {
10861 0x42, 0x00, 0x00, 0x00,
10862 0x43, 0x44, 0x00, 0x00,
10863 0x21, 0x07, 0x19, 0x83,
10864 },
10865 },
10866 {
10867 "LD_ABS halfword frag",
10868 .u.insns = {
10869 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10870 BPF_STMT(BPF_RET | BPF_A, 0x0),
10871 },
10872 CLASSIC | FLAG_SKB_FRAG,
10873 { },
10874 { {0x40, 0x4344} },
10875 .frag_data = {
10876 0x42, 0x00, 0x00, 0x00,
10877 0x43, 0x44, 0x00, 0x00,
10878 0x21, 0x07, 0x19, 0x83,
10879 },
10880 },
10881 {
10882 "LD_ABS word frag",
10883 .u.insns = {
10884 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10885 BPF_STMT(BPF_RET | BPF_A, 0x0),
10886 },
10887 CLASSIC | FLAG_SKB_FRAG,
10888 { },
10889 { {0x40, 0x21071983} },
10890 .frag_data = {
10891 0x42, 0x00, 0x00, 0x00,
10892 0x43, 0x44, 0x00, 0x00,
10893 0x21, 0x07, 0x19, 0x83,
10894 },
10895 },
10896 {
10897 "LD_ABS halfword mixed head/frag",
10898 .u.insns = {
10899 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10900 BPF_STMT(BPF_RET | BPF_A, 0x0),
10901 },
10902 CLASSIC | FLAG_SKB_FRAG,
10903 { [0x3e] = 0x25, [0x3f] = 0x05, },
10904 { {0x40, 0x0519} },
10905 .frag_data = { 0x19, 0x82 },
10906 },
10907 {
10908 "LD_ABS word mixed head/frag",
10909 .u.insns = {
10910 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10911 BPF_STMT(BPF_RET | BPF_A, 0x0),
10912 },
10913 CLASSIC | FLAG_SKB_FRAG,
10914 { [0x3e] = 0x25, [0x3f] = 0x05, },
10915 { {0x40, 0x25051982} },
10916 .frag_data = { 0x19, 0x82 },
10917 },
10918 /*
10919 * LD_IND / LD_ABS on non fragmented SKBs
10920 */
10921 {
10922 /*
10923 * this tests that the JIT/interpreter correctly resets X
10924 * before using it in an LD_IND instruction.
10925 */
10926 "LD_IND byte default X",
10927 .u.insns = {
10928 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10929 BPF_STMT(BPF_RET | BPF_A, 0x0),
10930 },
10931 CLASSIC,
10932 { [0x1] = 0x42 },
10933 { {0x40, 0x42 } },
10934 },
10935 {
10936 "LD_IND byte positive offset",
10937 .u.insns = {
10938 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10939 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10940 BPF_STMT(BPF_RET | BPF_A, 0x0),
10941 },
10942 CLASSIC,
10943 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10944 { {0x40, 0x82 } },
10945 },
10946 {
10947 "LD_IND byte negative offset",
10948 .u.insns = {
10949 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10950 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10951 BPF_STMT(BPF_RET | BPF_A, 0x0),
10952 },
10953 CLASSIC,
10954 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10955 { {0x40, 0x05 } },
10956 },
10957 {
10958 "LD_IND byte positive offset, all ff",
10959 .u.insns = {
10960 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10961 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10962 BPF_STMT(BPF_RET | BPF_A, 0x0),
10963 },
10964 CLASSIC,
10965 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10966 { {0x40, 0xff } },
10967 },
10968 {
10969 "LD_IND byte positive offset, out of bounds",
10970 .u.insns = {
10971 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10972 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10973 BPF_STMT(BPF_RET | BPF_A, 0x0),
10974 },
10975 CLASSIC,
10976 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10977 { {0x3f, 0 }, },
10978 },
10979 {
10980 "LD_IND byte negative offset, out of bounds",
10981 .u.insns = {
10982 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10983 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10984 BPF_STMT(BPF_RET | BPF_A, 0x0),
10985 },
10986 CLASSIC,
10987 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10988 { {0x3f, 0 } },
10989 },
10990 {
10991 "LD_IND byte negative offset, multiple calls",
10992 .u.insns = {
10993 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10994 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10995 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10996 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10997 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10998 BPF_STMT(BPF_RET | BPF_A, 0x0),
10999 },
11000 CLASSIC,
11001 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11002 { {0x40, 0x82 }, },
11003 },
11004 {
11005 "LD_IND halfword positive offset",
11006 .u.insns = {
11007 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11008 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
11009 BPF_STMT(BPF_RET | BPF_A, 0x0),
11010 },
11011 CLASSIC,
11012 {
11013 [0x1c] = 0xaa, [0x1d] = 0x55,
11014 [0x1e] = 0xbb, [0x1f] = 0x66,
11015 [0x20] = 0xcc, [0x21] = 0x77,
11016 [0x22] = 0xdd, [0x23] = 0x88,
11017 },
11018 { {0x40, 0xdd88 } },
11019 },
11020 {
11021 "LD_IND halfword negative offset",
11022 .u.insns = {
11023 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11024 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
11025 BPF_STMT(BPF_RET | BPF_A, 0x0),
11026 },
11027 CLASSIC,
11028 {
11029 [0x1c] = 0xaa, [0x1d] = 0x55,
11030 [0x1e] = 0xbb, [0x1f] = 0x66,
11031 [0x20] = 0xcc, [0x21] = 0x77,
11032 [0x22] = 0xdd, [0x23] = 0x88,
11033 },
11034 { {0x40, 0xbb66 } },
11035 },
11036 {
11037 "LD_IND halfword unaligned",
11038 .u.insns = {
11039 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11040 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
11041 BPF_STMT(BPF_RET | BPF_A, 0x0),
11042 },
11043 CLASSIC,
11044 {
11045 [0x1c] = 0xaa, [0x1d] = 0x55,
11046 [0x1e] = 0xbb, [0x1f] = 0x66,
11047 [0x20] = 0xcc, [0x21] = 0x77,
11048 [0x22] = 0xdd, [0x23] = 0x88,
11049 },
11050 { {0x40, 0x66cc } },
11051 },
11052 {
11053 "LD_IND halfword positive offset, all ff",
11054 .u.insns = {
11055 BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
11056 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11057 BPF_STMT(BPF_RET | BPF_A, 0x0),
11058 },
11059 CLASSIC,
11060 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11061 { {0x40, 0xffff } },
11062 },
11063 {
11064 "LD_IND halfword positive offset, out of bounds",
11065 .u.insns = {
11066 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11067 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11068 BPF_STMT(BPF_RET | BPF_A, 0x0),
11069 },
11070 CLASSIC,
11071 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11072 { {0x3f, 0 }, },
11073 },
11074 {
11075 "LD_IND halfword negative offset, out of bounds",
11076 .u.insns = {
11077 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11078 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
11079 BPF_STMT(BPF_RET | BPF_A, 0x0),
11080 },
11081 CLASSIC,
11082 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11083 { {0x3f, 0 } },
11084 },
11085 {
11086 "LD_IND word positive offset",
11087 .u.insns = {
11088 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11089 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
11090 BPF_STMT(BPF_RET | BPF_A, 0x0),
11091 },
11092 CLASSIC,
11093 {
11094 [0x1c] = 0xaa, [0x1d] = 0x55,
11095 [0x1e] = 0xbb, [0x1f] = 0x66,
11096 [0x20] = 0xcc, [0x21] = 0x77,
11097 [0x22] = 0xdd, [0x23] = 0x88,
11098 [0x24] = 0xee, [0x25] = 0x99,
11099 [0x26] = 0xff, [0x27] = 0xaa,
11100 },
11101 { {0x40, 0xee99ffaa } },
11102 },
11103 {
11104 "LD_IND word negative offset",
11105 .u.insns = {
11106 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11107 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
11108 BPF_STMT(BPF_RET | BPF_A, 0x0),
11109 },
11110 CLASSIC,
11111 {
11112 [0x1c] = 0xaa, [0x1d] = 0x55,
11113 [0x1e] = 0xbb, [0x1f] = 0x66,
11114 [0x20] = 0xcc, [0x21] = 0x77,
11115 [0x22] = 0xdd, [0x23] = 0x88,
11116 [0x24] = 0xee, [0x25] = 0x99,
11117 [0x26] = 0xff, [0x27] = 0xaa,
11118 },
11119 { {0x40, 0xaa55bb66 } },
11120 },
11121 {
11122 "LD_IND word unaligned (addr & 3 == 2)",
11123 .u.insns = {
11124 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11125 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
11126 BPF_STMT(BPF_RET | BPF_A, 0x0),
11127 },
11128 CLASSIC,
11129 {
11130 [0x1c] = 0xaa, [0x1d] = 0x55,
11131 [0x1e] = 0xbb, [0x1f] = 0x66,
11132 [0x20] = 0xcc, [0x21] = 0x77,
11133 [0x22] = 0xdd, [0x23] = 0x88,
11134 [0x24] = 0xee, [0x25] = 0x99,
11135 [0x26] = 0xff, [0x27] = 0xaa,
11136 },
11137 { {0x40, 0xbb66cc77 } },
11138 },
11139 {
11140 "LD_IND word unaligned (addr & 3 == 1)",
11141 .u.insns = {
11142 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11143 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
11144 BPF_STMT(BPF_RET | BPF_A, 0x0),
11145 },
11146 CLASSIC,
11147 {
11148 [0x1c] = 0xaa, [0x1d] = 0x55,
11149 [0x1e] = 0xbb, [0x1f] = 0x66,
11150 [0x20] = 0xcc, [0x21] = 0x77,
11151 [0x22] = 0xdd, [0x23] = 0x88,
11152 [0x24] = 0xee, [0x25] = 0x99,
11153 [0x26] = 0xff, [0x27] = 0xaa,
11154 },
11155 { {0x40, 0x55bb66cc } },
11156 },
11157 {
11158 "LD_IND word unaligned (addr & 3 == 3)",
11159 .u.insns = {
11160 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11161 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
11162 BPF_STMT(BPF_RET | BPF_A, 0x0),
11163 },
11164 CLASSIC,
11165 {
11166 [0x1c] = 0xaa, [0x1d] = 0x55,
11167 [0x1e] = 0xbb, [0x1f] = 0x66,
11168 [0x20] = 0xcc, [0x21] = 0x77,
11169 [0x22] = 0xdd, [0x23] = 0x88,
11170 [0x24] = 0xee, [0x25] = 0x99,
11171 [0x26] = 0xff, [0x27] = 0xaa,
11172 },
11173 { {0x40, 0x66cc77dd } },
11174 },
11175 {
11176 "LD_IND word positive offset, all ff",
11177 .u.insns = {
11178 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
11179 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11180 BPF_STMT(BPF_RET | BPF_A, 0x0),
11181 },
11182 CLASSIC,
11183 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11184 { {0x40, 0xffffffff } },
11185 },
11186 {
11187 "LD_IND word positive offset, out of bounds",
11188 .u.insns = {
11189 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11190 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11191 BPF_STMT(BPF_RET | BPF_A, 0x0),
11192 },
11193 CLASSIC,
11194 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11195 { {0x3f, 0 }, },
11196 },
11197 {
11198 "LD_IND word negative offset, out of bounds",
11199 .u.insns = {
11200 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11201 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
11202 BPF_STMT(BPF_RET | BPF_A, 0x0),
11203 },
11204 CLASSIC,
11205 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11206 { {0x3f, 0 } },
11207 },
11208 {
11209 "LD_ABS byte",
11210 .u.insns = {
11211 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
11212 BPF_STMT(BPF_RET | BPF_A, 0x0),
11213 },
11214 CLASSIC,
11215 {
11216 [0x1c] = 0xaa, [0x1d] = 0x55,
11217 [0x1e] = 0xbb, [0x1f] = 0x66,
11218 [0x20] = 0xcc, [0x21] = 0x77,
11219 [0x22] = 0xdd, [0x23] = 0x88,
11220 [0x24] = 0xee, [0x25] = 0x99,
11221 [0x26] = 0xff, [0x27] = 0xaa,
11222 },
11223 { {0x40, 0xcc } },
11224 },
11225 {
11226 "LD_ABS byte positive offset, all ff",
11227 .u.insns = {
11228 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11229 BPF_STMT(BPF_RET | BPF_A, 0x0),
11230 },
11231 CLASSIC,
11232 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11233 { {0x40, 0xff } },
11234 },
11235 {
11236 "LD_ABS byte positive offset, out of bounds",
11237 .u.insns = {
11238 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11239 BPF_STMT(BPF_RET | BPF_A, 0x0),
11240 },
11241 CLASSIC,
11242 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11243 { {0x3f, 0 }, },
11244 },
11245 {
11246 "LD_ABS byte negative offset, out of bounds load",
11247 .u.insns = {
11248 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
11249 BPF_STMT(BPF_RET | BPF_A, 0x0),
11250 },
11251 CLASSIC | FLAG_EXPECTED_FAIL,
11252 .expected_errcode = -EINVAL,
11253 },
11254 {
11255 "LD_ABS byte negative offset, in bounds",
11256 .u.insns = {
11257 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11258 BPF_STMT(BPF_RET | BPF_A, 0x0),
11259 },
11260 CLASSIC,
11261 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11262 { {0x40, 0x82 }, },
11263 },
11264 {
11265 "LD_ABS byte negative offset, out of bounds",
11266 .u.insns = {
11267 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11268 BPF_STMT(BPF_RET | BPF_A, 0x0),
11269 },
11270 CLASSIC,
11271 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11272 { {0x3f, 0 }, },
11273 },
11274 {
11275 "LD_ABS byte negative offset, multiple calls",
11276 .u.insns = {
11277 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
11278 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
11279 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
11280 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11281 BPF_STMT(BPF_RET | BPF_A, 0x0),
11282 },
11283 CLASSIC,
11284 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11285 { {0x40, 0x82 }, },
11286 },
11287 {
11288 "LD_ABS halfword",
11289 .u.insns = {
11290 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
11291 BPF_STMT(BPF_RET | BPF_A, 0x0),
11292 },
11293 CLASSIC,
11294 {
11295 [0x1c] = 0xaa, [0x1d] = 0x55,
11296 [0x1e] = 0xbb, [0x1f] = 0x66,
11297 [0x20] = 0xcc, [0x21] = 0x77,
11298 [0x22] = 0xdd, [0x23] = 0x88,
11299 [0x24] = 0xee, [0x25] = 0x99,
11300 [0x26] = 0xff, [0x27] = 0xaa,
11301 },
11302 { {0x40, 0xdd88 } },
11303 },
11304 {
11305 "LD_ABS halfword unaligned",
11306 .u.insns = {
11307 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
11308 BPF_STMT(BPF_RET | BPF_A, 0x0),
11309 },
11310 CLASSIC,
11311 {
11312 [0x1c] = 0xaa, [0x1d] = 0x55,
11313 [0x1e] = 0xbb, [0x1f] = 0x66,
11314 [0x20] = 0xcc, [0x21] = 0x77,
11315 [0x22] = 0xdd, [0x23] = 0x88,
11316 [0x24] = 0xee, [0x25] = 0x99,
11317 [0x26] = 0xff, [0x27] = 0xaa,
11318 },
11319 { {0x40, 0x99ff } },
11320 },
11321 {
11322 "LD_ABS halfword positive offset, all ff",
11323 .u.insns = {
11324 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
11325 BPF_STMT(BPF_RET | BPF_A, 0x0),
11326 },
11327 CLASSIC,
11328 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11329 { {0x40, 0xffff } },
11330 },
11331 {
11332 "LD_ABS halfword positive offset, out of bounds",
11333 .u.insns = {
11334 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
11335 BPF_STMT(BPF_RET | BPF_A, 0x0),
11336 },
11337 CLASSIC,
11338 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11339 { {0x3f, 0 }, },
11340 },
11341 {
11342 "LD_ABS halfword negative offset, out of bounds load",
11343 .u.insns = {
11344 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
11345 BPF_STMT(BPF_RET | BPF_A, 0x0),
11346 },
11347 CLASSIC | FLAG_EXPECTED_FAIL,
11348 .expected_errcode = -EINVAL,
11349 },
11350 {
11351 "LD_ABS halfword negative offset, in bounds",
11352 .u.insns = {
11353 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11354 BPF_STMT(BPF_RET | BPF_A, 0x0),
11355 },
11356 CLASSIC,
11357 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11358 { {0x40, 0x1982 }, },
11359 },
11360 {
11361 "LD_ABS halfword negative offset, out of bounds",
11362 .u.insns = {
11363 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11364 BPF_STMT(BPF_RET | BPF_A, 0x0),
11365 },
11366 CLASSIC,
11367 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11368 { {0x3f, 0 }, },
11369 },
11370 {
11371 "LD_ABS word",
11372 .u.insns = {
11373 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
11374 BPF_STMT(BPF_RET | BPF_A, 0x0),
11375 },
11376 CLASSIC,
11377 {
11378 [0x1c] = 0xaa, [0x1d] = 0x55,
11379 [0x1e] = 0xbb, [0x1f] = 0x66,
11380 [0x20] = 0xcc, [0x21] = 0x77,
11381 [0x22] = 0xdd, [0x23] = 0x88,
11382 [0x24] = 0xee, [0x25] = 0x99,
11383 [0x26] = 0xff, [0x27] = 0xaa,
11384 },
11385 { {0x40, 0xaa55bb66 } },
11386 },
11387 {
11388 "LD_ABS word unaligned (addr & 3 == 2)",
11389 .u.insns = {
11390 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
11391 BPF_STMT(BPF_RET | BPF_A, 0x0),
11392 },
11393 CLASSIC,
11394 {
11395 [0x1c] = 0xaa, [0x1d] = 0x55,
11396 [0x1e] = 0xbb, [0x1f] = 0x66,
11397 [0x20] = 0xcc, [0x21] = 0x77,
11398 [0x22] = 0xdd, [0x23] = 0x88,
11399 [0x24] = 0xee, [0x25] = 0x99,
11400 [0x26] = 0xff, [0x27] = 0xaa,
11401 },
11402 { {0x40, 0xdd88ee99 } },
11403 },
11404 {
11405 "LD_ABS word unaligned (addr & 3 == 1)",
11406 .u.insns = {
11407 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
11408 BPF_STMT(BPF_RET | BPF_A, 0x0),
11409 },
11410 CLASSIC,
11411 {
11412 [0x1c] = 0xaa, [0x1d] = 0x55,
11413 [0x1e] = 0xbb, [0x1f] = 0x66,
11414 [0x20] = 0xcc, [0x21] = 0x77,
11415 [0x22] = 0xdd, [0x23] = 0x88,
11416 [0x24] = 0xee, [0x25] = 0x99,
11417 [0x26] = 0xff, [0x27] = 0xaa,
11418 },
11419 { {0x40, 0x77dd88ee } },
11420 },
11421 {
11422 "LD_ABS word unaligned (addr & 3 == 3)",
11423 .u.insns = {
11424 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
11425 BPF_STMT(BPF_RET | BPF_A, 0x0),
11426 },
11427 CLASSIC,
11428 {
11429 [0x1c] = 0xaa, [0x1d] = 0x55,
11430 [0x1e] = 0xbb, [0x1f] = 0x66,
11431 [0x20] = 0xcc, [0x21] = 0x77,
11432 [0x22] = 0xdd, [0x23] = 0x88,
11433 [0x24] = 0xee, [0x25] = 0x99,
11434 [0x26] = 0xff, [0x27] = 0xaa,
11435 },
11436 { {0x40, 0x88ee99ff } },
11437 },
11438 {
11439 "LD_ABS word positive offset, all ff",
11440 .u.insns = {
11441 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
11442 BPF_STMT(BPF_RET | BPF_A, 0x0),
11443 },
11444 CLASSIC,
11445 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11446 { {0x40, 0xffffffff } },
11447 },
11448 {
11449 "LD_ABS word positive offset, out of bounds",
11450 .u.insns = {
11451 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
11452 BPF_STMT(BPF_RET | BPF_A, 0x0),
11453 },
11454 CLASSIC,
11455 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11456 { {0x3f, 0 }, },
11457 },
11458 {
11459 "LD_ABS word negative offset, out of bounds load",
11460 .u.insns = {
11461 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
11462 BPF_STMT(BPF_RET | BPF_A, 0x0),
11463 },
11464 CLASSIC | FLAG_EXPECTED_FAIL,
11465 .expected_errcode = -EINVAL,
11466 },
11467 {
11468 "LD_ABS word negative offset, in bounds",
11469 .u.insns = {
11470 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11471 BPF_STMT(BPF_RET | BPF_A, 0x0),
11472 },
11473 CLASSIC,
11474 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11475 { {0x40, 0x25051982 }, },
11476 },
11477 {
11478 "LD_ABS word negative offset, out of bounds",
11479 .u.insns = {
11480 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11481 BPF_STMT(BPF_RET | BPF_A, 0x0),
11482 },
11483 CLASSIC,
11484 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11485 { {0x3f, 0 }, },
11486 },
11487 {
11488 "LDX_MSH standalone, preserved A",
11489 .u.insns = {
11490 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11491 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11492 BPF_STMT(BPF_RET | BPF_A, 0x0),
11493 },
11494 CLASSIC,
11495 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11496 { {0x40, 0xffeebbaa }, },
11497 },
11498 {
11499 "LDX_MSH standalone, preserved A 2",
11500 .u.insns = {
11501 BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
11502 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11503 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
11504 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11505 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
11506 BPF_STMT(BPF_RET | BPF_A, 0x0),
11507 },
11508 CLASSIC,
11509 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11510 { {0x40, 0x175e9d63 }, },
11511 },
11512 {
11513 "LDX_MSH standalone, test result 1",
11514 .u.insns = {
11515 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11516 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11517 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11518 BPF_STMT(BPF_RET | BPF_A, 0x0),
11519 },
11520 CLASSIC,
11521 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11522 { {0x40, 0x14 }, },
11523 },
11524 {
11525 "LDX_MSH standalone, test result 2",
11526 .u.insns = {
11527 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11528 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11529 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11530 BPF_STMT(BPF_RET | BPF_A, 0x0),
11531 },
11532 CLASSIC,
11533 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11534 { {0x40, 0x24 }, },
11535 },
11536 {
11537 "LDX_MSH standalone, negative offset",
11538 .u.insns = {
11539 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11540 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
11541 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11542 BPF_STMT(BPF_RET | BPF_A, 0x0),
11543 },
11544 CLASSIC,
11545 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11546 { {0x40, 0 }, },
11547 },
11548 {
11549 "LDX_MSH standalone, negative offset 2",
11550 .u.insns = {
11551 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11552 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
11553 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11554 BPF_STMT(BPF_RET | BPF_A, 0x0),
11555 },
11556 CLASSIC,
11557 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11558 { {0x40, 0x24 }, },
11559 },
11560 {
11561 "LDX_MSH standalone, out of bounds",
11562 .u.insns = {
11563 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11564 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
11565 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11566 BPF_STMT(BPF_RET | BPF_A, 0x0),
11567 },
11568 CLASSIC,
11569 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11570 { {0x40, 0 }, },
11571 },
11572 /*
11573 * verify that the interpreter or JIT correctly sets A and X
11574 * to 0.
11575 */
11576 {
11577 "ADD default X",
11578 .u.insns = {
11579 /*
11580 * A = 0x42
11581 * A = A + X
11582 * ret A
11583 */
11584 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11585 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
11586 BPF_STMT(BPF_RET | BPF_A, 0x0),
11587 },
11588 CLASSIC | FLAG_NO_DATA,
11589 {},
11590 { {0x1, 0x42 } },
11591 },
11592 {
11593 "ADD default A",
11594 .u.insns = {
11595 /*
11596 * A = A + 0x42
11597 * ret A
11598 */
11599 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
11600 BPF_STMT(BPF_RET | BPF_A, 0x0),
11601 },
11602 CLASSIC | FLAG_NO_DATA,
11603 {},
11604 { {0x1, 0x42 } },
11605 },
11606 {
11607 "SUB default X",
11608 .u.insns = {
11609 /*
11610 * A = 0x66
11611 * A = A - X
11612 * ret A
11613 */
11614 BPF_STMT(BPF_LD | BPF_IMM, 0x66),
11615 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
11616 BPF_STMT(BPF_RET | BPF_A, 0x0),
11617 },
11618 CLASSIC | FLAG_NO_DATA,
11619 {},
11620 { {0x1, 0x66 } },
11621 },
11622 {
11623 "SUB default A",
11624 .u.insns = {
11625 /*
11626 * A = A - -0x66
11627 * ret A
11628 */
11629 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
11630 BPF_STMT(BPF_RET | BPF_A, 0x0),
11631 },
11632 CLASSIC | FLAG_NO_DATA,
11633 {},
11634 { {0x1, 0x66 } },
11635 },
11636 {
11637 "MUL default X",
11638 .u.insns = {
11639 /*
11640 * A = 0x42
11641 * A = A * X
11642 * ret A
11643 */
11644 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11645 BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
11646 BPF_STMT(BPF_RET | BPF_A, 0x0),
11647 },
11648 CLASSIC | FLAG_NO_DATA,
11649 {},
11650 { {0x1, 0x0 } },
11651 },
11652 {
11653 "MUL default A",
11654 .u.insns = {
11655 /*
11656 * A = A * 0x66
11657 * ret A
11658 */
11659 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
11660 BPF_STMT(BPF_RET | BPF_A, 0x0),
11661 },
11662 CLASSIC | FLAG_NO_DATA,
11663 {},
11664 { {0x1, 0x0 } },
11665 },
11666 {
11667 "DIV default X",
11668 .u.insns = {
11669 /*
11670 * A = 0x42
11671 * A = A / X ; this halt the filter execution if X is 0
11672 * ret 0x42
11673 */
11674 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11675 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
11676 BPF_STMT(BPF_RET | BPF_K, 0x42),
11677 },
11678 CLASSIC | FLAG_NO_DATA,
11679 {},
11680 { {0x1, 0x0 } },
11681 },
11682 {
11683 "DIV default A",
11684 .u.insns = {
11685 /*
11686 * A = A / 1
11687 * ret A
11688 */
11689 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
11690 BPF_STMT(BPF_RET | BPF_A, 0x0),
11691 },
11692 CLASSIC | FLAG_NO_DATA,
11693 {},
11694 { {0x1, 0x0 } },
11695 },
11696 {
11697 "MOD default X",
11698 .u.insns = {
11699 /*
11700 * A = 0x42
11701 * A = A mod X ; this halt the filter execution if X is 0
11702 * ret 0x42
11703 */
11704 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11705 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
11706 BPF_STMT(BPF_RET | BPF_K, 0x42),
11707 },
11708 CLASSIC | FLAG_NO_DATA,
11709 {},
11710 { {0x1, 0x0 } },
11711 },
11712 {
11713 "MOD default A",
11714 .u.insns = {
11715 /*
11716 * A = A mod 1
11717 * ret A
11718 */
11719 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
11720 BPF_STMT(BPF_RET | BPF_A, 0x0),
11721 },
11722 CLASSIC | FLAG_NO_DATA,
11723 {},
11724 { {0x1, 0x0 } },
11725 },
11726 {
11727 "JMP EQ default A",
11728 .u.insns = {
11729 /*
11730 * cmp A, 0x0, 0, 1
11731 * ret 0x42
11732 * ret 0x66
11733 */
11734 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11735 BPF_STMT(BPF_RET | BPF_K, 0x42),
11736 BPF_STMT(BPF_RET | BPF_K, 0x66),
11737 },
11738 CLASSIC | FLAG_NO_DATA,
11739 {},
11740 { {0x1, 0x42 } },
11741 },
11742 {
11743 "JMP EQ default X",
11744 .u.insns = {
11745 /*
11746 * A = 0x0
11747 * cmp A, X, 0, 1
11748 * ret 0x42
11749 * ret 0x66
11750 */
11751 BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11752 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11753 BPF_STMT(BPF_RET | BPF_K, 0x42),
11754 BPF_STMT(BPF_RET | BPF_K, 0x66),
11755 },
11756 CLASSIC | FLAG_NO_DATA,
11757 {},
11758 { {0x1, 0x42 } },
11759 },
11760 /* Checking interpreter vs JIT wrt signed extended imms. */
11761 {
11762 "JNE signed compare, test 1",
11763 .u.insns_int = {
11764 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11765 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11766 BPF_MOV64_REG(R2, R1),
11767 BPF_ALU64_REG(BPF_AND, R2, R3),
11768 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11769 BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11770 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11771 BPF_EXIT_INSN(),
11772 },
11773 INTERNAL,
11774 { },
11775 { { 0, 1 } },
11776 },
11777 {
11778 "JNE signed compare, test 2",
11779 .u.insns_int = {
11780 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11781 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11782 BPF_MOV64_REG(R2, R1),
11783 BPF_ALU64_REG(BPF_AND, R2, R3),
11784 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11785 BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11786 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11787 BPF_EXIT_INSN(),
11788 },
11789 INTERNAL,
11790 { },
11791 { { 0, 1 } },
11792 },
11793 {
11794 "JNE signed compare, test 3",
11795 .u.insns_int = {
11796 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11797 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11798 BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11799 BPF_MOV64_REG(R2, R1),
11800 BPF_ALU64_REG(BPF_AND, R2, R3),
11801 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11802 BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11803 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11804 BPF_EXIT_INSN(),
11805 },
11806 INTERNAL,
11807 { },
11808 { { 0, 2 } },
11809 },
11810 {
11811 "JNE signed compare, test 4",
11812 .u.insns_int = {
11813 BPF_LD_IMM64(R1, -17104896),
11814 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11815 BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11816 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11817 BPF_EXIT_INSN(),
11818 },
11819 INTERNAL,
11820 { },
11821 { { 0, 2 } },
11822 },
11823 {
11824 "JNE signed compare, test 5",
11825 .u.insns_int = {
11826 BPF_LD_IMM64(R1, 0xfefb0000),
11827 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11828 BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11829 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11830 BPF_EXIT_INSN(),
11831 },
11832 INTERNAL,
11833 { },
11834 { { 0, 1 } },
11835 },
11836 {
11837 "JNE signed compare, test 6",
11838 .u.insns_int = {
11839 BPF_LD_IMM64(R1, 0x7efb0000),
11840 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11841 BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11842 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11843 BPF_EXIT_INSN(),
11844 },
11845 INTERNAL,
11846 { },
11847 { { 0, 2 } },
11848 },
11849 {
11850 "JNE signed compare, test 7",
11851 .u.insns = {
11852 BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11853 BPF_STMT(BPF_MISC | BPF_TAX, 0),
11854 BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11855 BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11856 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11857 BPF_STMT(BPF_RET | BPF_K, 1),
11858 BPF_STMT(BPF_RET | BPF_K, 2),
11859 },
11860 CLASSIC | FLAG_NO_DATA,
11861 {},
11862 { { 0, 2 } },
11863 },
11864 /* BPF_LDX_MEM with operand aliasing */
11865 {
11866 "LDX_MEM_B: operand register aliasing",
11867 .u.insns_int = {
11868 BPF_ST_MEM(BPF_B, R10, -8, 123),
11869 BPF_MOV64_REG(R0, R10),
11870 BPF_LDX_MEM(BPF_B, R0, R0, -8),
11871 BPF_EXIT_INSN(),
11872 },
11873 INTERNAL,
11874 { },
11875 { { 0, 123 } },
11876 .stack_depth = 8,
11877 },
11878 {
11879 "LDX_MEM_H: operand register aliasing",
11880 .u.insns_int = {
11881 BPF_ST_MEM(BPF_H, R10, -8, 12345),
11882 BPF_MOV64_REG(R0, R10),
11883 BPF_LDX_MEM(BPF_H, R0, R0, -8),
11884 BPF_EXIT_INSN(),
11885 },
11886 INTERNAL,
11887 { },
11888 { { 0, 12345 } },
11889 .stack_depth = 8,
11890 },
11891 {
11892 "LDX_MEM_W: operand register aliasing",
11893 .u.insns_int = {
11894 BPF_ST_MEM(BPF_W, R10, -8, 123456789),
11895 BPF_MOV64_REG(R0, R10),
11896 BPF_LDX_MEM(BPF_W, R0, R0, -8),
11897 BPF_EXIT_INSN(),
11898 },
11899 INTERNAL,
11900 { },
11901 { { 0, 123456789 } },
11902 .stack_depth = 8,
11903 },
11904 {
11905 "LDX_MEM_DW: operand register aliasing",
11906 .u.insns_int = {
11907 BPF_LD_IMM64(R1, 0x123456789abcdefULL),
11908 BPF_STX_MEM(BPF_DW, R10, R1, -8),
11909 BPF_MOV64_REG(R0, R10),
11910 BPF_LDX_MEM(BPF_DW, R0, R0, -8),
11911 BPF_ALU64_REG(BPF_SUB, R0, R1),
11912 BPF_MOV64_REG(R1, R0),
11913 BPF_ALU64_IMM(BPF_RSH, R1, 32),
11914 BPF_ALU64_REG(BPF_OR, R0, R1),
11915 BPF_EXIT_INSN(),
11916 },
11917 INTERNAL,
11918 { },
11919 { { 0, 0 } },
11920 .stack_depth = 8,
11921 },
11922 /*
11923 * Register (non-)clobbering tests for the case where a JIT implements
11924 * complex ALU or ATOMIC operations via function calls. If so, the
11925 * function call must be transparent to the eBPF registers. The JIT
11926 * must therefore save and restore relevant registers across the call.
11927 * The following tests check that the eBPF registers retain their
11928 * values after such an operation. Mainly intended for complex ALU
11929 * and atomic operation, but we run it for all. You never know...
11930 *
11931 * Note that each operations should be tested twice with different
11932 * destinations, to check preservation for all registers.
11933 */
11934 #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
11935 { \
11936 #alu "_" #op " to " #dst ": no clobbering", \
11937 .u.insns_int = { \
11938 BPF_ALU64_IMM(BPF_MOV, R0, R0), \
11939 BPF_ALU64_IMM(BPF_MOV, R1, R1), \
11940 BPF_ALU64_IMM(BPF_MOV, R2, R2), \
11941 BPF_ALU64_IMM(BPF_MOV, R3, R3), \
11942 BPF_ALU64_IMM(BPF_MOV, R4, R4), \
11943 BPF_ALU64_IMM(BPF_MOV, R5, R5), \
11944 BPF_ALU64_IMM(BPF_MOV, R6, R6), \
11945 BPF_ALU64_IMM(BPF_MOV, R7, R7), \
11946 BPF_ALU64_IMM(BPF_MOV, R8, R8), \
11947 BPF_ALU64_IMM(BPF_MOV, R9, R9), \
11948 BPF_##alu(BPF_ ##op, dst, src), \
11949 BPF_ALU32_IMM(BPF_MOV, dst, dst), \
11950 BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
11951 BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
11952 BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
11953 BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
11954 BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
11955 BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
11956 BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
11957 BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
11958 BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
11959 BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
11960 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
11961 BPF_EXIT_INSN(), \
11962 }, \
11963 INTERNAL, \
11964 { }, \
11965 { { 0, 1 } } \
11966 }
11967 /* ALU64 operations, register clobbering */
11968 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11969 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11970 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11971 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11972 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11973 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11974 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11975 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11976 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11977 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11978 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11979 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11980 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11981 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11982 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11983 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11984 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11985 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11986 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11987 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11988 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11989 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11990 /* ALU32 immediate operations, register clobbering */
11991 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11992 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11993 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11994 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11995 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11996 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11997 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11998 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11999 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
12000 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
12001 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
12002 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
12003 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
12004 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
12005 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
12006 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
12007 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
12008 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
12009 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
12010 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
12011 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
12012 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
12013 /* ALU64 register operations, register clobbering */
12014 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
12015 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
12016 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
12017 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
12018 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
12019 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
12020 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
12021 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
12022 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
12023 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
12024 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
12025 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
12026 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
12027 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
12028 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
12029 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
12030 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
12031 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
12032 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
12033 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
12034 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
12035 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
12036 /* ALU32 register operations, register clobbering */
12037 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
12038 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
12039 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
12040 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
12041 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
12042 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
12043 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
12044 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
12045 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
12046 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
12047 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
12048 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
12049 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
12050 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
12051 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
12052 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
12053 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
12054 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
12055 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
12056 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
12057 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
12058 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
12059 #undef BPF_TEST_CLOBBER_ALU
12060 #define BPF_TEST_CLOBBER_ATOMIC(width, op) \
12061 { \
12062 "Atomic_" #width " " #op ": no clobbering", \
12063 .u.insns_int = { \
12064 BPF_ALU64_IMM(BPF_MOV, R0, 0), \
12065 BPF_ALU64_IMM(BPF_MOV, R1, 1), \
12066 BPF_ALU64_IMM(BPF_MOV, R2, 2), \
12067 BPF_ALU64_IMM(BPF_MOV, R3, 3), \
12068 BPF_ALU64_IMM(BPF_MOV, R4, 4), \
12069 BPF_ALU64_IMM(BPF_MOV, R5, 5), \
12070 BPF_ALU64_IMM(BPF_MOV, R6, 6), \
12071 BPF_ALU64_IMM(BPF_MOV, R7, 7), \
12072 BPF_ALU64_IMM(BPF_MOV, R8, 8), \
12073 BPF_ALU64_IMM(BPF_MOV, R9, 9), \
12074 BPF_ST_MEM(width, R10, -8, \
12075 (op) == BPF_CMPXCHG ? 0 : \
12076 (op) & BPF_FETCH ? 1 : 0), \
12077 BPF_ATOMIC_OP(width, op, R10, R1, -8), \
12078 BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
12079 BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
12080 BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
12081 BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
12082 BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
12083 BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
12084 BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
12085 BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
12086 BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
12087 BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
12088 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
12089 BPF_EXIT_INSN(), \
12090 }, \
12091 INTERNAL, \
12092 { }, \
12093 { { 0, 1 } }, \
12094 .stack_depth = 8, \
12095 }
12096 /* 64-bit atomic operations, register clobbering */
12097 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
12098 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
12099 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
12100 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
12101 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
12102 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
12103 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
12104 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
12105 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
12106 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
12107 /* 32-bit atomic operations, register clobbering */
12108 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
12109 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
12110 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
12111 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
12112 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
12113 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
12114 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
12115 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
12116 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
12117 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
12118 #undef BPF_TEST_CLOBBER_ATOMIC
12119 /* Checking that ALU32 src is not zero extended in place */
12120 #define BPF_ALU32_SRC_ZEXT(op) \
12121 { \
12122 "ALU32_" #op "_X: src preserved in zext", \
12123 .u.insns_int = { \
12124 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
12125 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
12126 BPF_ALU64_REG(BPF_MOV, R0, R1), \
12127 BPF_ALU32_REG(BPF_##op, R2, R1), \
12128 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12129 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12130 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12131 BPF_ALU64_REG(BPF_OR, R0, R1), \
12132 BPF_EXIT_INSN(), \
12133 }, \
12134 INTERNAL, \
12135 { }, \
12136 { { 0, 0 } }, \
12137 }
12138 BPF_ALU32_SRC_ZEXT(MOV),
12139 BPF_ALU32_SRC_ZEXT(AND),
12140 BPF_ALU32_SRC_ZEXT(OR),
12141 BPF_ALU32_SRC_ZEXT(XOR),
12142 BPF_ALU32_SRC_ZEXT(ADD),
12143 BPF_ALU32_SRC_ZEXT(SUB),
12144 BPF_ALU32_SRC_ZEXT(MUL),
12145 BPF_ALU32_SRC_ZEXT(DIV),
12146 BPF_ALU32_SRC_ZEXT(MOD),
12147 #undef BPF_ALU32_SRC_ZEXT
12148 /* Checking that ATOMIC32 src is not zero extended in place */
12149 #define BPF_ATOMIC32_SRC_ZEXT(op) \
12150 { \
12151 "ATOMIC_W_" #op ": src preserved in zext", \
12152 .u.insns_int = { \
12153 BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
12154 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12155 BPF_ST_MEM(BPF_W, R10, -4, 0), \
12156 BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
12157 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12158 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12159 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12160 BPF_ALU64_REG(BPF_OR, R0, R1), \
12161 BPF_EXIT_INSN(), \
12162 }, \
12163 INTERNAL, \
12164 { }, \
12165 { { 0, 0 } }, \
12166 .stack_depth = 8, \
12167 }
12168 BPF_ATOMIC32_SRC_ZEXT(ADD),
12169 BPF_ATOMIC32_SRC_ZEXT(AND),
12170 BPF_ATOMIC32_SRC_ZEXT(OR),
12171 BPF_ATOMIC32_SRC_ZEXT(XOR),
12172 #undef BPF_ATOMIC32_SRC_ZEXT
12173 /* Checking that CMPXCHG32 src is not zero extended in place */
12174 {
12175 "ATOMIC_W_CMPXCHG: src preserved in zext",
12176 .u.insns_int = {
12177 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
12178 BPF_ALU64_REG(BPF_MOV, R2, R1),
12179 BPF_ALU64_REG(BPF_MOV, R0, 0),
12180 BPF_ST_MEM(BPF_W, R10, -4, 0),
12181 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
12182 BPF_ALU64_REG(BPF_SUB, R1, R2),
12183 BPF_ALU64_REG(BPF_MOV, R2, R1),
12184 BPF_ALU64_IMM(BPF_RSH, R2, 32),
12185 BPF_ALU64_REG(BPF_OR, R1, R2),
12186 BPF_ALU64_REG(BPF_MOV, R0, R1),
12187 BPF_EXIT_INSN(),
12188 },
12189 INTERNAL,
12190 { },
12191 { { 0, 0 } },
12192 .stack_depth = 8,
12193 },
12194 /* Checking that JMP32 immediate src is not zero extended in place */
12195 #define BPF_JMP32_IMM_ZEXT(op) \
12196 { \
12197 "JMP32_" #op "_K: operand preserved in zext", \
12198 .u.insns_int = { \
12199 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12200 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12201 BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
12202 BPF_JMP_A(0), /* Nop */ \
12203 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12204 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12205 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12206 BPF_ALU64_REG(BPF_OR, R0, R1), \
12207 BPF_EXIT_INSN(), \
12208 }, \
12209 INTERNAL, \
12210 { }, \
12211 { { 0, 0 } }, \
12212 }
12213 BPF_JMP32_IMM_ZEXT(JEQ),
12214 BPF_JMP32_IMM_ZEXT(JNE),
12215 BPF_JMP32_IMM_ZEXT(JSET),
12216 BPF_JMP32_IMM_ZEXT(JGT),
12217 BPF_JMP32_IMM_ZEXT(JGE),
12218 BPF_JMP32_IMM_ZEXT(JLT),
12219 BPF_JMP32_IMM_ZEXT(JLE),
12220 BPF_JMP32_IMM_ZEXT(JSGT),
12221 BPF_JMP32_IMM_ZEXT(JSGE),
12222 BPF_JMP32_IMM_ZEXT(JSLT),
12223 BPF_JMP32_IMM_ZEXT(JSLE),
12224 #undef BPF_JMP2_IMM_ZEXT
12225 /* Checking that JMP32 dst & src are not zero extended in place */
12226 #define BPF_JMP32_REG_ZEXT(op) \
12227 { \
12228 "JMP32_" #op "_X: operands preserved in zext", \
12229 .u.insns_int = { \
12230 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12231 BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
12232 BPF_ALU64_REG(BPF_MOV, R2, R0), \
12233 BPF_ALU64_REG(BPF_MOV, R3, R1), \
12234 BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
12235 BPF_JMP_A(0), /* Nop */ \
12236 BPF_ALU64_REG(BPF_SUB, R0, R2), \
12237 BPF_ALU64_REG(BPF_SUB, R1, R3), \
12238 BPF_ALU64_REG(BPF_OR, R0, R1), \
12239 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12240 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12241 BPF_ALU64_REG(BPF_OR, R0, R1), \
12242 BPF_EXIT_INSN(), \
12243 }, \
12244 INTERNAL, \
12245 { }, \
12246 { { 0, 0 } }, \
12247 }
12248 BPF_JMP32_REG_ZEXT(JEQ),
12249 BPF_JMP32_REG_ZEXT(JNE),
12250 BPF_JMP32_REG_ZEXT(JSET),
12251 BPF_JMP32_REG_ZEXT(JGT),
12252 BPF_JMP32_REG_ZEXT(JGE),
12253 BPF_JMP32_REG_ZEXT(JLT),
12254 BPF_JMP32_REG_ZEXT(JLE),
12255 BPF_JMP32_REG_ZEXT(JSGT),
12256 BPF_JMP32_REG_ZEXT(JSGE),
12257 BPF_JMP32_REG_ZEXT(JSLT),
12258 BPF_JMP32_REG_ZEXT(JSLE),
12259 #undef BPF_JMP2_REG_ZEXT
12260 /* ALU64 K register combinations */
12261 {
12262 "ALU64_MOV_K: registers",
12263 { },
12264 INTERNAL,
12265 { },
12266 { { 0, 1 } },
12267 .fill_helper = bpf_fill_alu64_mov_imm_regs,
12268 },
12269 {
12270 "ALU64_AND_K: registers",
12271 { },
12272 INTERNAL,
12273 { },
12274 { { 0, 1 } },
12275 .fill_helper = bpf_fill_alu64_and_imm_regs,
12276 },
12277 {
12278 "ALU64_OR_K: registers",
12279 { },
12280 INTERNAL,
12281 { },
12282 { { 0, 1 } },
12283 .fill_helper = bpf_fill_alu64_or_imm_regs,
12284 },
12285 {
12286 "ALU64_XOR_K: registers",
12287 { },
12288 INTERNAL,
12289 { },
12290 { { 0, 1 } },
12291 .fill_helper = bpf_fill_alu64_xor_imm_regs,
12292 },
12293 {
12294 "ALU64_LSH_K: registers",
12295 { },
12296 INTERNAL,
12297 { },
12298 { { 0, 1 } },
12299 .fill_helper = bpf_fill_alu64_lsh_imm_regs,
12300 },
12301 {
12302 "ALU64_RSH_K: registers",
12303 { },
12304 INTERNAL,
12305 { },
12306 { { 0, 1 } },
12307 .fill_helper = bpf_fill_alu64_rsh_imm_regs,
12308 },
12309 {
12310 "ALU64_ARSH_K: registers",
12311 { },
12312 INTERNAL,
12313 { },
12314 { { 0, 1 } },
12315 .fill_helper = bpf_fill_alu64_arsh_imm_regs,
12316 },
12317 {
12318 "ALU64_ADD_K: registers",
12319 { },
12320 INTERNAL,
12321 { },
12322 { { 0, 1 } },
12323 .fill_helper = bpf_fill_alu64_add_imm_regs,
12324 },
12325 {
12326 "ALU64_SUB_K: registers",
12327 { },
12328 INTERNAL,
12329 { },
12330 { { 0, 1 } },
12331 .fill_helper = bpf_fill_alu64_sub_imm_regs,
12332 },
12333 {
12334 "ALU64_MUL_K: registers",
12335 { },
12336 INTERNAL,
12337 { },
12338 { { 0, 1 } },
12339 .fill_helper = bpf_fill_alu64_mul_imm_regs,
12340 },
12341 {
12342 "ALU64_DIV_K: registers",
12343 { },
12344 INTERNAL,
12345 { },
12346 { { 0, 1 } },
12347 .fill_helper = bpf_fill_alu64_div_imm_regs,
12348 },
12349 {
12350 "ALU64_MOD_K: registers",
12351 { },
12352 INTERNAL,
12353 { },
12354 { { 0, 1 } },
12355 .fill_helper = bpf_fill_alu64_mod_imm_regs,
12356 },
12357 /* ALU32 K registers */
12358 {
12359 "ALU32_MOV_K: registers",
12360 { },
12361 INTERNAL,
12362 { },
12363 { { 0, 1 } },
12364 .fill_helper = bpf_fill_alu32_mov_imm_regs,
12365 },
12366 {
12367 "ALU32_AND_K: registers",
12368 { },
12369 INTERNAL,
12370 { },
12371 { { 0, 1 } },
12372 .fill_helper = bpf_fill_alu32_and_imm_regs,
12373 },
12374 {
12375 "ALU32_OR_K: registers",
12376 { },
12377 INTERNAL,
12378 { },
12379 { { 0, 1 } },
12380 .fill_helper = bpf_fill_alu32_or_imm_regs,
12381 },
12382 {
12383 "ALU32_XOR_K: registers",
12384 { },
12385 INTERNAL,
12386 { },
12387 { { 0, 1 } },
12388 .fill_helper = bpf_fill_alu32_xor_imm_regs,
12389 },
12390 {
12391 "ALU32_LSH_K: registers",
12392 { },
12393 INTERNAL,
12394 { },
12395 { { 0, 1 } },
12396 .fill_helper = bpf_fill_alu32_lsh_imm_regs,
12397 },
12398 {
12399 "ALU32_RSH_K: registers",
12400 { },
12401 INTERNAL,
12402 { },
12403 { { 0, 1 } },
12404 .fill_helper = bpf_fill_alu32_rsh_imm_regs,
12405 },
12406 {
12407 "ALU32_ARSH_K: registers",
12408 { },
12409 INTERNAL,
12410 { },
12411 { { 0, 1 } },
12412 .fill_helper = bpf_fill_alu32_arsh_imm_regs,
12413 },
12414 {
12415 "ALU32_ADD_K: registers",
12416 { },
12417 INTERNAL,
12418 { },
12419 { { 0, 1 } },
12420 .fill_helper = bpf_fill_alu32_add_imm_regs,
12421 },
12422 {
12423 "ALU32_SUB_K: registers",
12424 { },
12425 INTERNAL,
12426 { },
12427 { { 0, 1 } },
12428 .fill_helper = bpf_fill_alu32_sub_imm_regs,
12429 },
12430 {
12431 "ALU32_MUL_K: registers",
12432 { },
12433 INTERNAL,
12434 { },
12435 { { 0, 1 } },
12436 .fill_helper = bpf_fill_alu32_mul_imm_regs,
12437 },
12438 {
12439 "ALU32_DIV_K: registers",
12440 { },
12441 INTERNAL,
12442 { },
12443 { { 0, 1 } },
12444 .fill_helper = bpf_fill_alu32_div_imm_regs,
12445 },
12446 {
12447 "ALU32_MOD_K: registers",
12448 { },
12449 INTERNAL,
12450 { },
12451 { { 0, 1 } },
12452 .fill_helper = bpf_fill_alu32_mod_imm_regs,
12453 },
12454 /* ALU64 X register combinations */
12455 {
12456 "ALU64_MOV_X: register combinations",
12457 { },
12458 INTERNAL,
12459 { },
12460 { { 0, 1 } },
12461 .fill_helper = bpf_fill_alu64_mov_reg_pairs,
12462 },
12463 {
12464 "ALU64_AND_X: register combinations",
12465 { },
12466 INTERNAL,
12467 { },
12468 { { 0, 1 } },
12469 .fill_helper = bpf_fill_alu64_and_reg_pairs,
12470 },
12471 {
12472 "ALU64_OR_X: register combinations",
12473 { },
12474 INTERNAL,
12475 { },
12476 { { 0, 1 } },
12477 .fill_helper = bpf_fill_alu64_or_reg_pairs,
12478 },
12479 {
12480 "ALU64_XOR_X: register combinations",
12481 { },
12482 INTERNAL,
12483 { },
12484 { { 0, 1 } },
12485 .fill_helper = bpf_fill_alu64_xor_reg_pairs,
12486 },
12487 {
12488 "ALU64_LSH_X: register combinations",
12489 { },
12490 INTERNAL,
12491 { },
12492 { { 0, 1 } },
12493 .fill_helper = bpf_fill_alu64_lsh_reg_pairs,
12494 },
12495 {
12496 "ALU64_RSH_X: register combinations",
12497 { },
12498 INTERNAL,
12499 { },
12500 { { 0, 1 } },
12501 .fill_helper = bpf_fill_alu64_rsh_reg_pairs,
12502 },
12503 {
12504 "ALU64_ARSH_X: register combinations",
12505 { },
12506 INTERNAL,
12507 { },
12508 { { 0, 1 } },
12509 .fill_helper = bpf_fill_alu64_arsh_reg_pairs,
12510 },
12511 {
12512 "ALU64_ADD_X: register combinations",
12513 { },
12514 INTERNAL,
12515 { },
12516 { { 0, 1 } },
12517 .fill_helper = bpf_fill_alu64_add_reg_pairs,
12518 },
12519 {
12520 "ALU64_SUB_X: register combinations",
12521 { },
12522 INTERNAL,
12523 { },
12524 { { 0, 1 } },
12525 .fill_helper = bpf_fill_alu64_sub_reg_pairs,
12526 },
12527 {
12528 "ALU64_MUL_X: register combinations",
12529 { },
12530 INTERNAL,
12531 { },
12532 { { 0, 1 } },
12533 .fill_helper = bpf_fill_alu64_mul_reg_pairs,
12534 },
12535 {
12536 "ALU64_DIV_X: register combinations",
12537 { },
12538 INTERNAL,
12539 { },
12540 { { 0, 1 } },
12541 .fill_helper = bpf_fill_alu64_div_reg_pairs,
12542 },
12543 {
12544 "ALU64_MOD_X: register combinations",
12545 { },
12546 INTERNAL,
12547 { },
12548 { { 0, 1 } },
12549 .fill_helper = bpf_fill_alu64_mod_reg_pairs,
12550 },
12551 /* ALU32 X register combinations */
12552 {
12553 "ALU32_MOV_X: register combinations",
12554 { },
12555 INTERNAL,
12556 { },
12557 { { 0, 1 } },
12558 .fill_helper = bpf_fill_alu32_mov_reg_pairs,
12559 },
12560 {
12561 "ALU32_AND_X: register combinations",
12562 { },
12563 INTERNAL,
12564 { },
12565 { { 0, 1 } },
12566 .fill_helper = bpf_fill_alu32_and_reg_pairs,
12567 },
12568 {
12569 "ALU32_OR_X: register combinations",
12570 { },
12571 INTERNAL,
12572 { },
12573 { { 0, 1 } },
12574 .fill_helper = bpf_fill_alu32_or_reg_pairs,
12575 },
12576 {
12577 "ALU32_XOR_X: register combinations",
12578 { },
12579 INTERNAL,
12580 { },
12581 { { 0, 1 } },
12582 .fill_helper = bpf_fill_alu32_xor_reg_pairs,
12583 },
12584 {
12585 "ALU32_LSH_X: register combinations",
12586 { },
12587 INTERNAL,
12588 { },
12589 { { 0, 1 } },
12590 .fill_helper = bpf_fill_alu32_lsh_reg_pairs,
12591 },
12592 {
12593 "ALU32_RSH_X: register combinations",
12594 { },
12595 INTERNAL,
12596 { },
12597 { { 0, 1 } },
12598 .fill_helper = bpf_fill_alu32_rsh_reg_pairs,
12599 },
12600 {
12601 "ALU32_ARSH_X: register combinations",
12602 { },
12603 INTERNAL,
12604 { },
12605 { { 0, 1 } },
12606 .fill_helper = bpf_fill_alu32_arsh_reg_pairs,
12607 },
12608 {
12609 "ALU32_ADD_X: register combinations",
12610 { },
12611 INTERNAL,
12612 { },
12613 { { 0, 1 } },
12614 .fill_helper = bpf_fill_alu32_add_reg_pairs,
12615 },
12616 {
12617 "ALU32_SUB_X: register combinations",
12618 { },
12619 INTERNAL,
12620 { },
12621 { { 0, 1 } },
12622 .fill_helper = bpf_fill_alu32_sub_reg_pairs,
12623 },
12624 {
12625 "ALU32_MUL_X: register combinations",
12626 { },
12627 INTERNAL,
12628 { },
12629 { { 0, 1 } },
12630 .fill_helper = bpf_fill_alu32_mul_reg_pairs,
12631 },
12632 {
12633 "ALU32_DIV_X: register combinations",
12634 { },
12635 INTERNAL,
12636 { },
12637 { { 0, 1 } },
12638 .fill_helper = bpf_fill_alu32_div_reg_pairs,
12639 },
12640 {
12641 "ALU32_MOD_X register combinations",
12642 { },
12643 INTERNAL,
12644 { },
12645 { { 0, 1 } },
12646 .fill_helper = bpf_fill_alu32_mod_reg_pairs,
12647 },
12648 /* Exhaustive test of ALU64 shift operations */
12649 {
12650 "ALU64_LSH_K: all shift values",
12651 { },
12652 INTERNAL | FLAG_NO_DATA,
12653 { },
12654 { { 0, 1 } },
12655 .fill_helper = bpf_fill_alu64_lsh_imm,
12656 },
12657 {
12658 "ALU64_RSH_K: all shift values",
12659 { },
12660 INTERNAL | FLAG_NO_DATA,
12661 { },
12662 { { 0, 1 } },
12663 .fill_helper = bpf_fill_alu64_rsh_imm,
12664 },
12665 {
12666 "ALU64_ARSH_K: all shift values",
12667 { },
12668 INTERNAL | FLAG_NO_DATA,
12669 { },
12670 { { 0, 1 } },
12671 .fill_helper = bpf_fill_alu64_arsh_imm,
12672 },
12673 {
12674 "ALU64_LSH_X: all shift values",
12675 { },
12676 INTERNAL | FLAG_NO_DATA,
12677 { },
12678 { { 0, 1 } },
12679 .fill_helper = bpf_fill_alu64_lsh_reg,
12680 },
12681 {
12682 "ALU64_RSH_X: all shift values",
12683 { },
12684 INTERNAL | FLAG_NO_DATA,
12685 { },
12686 { { 0, 1 } },
12687 .fill_helper = bpf_fill_alu64_rsh_reg,
12688 },
12689 {
12690 "ALU64_ARSH_X: all shift values",
12691 { },
12692 INTERNAL | FLAG_NO_DATA,
12693 { },
12694 { { 0, 1 } },
12695 .fill_helper = bpf_fill_alu64_arsh_reg,
12696 },
12697 /* Exhaustive test of ALU32 shift operations */
12698 {
12699 "ALU32_LSH_K: all shift values",
12700 { },
12701 INTERNAL | FLAG_NO_DATA,
12702 { },
12703 { { 0, 1 } },
12704 .fill_helper = bpf_fill_alu32_lsh_imm,
12705 },
12706 {
12707 "ALU32_RSH_K: all shift values",
12708 { },
12709 INTERNAL | FLAG_NO_DATA,
12710 { },
12711 { { 0, 1 } },
12712 .fill_helper = bpf_fill_alu32_rsh_imm,
12713 },
12714 {
12715 "ALU32_ARSH_K: all shift values",
12716 { },
12717 INTERNAL | FLAG_NO_DATA,
12718 { },
12719 { { 0, 1 } },
12720 .fill_helper = bpf_fill_alu32_arsh_imm,
12721 },
12722 {
12723 "ALU32_LSH_X: all shift values",
12724 { },
12725 INTERNAL | FLAG_NO_DATA,
12726 { },
12727 { { 0, 1 } },
12728 .fill_helper = bpf_fill_alu32_lsh_reg,
12729 },
12730 {
12731 "ALU32_RSH_X: all shift values",
12732 { },
12733 INTERNAL | FLAG_NO_DATA,
12734 { },
12735 { { 0, 1 } },
12736 .fill_helper = bpf_fill_alu32_rsh_reg,
12737 },
12738 {
12739 "ALU32_ARSH_X: all shift values",
12740 { },
12741 INTERNAL | FLAG_NO_DATA,
12742 { },
12743 { { 0, 1 } },
12744 .fill_helper = bpf_fill_alu32_arsh_reg,
12745 },
12746 /*
12747 * Exhaustive test of ALU64 shift operations when
12748 * source and destination register are the same.
12749 */
12750 {
12751 "ALU64_LSH_X: all shift values with the same register",
12752 { },
12753 INTERNAL | FLAG_NO_DATA,
12754 { },
12755 { { 0, 1 } },
12756 .fill_helper = bpf_fill_alu64_lsh_same_reg,
12757 },
12758 {
12759 "ALU64_RSH_X: all shift values with the same register",
12760 { },
12761 INTERNAL | FLAG_NO_DATA,
12762 { },
12763 { { 0, 1 } },
12764 .fill_helper = bpf_fill_alu64_rsh_same_reg,
12765 },
12766 {
12767 "ALU64_ARSH_X: all shift values with the same register",
12768 { },
12769 INTERNAL | FLAG_NO_DATA,
12770 { },
12771 { { 0, 1 } },
12772 .fill_helper = bpf_fill_alu64_arsh_same_reg,
12773 },
12774 /*
12775 * Exhaustive test of ALU32 shift operations when
12776 * source and destination register are the same.
12777 */
12778 {
12779 "ALU32_LSH_X: all shift values with the same register",
12780 { },
12781 INTERNAL | FLAG_NO_DATA,
12782 { },
12783 { { 0, 1 } },
12784 .fill_helper = bpf_fill_alu32_lsh_same_reg,
12785 },
12786 {
12787 "ALU32_RSH_X: all shift values with the same register",
12788 { },
12789 INTERNAL | FLAG_NO_DATA,
12790 { },
12791 { { 0, 1 } },
12792 .fill_helper = bpf_fill_alu32_rsh_same_reg,
12793 },
12794 {
12795 "ALU32_ARSH_X: all shift values with the same register",
12796 { },
12797 INTERNAL | FLAG_NO_DATA,
12798 { },
12799 { { 0, 1 } },
12800 .fill_helper = bpf_fill_alu32_arsh_same_reg,
12801 },
12802 /* ALU64 immediate magnitudes */
12803 {
12804 "ALU64_MOV_K: all immediate value magnitudes",
12805 { },
12806 INTERNAL | FLAG_NO_DATA,
12807 { },
12808 { { 0, 1 } },
12809 .fill_helper = bpf_fill_alu64_mov_imm,
12810 .nr_testruns = NR_PATTERN_RUNS,
12811 },
12812 {
12813 "ALU64_AND_K: all immediate value magnitudes",
12814 { },
12815 INTERNAL | FLAG_NO_DATA,
12816 { },
12817 { { 0, 1 } },
12818 .fill_helper = bpf_fill_alu64_and_imm,
12819 .nr_testruns = NR_PATTERN_RUNS,
12820 },
12821 {
12822 "ALU64_OR_K: all immediate value magnitudes",
12823 { },
12824 INTERNAL | FLAG_NO_DATA,
12825 { },
12826 { { 0, 1 } },
12827 .fill_helper = bpf_fill_alu64_or_imm,
12828 .nr_testruns = NR_PATTERN_RUNS,
12829 },
12830 {
12831 "ALU64_XOR_K: all immediate value magnitudes",
12832 { },
12833 INTERNAL | FLAG_NO_DATA,
12834 { },
12835 { { 0, 1 } },
12836 .fill_helper = bpf_fill_alu64_xor_imm,
12837 .nr_testruns = NR_PATTERN_RUNS,
12838 },
12839 {
12840 "ALU64_ADD_K: all immediate value magnitudes",
12841 { },
12842 INTERNAL | FLAG_NO_DATA,
12843 { },
12844 { { 0, 1 } },
12845 .fill_helper = bpf_fill_alu64_add_imm,
12846 .nr_testruns = NR_PATTERN_RUNS,
12847 },
12848 {
12849 "ALU64_SUB_K: all immediate value magnitudes",
12850 { },
12851 INTERNAL | FLAG_NO_DATA,
12852 { },
12853 { { 0, 1 } },
12854 .fill_helper = bpf_fill_alu64_sub_imm,
12855 .nr_testruns = NR_PATTERN_RUNS,
12856 },
12857 {
12858 "ALU64_MUL_K: all immediate value magnitudes",
12859 { },
12860 INTERNAL | FLAG_NO_DATA,
12861 { },
12862 { { 0, 1 } },
12863 .fill_helper = bpf_fill_alu64_mul_imm,
12864 .nr_testruns = NR_PATTERN_RUNS,
12865 },
12866 {
12867 "ALU64_DIV_K: all immediate value magnitudes",
12868 { },
12869 INTERNAL | FLAG_NO_DATA,
12870 { },
12871 { { 0, 1 } },
12872 .fill_helper = bpf_fill_alu64_div_imm,
12873 .nr_testruns = NR_PATTERN_RUNS,
12874 },
12875 {
12876 "ALU64_MOD_K: all immediate value magnitudes",
12877 { },
12878 INTERNAL | FLAG_NO_DATA,
12879 { },
12880 { { 0, 1 } },
12881 .fill_helper = bpf_fill_alu64_mod_imm,
12882 .nr_testruns = NR_PATTERN_RUNS,
12883 },
12884 /* ALU32 immediate magnitudes */
12885 {
12886 "ALU32_MOV_K: all immediate value magnitudes",
12887 { },
12888 INTERNAL | FLAG_NO_DATA,
12889 { },
12890 { { 0, 1 } },
12891 .fill_helper = bpf_fill_alu32_mov_imm,
12892 .nr_testruns = NR_PATTERN_RUNS,
12893 },
12894 {
12895 "ALU32_AND_K: all immediate value magnitudes",
12896 { },
12897 INTERNAL | FLAG_NO_DATA,
12898 { },
12899 { { 0, 1 } },
12900 .fill_helper = bpf_fill_alu32_and_imm,
12901 .nr_testruns = NR_PATTERN_RUNS,
12902 },
12903 {
12904 "ALU32_OR_K: all immediate value magnitudes",
12905 { },
12906 INTERNAL | FLAG_NO_DATA,
12907 { },
12908 { { 0, 1 } },
12909 .fill_helper = bpf_fill_alu32_or_imm,
12910 .nr_testruns = NR_PATTERN_RUNS,
12911 },
12912 {
12913 "ALU32_XOR_K: all immediate value magnitudes",
12914 { },
12915 INTERNAL | FLAG_NO_DATA,
12916 { },
12917 { { 0, 1 } },
12918 .fill_helper = bpf_fill_alu32_xor_imm,
12919 .nr_testruns = NR_PATTERN_RUNS,
12920 },
12921 {
12922 "ALU32_ADD_K: all immediate value magnitudes",
12923 { },
12924 INTERNAL | FLAG_NO_DATA,
12925 { },
12926 { { 0, 1 } },
12927 .fill_helper = bpf_fill_alu32_add_imm,
12928 .nr_testruns = NR_PATTERN_RUNS,
12929 },
12930 {
12931 "ALU32_SUB_K: all immediate value magnitudes",
12932 { },
12933 INTERNAL | FLAG_NO_DATA,
12934 { },
12935 { { 0, 1 } },
12936 .fill_helper = bpf_fill_alu32_sub_imm,
12937 .nr_testruns = NR_PATTERN_RUNS,
12938 },
12939 {
12940 "ALU32_MUL_K: all immediate value magnitudes",
12941 { },
12942 INTERNAL | FLAG_NO_DATA,
12943 { },
12944 { { 0, 1 } },
12945 .fill_helper = bpf_fill_alu32_mul_imm,
12946 .nr_testruns = NR_PATTERN_RUNS,
12947 },
12948 {
12949 "ALU32_DIV_K: all immediate value magnitudes",
12950 { },
12951 INTERNAL | FLAG_NO_DATA,
12952 { },
12953 { { 0, 1 } },
12954 .fill_helper = bpf_fill_alu32_div_imm,
12955 .nr_testruns = NR_PATTERN_RUNS,
12956 },
12957 {
12958 "ALU32_MOD_K: all immediate value magnitudes",
12959 { },
12960 INTERNAL | FLAG_NO_DATA,
12961 { },
12962 { { 0, 1 } },
12963 .fill_helper = bpf_fill_alu32_mod_imm,
12964 .nr_testruns = NR_PATTERN_RUNS,
12965 },
12966 /* ALU64 register magnitudes */
12967 {
12968 "ALU64_MOV_X: all register value magnitudes",
12969 { },
12970 INTERNAL | FLAG_NO_DATA,
12971 { },
12972 { { 0, 1 } },
12973 .fill_helper = bpf_fill_alu64_mov_reg,
12974 .nr_testruns = NR_PATTERN_RUNS,
12975 },
12976 {
12977 "ALU64_AND_X: all register value magnitudes",
12978 { },
12979 INTERNAL | FLAG_NO_DATA,
12980 { },
12981 { { 0, 1 } },
12982 .fill_helper = bpf_fill_alu64_and_reg,
12983 .nr_testruns = NR_PATTERN_RUNS,
12984 },
12985 {
12986 "ALU64_OR_X: all register value magnitudes",
12987 { },
12988 INTERNAL | FLAG_NO_DATA,
12989 { },
12990 { { 0, 1 } },
12991 .fill_helper = bpf_fill_alu64_or_reg,
12992 .nr_testruns = NR_PATTERN_RUNS,
12993 },
12994 {
12995 "ALU64_XOR_X: all register value magnitudes",
12996 { },
12997 INTERNAL | FLAG_NO_DATA,
12998 { },
12999 { { 0, 1 } },
13000 .fill_helper = bpf_fill_alu64_xor_reg,
13001 .nr_testruns = NR_PATTERN_RUNS,
13002 },
13003 {
13004 "ALU64_ADD_X: all register value magnitudes",
13005 { },
13006 INTERNAL | FLAG_NO_DATA,
13007 { },
13008 { { 0, 1 } },
13009 .fill_helper = bpf_fill_alu64_add_reg,
13010 .nr_testruns = NR_PATTERN_RUNS,
13011 },
13012 {
13013 "ALU64_SUB_X: all register value magnitudes",
13014 { },
13015 INTERNAL | FLAG_NO_DATA,
13016 { },
13017 { { 0, 1 } },
13018 .fill_helper = bpf_fill_alu64_sub_reg,
13019 .nr_testruns = NR_PATTERN_RUNS,
13020 },
13021 {
13022 "ALU64_MUL_X: all register value magnitudes",
13023 { },
13024 INTERNAL | FLAG_NO_DATA,
13025 { },
13026 { { 0, 1 } },
13027 .fill_helper = bpf_fill_alu64_mul_reg,
13028 .nr_testruns = NR_PATTERN_RUNS,
13029 },
13030 {
13031 "ALU64_DIV_X: all register value magnitudes",
13032 { },
13033 INTERNAL | FLAG_NO_DATA,
13034 { },
13035 { { 0, 1 } },
13036 .fill_helper = bpf_fill_alu64_div_reg,
13037 .nr_testruns = NR_PATTERN_RUNS,
13038 },
13039 {
13040 "ALU64_MOD_X: all register value magnitudes",
13041 { },
13042 INTERNAL | FLAG_NO_DATA,
13043 { },
13044 { { 0, 1 } },
13045 .fill_helper = bpf_fill_alu64_mod_reg,
13046 .nr_testruns = NR_PATTERN_RUNS,
13047 },
13048 /* ALU32 register magnitudes */
13049 {
13050 "ALU32_MOV_X: all register value magnitudes",
13051 { },
13052 INTERNAL | FLAG_NO_DATA,
13053 { },
13054 { { 0, 1 } },
13055 .fill_helper = bpf_fill_alu32_mov_reg,
13056 .nr_testruns = NR_PATTERN_RUNS,
13057 },
13058 {
13059 "ALU32_AND_X: all register value magnitudes",
13060 { },
13061 INTERNAL | FLAG_NO_DATA,
13062 { },
13063 { { 0, 1 } },
13064 .fill_helper = bpf_fill_alu32_and_reg,
13065 .nr_testruns = NR_PATTERN_RUNS,
13066 },
13067 {
13068 "ALU32_OR_X: all register value magnitudes",
13069 { },
13070 INTERNAL | FLAG_NO_DATA,
13071 { },
13072 { { 0, 1 } },
13073 .fill_helper = bpf_fill_alu32_or_reg,
13074 .nr_testruns = NR_PATTERN_RUNS,
13075 },
13076 {
13077 "ALU32_XOR_X: all register value magnitudes",
13078 { },
13079 INTERNAL | FLAG_NO_DATA,
13080 { },
13081 { { 0, 1 } },
13082 .fill_helper = bpf_fill_alu32_xor_reg,
13083 .nr_testruns = NR_PATTERN_RUNS,
13084 },
13085 {
13086 "ALU32_ADD_X: all register value magnitudes",
13087 { },
13088 INTERNAL | FLAG_NO_DATA,
13089 { },
13090 { { 0, 1 } },
13091 .fill_helper = bpf_fill_alu32_add_reg,
13092 .nr_testruns = NR_PATTERN_RUNS,
13093 },
13094 {
13095 "ALU32_SUB_X: all register value magnitudes",
13096 { },
13097 INTERNAL | FLAG_NO_DATA,
13098 { },
13099 { { 0, 1 } },
13100 .fill_helper = bpf_fill_alu32_sub_reg,
13101 .nr_testruns = NR_PATTERN_RUNS,
13102 },
13103 {
13104 "ALU32_MUL_X: all register value magnitudes",
13105 { },
13106 INTERNAL | FLAG_NO_DATA,
13107 { },
13108 { { 0, 1 } },
13109 .fill_helper = bpf_fill_alu32_mul_reg,
13110 .nr_testruns = NR_PATTERN_RUNS,
13111 },
13112 {
13113 "ALU32_DIV_X: all register value magnitudes",
13114 { },
13115 INTERNAL | FLAG_NO_DATA,
13116 { },
13117 { { 0, 1 } },
13118 .fill_helper = bpf_fill_alu32_div_reg,
13119 .nr_testruns = NR_PATTERN_RUNS,
13120 },
13121 {
13122 "ALU32_MOD_X: all register value magnitudes",
13123 { },
13124 INTERNAL | FLAG_NO_DATA,
13125 { },
13126 { { 0, 1 } },
13127 .fill_helper = bpf_fill_alu32_mod_reg,
13128 .nr_testruns = NR_PATTERN_RUNS,
13129 },
13130 /* LD_IMM64 immediate magnitudes and byte patterns */
13131 {
13132 "LD_IMM64: all immediate value magnitudes",
13133 { },
13134 INTERNAL | FLAG_NO_DATA,
13135 { },
13136 { { 0, 1 } },
13137 .fill_helper = bpf_fill_ld_imm64_magn,
13138 },
13139 {
13140 "LD_IMM64: checker byte patterns",
13141 { },
13142 INTERNAL | FLAG_NO_DATA,
13143 { },
13144 { { 0, 1 } },
13145 .fill_helper = bpf_fill_ld_imm64_checker,
13146 },
13147 {
13148 "LD_IMM64: random positive and zero byte patterns",
13149 { },
13150 INTERNAL | FLAG_NO_DATA,
13151 { },
13152 { { 0, 1 } },
13153 .fill_helper = bpf_fill_ld_imm64_pos_zero,
13154 },
13155 {
13156 "LD_IMM64: random negative and zero byte patterns",
13157 { },
13158 INTERNAL | FLAG_NO_DATA,
13159 { },
13160 { { 0, 1 } },
13161 .fill_helper = bpf_fill_ld_imm64_neg_zero,
13162 },
13163 {
13164 "LD_IMM64: random positive and negative byte patterns",
13165 { },
13166 INTERNAL | FLAG_NO_DATA,
13167 { },
13168 { { 0, 1 } },
13169 .fill_helper = bpf_fill_ld_imm64_pos_neg,
13170 },
13171 /* 64-bit ATOMIC register combinations */
13172 {
13173 "ATOMIC_DW_ADD: register combinations",
13174 { },
13175 INTERNAL,
13176 { },
13177 { { 0, 1 } },
13178 .fill_helper = bpf_fill_atomic64_add_reg_pairs,
13179 .stack_depth = 8,
13180 },
13181 {
13182 "ATOMIC_DW_AND: register combinations",
13183 { },
13184 INTERNAL,
13185 { },
13186 { { 0, 1 } },
13187 .fill_helper = bpf_fill_atomic64_and_reg_pairs,
13188 .stack_depth = 8,
13189 },
13190 {
13191 "ATOMIC_DW_OR: register combinations",
13192 { },
13193 INTERNAL,
13194 { },
13195 { { 0, 1 } },
13196 .fill_helper = bpf_fill_atomic64_or_reg_pairs,
13197 .stack_depth = 8,
13198 },
13199 {
13200 "ATOMIC_DW_XOR: register combinations",
13201 { },
13202 INTERNAL,
13203 { },
13204 { { 0, 1 } },
13205 .fill_helper = bpf_fill_atomic64_xor_reg_pairs,
13206 .stack_depth = 8,
13207 },
13208 {
13209 "ATOMIC_DW_ADD_FETCH: register combinations",
13210 { },
13211 INTERNAL,
13212 { },
13213 { { 0, 1 } },
13214 .fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
13215 .stack_depth = 8,
13216 },
13217 {
13218 "ATOMIC_DW_AND_FETCH: register combinations",
13219 { },
13220 INTERNAL,
13221 { },
13222 { { 0, 1 } },
13223 .fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
13224 .stack_depth = 8,
13225 },
13226 {
13227 "ATOMIC_DW_OR_FETCH: register combinations",
13228 { },
13229 INTERNAL,
13230 { },
13231 { { 0, 1 } },
13232 .fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
13233 .stack_depth = 8,
13234 },
13235 {
13236 "ATOMIC_DW_XOR_FETCH: register combinations",
13237 { },
13238 INTERNAL,
13239 { },
13240 { { 0, 1 } },
13241 .fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
13242 .stack_depth = 8,
13243 },
13244 {
13245 "ATOMIC_DW_XCHG: register combinations",
13246 { },
13247 INTERNAL,
13248 { },
13249 { { 0, 1 } },
13250 .fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
13251 .stack_depth = 8,
13252 },
13253 {
13254 "ATOMIC_DW_CMPXCHG: register combinations",
13255 { },
13256 INTERNAL,
13257 { },
13258 { { 0, 1 } },
13259 .fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
13260 .stack_depth = 8,
13261 },
13262 /* 32-bit ATOMIC register combinations */
13263 {
13264 "ATOMIC_W_ADD: register combinations",
13265 { },
13266 INTERNAL,
13267 { },
13268 { { 0, 1 } },
13269 .fill_helper = bpf_fill_atomic32_add_reg_pairs,
13270 .stack_depth = 8,
13271 },
13272 {
13273 "ATOMIC_W_AND: register combinations",
13274 { },
13275 INTERNAL,
13276 { },
13277 { { 0, 1 } },
13278 .fill_helper = bpf_fill_atomic32_and_reg_pairs,
13279 .stack_depth = 8,
13280 },
13281 {
13282 "ATOMIC_W_OR: register combinations",
13283 { },
13284 INTERNAL,
13285 { },
13286 { { 0, 1 } },
13287 .fill_helper = bpf_fill_atomic32_or_reg_pairs,
13288 .stack_depth = 8,
13289 },
13290 {
13291 "ATOMIC_W_XOR: register combinations",
13292 { },
13293 INTERNAL,
13294 { },
13295 { { 0, 1 } },
13296 .fill_helper = bpf_fill_atomic32_xor_reg_pairs,
13297 .stack_depth = 8,
13298 },
13299 {
13300 "ATOMIC_W_ADD_FETCH: register combinations",
13301 { },
13302 INTERNAL,
13303 { },
13304 { { 0, 1 } },
13305 .fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
13306 .stack_depth = 8,
13307 },
13308 {
13309 "ATOMIC_W_AND_FETCH: register combinations",
13310 { },
13311 INTERNAL,
13312 { },
13313 { { 0, 1 } },
13314 .fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
13315 .stack_depth = 8,
13316 },
13317 {
13318 "ATOMIC_W_OR_FETCH: register combinations",
13319 { },
13320 INTERNAL,
13321 { },
13322 { { 0, 1 } },
13323 .fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
13324 .stack_depth = 8,
13325 },
13326 {
13327 "ATOMIC_W_XOR_FETCH: register combinations",
13328 { },
13329 INTERNAL,
13330 { },
13331 { { 0, 1 } },
13332 .fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
13333 .stack_depth = 8,
13334 },
13335 {
13336 "ATOMIC_W_XCHG: register combinations",
13337 { },
13338 INTERNAL,
13339 { },
13340 { { 0, 1 } },
13341 .fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
13342 .stack_depth = 8,
13343 },
13344 {
13345 "ATOMIC_W_CMPXCHG: register combinations",
13346 { },
13347 INTERNAL,
13348 { },
13349 { { 0, 1 } },
13350 .fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
13351 .stack_depth = 8,
13352 },
13353 /* 64-bit ATOMIC magnitudes */
13354 {
13355 "ATOMIC_DW_ADD: all operand magnitudes",
13356 { },
13357 INTERNAL | FLAG_NO_DATA,
13358 { },
13359 { { 0, 1 } },
13360 .fill_helper = bpf_fill_atomic64_add,
13361 .stack_depth = 8,
13362 .nr_testruns = NR_PATTERN_RUNS,
13363 },
13364 {
13365 "ATOMIC_DW_AND: all operand magnitudes",
13366 { },
13367 INTERNAL | FLAG_NO_DATA,
13368 { },
13369 { { 0, 1 } },
13370 .fill_helper = bpf_fill_atomic64_and,
13371 .stack_depth = 8,
13372 .nr_testruns = NR_PATTERN_RUNS,
13373 },
13374 {
13375 "ATOMIC_DW_OR: all operand magnitudes",
13376 { },
13377 INTERNAL | FLAG_NO_DATA,
13378 { },
13379 { { 0, 1 } },
13380 .fill_helper = bpf_fill_atomic64_or,
13381 .stack_depth = 8,
13382 .nr_testruns = NR_PATTERN_RUNS,
13383 },
13384 {
13385 "ATOMIC_DW_XOR: all operand magnitudes",
13386 { },
13387 INTERNAL | FLAG_NO_DATA,
13388 { },
13389 { { 0, 1 } },
13390 .fill_helper = bpf_fill_atomic64_xor,
13391 .stack_depth = 8,
13392 .nr_testruns = NR_PATTERN_RUNS,
13393 },
13394 {
13395 "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
13396 { },
13397 INTERNAL | FLAG_NO_DATA,
13398 { },
13399 { { 0, 1 } },
13400 .fill_helper = bpf_fill_atomic64_add_fetch,
13401 .stack_depth = 8,
13402 .nr_testruns = NR_PATTERN_RUNS,
13403 },
13404 {
13405 "ATOMIC_DW_AND_FETCH: all operand magnitudes",
13406 { },
13407 INTERNAL | FLAG_NO_DATA,
13408 { },
13409 { { 0, 1 } },
13410 .fill_helper = bpf_fill_atomic64_and_fetch,
13411 .stack_depth = 8,
13412 .nr_testruns = NR_PATTERN_RUNS,
13413 },
13414 {
13415 "ATOMIC_DW_OR_FETCH: all operand magnitudes",
13416 { },
13417 INTERNAL | FLAG_NO_DATA,
13418 { },
13419 { { 0, 1 } },
13420 .fill_helper = bpf_fill_atomic64_or_fetch,
13421 .stack_depth = 8,
13422 .nr_testruns = NR_PATTERN_RUNS,
13423 },
13424 {
13425 "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
13426 { },
13427 INTERNAL | FLAG_NO_DATA,
13428 { },
13429 { { 0, 1 } },
13430 .fill_helper = bpf_fill_atomic64_xor_fetch,
13431 .stack_depth = 8,
13432 .nr_testruns = NR_PATTERN_RUNS,
13433 },
13434 {
13435 "ATOMIC_DW_XCHG: all operand magnitudes",
13436 { },
13437 INTERNAL | FLAG_NO_DATA,
13438 { },
13439 { { 0, 1 } },
13440 .fill_helper = bpf_fill_atomic64_xchg,
13441 .stack_depth = 8,
13442 .nr_testruns = NR_PATTERN_RUNS,
13443 },
13444 {
13445 "ATOMIC_DW_CMPXCHG: all operand magnitudes",
13446 { },
13447 INTERNAL | FLAG_NO_DATA,
13448 { },
13449 { { 0, 1 } },
13450 .fill_helper = bpf_fill_cmpxchg64,
13451 .stack_depth = 8,
13452 .nr_testruns = NR_PATTERN_RUNS,
13453 },
13454 /* 32-bit atomic magnitudes */
13455 {
13456 "ATOMIC_W_ADD: all operand magnitudes",
13457 { },
13458 INTERNAL | FLAG_NO_DATA,
13459 { },
13460 { { 0, 1 } },
13461 .fill_helper = bpf_fill_atomic32_add,
13462 .stack_depth = 8,
13463 .nr_testruns = NR_PATTERN_RUNS,
13464 },
13465 {
13466 "ATOMIC_W_AND: all operand magnitudes",
13467 { },
13468 INTERNAL | FLAG_NO_DATA,
13469 { },
13470 { { 0, 1 } },
13471 .fill_helper = bpf_fill_atomic32_and,
13472 .stack_depth = 8,
13473 .nr_testruns = NR_PATTERN_RUNS,
13474 },
13475 {
13476 "ATOMIC_W_OR: all operand magnitudes",
13477 { },
13478 INTERNAL | FLAG_NO_DATA,
13479 { },
13480 { { 0, 1 } },
13481 .fill_helper = bpf_fill_atomic32_or,
13482 .stack_depth = 8,
13483 .nr_testruns = NR_PATTERN_RUNS,
13484 },
13485 {
13486 "ATOMIC_W_XOR: all operand magnitudes",
13487 { },
13488 INTERNAL | FLAG_NO_DATA,
13489 { },
13490 { { 0, 1 } },
13491 .fill_helper = bpf_fill_atomic32_xor,
13492 .stack_depth = 8,
13493 .nr_testruns = NR_PATTERN_RUNS,
13494 },
13495 {
13496 "ATOMIC_W_ADD_FETCH: all operand magnitudes",
13497 { },
13498 INTERNAL | FLAG_NO_DATA,
13499 { },
13500 { { 0, 1 } },
13501 .fill_helper = bpf_fill_atomic32_add_fetch,
13502 .stack_depth = 8,
13503 .nr_testruns = NR_PATTERN_RUNS,
13504 },
13505 {
13506 "ATOMIC_W_AND_FETCH: all operand magnitudes",
13507 { },
13508 INTERNAL | FLAG_NO_DATA,
13509 { },
13510 { { 0, 1 } },
13511 .fill_helper = bpf_fill_atomic32_and_fetch,
13512 .stack_depth = 8,
13513 .nr_testruns = NR_PATTERN_RUNS,
13514 },
13515 {
13516 "ATOMIC_W_OR_FETCH: all operand magnitudes",
13517 { },
13518 INTERNAL | FLAG_NO_DATA,
13519 { },
13520 { { 0, 1 } },
13521 .fill_helper = bpf_fill_atomic32_or_fetch,
13522 .stack_depth = 8,
13523 .nr_testruns = NR_PATTERN_RUNS,
13524 },
13525 {
13526 "ATOMIC_W_XOR_FETCH: all operand magnitudes",
13527 { },
13528 INTERNAL | FLAG_NO_DATA,
13529 { },
13530 { { 0, 1 } },
13531 .fill_helper = bpf_fill_atomic32_xor_fetch,
13532 .stack_depth = 8,
13533 .nr_testruns = NR_PATTERN_RUNS,
13534 },
13535 {
13536 "ATOMIC_W_XCHG: all operand magnitudes",
13537 { },
13538 INTERNAL | FLAG_NO_DATA,
13539 { },
13540 { { 0, 1 } },
13541 .fill_helper = bpf_fill_atomic32_xchg,
13542 .stack_depth = 8,
13543 .nr_testruns = NR_PATTERN_RUNS,
13544 },
13545 {
13546 "ATOMIC_W_CMPXCHG: all operand magnitudes",
13547 { },
13548 INTERNAL | FLAG_NO_DATA,
13549 { },
13550 { { 0, 1 } },
13551 .fill_helper = bpf_fill_cmpxchg32,
13552 .stack_depth = 8,
13553 .nr_testruns = NR_PATTERN_RUNS,
13554 },
13555 /* JMP immediate magnitudes */
13556 {
13557 "JMP_JSET_K: all immediate value magnitudes",
13558 { },
13559 INTERNAL | FLAG_NO_DATA,
13560 { },
13561 { { 0, 1 } },
13562 .fill_helper = bpf_fill_jmp_jset_imm,
13563 .nr_testruns = NR_PATTERN_RUNS,
13564 },
13565 {
13566 "JMP_JEQ_K: all immediate value magnitudes",
13567 { },
13568 INTERNAL | FLAG_NO_DATA,
13569 { },
13570 { { 0, 1 } },
13571 .fill_helper = bpf_fill_jmp_jeq_imm,
13572 .nr_testruns = NR_PATTERN_RUNS,
13573 },
13574 {
13575 "JMP_JNE_K: all immediate value magnitudes",
13576 { },
13577 INTERNAL | FLAG_NO_DATA,
13578 { },
13579 { { 0, 1 } },
13580 .fill_helper = bpf_fill_jmp_jne_imm,
13581 .nr_testruns = NR_PATTERN_RUNS,
13582 },
13583 {
13584 "JMP_JGT_K: all immediate value magnitudes",
13585 { },
13586 INTERNAL | FLAG_NO_DATA,
13587 { },
13588 { { 0, 1 } },
13589 .fill_helper = bpf_fill_jmp_jgt_imm,
13590 .nr_testruns = NR_PATTERN_RUNS,
13591 },
13592 {
13593 "JMP_JGE_K: all immediate value magnitudes",
13594 { },
13595 INTERNAL | FLAG_NO_DATA,
13596 { },
13597 { { 0, 1 } },
13598 .fill_helper = bpf_fill_jmp_jge_imm,
13599 .nr_testruns = NR_PATTERN_RUNS,
13600 },
13601 {
13602 "JMP_JLT_K: all immediate value magnitudes",
13603 { },
13604 INTERNAL | FLAG_NO_DATA,
13605 { },
13606 { { 0, 1 } },
13607 .fill_helper = bpf_fill_jmp_jlt_imm,
13608 .nr_testruns = NR_PATTERN_RUNS,
13609 },
13610 {
13611 "JMP_JLE_K: all immediate value magnitudes",
13612 { },
13613 INTERNAL | FLAG_NO_DATA,
13614 { },
13615 { { 0, 1 } },
13616 .fill_helper = bpf_fill_jmp_jle_imm,
13617 .nr_testruns = NR_PATTERN_RUNS,
13618 },
13619 {
13620 "JMP_JSGT_K: all immediate value magnitudes",
13621 { },
13622 INTERNAL | FLAG_NO_DATA,
13623 { },
13624 { { 0, 1 } },
13625 .fill_helper = bpf_fill_jmp_jsgt_imm,
13626 .nr_testruns = NR_PATTERN_RUNS,
13627 },
13628 {
13629 "JMP_JSGE_K: all immediate value magnitudes",
13630 { },
13631 INTERNAL | FLAG_NO_DATA,
13632 { },
13633 { { 0, 1 } },
13634 .fill_helper = bpf_fill_jmp_jsge_imm,
13635 .nr_testruns = NR_PATTERN_RUNS,
13636 },
13637 {
13638 "JMP_JSLT_K: all immediate value magnitudes",
13639 { },
13640 INTERNAL | FLAG_NO_DATA,
13641 { },
13642 { { 0, 1 } },
13643 .fill_helper = bpf_fill_jmp_jslt_imm,
13644 .nr_testruns = NR_PATTERN_RUNS,
13645 },
13646 {
13647 "JMP_JSLE_K: all immediate value magnitudes",
13648 { },
13649 INTERNAL | FLAG_NO_DATA,
13650 { },
13651 { { 0, 1 } },
13652 .fill_helper = bpf_fill_jmp_jsle_imm,
13653 .nr_testruns = NR_PATTERN_RUNS,
13654 },
13655 /* JMP register magnitudes */
13656 {
13657 "JMP_JSET_X: all register value magnitudes",
13658 { },
13659 INTERNAL | FLAG_NO_DATA,
13660 { },
13661 { { 0, 1 } },
13662 .fill_helper = bpf_fill_jmp_jset_reg,
13663 .nr_testruns = NR_PATTERN_RUNS,
13664 },
13665 {
13666 "JMP_JEQ_X: all register value magnitudes",
13667 { },
13668 INTERNAL | FLAG_NO_DATA,
13669 { },
13670 { { 0, 1 } },
13671 .fill_helper = bpf_fill_jmp_jeq_reg,
13672 .nr_testruns = NR_PATTERN_RUNS,
13673 },
13674 {
13675 "JMP_JNE_X: all register value magnitudes",
13676 { },
13677 INTERNAL | FLAG_NO_DATA,
13678 { },
13679 { { 0, 1 } },
13680 .fill_helper = bpf_fill_jmp_jne_reg,
13681 .nr_testruns = NR_PATTERN_RUNS,
13682 },
13683 {
13684 "JMP_JGT_X: all register value magnitudes",
13685 { },
13686 INTERNAL | FLAG_NO_DATA,
13687 { },
13688 { { 0, 1 } },
13689 .fill_helper = bpf_fill_jmp_jgt_reg,
13690 .nr_testruns = NR_PATTERN_RUNS,
13691 },
13692 {
13693 "JMP_JGE_X: all register value magnitudes",
13694 { },
13695 INTERNAL | FLAG_NO_DATA,
13696 { },
13697 { { 0, 1 } },
13698 .fill_helper = bpf_fill_jmp_jge_reg,
13699 .nr_testruns = NR_PATTERN_RUNS,
13700 },
13701 {
13702 "JMP_JLT_X: all register value magnitudes",
13703 { },
13704 INTERNAL | FLAG_NO_DATA,
13705 { },
13706 { { 0, 1 } },
13707 .fill_helper = bpf_fill_jmp_jlt_reg,
13708 .nr_testruns = NR_PATTERN_RUNS,
13709 },
13710 {
13711 "JMP_JLE_X: all register value magnitudes",
13712 { },
13713 INTERNAL | FLAG_NO_DATA,
13714 { },
13715 { { 0, 1 } },
13716 .fill_helper = bpf_fill_jmp_jle_reg,
13717 .nr_testruns = NR_PATTERN_RUNS,
13718 },
13719 {
13720 "JMP_JSGT_X: all register value magnitudes",
13721 { },
13722 INTERNAL | FLAG_NO_DATA,
13723 { },
13724 { { 0, 1 } },
13725 .fill_helper = bpf_fill_jmp_jsgt_reg,
13726 .nr_testruns = NR_PATTERN_RUNS,
13727 },
13728 {
13729 "JMP_JSGE_X: all register value magnitudes",
13730 { },
13731 INTERNAL | FLAG_NO_DATA,
13732 { },
13733 { { 0, 1 } },
13734 .fill_helper = bpf_fill_jmp_jsge_reg,
13735 .nr_testruns = NR_PATTERN_RUNS,
13736 },
13737 {
13738 "JMP_JSLT_X: all register value magnitudes",
13739 { },
13740 INTERNAL | FLAG_NO_DATA,
13741 { },
13742 { { 0, 1 } },
13743 .fill_helper = bpf_fill_jmp_jslt_reg,
13744 .nr_testruns = NR_PATTERN_RUNS,
13745 },
13746 {
13747 "JMP_JSLE_X: all register value magnitudes",
13748 { },
13749 INTERNAL | FLAG_NO_DATA,
13750 { },
13751 { { 0, 1 } },
13752 .fill_helper = bpf_fill_jmp_jsle_reg,
13753 .nr_testruns = NR_PATTERN_RUNS,
13754 },
13755 /* JMP32 immediate magnitudes */
13756 {
13757 "JMP32_JSET_K: all immediate value magnitudes",
13758 { },
13759 INTERNAL | FLAG_NO_DATA,
13760 { },
13761 { { 0, 1 } },
13762 .fill_helper = bpf_fill_jmp32_jset_imm,
13763 .nr_testruns = NR_PATTERN_RUNS,
13764 },
13765 {
13766 "JMP32_JEQ_K: all immediate value magnitudes",
13767 { },
13768 INTERNAL | FLAG_NO_DATA,
13769 { },
13770 { { 0, 1 } },
13771 .fill_helper = bpf_fill_jmp32_jeq_imm,
13772 .nr_testruns = NR_PATTERN_RUNS,
13773 },
13774 {
13775 "JMP32_JNE_K: all immediate value magnitudes",
13776 { },
13777 INTERNAL | FLAG_NO_DATA,
13778 { },
13779 { { 0, 1 } },
13780 .fill_helper = bpf_fill_jmp32_jne_imm,
13781 .nr_testruns = NR_PATTERN_RUNS,
13782 },
13783 {
13784 "JMP32_JGT_K: all immediate value magnitudes",
13785 { },
13786 INTERNAL | FLAG_NO_DATA,
13787 { },
13788 { { 0, 1 } },
13789 .fill_helper = bpf_fill_jmp32_jgt_imm,
13790 .nr_testruns = NR_PATTERN_RUNS,
13791 },
13792 {
13793 "JMP32_JGE_K: all immediate value magnitudes",
13794 { },
13795 INTERNAL | FLAG_NO_DATA,
13796 { },
13797 { { 0, 1 } },
13798 .fill_helper = bpf_fill_jmp32_jge_imm,
13799 .nr_testruns = NR_PATTERN_RUNS,
13800 },
13801 {
13802 "JMP32_JLT_K: all immediate value magnitudes",
13803 { },
13804 INTERNAL | FLAG_NO_DATA,
13805 { },
13806 { { 0, 1 } },
13807 .fill_helper = bpf_fill_jmp32_jlt_imm,
13808 .nr_testruns = NR_PATTERN_RUNS,
13809 },
13810 {
13811 "JMP32_JLE_K: all immediate value magnitudes",
13812 { },
13813 INTERNAL | FLAG_NO_DATA,
13814 { },
13815 { { 0, 1 } },
13816 .fill_helper = bpf_fill_jmp32_jle_imm,
13817 .nr_testruns = NR_PATTERN_RUNS,
13818 },
13819 {
13820 "JMP32_JSGT_K: all immediate value magnitudes",
13821 { },
13822 INTERNAL | FLAG_NO_DATA,
13823 { },
13824 { { 0, 1 } },
13825 .fill_helper = bpf_fill_jmp32_jsgt_imm,
13826 .nr_testruns = NR_PATTERN_RUNS,
13827 },
13828 {
13829 "JMP32_JSGE_K: all immediate value magnitudes",
13830 { },
13831 INTERNAL | FLAG_NO_DATA,
13832 { },
13833 { { 0, 1 } },
13834 .fill_helper = bpf_fill_jmp32_jsge_imm,
13835 .nr_testruns = NR_PATTERN_RUNS,
13836 },
13837 {
13838 "JMP32_JSLT_K: all immediate value magnitudes",
13839 { },
13840 INTERNAL | FLAG_NO_DATA,
13841 { },
13842 { { 0, 1 } },
13843 .fill_helper = bpf_fill_jmp32_jslt_imm,
13844 .nr_testruns = NR_PATTERN_RUNS,
13845 },
13846 {
13847 "JMP32_JSLE_K: all immediate value magnitudes",
13848 { },
13849 INTERNAL | FLAG_NO_DATA,
13850 { },
13851 { { 0, 1 } },
13852 .fill_helper = bpf_fill_jmp32_jsle_imm,
13853 .nr_testruns = NR_PATTERN_RUNS,
13854 },
13855 /* JMP32 register magnitudes */
13856 {
13857 "JMP32_JSET_X: all register value magnitudes",
13858 { },
13859 INTERNAL | FLAG_NO_DATA,
13860 { },
13861 { { 0, 1 } },
13862 .fill_helper = bpf_fill_jmp32_jset_reg,
13863 .nr_testruns = NR_PATTERN_RUNS,
13864 },
13865 {
13866 "JMP32_JEQ_X: all register value magnitudes",
13867 { },
13868 INTERNAL | FLAG_NO_DATA,
13869 { },
13870 { { 0, 1 } },
13871 .fill_helper = bpf_fill_jmp32_jeq_reg,
13872 .nr_testruns = NR_PATTERN_RUNS,
13873 },
13874 {
13875 "JMP32_JNE_X: all register value magnitudes",
13876 { },
13877 INTERNAL | FLAG_NO_DATA,
13878 { },
13879 { { 0, 1 } },
13880 .fill_helper = bpf_fill_jmp32_jne_reg,
13881 .nr_testruns = NR_PATTERN_RUNS,
13882 },
13883 {
13884 "JMP32_JGT_X: all register value magnitudes",
13885 { },
13886 INTERNAL | FLAG_NO_DATA,
13887 { },
13888 { { 0, 1 } },
13889 .fill_helper = bpf_fill_jmp32_jgt_reg,
13890 .nr_testruns = NR_PATTERN_RUNS,
13891 },
13892 {
13893 "JMP32_JGE_X: all register value magnitudes",
13894 { },
13895 INTERNAL | FLAG_NO_DATA,
13896 { },
13897 { { 0, 1 } },
13898 .fill_helper = bpf_fill_jmp32_jge_reg,
13899 .nr_testruns = NR_PATTERN_RUNS,
13900 },
13901 {
13902 "JMP32_JLT_X: all register value magnitudes",
13903 { },
13904 INTERNAL | FLAG_NO_DATA,
13905 { },
13906 { { 0, 1 } },
13907 .fill_helper = bpf_fill_jmp32_jlt_reg,
13908 .nr_testruns = NR_PATTERN_RUNS,
13909 },
13910 {
13911 "JMP32_JLE_X: all register value magnitudes",
13912 { },
13913 INTERNAL | FLAG_NO_DATA,
13914 { },
13915 { { 0, 1 } },
13916 .fill_helper = bpf_fill_jmp32_jle_reg,
13917 .nr_testruns = NR_PATTERN_RUNS,
13918 },
13919 {
13920 "JMP32_JSGT_X: all register value magnitudes",
13921 { },
13922 INTERNAL | FLAG_NO_DATA,
13923 { },
13924 { { 0, 1 } },
13925 .fill_helper = bpf_fill_jmp32_jsgt_reg,
13926 .nr_testruns = NR_PATTERN_RUNS,
13927 },
13928 {
13929 "JMP32_JSGE_X: all register value magnitudes",
13930 { },
13931 INTERNAL | FLAG_NO_DATA,
13932 { },
13933 { { 0, 1 } },
13934 .fill_helper = bpf_fill_jmp32_jsge_reg,
13935 .nr_testruns = NR_PATTERN_RUNS,
13936 },
13937 {
13938 "JMP32_JSLT_X: all register value magnitudes",
13939 { },
13940 INTERNAL | FLAG_NO_DATA,
13941 { },
13942 { { 0, 1 } },
13943 .fill_helper = bpf_fill_jmp32_jslt_reg,
13944 .nr_testruns = NR_PATTERN_RUNS,
13945 },
13946 {
13947 "JMP32_JSLE_X: all register value magnitudes",
13948 { },
13949 INTERNAL | FLAG_NO_DATA,
13950 { },
13951 { { 0, 1 } },
13952 .fill_helper = bpf_fill_jmp32_jsle_reg,
13953 .nr_testruns = NR_PATTERN_RUNS,
13954 },
13955 /* Conditional jumps with constant decision */
13956 {
13957 "JMP_JSET_K: imm = 0 -> never taken",
13958 .u.insns_int = {
13959 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13960 BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13961 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13962 BPF_EXIT_INSN(),
13963 },
13964 INTERNAL | FLAG_NO_DATA,
13965 { },
13966 { { 0, 0 } },
13967 },
13968 {
13969 "JMP_JLT_K: imm = 0 -> never taken",
13970 .u.insns_int = {
13971 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13972 BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13973 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13974 BPF_EXIT_INSN(),
13975 },
13976 INTERNAL | FLAG_NO_DATA,
13977 { },
13978 { { 0, 0 } },
13979 },
13980 {
13981 "JMP_JGE_K: imm = 0 -> always taken",
13982 .u.insns_int = {
13983 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13984 BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13985 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13986 BPF_EXIT_INSN(),
13987 },
13988 INTERNAL | FLAG_NO_DATA,
13989 { },
13990 { { 0, 1 } },
13991 },
13992 {
13993 "JMP_JGT_K: imm = 0xffffffff -> never taken",
13994 .u.insns_int = {
13995 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13996 BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13997 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13998 BPF_EXIT_INSN(),
13999 },
14000 INTERNAL | FLAG_NO_DATA,
14001 { },
14002 { { 0, 0 } },
14003 },
14004 {
14005 "JMP_JLE_K: imm = 0xffffffff -> always taken",
14006 .u.insns_int = {
14007 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14008 BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
14009 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14010 BPF_EXIT_INSN(),
14011 },
14012 INTERNAL | FLAG_NO_DATA,
14013 { },
14014 { { 0, 1 } },
14015 },
14016 {
14017 "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
14018 .u.insns_int = {
14019 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14020 BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
14021 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14022 BPF_EXIT_INSN(),
14023 },
14024 INTERNAL | FLAG_NO_DATA,
14025 { },
14026 { { 0, 0 } },
14027 },
14028 {
14029 "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
14030 .u.insns_int = {
14031 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14032 BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
14033 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14034 BPF_EXIT_INSN(),
14035 },
14036 INTERNAL | FLAG_NO_DATA,
14037 { },
14038 { { 0, 1 } },
14039 },
14040 {
14041 "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
14042 .u.insns_int = {
14043 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14044 BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
14045 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14046 BPF_EXIT_INSN(),
14047 },
14048 INTERNAL | FLAG_NO_DATA,
14049 { },
14050 { { 0, 0 } },
14051 },
14052 {
14053 "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
14054 .u.insns_int = {
14055 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14056 BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
14057 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14058 BPF_EXIT_INSN(),
14059 },
14060 INTERNAL | FLAG_NO_DATA,
14061 { },
14062 { { 0, 1 } },
14063 },
14064 {
14065 "JMP_JEQ_X: dst = src -> always taken",
14066 .u.insns_int = {
14067 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14068 BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
14069 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14070 BPF_EXIT_INSN(),
14071 },
14072 INTERNAL | FLAG_NO_DATA,
14073 { },
14074 { { 0, 1 } },
14075 },
14076 {
14077 "JMP_JGE_X: dst = src -> always taken",
14078 .u.insns_int = {
14079 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14080 BPF_JMP_REG(BPF_JGE, R1, R1, 1),
14081 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14082 BPF_EXIT_INSN(),
14083 },
14084 INTERNAL | FLAG_NO_DATA,
14085 { },
14086 { { 0, 1 } },
14087 },
14088 {
14089 "JMP_JLE_X: dst = src -> always taken",
14090 .u.insns_int = {
14091 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14092 BPF_JMP_REG(BPF_JLE, R1, R1, 1),
14093 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14094 BPF_EXIT_INSN(),
14095 },
14096 INTERNAL | FLAG_NO_DATA,
14097 { },
14098 { { 0, 1 } },
14099 },
14100 {
14101 "JMP_JSGE_X: dst = src -> always taken",
14102 .u.insns_int = {
14103 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14104 BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
14105 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14106 BPF_EXIT_INSN(),
14107 },
14108 INTERNAL | FLAG_NO_DATA,
14109 { },
14110 { { 0, 1 } },
14111 },
14112 {
14113 "JMP_JSLE_X: dst = src -> always taken",
14114 .u.insns_int = {
14115 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14116 BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
14117 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14118 BPF_EXIT_INSN(),
14119 },
14120 INTERNAL | FLAG_NO_DATA,
14121 { },
14122 { { 0, 1 } },
14123 },
14124 {
14125 "JMP_JNE_X: dst = src -> never taken",
14126 .u.insns_int = {
14127 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14128 BPF_JMP_REG(BPF_JNE, R1, R1, 1),
14129 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14130 BPF_EXIT_INSN(),
14131 },
14132 INTERNAL | FLAG_NO_DATA,
14133 { },
14134 { { 0, 0 } },
14135 },
14136 {
14137 "JMP_JGT_X: dst = src -> never taken",
14138 .u.insns_int = {
14139 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14140 BPF_JMP_REG(BPF_JGT, R1, R1, 1),
14141 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14142 BPF_EXIT_INSN(),
14143 },
14144 INTERNAL | FLAG_NO_DATA,
14145 { },
14146 { { 0, 0 } },
14147 },
14148 {
14149 "JMP_JLT_X: dst = src -> never taken",
14150 .u.insns_int = {
14151 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14152 BPF_JMP_REG(BPF_JLT, R1, R1, 1),
14153 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14154 BPF_EXIT_INSN(),
14155 },
14156 INTERNAL | FLAG_NO_DATA,
14157 { },
14158 { { 0, 0 } },
14159 },
14160 {
14161 "JMP_JSGT_X: dst = src -> never taken",
14162 .u.insns_int = {
14163 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14164 BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
14165 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14166 BPF_EXIT_INSN(),
14167 },
14168 INTERNAL | FLAG_NO_DATA,
14169 { },
14170 { { 0, 0 } },
14171 },
14172 {
14173 "JMP_JSLT_X: dst = src -> never taken",
14174 .u.insns_int = {
14175 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14176 BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
14177 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14178 BPF_EXIT_INSN(),
14179 },
14180 INTERNAL | FLAG_NO_DATA,
14181 { },
14182 { { 0, 0 } },
14183 },
14184 /* Short relative jumps */
14185 {
14186 "Short relative jump: offset=0",
14187 .u.insns_int = {
14188 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14189 BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
14190 BPF_EXIT_INSN(),
14191 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14192 },
14193 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14194 { },
14195 { { 0, 0 } },
14196 },
14197 {
14198 "Short relative jump: offset=1",
14199 .u.insns_int = {
14200 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14201 BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
14202 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14203 BPF_EXIT_INSN(),
14204 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14205 },
14206 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14207 { },
14208 { { 0, 0 } },
14209 },
14210 {
14211 "Short relative jump: offset=2",
14212 .u.insns_int = {
14213 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14214 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
14215 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14216 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14217 BPF_EXIT_INSN(),
14218 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14219 },
14220 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14221 { },
14222 { { 0, 0 } },
14223 },
14224 {
14225 "Short relative jump: offset=3",
14226 .u.insns_int = {
14227 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14228 BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
14229 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14230 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14231 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14232 BPF_EXIT_INSN(),
14233 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14234 },
14235 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14236 { },
14237 { { 0, 0 } },
14238 },
14239 {
14240 "Short relative jump: offset=4",
14241 .u.insns_int = {
14242 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14243 BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
14244 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14245 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14246 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14247 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14248 BPF_EXIT_INSN(),
14249 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14250 },
14251 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14252 { },
14253 { { 0, 0 } },
14254 },
14255 /* Conditional branch conversions */
14256 {
14257 "Long conditional jump: taken at runtime (32 bits)",
14258 { },
14259 INTERNAL | FLAG_NO_DATA,
14260 { },
14261 { { 0, 1 } },
14262 .fill_helper = bpf_fill_max_jmp_taken_32,
14263 },
14264 {
14265 "Long conditional jump: not taken at runtime (32 bits)",
14266 { },
14267 INTERNAL | FLAG_NO_DATA,
14268 { },
14269 { { 0, 2 } },
14270 .fill_helper = bpf_fill_max_jmp_not_taken_32,
14271 },
14272 {
14273 "Long conditional jump: always taken, known at JIT time (32 bits)",
14274 { },
14275 INTERNAL | FLAG_NO_DATA,
14276 { },
14277 { { 0, 1 } },
14278 .fill_helper = bpf_fill_max_jmp_always_taken_32,
14279 },
14280 {
14281 "Long conditional jump: never taken, known at JIT time (32 bits)",
14282 { },
14283 INTERNAL | FLAG_NO_DATA,
14284 { },
14285 { { 0, 2 } },
14286 .fill_helper = bpf_fill_max_jmp_never_taken_32,
14287 },
14288 {
14289 "Long conditional jump: taken at runtime",
14290 { },
14291 INTERNAL | FLAG_NO_DATA,
14292 { },
14293 { { 0, 1 } },
14294 .fill_helper = bpf_fill_max_jmp_taken,
14295 },
14296 {
14297 "Long conditional jump: not taken at runtime",
14298 { },
14299 INTERNAL | FLAG_NO_DATA,
14300 { },
14301 { { 0, 2 } },
14302 .fill_helper = bpf_fill_max_jmp_not_taken,
14303 },
14304 {
14305 "Long conditional jump: always taken, known at JIT time",
14306 { },
14307 INTERNAL | FLAG_NO_DATA,
14308 { },
14309 { { 0, 1 } },
14310 .fill_helper = bpf_fill_max_jmp_always_taken,
14311 },
14312 {
14313 "Long conditional jump: never taken, known at JIT time",
14314 { },
14315 INTERNAL | FLAG_NO_DATA,
14316 { },
14317 { { 0, 2 } },
14318 .fill_helper = bpf_fill_max_jmp_never_taken,
14319 },
14320 /* Staggered jump sequences, immediate */
14321 {
14322 "Staggered jumps: JMP_JA",
14323 { },
14324 INTERNAL | FLAG_NO_DATA,
14325 { },
14326 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14327 .fill_helper = bpf_fill_staggered_ja,
14328 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14329 },
14330 {
14331 "Staggered jumps: JMP_JEQ_K",
14332 { },
14333 INTERNAL | FLAG_NO_DATA,
14334 { },
14335 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14336 .fill_helper = bpf_fill_staggered_jeq_imm,
14337 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14338 },
14339 {
14340 "Staggered jumps: JMP_JNE_K",
14341 { },
14342 INTERNAL | FLAG_NO_DATA,
14343 { },
14344 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14345 .fill_helper = bpf_fill_staggered_jne_imm,
14346 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14347 },
14348 {
14349 "Staggered jumps: JMP_JSET_K",
14350 { },
14351 INTERNAL | FLAG_NO_DATA,
14352 { },
14353 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14354 .fill_helper = bpf_fill_staggered_jset_imm,
14355 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14356 },
14357 {
14358 "Staggered jumps: JMP_JGT_K",
14359 { },
14360 INTERNAL | FLAG_NO_DATA,
14361 { },
14362 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14363 .fill_helper = bpf_fill_staggered_jgt_imm,
14364 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14365 },
14366 {
14367 "Staggered jumps: JMP_JGE_K",
14368 { },
14369 INTERNAL | FLAG_NO_DATA,
14370 { },
14371 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14372 .fill_helper = bpf_fill_staggered_jge_imm,
14373 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14374 },
14375 {
14376 "Staggered jumps: JMP_JLT_K",
14377 { },
14378 INTERNAL | FLAG_NO_DATA,
14379 { },
14380 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14381 .fill_helper = bpf_fill_staggered_jlt_imm,
14382 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14383 },
14384 {
14385 "Staggered jumps: JMP_JLE_K",
14386 { },
14387 INTERNAL | FLAG_NO_DATA,
14388 { },
14389 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14390 .fill_helper = bpf_fill_staggered_jle_imm,
14391 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14392 },
14393 {
14394 "Staggered jumps: JMP_JSGT_K",
14395 { },
14396 INTERNAL | FLAG_NO_DATA,
14397 { },
14398 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14399 .fill_helper = bpf_fill_staggered_jsgt_imm,
14400 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14401 },
14402 {
14403 "Staggered jumps: JMP_JSGE_K",
14404 { },
14405 INTERNAL | FLAG_NO_DATA,
14406 { },
14407 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14408 .fill_helper = bpf_fill_staggered_jsge_imm,
14409 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14410 },
14411 {
14412 "Staggered jumps: JMP_JSLT_K",
14413 { },
14414 INTERNAL | FLAG_NO_DATA,
14415 { },
14416 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14417 .fill_helper = bpf_fill_staggered_jslt_imm,
14418 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14419 },
14420 {
14421 "Staggered jumps: JMP_JSLE_K",
14422 { },
14423 INTERNAL | FLAG_NO_DATA,
14424 { },
14425 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14426 .fill_helper = bpf_fill_staggered_jsle_imm,
14427 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14428 },
14429 /* Staggered jump sequences, register */
14430 {
14431 "Staggered jumps: JMP_JEQ_X",
14432 { },
14433 INTERNAL | FLAG_NO_DATA,
14434 { },
14435 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14436 .fill_helper = bpf_fill_staggered_jeq_reg,
14437 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14438 },
14439 {
14440 "Staggered jumps: JMP_JNE_X",
14441 { },
14442 INTERNAL | FLAG_NO_DATA,
14443 { },
14444 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14445 .fill_helper = bpf_fill_staggered_jne_reg,
14446 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14447 },
14448 {
14449 "Staggered jumps: JMP_JSET_X",
14450 { },
14451 INTERNAL | FLAG_NO_DATA,
14452 { },
14453 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14454 .fill_helper = bpf_fill_staggered_jset_reg,
14455 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14456 },
14457 {
14458 "Staggered jumps: JMP_JGT_X",
14459 { },
14460 INTERNAL | FLAG_NO_DATA,
14461 { },
14462 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14463 .fill_helper = bpf_fill_staggered_jgt_reg,
14464 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14465 },
14466 {
14467 "Staggered jumps: JMP_JGE_X",
14468 { },
14469 INTERNAL | FLAG_NO_DATA,
14470 { },
14471 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14472 .fill_helper = bpf_fill_staggered_jge_reg,
14473 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14474 },
14475 {
14476 "Staggered jumps: JMP_JLT_X",
14477 { },
14478 INTERNAL | FLAG_NO_DATA,
14479 { },
14480 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14481 .fill_helper = bpf_fill_staggered_jlt_reg,
14482 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14483 },
14484 {
14485 "Staggered jumps: JMP_JLE_X",
14486 { },
14487 INTERNAL | FLAG_NO_DATA,
14488 { },
14489 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14490 .fill_helper = bpf_fill_staggered_jle_reg,
14491 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14492 },
14493 {
14494 "Staggered jumps: JMP_JSGT_X",
14495 { },
14496 INTERNAL | FLAG_NO_DATA,
14497 { },
14498 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14499 .fill_helper = bpf_fill_staggered_jsgt_reg,
14500 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14501 },
14502 {
14503 "Staggered jumps: JMP_JSGE_X",
14504 { },
14505 INTERNAL | FLAG_NO_DATA,
14506 { },
14507 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14508 .fill_helper = bpf_fill_staggered_jsge_reg,
14509 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14510 },
14511 {
14512 "Staggered jumps: JMP_JSLT_X",
14513 { },
14514 INTERNAL | FLAG_NO_DATA,
14515 { },
14516 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14517 .fill_helper = bpf_fill_staggered_jslt_reg,
14518 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14519 },
14520 {
14521 "Staggered jumps: JMP_JSLE_X",
14522 { },
14523 INTERNAL | FLAG_NO_DATA,
14524 { },
14525 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14526 .fill_helper = bpf_fill_staggered_jsle_reg,
14527 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14528 },
14529 /* Staggered jump sequences, JMP32 immediate */
14530 {
14531 "Staggered jumps: JMP32_JEQ_K",
14532 { },
14533 INTERNAL | FLAG_NO_DATA,
14534 { },
14535 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14536 .fill_helper = bpf_fill_staggered_jeq32_imm,
14537 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14538 },
14539 {
14540 "Staggered jumps: JMP32_JNE_K",
14541 { },
14542 INTERNAL | FLAG_NO_DATA,
14543 { },
14544 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14545 .fill_helper = bpf_fill_staggered_jne32_imm,
14546 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14547 },
14548 {
14549 "Staggered jumps: JMP32_JSET_K",
14550 { },
14551 INTERNAL | FLAG_NO_DATA,
14552 { },
14553 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14554 .fill_helper = bpf_fill_staggered_jset32_imm,
14555 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14556 },
14557 {
14558 "Staggered jumps: JMP32_JGT_K",
14559 { },
14560 INTERNAL | FLAG_NO_DATA,
14561 { },
14562 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14563 .fill_helper = bpf_fill_staggered_jgt32_imm,
14564 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14565 },
14566 {
14567 "Staggered jumps: JMP32_JGE_K",
14568 { },
14569 INTERNAL | FLAG_NO_DATA,
14570 { },
14571 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14572 .fill_helper = bpf_fill_staggered_jge32_imm,
14573 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14574 },
14575 {
14576 "Staggered jumps: JMP32_JLT_K",
14577 { },
14578 INTERNAL | FLAG_NO_DATA,
14579 { },
14580 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14581 .fill_helper = bpf_fill_staggered_jlt32_imm,
14582 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14583 },
14584 {
14585 "Staggered jumps: JMP32_JLE_K",
14586 { },
14587 INTERNAL | FLAG_NO_DATA,
14588 { },
14589 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14590 .fill_helper = bpf_fill_staggered_jle32_imm,
14591 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14592 },
14593 {
14594 "Staggered jumps: JMP32_JSGT_K",
14595 { },
14596 INTERNAL | FLAG_NO_DATA,
14597 { },
14598 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14599 .fill_helper = bpf_fill_staggered_jsgt32_imm,
14600 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14601 },
14602 {
14603 "Staggered jumps: JMP32_JSGE_K",
14604 { },
14605 INTERNAL | FLAG_NO_DATA,
14606 { },
14607 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14608 .fill_helper = bpf_fill_staggered_jsge32_imm,
14609 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14610 },
14611 {
14612 "Staggered jumps: JMP32_JSLT_K",
14613 { },
14614 INTERNAL | FLAG_NO_DATA,
14615 { },
14616 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14617 .fill_helper = bpf_fill_staggered_jslt32_imm,
14618 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14619 },
14620 {
14621 "Staggered jumps: JMP32_JSLE_K",
14622 { },
14623 INTERNAL | FLAG_NO_DATA,
14624 { },
14625 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14626 .fill_helper = bpf_fill_staggered_jsle32_imm,
14627 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14628 },
14629 /* Staggered jump sequences, JMP32 register */
14630 {
14631 "Staggered jumps: JMP32_JEQ_X",
14632 { },
14633 INTERNAL | FLAG_NO_DATA,
14634 { },
14635 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14636 .fill_helper = bpf_fill_staggered_jeq32_reg,
14637 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14638 },
14639 {
14640 "Staggered jumps: JMP32_JNE_X",
14641 { },
14642 INTERNAL | FLAG_NO_DATA,
14643 { },
14644 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14645 .fill_helper = bpf_fill_staggered_jne32_reg,
14646 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14647 },
14648 {
14649 "Staggered jumps: JMP32_JSET_X",
14650 { },
14651 INTERNAL | FLAG_NO_DATA,
14652 { },
14653 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14654 .fill_helper = bpf_fill_staggered_jset32_reg,
14655 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14656 },
14657 {
14658 "Staggered jumps: JMP32_JGT_X",
14659 { },
14660 INTERNAL | FLAG_NO_DATA,
14661 { },
14662 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14663 .fill_helper = bpf_fill_staggered_jgt32_reg,
14664 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14665 },
14666 {
14667 "Staggered jumps: JMP32_JGE_X",
14668 { },
14669 INTERNAL | FLAG_NO_DATA,
14670 { },
14671 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14672 .fill_helper = bpf_fill_staggered_jge32_reg,
14673 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14674 },
14675 {
14676 "Staggered jumps: JMP32_JLT_X",
14677 { },
14678 INTERNAL | FLAG_NO_DATA,
14679 { },
14680 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14681 .fill_helper = bpf_fill_staggered_jlt32_reg,
14682 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14683 },
14684 {
14685 "Staggered jumps: JMP32_JLE_X",
14686 { },
14687 INTERNAL | FLAG_NO_DATA,
14688 { },
14689 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14690 .fill_helper = bpf_fill_staggered_jle32_reg,
14691 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14692 },
14693 {
14694 "Staggered jumps: JMP32_JSGT_X",
14695 { },
14696 INTERNAL | FLAG_NO_DATA,
14697 { },
14698 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14699 .fill_helper = bpf_fill_staggered_jsgt32_reg,
14700 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14701 },
14702 {
14703 "Staggered jumps: JMP32_JSGE_X",
14704 { },
14705 INTERNAL | FLAG_NO_DATA,
14706 { },
14707 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14708 .fill_helper = bpf_fill_staggered_jsge32_reg,
14709 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14710 },
14711 {
14712 "Staggered jumps: JMP32_JSLT_X",
14713 { },
14714 INTERNAL | FLAG_NO_DATA,
14715 { },
14716 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14717 .fill_helper = bpf_fill_staggered_jslt32_reg,
14718 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14719 },
14720 {
14721 "Staggered jumps: JMP32_JSLE_X",
14722 { },
14723 INTERNAL | FLAG_NO_DATA,
14724 { },
14725 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14726 .fill_helper = bpf_fill_staggered_jsle32_reg,
14727 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14728 },
14729 };
14730
14731 static struct net_device dev;
14732
populate_skb(char * buf,int size)14733 static struct sk_buff *populate_skb(char *buf, int size)
14734 {
14735 struct sk_buff *skb;
14736
14737 if (size >= MAX_DATA)
14738 return NULL;
14739
14740 skb = alloc_skb(MAX_DATA, GFP_KERNEL);
14741 if (!skb)
14742 return NULL;
14743
14744 __skb_put_data(skb, buf, size);
14745
14746 /* Initialize a fake skb with test pattern. */
14747 skb_reset_mac_header(skb);
14748 skb->protocol = htons(ETH_P_IP);
14749 skb->pkt_type = SKB_TYPE;
14750 skb->mark = SKB_MARK;
14751 skb->hash = SKB_HASH;
14752 skb->queue_mapping = SKB_QUEUE_MAP;
14753 skb->vlan_tci = SKB_VLAN_TCI;
14754 skb->vlan_proto = htons(ETH_P_IP);
14755 dev_net_set(&dev, &init_net);
14756 skb->dev = &dev;
14757 skb->dev->ifindex = SKB_DEV_IFINDEX;
14758 skb->dev->type = SKB_DEV_TYPE;
14759 skb_set_network_header(skb, min(size, ETH_HLEN));
14760
14761 return skb;
14762 }
14763
generate_test_data(struct bpf_test * test,int sub)14764 static void *generate_test_data(struct bpf_test *test, int sub)
14765 {
14766 struct sk_buff *skb;
14767 struct page *page;
14768
14769 if (test->aux & FLAG_NO_DATA)
14770 return NULL;
14771
14772 if (test->aux & FLAG_LARGE_MEM)
14773 return kmalloc(test->test[sub].data_size, GFP_KERNEL);
14774
14775 /* Test case expects an skb, so populate one. Various
14776 * subtests generate skbs of different sizes based on
14777 * the same data.
14778 */
14779 skb = populate_skb(test->data, test->test[sub].data_size);
14780 if (!skb)
14781 return NULL;
14782
14783 if (test->aux & FLAG_SKB_FRAG) {
14784 /*
14785 * when the test requires a fragmented skb, add a
14786 * single fragment to the skb, filled with
14787 * test->frag_data.
14788 */
14789 page = alloc_page(GFP_KERNEL);
14790 if (!page)
14791 goto err_kfree_skb;
14792
14793 memcpy(page_address(page), test->frag_data, MAX_DATA);
14794 skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
14795 }
14796
14797 return skb;
14798 err_kfree_skb:
14799 kfree_skb(skb);
14800 return NULL;
14801 }
14802
release_test_data(const struct bpf_test * test,void * data)14803 static void release_test_data(const struct bpf_test *test, void *data)
14804 {
14805 if (test->aux & FLAG_NO_DATA)
14806 return;
14807
14808 if (test->aux & FLAG_LARGE_MEM)
14809 kfree(data);
14810 else
14811 kfree_skb(data);
14812 }
14813
filter_length(int which)14814 static int filter_length(int which)
14815 {
14816 struct sock_filter *fp;
14817 int len;
14818
14819 if (tests[which].fill_helper)
14820 return tests[which].u.ptr.len;
14821
14822 fp = tests[which].u.insns;
14823 for (len = MAX_INSNS - 1; len > 0; --len)
14824 if (fp[len].code != 0 || fp[len].k != 0)
14825 break;
14826
14827 return len + 1;
14828 }
14829
filter_pointer(int which)14830 static void *filter_pointer(int which)
14831 {
14832 if (tests[which].fill_helper)
14833 return tests[which].u.ptr.insns;
14834 else
14835 return tests[which].u.insns;
14836 }
14837
generate_filter(int which,int * err)14838 static struct bpf_prog *generate_filter(int which, int *err)
14839 {
14840 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14841 unsigned int flen = filter_length(which);
14842 void *fptr = filter_pointer(which);
14843 struct sock_fprog_kern fprog;
14844 struct bpf_prog *fp;
14845
14846 switch (test_type) {
14847 case CLASSIC:
14848 fprog.filter = fptr;
14849 fprog.len = flen;
14850
14851 *err = bpf_prog_create(&fp, &fprog);
14852 if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14853 if (*err == tests[which].expected_errcode) {
14854 pr_cont("PASS\n");
14855 /* Verifier rejected filter as expected. */
14856 *err = 0;
14857 return NULL;
14858 } else {
14859 pr_cont("UNEXPECTED_PASS\n");
14860 /* Verifier didn't reject the test that's
14861 * bad enough, just return!
14862 */
14863 *err = -EINVAL;
14864 return NULL;
14865 }
14866 }
14867 if (*err) {
14868 pr_cont("FAIL to prog_create err=%d len=%d\n",
14869 *err, fprog.len);
14870 return NULL;
14871 }
14872 break;
14873
14874 case INTERNAL:
14875 fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
14876 if (fp == NULL) {
14877 pr_cont("UNEXPECTED_FAIL no memory left\n");
14878 *err = -ENOMEM;
14879 return NULL;
14880 }
14881
14882 fp->len = flen;
14883 /* Type doesn't really matter here as long as it's not unspec. */
14884 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14885 memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14886 fp->aux->stack_depth = tests[which].stack_depth;
14887 fp->aux->verifier_zext = !!(tests[which].aux &
14888 FLAG_VERIFIER_ZEXT);
14889
14890 /* We cannot error here as we don't need type compatibility
14891 * checks.
14892 */
14893 fp = bpf_prog_select_runtime(fp, err);
14894 if (*err) {
14895 pr_cont("FAIL to select_runtime err=%d\n", *err);
14896 return NULL;
14897 }
14898 break;
14899 }
14900
14901 *err = 0;
14902 return fp;
14903 }
14904
release_filter(struct bpf_prog * fp,int which)14905 static void release_filter(struct bpf_prog *fp, int which)
14906 {
14907 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14908
14909 switch (test_type) {
14910 case CLASSIC:
14911 bpf_prog_destroy(fp);
14912 break;
14913 case INTERNAL:
14914 bpf_prog_free(fp);
14915 break;
14916 }
14917 }
14918
__run_one(const struct bpf_prog * fp,const void * data,int runs,u64 * duration)14919 static int __run_one(const struct bpf_prog *fp, const void *data,
14920 int runs, u64 *duration)
14921 {
14922 u64 start, finish;
14923 int ret = 0, i;
14924
14925 migrate_disable();
14926 start = ktime_get_ns();
14927
14928 for (i = 0; i < runs; i++)
14929 ret = bpf_prog_run(fp, data);
14930
14931 finish = ktime_get_ns();
14932 migrate_enable();
14933
14934 *duration = finish - start;
14935 do_div(*duration, runs);
14936
14937 return ret;
14938 }
14939
run_one(const struct bpf_prog * fp,struct bpf_test * test)14940 static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14941 {
14942 int err_cnt = 0, i, runs = MAX_TESTRUNS;
14943
14944 if (test->nr_testruns)
14945 runs = min(test->nr_testruns, MAX_TESTRUNS);
14946
14947 for (i = 0; i < MAX_SUBTESTS; i++) {
14948 void *data;
14949 u64 duration;
14950 u32 ret;
14951
14952 /*
14953 * NOTE: Several sub-tests may be present, in which case
14954 * a zero {data_size, result} tuple indicates the end of
14955 * the sub-test array. The first test is always run,
14956 * even if both data_size and result happen to be zero.
14957 */
14958 if (i > 0 &&
14959 test->test[i].data_size == 0 &&
14960 test->test[i].result == 0)
14961 break;
14962
14963 data = generate_test_data(test, i);
14964 if (!data && !(test->aux & FLAG_NO_DATA)) {
14965 pr_cont("data generation failed ");
14966 err_cnt++;
14967 break;
14968 }
14969 ret = __run_one(fp, data, runs, &duration);
14970 release_test_data(test, data);
14971
14972 if (ret == test->test[i].result) {
14973 pr_cont("%lld ", duration);
14974 } else {
14975 s32 res = test->test[i].result;
14976
14977 pr_cont("ret %d != %d (%#x != %#x)",
14978 ret, res, ret, res);
14979 err_cnt++;
14980 }
14981 }
14982
14983 return err_cnt;
14984 }
14985
14986 static char test_name[64];
14987 module_param_string(test_name, test_name, sizeof(test_name), 0);
14988
14989 static int test_id = -1;
14990 module_param(test_id, int, 0);
14991
14992 static int test_range[2] = { 0, INT_MAX };
14993 module_param_array(test_range, int, NULL, 0);
14994
exclude_test(int test_id)14995 static bool exclude_test(int test_id)
14996 {
14997 return test_id < test_range[0] || test_id > test_range[1];
14998 }
14999
build_test_skb(void)15000 static __init struct sk_buff *build_test_skb(void)
15001 {
15002 u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
15003 struct sk_buff *skb[2];
15004 struct page *page[2];
15005 int i, data_size = 8;
15006
15007 for (i = 0; i < 2; i++) {
15008 page[i] = alloc_page(GFP_KERNEL);
15009 if (!page[i]) {
15010 if (i == 0)
15011 goto err_page0;
15012 else
15013 goto err_page1;
15014 }
15015
15016 /* this will set skb[i]->head_frag */
15017 skb[i] = dev_alloc_skb(headroom + data_size);
15018 if (!skb[i]) {
15019 if (i == 0)
15020 goto err_skb0;
15021 else
15022 goto err_skb1;
15023 }
15024
15025 skb_reserve(skb[i], headroom);
15026 skb_put(skb[i], data_size);
15027 skb[i]->protocol = htons(ETH_P_IP);
15028 skb_reset_network_header(skb[i]);
15029 skb_set_mac_header(skb[i], -ETH_HLEN);
15030
15031 skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
15032 // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
15033 }
15034
15035 /* setup shinfo */
15036 skb_shinfo(skb[0])->gso_size = 1448;
15037 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
15038 skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
15039 skb_shinfo(skb[0])->gso_segs = 0;
15040 skb_shinfo(skb[0])->frag_list = skb[1];
15041 skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
15042
15043 /* adjust skb[0]'s len */
15044 skb[0]->len += skb[1]->len;
15045 skb[0]->data_len += skb[1]->data_len;
15046 skb[0]->truesize += skb[1]->truesize;
15047
15048 return skb[0];
15049
15050 err_skb1:
15051 __free_page(page[1]);
15052 err_page1:
15053 kfree_skb(skb[0]);
15054 err_skb0:
15055 __free_page(page[0]);
15056 err_page0:
15057 return NULL;
15058 }
15059
build_test_skb_linear_no_head_frag(void)15060 static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
15061 {
15062 unsigned int alloc_size = 2000;
15063 unsigned int headroom = 102, doffset = 72, data_size = 1308;
15064 struct sk_buff *skb[2];
15065 int i;
15066
15067 /* skbs linked in a frag_list, both with linear data, with head_frag=0
15068 * (data allocated by kmalloc), both have tcp data of 1308 bytes
15069 * (total payload is 2616 bytes).
15070 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
15071 */
15072 for (i = 0; i < 2; i++) {
15073 skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
15074 if (!skb[i]) {
15075 if (i == 0)
15076 goto err_skb0;
15077 else
15078 goto err_skb1;
15079 }
15080
15081 skb[i]->protocol = htons(ETH_P_IPV6);
15082 skb_reserve(skb[i], headroom);
15083 skb_put(skb[i], doffset + data_size);
15084 skb_reset_network_header(skb[i]);
15085 if (i == 0)
15086 skb_reset_mac_header(skb[i]);
15087 else
15088 skb_set_mac_header(skb[i], -ETH_HLEN);
15089 __skb_pull(skb[i], doffset);
15090 }
15091
15092 /* setup shinfo.
15093 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
15094 * reduced gso_size.
15095 */
15096 skb_shinfo(skb[0])->gso_size = 1288;
15097 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
15098 skb_shinfo(skb[0])->gso_segs = 0;
15099 skb_shinfo(skb[0])->frag_list = skb[1];
15100
15101 /* adjust skb[0]'s len */
15102 skb[0]->len += skb[1]->len;
15103 skb[0]->data_len += skb[1]->len;
15104 skb[0]->truesize += skb[1]->truesize;
15105
15106 return skb[0];
15107
15108 err_skb1:
15109 kfree_skb(skb[0]);
15110 err_skb0:
15111 return NULL;
15112 }
15113
15114 struct skb_segment_test {
15115 const char *descr;
15116 struct sk_buff *(*build_skb)(void);
15117 netdev_features_t features;
15118 };
15119
15120 static struct skb_segment_test skb_segment_tests[] __initconst = {
15121 {
15122 .descr = "gso_with_rx_frags",
15123 .build_skb = build_test_skb,
15124 .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
15125 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
15126 },
15127 {
15128 .descr = "gso_linear_no_head_frag",
15129 .build_skb = build_test_skb_linear_no_head_frag,
15130 .features = NETIF_F_SG | NETIF_F_FRAGLIST |
15131 NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
15132 NETIF_F_GRO | NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
15133 NETIF_F_HW_VLAN_STAG_TX
15134 }
15135 };
15136
test_skb_segment_single(const struct skb_segment_test * test)15137 static __init int test_skb_segment_single(const struct skb_segment_test *test)
15138 {
15139 struct sk_buff *skb, *segs;
15140 int ret = -1;
15141
15142 skb = test->build_skb();
15143 if (!skb) {
15144 pr_info("%s: failed to build_test_skb", __func__);
15145 goto done;
15146 }
15147
15148 segs = skb_segment(skb, test->features);
15149 if (!IS_ERR(segs)) {
15150 kfree_skb_list(segs);
15151 ret = 0;
15152 }
15153 kfree_skb(skb);
15154 done:
15155 return ret;
15156 }
15157
test_skb_segment(void)15158 static __init int test_skb_segment(void)
15159 {
15160 int i, err_cnt = 0, pass_cnt = 0;
15161
15162 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15163 const struct skb_segment_test *test = &skb_segment_tests[i];
15164
15165 cond_resched();
15166 if (exclude_test(i))
15167 continue;
15168
15169 pr_info("#%d %s ", i, test->descr);
15170
15171 if (test_skb_segment_single(test)) {
15172 pr_cont("FAIL\n");
15173 err_cnt++;
15174 } else {
15175 pr_cont("PASS\n");
15176 pass_cnt++;
15177 }
15178 }
15179
15180 pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
15181 pass_cnt, err_cnt);
15182 return err_cnt ? -EINVAL : 0;
15183 }
15184
test_bpf(void)15185 static __init int test_bpf(void)
15186 {
15187 int i, err_cnt = 0, pass_cnt = 0;
15188 int jit_cnt = 0, run_cnt = 0;
15189
15190 for (i = 0; i < ARRAY_SIZE(tests); i++) {
15191 struct bpf_prog *fp;
15192 int err;
15193
15194 cond_resched();
15195 if (exclude_test(i))
15196 continue;
15197
15198 pr_info("#%d %s ", i, tests[i].descr);
15199
15200 if (tests[i].fill_helper &&
15201 tests[i].fill_helper(&tests[i]) < 0) {
15202 pr_cont("FAIL to prog_fill\n");
15203 continue;
15204 }
15205
15206 fp = generate_filter(i, &err);
15207
15208 if (tests[i].fill_helper) {
15209 kfree(tests[i].u.ptr.insns);
15210 tests[i].u.ptr.insns = NULL;
15211 }
15212
15213 if (fp == NULL) {
15214 if (err == 0) {
15215 pass_cnt++;
15216 continue;
15217 }
15218 err_cnt++;
15219 continue;
15220 }
15221
15222 pr_cont("jited:%u ", fp->jited);
15223
15224 run_cnt++;
15225 if (fp->jited)
15226 jit_cnt++;
15227
15228 err = run_one(fp, &tests[i]);
15229 release_filter(fp, i);
15230
15231 if (err) {
15232 pr_cont("FAIL (%d times)\n", err);
15233 err_cnt++;
15234 } else {
15235 pr_cont("PASS\n");
15236 pass_cnt++;
15237 }
15238 }
15239
15240 pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15241 pass_cnt, err_cnt, jit_cnt, run_cnt);
15242
15243 return err_cnt ? -EINVAL : 0;
15244 }
15245
15246 struct tail_call_test {
15247 const char *descr;
15248 struct bpf_insn insns[MAX_INSNS];
15249 int flags;
15250 int result;
15251 int stack_depth;
15252 bool has_tail_call;
15253 };
15254
15255 /* Flags that can be passed to tail call test cases */
15256 #define FLAG_NEED_STATE BIT(0)
15257 #define FLAG_RESULT_IN_STATE BIT(1)
15258
15259 /*
15260 * Magic marker used in test snippets for tail calls below.
15261 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
15262 * with the proper values by the test runner.
15263 */
15264 #define TAIL_CALL_MARKER 0x7a11ca11
15265
15266 /* Special offset to indicate a NULL call target */
15267 #define TAIL_CALL_NULL 0x7fff
15268
15269 /* Special offset to indicate an out-of-range index */
15270 #define TAIL_CALL_INVALID 0x7ffe
15271
15272 #define TAIL_CALL(offset) \
15273 BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
15274 BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
15275 offset, TAIL_CALL_MARKER), \
15276 BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
15277
15278 /*
15279 * A test function to be called from a BPF program, clobbering a lot of
15280 * CPU registers in the process. A JITed BPF program calling this function
15281 * must save and restore any caller-saved registers it uses for internal
15282 * state, for example the current tail call count.
15283 */
BPF_CALL_1(bpf_test_func,u64,arg)15284 BPF_CALL_1(bpf_test_func, u64, arg)
15285 {
15286 char buf[64];
15287 long a = 0;
15288 long b = 1;
15289 long c = 2;
15290 long d = 3;
15291 long e = 4;
15292 long f = 5;
15293 long g = 6;
15294 long h = 7;
15295
15296 return snprintf(buf, sizeof(buf),
15297 "%ld %lu %lx %ld %lu %lx %ld %lu %x",
15298 a, b, c, d, e, f, g, h, (int)arg);
15299 }
15300 #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
15301
15302 /*
15303 * Tail call tests. Each test case may call any other test in the table,
15304 * including itself, specified as a relative index offset from the calling
15305 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
15306 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
15307 * results in a target index that is out of range.
15308 */
15309 static struct tail_call_test tail_call_tests[] = {
15310 {
15311 "Tail call leaf",
15312 .insns = {
15313 BPF_ALU64_REG(BPF_MOV, R0, R1),
15314 BPF_ALU64_IMM(BPF_ADD, R0, 1),
15315 BPF_EXIT_INSN(),
15316 },
15317 .result = 1,
15318 },
15319 {
15320 "Tail call 2",
15321 .insns = {
15322 BPF_ALU64_IMM(BPF_ADD, R1, 2),
15323 TAIL_CALL(-1),
15324 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15325 BPF_EXIT_INSN(),
15326 },
15327 .result = 3,
15328 .has_tail_call = true,
15329 },
15330 {
15331 "Tail call 3",
15332 .insns = {
15333 BPF_ALU64_IMM(BPF_ADD, R1, 3),
15334 TAIL_CALL(-1),
15335 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15336 BPF_EXIT_INSN(),
15337 },
15338 .result = 6,
15339 .has_tail_call = true,
15340 },
15341 {
15342 "Tail call 4",
15343 .insns = {
15344 BPF_ALU64_IMM(BPF_ADD, R1, 4),
15345 TAIL_CALL(-1),
15346 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15347 BPF_EXIT_INSN(),
15348 },
15349 .result = 10,
15350 .has_tail_call = true,
15351 },
15352 {
15353 "Tail call load/store leaf",
15354 .insns = {
15355 BPF_ALU64_IMM(BPF_MOV, R1, 1),
15356 BPF_ALU64_IMM(BPF_MOV, R2, 2),
15357 BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
15358 BPF_STX_MEM(BPF_DW, R3, R1, -8),
15359 BPF_STX_MEM(BPF_DW, R3, R2, -16),
15360 BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
15361 BPF_JMP_REG(BPF_JNE, R0, R1, 3),
15362 BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
15363 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
15364 BPF_ALU64_IMM(BPF_MOV, R0, 0),
15365 BPF_EXIT_INSN(),
15366 },
15367 .result = 0,
15368 .stack_depth = 32,
15369 },
15370 {
15371 "Tail call load/store",
15372 .insns = {
15373 BPF_ALU64_IMM(BPF_MOV, R0, 3),
15374 BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
15375 TAIL_CALL(-1),
15376 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15377 BPF_EXIT_INSN(),
15378 },
15379 .result = 0,
15380 .stack_depth = 16,
15381 .has_tail_call = true,
15382 },
15383 {
15384 "Tail call error path, max count reached",
15385 .insns = {
15386 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15387 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15388 BPF_STX_MEM(BPF_W, R1, R2, 0),
15389 TAIL_CALL(0),
15390 BPF_EXIT_INSN(),
15391 },
15392 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15393 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15394 .has_tail_call = true,
15395 },
15396 {
15397 "Tail call count preserved across function calls",
15398 .insns = {
15399 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15400 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15401 BPF_STX_MEM(BPF_W, R1, R2, 0),
15402 BPF_STX_MEM(BPF_DW, R10, R1, -8),
15403 BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
15404 BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
15405 BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
15406 BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
15407 BPF_CALL_REL(BPF_FUNC_jiffies64),
15408 BPF_CALL_REL(BPF_FUNC_test_func),
15409 BPF_LDX_MEM(BPF_DW, R1, R10, -8),
15410 BPF_ALU32_REG(BPF_MOV, R0, R1),
15411 TAIL_CALL(0),
15412 BPF_EXIT_INSN(),
15413 },
15414 .stack_depth = 8,
15415 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15416 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15417 .has_tail_call = true,
15418 },
15419 {
15420 "Tail call error path, NULL target",
15421 .insns = {
15422 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15423 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15424 BPF_STX_MEM(BPF_W, R1, R2, 0),
15425 TAIL_CALL(TAIL_CALL_NULL),
15426 BPF_EXIT_INSN(),
15427 },
15428 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15429 .result = MAX_TESTRUNS,
15430 .has_tail_call = true,
15431 },
15432 {
15433 "Tail call error path, index out of range",
15434 .insns = {
15435 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15436 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15437 BPF_STX_MEM(BPF_W, R1, R2, 0),
15438 TAIL_CALL(TAIL_CALL_INVALID),
15439 BPF_EXIT_INSN(),
15440 },
15441 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15442 .result = MAX_TESTRUNS,
15443 .has_tail_call = true,
15444 },
15445 };
15446
destroy_tail_call_tests(struct bpf_array * progs)15447 static void __init destroy_tail_call_tests(struct bpf_array *progs)
15448 {
15449 int i;
15450
15451 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
15452 if (progs->ptrs[i])
15453 bpf_prog_free(progs->ptrs[i]);
15454 kfree(progs);
15455 }
15456
prepare_tail_call_tests(struct bpf_array ** pprogs)15457 static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
15458 {
15459 int ntests = ARRAY_SIZE(tail_call_tests);
15460 struct bpf_array *progs;
15461 int which, err;
15462
15463 /* Allocate the table of programs to be used for tail calls */
15464 progs = kzalloc(struct_size(progs, ptrs, ntests + 1), GFP_KERNEL);
15465 if (!progs)
15466 goto out_nomem;
15467
15468 /* Create all eBPF programs and populate the table */
15469 for (which = 0; which < ntests; which++) {
15470 struct tail_call_test *test = &tail_call_tests[which];
15471 struct bpf_prog *fp;
15472 int len, i;
15473
15474 /* Compute the number of program instructions */
15475 for (len = 0; len < MAX_INSNS; len++) {
15476 struct bpf_insn *insn = &test->insns[len];
15477
15478 if (len < MAX_INSNS - 1 &&
15479 insn->code == (BPF_LD | BPF_DW | BPF_IMM))
15480 len++;
15481 if (insn->code == 0)
15482 break;
15483 }
15484
15485 /* Allocate and initialize the program */
15486 fp = bpf_prog_alloc(bpf_prog_size(len), 0);
15487 if (!fp)
15488 goto out_nomem;
15489
15490 fp->len = len;
15491 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
15492 fp->aux->stack_depth = test->stack_depth;
15493 fp->aux->tail_call_reachable = test->has_tail_call;
15494 memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
15495
15496 /* Relocate runtime tail call offsets and addresses */
15497 for (i = 0; i < len; i++) {
15498 struct bpf_insn *insn = &fp->insnsi[i];
15499 long addr = 0;
15500
15501 switch (insn->code) {
15502 case BPF_LD | BPF_DW | BPF_IMM:
15503 if (insn->imm != TAIL_CALL_MARKER)
15504 break;
15505 insn[0].imm = (u32)(long)progs;
15506 insn[1].imm = ((u64)(long)progs) >> 32;
15507 break;
15508
15509 case BPF_ALU | BPF_MOV | BPF_K:
15510 if (insn->imm != TAIL_CALL_MARKER)
15511 break;
15512 if (insn->off == TAIL_CALL_NULL)
15513 insn->imm = ntests;
15514 else if (insn->off == TAIL_CALL_INVALID)
15515 insn->imm = ntests + 1;
15516 else
15517 insn->imm = which + insn->off;
15518 insn->off = 0;
15519 break;
15520
15521 case BPF_JMP | BPF_CALL:
15522 if (insn->src_reg != BPF_PSEUDO_CALL)
15523 break;
15524 switch (insn->imm) {
15525 case BPF_FUNC_get_numa_node_id:
15526 addr = (long)&numa_node_id;
15527 break;
15528 case BPF_FUNC_ktime_get_ns:
15529 addr = (long)&ktime_get_ns;
15530 break;
15531 case BPF_FUNC_ktime_get_boot_ns:
15532 addr = (long)&ktime_get_boot_fast_ns;
15533 break;
15534 case BPF_FUNC_ktime_get_coarse_ns:
15535 addr = (long)&ktime_get_coarse_ns;
15536 break;
15537 case BPF_FUNC_jiffies64:
15538 addr = (long)&get_jiffies_64;
15539 break;
15540 case BPF_FUNC_test_func:
15541 addr = (long)&bpf_test_func;
15542 break;
15543 default:
15544 err = -EFAULT;
15545 goto out_err;
15546 }
15547 *insn = BPF_EMIT_CALL(addr);
15548 if ((long)__bpf_call_base + insn->imm != addr)
15549 *insn = BPF_JMP_A(0); /* Skip: NOP */
15550 break;
15551 }
15552 }
15553
15554 fp = bpf_prog_select_runtime(fp, &err);
15555 if (err)
15556 goto out_err;
15557
15558 progs->ptrs[which] = fp;
15559 }
15560
15561 /* The last entry contains a NULL program pointer */
15562 progs->map.max_entries = ntests + 1;
15563 *pprogs = progs;
15564 return 0;
15565
15566 out_nomem:
15567 err = -ENOMEM;
15568
15569 out_err:
15570 if (progs)
15571 destroy_tail_call_tests(progs);
15572 return err;
15573 }
15574
test_tail_calls(struct bpf_array * progs)15575 static __init int test_tail_calls(struct bpf_array *progs)
15576 {
15577 int i, err_cnt = 0, pass_cnt = 0;
15578 int jit_cnt = 0, run_cnt = 0;
15579
15580 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15581 struct tail_call_test *test = &tail_call_tests[i];
15582 struct bpf_prog *fp = progs->ptrs[i];
15583 int *data = NULL;
15584 int state = 0;
15585 u64 duration;
15586 int ret;
15587
15588 cond_resched();
15589 if (exclude_test(i))
15590 continue;
15591
15592 pr_info("#%d %s ", i, test->descr);
15593 if (!fp) {
15594 err_cnt++;
15595 continue;
15596 }
15597 pr_cont("jited:%u ", fp->jited);
15598
15599 run_cnt++;
15600 if (fp->jited)
15601 jit_cnt++;
15602
15603 if (test->flags & FLAG_NEED_STATE)
15604 data = &state;
15605 ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
15606 if (test->flags & FLAG_RESULT_IN_STATE)
15607 ret = state;
15608 if (ret == test->result) {
15609 pr_cont("%lld PASS", duration);
15610 pass_cnt++;
15611 } else {
15612 pr_cont("ret %d != %d FAIL", ret, test->result);
15613 err_cnt++;
15614 }
15615 }
15616
15617 pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15618 __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
15619
15620 return err_cnt ? -EINVAL : 0;
15621 }
15622
15623 static char test_suite[32];
15624 module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
15625
find_test_index(const char * test_name)15626 static __init int find_test_index(const char *test_name)
15627 {
15628 int i;
15629
15630 if (!strcmp(test_suite, "test_bpf")) {
15631 for (i = 0; i < ARRAY_SIZE(tests); i++) {
15632 if (!strcmp(tests[i].descr, test_name))
15633 return i;
15634 }
15635 }
15636
15637 if (!strcmp(test_suite, "test_tail_calls")) {
15638 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15639 if (!strcmp(tail_call_tests[i].descr, test_name))
15640 return i;
15641 }
15642 }
15643
15644 if (!strcmp(test_suite, "test_skb_segment")) {
15645 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15646 if (!strcmp(skb_segment_tests[i].descr, test_name))
15647 return i;
15648 }
15649 }
15650
15651 return -1;
15652 }
15653
prepare_test_range(void)15654 static __init int prepare_test_range(void)
15655 {
15656 int valid_range;
15657
15658 if (!strcmp(test_suite, "test_bpf"))
15659 valid_range = ARRAY_SIZE(tests);
15660 else if (!strcmp(test_suite, "test_tail_calls"))
15661 valid_range = ARRAY_SIZE(tail_call_tests);
15662 else if (!strcmp(test_suite, "test_skb_segment"))
15663 valid_range = ARRAY_SIZE(skb_segment_tests);
15664 else
15665 return 0;
15666
15667 if (test_id >= 0) {
15668 /*
15669 * if a test_id was specified, use test_range to
15670 * cover only that test.
15671 */
15672 if (test_id >= valid_range) {
15673 pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
15674 test_suite);
15675 return -EINVAL;
15676 }
15677
15678 test_range[0] = test_id;
15679 test_range[1] = test_id;
15680 } else if (*test_name) {
15681 /*
15682 * if a test_name was specified, find it and setup
15683 * test_range to cover only that test.
15684 */
15685 int idx = find_test_index(test_name);
15686
15687 if (idx < 0) {
15688 pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
15689 test_name, test_suite);
15690 return -EINVAL;
15691 }
15692 test_range[0] = idx;
15693 test_range[1] = idx;
15694 } else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
15695 /*
15696 * check that the supplied test_range is valid.
15697 */
15698 if (test_range[0] < 0 || test_range[1] >= valid_range) {
15699 pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
15700 test_suite);
15701 return -EINVAL;
15702 }
15703
15704 if (test_range[1] < test_range[0]) {
15705 pr_err("test_bpf: test_range is ending before it starts.\n");
15706 return -EINVAL;
15707 }
15708 }
15709
15710 return 0;
15711 }
15712
test_bpf_init(void)15713 static int __init test_bpf_init(void)
15714 {
15715 struct bpf_array *progs = NULL;
15716 int ret;
15717
15718 if (strlen(test_suite) &&
15719 strcmp(test_suite, "test_bpf") &&
15720 strcmp(test_suite, "test_tail_calls") &&
15721 strcmp(test_suite, "test_skb_segment")) {
15722 pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
15723 return -EINVAL;
15724 }
15725
15726 /*
15727 * if test_suite is not specified, but test_id, test_name or test_range
15728 * is specified, set 'test_bpf' as the default test suite.
15729 */
15730 if (!strlen(test_suite) &&
15731 (test_id != -1 || strlen(test_name) ||
15732 (test_range[0] != 0 || test_range[1] != INT_MAX))) {
15733 pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15734 strscpy(test_suite, "test_bpf", sizeof(test_suite));
15735 }
15736
15737 ret = prepare_test_range();
15738 if (ret < 0)
15739 return ret;
15740
15741 if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
15742 ret = test_bpf();
15743 if (ret)
15744 return ret;
15745 }
15746
15747 if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
15748 ret = prepare_tail_call_tests(&progs);
15749 if (ret)
15750 return ret;
15751 ret = test_tail_calls(progs);
15752 destroy_tail_call_tests(progs);
15753 if (ret)
15754 return ret;
15755 }
15756
15757 if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
15758 return test_skb_segment();
15759
15760 return 0;
15761 }
15762
test_bpf_exit(void)15763 static void __exit test_bpf_exit(void)
15764 {
15765 }
15766
15767 module_init(test_bpf_init);
15768 module_exit(test_bpf_exit);
15769
15770 MODULE_DESCRIPTION("Testsuite for BPF interpreter and BPF JIT compiler");
15771 MODULE_LICENSE("GPL");
15772