xref: /linux/lib/test_bpf.c (revision b73119222f2dd6b1c294cc6ef4b1fb7f9ff3adf5)
1  // SPDX-License-Identifier: GPL-2.0-only
2  /*
3   * Testsuite for BPF interpreter and BPF JIT compiler
4   *
5   * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
6   */
7  
8  #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9  
10  #include <linux/init.h>
11  #include <linux/module.h>
12  #include <linux/filter.h>
13  #include <linux/bpf.h>
14  #include <linux/skbuff.h>
15  #include <linux/netdevice.h>
16  #include <linux/if_vlan.h>
17  #include <linux/random.h>
18  #include <linux/highmem.h>
19  #include <linux/sched.h>
20  
21  /* General test specific settings */
22  #define MAX_SUBTESTS	3
23  #define MAX_TESTRUNS	1000
24  #define MAX_DATA	128
25  #define MAX_INSNS	512
26  #define MAX_K		0xffffFFFF
27  
28  /* Few constants used to init test 'skb' */
29  #define SKB_TYPE	3
30  #define SKB_MARK	0x1234aaaa
31  #define SKB_HASH	0x1234aaab
32  #define SKB_QUEUE_MAP	123
33  #define SKB_VLAN_TCI	0xffff
34  #define SKB_VLAN_PRESENT	1
35  #define SKB_DEV_IFINDEX	577
36  #define SKB_DEV_TYPE	588
37  
38  /* Redefine REGs to make tests less verbose */
39  #define R0		BPF_REG_0
40  #define R1		BPF_REG_1
41  #define R2		BPF_REG_2
42  #define R3		BPF_REG_3
43  #define R4		BPF_REG_4
44  #define R5		BPF_REG_5
45  #define R6		BPF_REG_6
46  #define R7		BPF_REG_7
47  #define R8		BPF_REG_8
48  #define R9		BPF_REG_9
49  #define R10		BPF_REG_10
50  
51  /* Flags that can be passed to test cases */
52  #define FLAG_NO_DATA		BIT(0)
53  #define FLAG_EXPECTED_FAIL	BIT(1)
54  #define FLAG_SKB_FRAG		BIT(2)
55  #define FLAG_VERIFIER_ZEXT	BIT(3)
56  
57  enum {
58  	CLASSIC  = BIT(6),	/* Old BPF instructions only. */
59  	INTERNAL = BIT(7),	/* Extended instruction set.  */
60  };
61  
62  #define TEST_TYPE_MASK		(CLASSIC | INTERNAL)
63  
64  struct bpf_test {
65  	const char *descr;
66  	union {
67  		struct sock_filter insns[MAX_INSNS];
68  		struct bpf_insn insns_int[MAX_INSNS];
69  		struct {
70  			void *insns;
71  			unsigned int len;
72  		} ptr;
73  	} u;
74  	__u8 aux;
75  	__u8 data[MAX_DATA];
76  	struct {
77  		int data_size;
78  		__u32 result;
79  	} test[MAX_SUBTESTS];
80  	int (*fill_helper)(struct bpf_test *self);
81  	int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
82  	__u8 frag_data[MAX_DATA];
83  	int stack_depth; /* for eBPF only, since tests don't call verifier */
84  	int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
85  };
86  
87  /* Large test cases need separate allocation and fill handler. */
88  
89  static int bpf_fill_maxinsns1(struct bpf_test *self)
90  {
91  	unsigned int len = BPF_MAXINSNS;
92  	struct sock_filter *insn;
93  	__u32 k = ~0;
94  	int i;
95  
96  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
97  	if (!insn)
98  		return -ENOMEM;
99  
100  	for (i = 0; i < len; i++, k--)
101  		insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
102  
103  	self->u.ptr.insns = insn;
104  	self->u.ptr.len = len;
105  
106  	return 0;
107  }
108  
109  static int bpf_fill_maxinsns2(struct bpf_test *self)
110  {
111  	unsigned int len = BPF_MAXINSNS;
112  	struct sock_filter *insn;
113  	int i;
114  
115  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
116  	if (!insn)
117  		return -ENOMEM;
118  
119  	for (i = 0; i < len; i++)
120  		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
121  
122  	self->u.ptr.insns = insn;
123  	self->u.ptr.len = len;
124  
125  	return 0;
126  }
127  
128  static int bpf_fill_maxinsns3(struct bpf_test *self)
129  {
130  	unsigned int len = BPF_MAXINSNS;
131  	struct sock_filter *insn;
132  	struct rnd_state rnd;
133  	int i;
134  
135  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
136  	if (!insn)
137  		return -ENOMEM;
138  
139  	prandom_seed_state(&rnd, 3141592653589793238ULL);
140  
141  	for (i = 0; i < len - 1; i++) {
142  		__u32 k = prandom_u32_state(&rnd);
143  
144  		insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
145  	}
146  
147  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
148  
149  	self->u.ptr.insns = insn;
150  	self->u.ptr.len = len;
151  
152  	return 0;
153  }
154  
155  static int bpf_fill_maxinsns4(struct bpf_test *self)
156  {
157  	unsigned int len = BPF_MAXINSNS + 1;
158  	struct sock_filter *insn;
159  	int i;
160  
161  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
162  	if (!insn)
163  		return -ENOMEM;
164  
165  	for (i = 0; i < len; i++)
166  		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
167  
168  	self->u.ptr.insns = insn;
169  	self->u.ptr.len = len;
170  
171  	return 0;
172  }
173  
174  static int bpf_fill_maxinsns5(struct bpf_test *self)
175  {
176  	unsigned int len = BPF_MAXINSNS;
177  	struct sock_filter *insn;
178  	int i;
179  
180  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
181  	if (!insn)
182  		return -ENOMEM;
183  
184  	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
185  
186  	for (i = 1; i < len - 1; i++)
187  		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
188  
189  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
190  
191  	self->u.ptr.insns = insn;
192  	self->u.ptr.len = len;
193  
194  	return 0;
195  }
196  
197  static int bpf_fill_maxinsns6(struct bpf_test *self)
198  {
199  	unsigned int len = BPF_MAXINSNS;
200  	struct sock_filter *insn;
201  	int i;
202  
203  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
204  	if (!insn)
205  		return -ENOMEM;
206  
207  	for (i = 0; i < len - 1; i++)
208  		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
209  				     SKF_AD_VLAN_TAG_PRESENT);
210  
211  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
212  
213  	self->u.ptr.insns = insn;
214  	self->u.ptr.len = len;
215  
216  	return 0;
217  }
218  
219  static int bpf_fill_maxinsns7(struct bpf_test *self)
220  {
221  	unsigned int len = BPF_MAXINSNS;
222  	struct sock_filter *insn;
223  	int i;
224  
225  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
226  	if (!insn)
227  		return -ENOMEM;
228  
229  	for (i = 0; i < len - 4; i++)
230  		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
231  				     SKF_AD_CPU);
232  
233  	insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
234  	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
235  				   SKF_AD_CPU);
236  	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
237  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
238  
239  	self->u.ptr.insns = insn;
240  	self->u.ptr.len = len;
241  
242  	return 0;
243  }
244  
245  static int bpf_fill_maxinsns8(struct bpf_test *self)
246  {
247  	unsigned int len = BPF_MAXINSNS;
248  	struct sock_filter *insn;
249  	int i, jmp_off = len - 3;
250  
251  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
252  	if (!insn)
253  		return -ENOMEM;
254  
255  	insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
256  
257  	for (i = 1; i < len - 1; i++)
258  		insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
259  
260  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
261  
262  	self->u.ptr.insns = insn;
263  	self->u.ptr.len = len;
264  
265  	return 0;
266  }
267  
268  static int bpf_fill_maxinsns9(struct bpf_test *self)
269  {
270  	unsigned int len = BPF_MAXINSNS;
271  	struct bpf_insn *insn;
272  	int i;
273  
274  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
275  	if (!insn)
276  		return -ENOMEM;
277  
278  	insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
279  	insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
280  	insn[2] = BPF_EXIT_INSN();
281  
282  	for (i = 3; i < len - 2; i++)
283  		insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
284  
285  	insn[len - 2] = BPF_EXIT_INSN();
286  	insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
287  
288  	self->u.ptr.insns = insn;
289  	self->u.ptr.len = len;
290  
291  	return 0;
292  }
293  
294  static int bpf_fill_maxinsns10(struct bpf_test *self)
295  {
296  	unsigned int len = BPF_MAXINSNS, hlen = len - 2;
297  	struct bpf_insn *insn;
298  	int i;
299  
300  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
301  	if (!insn)
302  		return -ENOMEM;
303  
304  	for (i = 0; i < hlen / 2; i++)
305  		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
306  	for (i = hlen - 1; i > hlen / 2; i--)
307  		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
308  
309  	insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
310  	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
311  	insn[hlen + 1] = BPF_EXIT_INSN();
312  
313  	self->u.ptr.insns = insn;
314  	self->u.ptr.len = len;
315  
316  	return 0;
317  }
318  
319  static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
320  			 unsigned int plen)
321  {
322  	struct sock_filter *insn;
323  	unsigned int rlen;
324  	int i, j;
325  
326  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
327  	if (!insn)
328  		return -ENOMEM;
329  
330  	rlen = (len % plen) - 1;
331  
332  	for (i = 0; i + plen < len; i += plen)
333  		for (j = 0; j < plen; j++)
334  			insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
335  						 plen - 1 - j, 0, 0);
336  	for (j = 0; j < rlen; j++)
337  		insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
338  					 0, 0);
339  
340  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
341  
342  	self->u.ptr.insns = insn;
343  	self->u.ptr.len = len;
344  
345  	return 0;
346  }
347  
348  static int bpf_fill_maxinsns11(struct bpf_test *self)
349  {
350  	/* Hits 70 passes on x86_64 and triggers NOPs padding. */
351  	return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
352  }
353  
354  static int bpf_fill_maxinsns12(struct bpf_test *self)
355  {
356  	unsigned int len = BPF_MAXINSNS;
357  	struct sock_filter *insn;
358  	int i = 0;
359  
360  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
361  	if (!insn)
362  		return -ENOMEM;
363  
364  	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
365  
366  	for (i = 1; i < len - 1; i++)
367  		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
368  
369  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
370  
371  	self->u.ptr.insns = insn;
372  	self->u.ptr.len = len;
373  
374  	return 0;
375  }
376  
377  static int bpf_fill_maxinsns13(struct bpf_test *self)
378  {
379  	unsigned int len = BPF_MAXINSNS;
380  	struct sock_filter *insn;
381  	int i = 0;
382  
383  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
384  	if (!insn)
385  		return -ENOMEM;
386  
387  	for (i = 0; i < len - 3; i++)
388  		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
389  
390  	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
391  	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
392  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
393  
394  	self->u.ptr.insns = insn;
395  	self->u.ptr.len = len;
396  
397  	return 0;
398  }
399  
400  static int bpf_fill_ja(struct bpf_test *self)
401  {
402  	/* Hits exactly 11 passes on x86_64 JIT. */
403  	return __bpf_fill_ja(self, 12, 9);
404  }
405  
406  static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
407  {
408  	unsigned int len = BPF_MAXINSNS;
409  	struct sock_filter *insn;
410  	int i;
411  
412  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
413  	if (!insn)
414  		return -ENOMEM;
415  
416  	for (i = 0; i < len - 1; i += 2) {
417  		insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
418  		insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
419  					 SKF_AD_OFF + SKF_AD_CPU);
420  	}
421  
422  	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
423  
424  	self->u.ptr.insns = insn;
425  	self->u.ptr.len = len;
426  
427  	return 0;
428  }
429  
430  static int __bpf_fill_stxdw(struct bpf_test *self, int size)
431  {
432  	unsigned int len = BPF_MAXINSNS;
433  	struct bpf_insn *insn;
434  	int i;
435  
436  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
437  	if (!insn)
438  		return -ENOMEM;
439  
440  	insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
441  	insn[1] = BPF_ST_MEM(size, R10, -40, 42);
442  
443  	for (i = 2; i < len - 2; i++)
444  		insn[i] = BPF_STX_XADD(size, R10, R0, -40);
445  
446  	insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
447  	insn[len - 1] = BPF_EXIT_INSN();
448  
449  	self->u.ptr.insns = insn;
450  	self->u.ptr.len = len;
451  	self->stack_depth = 40;
452  
453  	return 0;
454  }
455  
456  static int bpf_fill_stxw(struct bpf_test *self)
457  {
458  	return __bpf_fill_stxdw(self, BPF_W);
459  }
460  
461  static int bpf_fill_stxdw(struct bpf_test *self)
462  {
463  	return __bpf_fill_stxdw(self, BPF_DW);
464  }
465  
466  static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
467  {
468  	struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
469  
470  	memcpy(insns, tmp, sizeof(tmp));
471  	return 2;
472  }
473  
474  /*
475   * Branch conversion tests. Complex operations can expand to a lot
476   * of instructions when JITed. This in turn may cause jump offsets
477   * to overflow the field size of the native instruction, triggering
478   * a branch conversion mechanism in some JITs.
479   */
480  static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
481  {
482  	struct bpf_insn *insns;
483  	int len = S16_MAX + 5;
484  	int i;
485  
486  	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
487  	if (!insns)
488  		return -ENOMEM;
489  
490  	i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
491  	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
492  	insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
493  	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
494  	insns[i++] = BPF_EXIT_INSN();
495  
496  	while (i < len - 1) {
497  		static const int ops[] = {
498  			BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
499  			BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
500  		};
501  		int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
502  
503  		if (i & 1)
504  			insns[i++] = BPF_ALU32_REG(op, R0, R1);
505  		else
506  			insns[i++] = BPF_ALU64_REG(op, R0, R1);
507  	}
508  
509  	insns[i++] = BPF_EXIT_INSN();
510  	self->u.ptr.insns = insns;
511  	self->u.ptr.len = len;
512  	BUG_ON(i != len);
513  
514  	return 0;
515  }
516  
517  /* Branch taken by runtime decision */
518  static int bpf_fill_max_jmp_taken(struct bpf_test *self)
519  {
520  	return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
521  }
522  
523  /* Branch not taken by runtime decision */
524  static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
525  {
526  	return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
527  }
528  
529  /* Branch always taken, known at JIT time */
530  static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
531  {
532  	return __bpf_fill_max_jmp(self, BPF_JGE, 0);
533  }
534  
535  /* Branch never taken, known at JIT time */
536  static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
537  {
538  	return __bpf_fill_max_jmp(self, BPF_JLT, 0);
539  }
540  
541  /* ALU result computation used in tests */
542  static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
543  {
544  	*res = 0;
545  	switch (op) {
546  	case BPF_MOV:
547  		*res = v2;
548  		break;
549  	case BPF_AND:
550  		*res = v1 & v2;
551  		break;
552  	case BPF_OR:
553  		*res = v1 | v2;
554  		break;
555  	case BPF_XOR:
556  		*res = v1 ^ v2;
557  		break;
558  	case BPF_LSH:
559  		*res = v1 << v2;
560  		break;
561  	case BPF_RSH:
562  		*res = v1 >> v2;
563  		break;
564  	case BPF_ARSH:
565  		*res = v1 >> v2;
566  		if (v2 > 0 && v1 > S64_MAX)
567  			*res |= ~0ULL << (64 - v2);
568  		break;
569  	case BPF_ADD:
570  		*res = v1 + v2;
571  		break;
572  	case BPF_SUB:
573  		*res = v1 - v2;
574  		break;
575  	case BPF_MUL:
576  		*res = v1 * v2;
577  		break;
578  	case BPF_DIV:
579  		if (v2 == 0)
580  			return false;
581  		*res = div64_u64(v1, v2);
582  		break;
583  	case BPF_MOD:
584  		if (v2 == 0)
585  			return false;
586  		div64_u64_rem(v1, v2, res);
587  		break;
588  	}
589  	return true;
590  }
591  
592  /* Test an ALU shift operation for all valid shift values */
593  static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
594  				u8 mode, bool alu32)
595  {
596  	static const s64 regs[] = {
597  		0x0123456789abcdefLL, /* dword > 0, word < 0 */
598  		0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
599  		0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
600  		0x0123458967abcdefLL, /* dword > 0, word > 0 */
601  	};
602  	int bits = alu32 ? 32 : 64;
603  	int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
604  	struct bpf_insn *insn;
605  	int imm, k;
606  	int i = 0;
607  
608  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
609  	if (!insn)
610  		return -ENOMEM;
611  
612  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
613  
614  	for (k = 0; k < ARRAY_SIZE(regs); k++) {
615  		s64 reg = regs[k];
616  
617  		i += __bpf_ld_imm64(&insn[i], R3, reg);
618  
619  		for (imm = 0; imm < bits; imm++) {
620  			u64 val;
621  
622  			/* Perform operation */
623  			insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
624  			insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
625  			if (alu32) {
626  				if (mode == BPF_K)
627  					insn[i++] = BPF_ALU32_IMM(op, R1, imm);
628  				else
629  					insn[i++] = BPF_ALU32_REG(op, R1, R2);
630  
631  				if (op == BPF_ARSH)
632  					reg = (s32)reg;
633  				else
634  					reg = (u32)reg;
635  				__bpf_alu_result(&val, reg, imm, op);
636  				val = (u32)val;
637  			} else {
638  				if (mode == BPF_K)
639  					insn[i++] = BPF_ALU64_IMM(op, R1, imm);
640  				else
641  					insn[i++] = BPF_ALU64_REG(op, R1, R2);
642  				__bpf_alu_result(&val, reg, imm, op);
643  			}
644  
645  			/*
646  			 * When debugging a JIT that fails this test, one
647  			 * can write the immediate value to R0 here to find
648  			 * out which operand values that fail.
649  			 */
650  
651  			/* Load reference and check the result */
652  			i += __bpf_ld_imm64(&insn[i], R4, val);
653  			insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
654  			insn[i++] = BPF_EXIT_INSN();
655  		}
656  	}
657  
658  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
659  	insn[i++] = BPF_EXIT_INSN();
660  
661  	self->u.ptr.insns = insn;
662  	self->u.ptr.len = len;
663  	BUG_ON(i != len);
664  
665  	return 0;
666  }
667  
668  static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
669  {
670  	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
671  }
672  
673  static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
674  {
675  	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
676  }
677  
678  static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
679  {
680  	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
681  }
682  
683  static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
684  {
685  	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
686  }
687  
688  static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
689  {
690  	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
691  }
692  
693  static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
694  {
695  	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
696  }
697  
698  static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
699  {
700  	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
701  }
702  
703  static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
704  {
705  	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
706  }
707  
708  static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
709  {
710  	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
711  }
712  
713  static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
714  {
715  	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
716  }
717  
718  static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
719  {
720  	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
721  }
722  
723  static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
724  {
725  	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
726  }
727  
728  /*
729   * Test an ALU register shift operation for all valid shift values
730   * for the case when the source and destination are the same.
731   */
732  static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
733  					 bool alu32)
734  {
735  	int bits = alu32 ? 32 : 64;
736  	int len = 3 + 6 * bits;
737  	struct bpf_insn *insn;
738  	int i = 0;
739  	u64 val;
740  
741  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
742  	if (!insn)
743  		return -ENOMEM;
744  
745  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
746  
747  	for (val = 0; val < bits; val++) {
748  		u64 res;
749  
750  		/* Perform operation */
751  		insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
752  		if (alu32)
753  			insn[i++] = BPF_ALU32_REG(op, R1, R1);
754  		else
755  			insn[i++] = BPF_ALU64_REG(op, R1, R1);
756  
757  		/* Compute the reference result */
758  		__bpf_alu_result(&res, val, val, op);
759  		if (alu32)
760  			res = (u32)res;
761  		i += __bpf_ld_imm64(&insn[i], R2, res);
762  
763  		/* Check the actual result */
764  		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
765  		insn[i++] = BPF_EXIT_INSN();
766  	}
767  
768  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
769  	insn[i++] = BPF_EXIT_INSN();
770  
771  	self->u.ptr.insns = insn;
772  	self->u.ptr.len = len;
773  	BUG_ON(i != len);
774  
775  	return 0;
776  }
777  
778  static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
779  {
780  	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
781  }
782  
783  static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
784  {
785  	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
786  }
787  
788  static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
789  {
790  	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
791  }
792  
793  static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
794  {
795  	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
796  }
797  
798  static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
799  {
800  	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
801  }
802  
803  static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
804  {
805  	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
806  }
807  
808  /*
809   * Common operand pattern generator for exhaustive power-of-two magnitudes
810   * tests. The block size parameters can be adjusted to increase/reduce the
811   * number of combinatons tested and thereby execution speed and memory
812   * footprint.
813   */
814  
815  static inline s64 value(int msb, int delta, int sign)
816  {
817  	return sign * (1LL << msb) + delta;
818  }
819  
820  static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
821  			      int dbits, int sbits, int block1, int block2,
822  			      int (*emit)(struct bpf_test*, void*,
823  					  struct bpf_insn*, s64, s64))
824  {
825  	static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
826  	struct bpf_insn *insns;
827  	int di, si, bt, db, sb;
828  	int count, len, k;
829  	int extra = 1 + 2;
830  	int i = 0;
831  
832  	/* Total number of iterations for the two pattern */
833  	count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
834  	count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
835  
836  	/* Compute the maximum number of insns and allocate the buffer */
837  	len = extra + count * (*emit)(self, arg, NULL, 0, 0);
838  	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
839  	if (!insns)
840  		return -ENOMEM;
841  
842  	/* Add head instruction(s) */
843  	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
844  
845  	/*
846  	 * Pattern 1: all combinations of power-of-two magnitudes and sign,
847  	 * and with a block of contiguous values around each magnitude.
848  	 */
849  	for (di = 0; di < dbits - 1; di++)                 /* Dst magnitudes */
850  		for (si = 0; si < sbits - 1; si++)         /* Src magnitudes */
851  			for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
852  				for (db = -(block1 / 2);
853  				     db < (block1 + 1) / 2; db++)
854  					for (sb = -(block1 / 2);
855  					     sb < (block1 + 1) / 2; sb++) {
856  						s64 dst, src;
857  
858  						dst = value(di, db, sgn[k][0]);
859  						src = value(si, sb, sgn[k][1]);
860  						i += (*emit)(self, arg,
861  							     &insns[i],
862  							     dst, src);
863  					}
864  	/*
865  	 * Pattern 2: all combinations for a larger block of values
866  	 * for each power-of-two magnitude and sign, where the magnitude is
867  	 * the same for both operands.
868  	 */
869  	for (bt = 0; bt < max(dbits, sbits) - 1; bt++)        /* Magnitude   */
870  		for (k = 0; k < ARRAY_SIZE(sgn); k++)         /* Sign combos */
871  			for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
872  				for (sb = -(block2 / 2);
873  				     sb < (block2 + 1) / 2; sb++) {
874  					s64 dst, src;
875  
876  					dst = value(bt % dbits, db, sgn[k][0]);
877  					src = value(bt % sbits, sb, sgn[k][1]);
878  					i += (*emit)(self, arg, &insns[i],
879  						     dst, src);
880  				}
881  
882  	/* Append tail instructions */
883  	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
884  	insns[i++] = BPF_EXIT_INSN();
885  	BUG_ON(i > len);
886  
887  	self->u.ptr.insns = insns;
888  	self->u.ptr.len = i;
889  
890  	return 0;
891  }
892  
893  /*
894   * Block size parameters used in pattern tests below. une as needed to
895   * increase/reduce the number combinations tested, see following examples.
896   *        block   values per operand MSB
897   * ----------------------------------------
898   *           0     none
899   *           1     (1 << MSB)
900   *           2     (1 << MSB) + [-1, 0]
901   *           3     (1 << MSB) + [-1, 0, 1]
902   */
903  #define PATTERN_BLOCK1 1
904  #define PATTERN_BLOCK2 5
905  
906  /* Number of test runs for a pattern test */
907  #define NR_PATTERN_RUNS 1
908  
909  /*
910   * Exhaustive tests of ALU operations for all combinations of power-of-two
911   * magnitudes of the operands, both for positive and negative values. The
912   * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
913   * emit different code depending on the magnitude of the immediate value.
914   */
915  static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
916  				struct bpf_insn *insns, s64 dst, s64 imm)
917  {
918  	int op = *(int *)arg;
919  	int i = 0;
920  	u64 res;
921  
922  	if (!insns)
923  		return 7;
924  
925  	if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
926  		i += __bpf_ld_imm64(&insns[i], R1, dst);
927  		i += __bpf_ld_imm64(&insns[i], R3, res);
928  		insns[i++] = BPF_ALU64_IMM(op, R1, imm);
929  		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
930  		insns[i++] = BPF_EXIT_INSN();
931  	}
932  
933  	return i;
934  }
935  
936  static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
937  				struct bpf_insn *insns, s64 dst, s64 imm)
938  {
939  	int op = *(int *)arg;
940  	int i = 0;
941  	u64 res;
942  
943  	if (!insns)
944  		return 7;
945  
946  	if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
947  		i += __bpf_ld_imm64(&insns[i], R1, dst);
948  		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
949  		insns[i++] = BPF_ALU32_IMM(op, R1, imm);
950  		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
951  		insns[i++] = BPF_EXIT_INSN();
952  	}
953  
954  	return i;
955  }
956  
957  static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
958  				struct bpf_insn *insns, s64 dst, s64 src)
959  {
960  	int op = *(int *)arg;
961  	int i = 0;
962  	u64 res;
963  
964  	if (!insns)
965  		return 9;
966  
967  	if (__bpf_alu_result(&res, dst, src, op)) {
968  		i += __bpf_ld_imm64(&insns[i], R1, dst);
969  		i += __bpf_ld_imm64(&insns[i], R2, src);
970  		i += __bpf_ld_imm64(&insns[i], R3, res);
971  		insns[i++] = BPF_ALU64_REG(op, R1, R2);
972  		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
973  		insns[i++] = BPF_EXIT_INSN();
974  	}
975  
976  	return i;
977  }
978  
979  static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
980  				struct bpf_insn *insns, s64 dst, s64 src)
981  {
982  	int op = *(int *)arg;
983  	int i = 0;
984  	u64 res;
985  
986  	if (!insns)
987  		return 9;
988  
989  	if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
990  		i += __bpf_ld_imm64(&insns[i], R1, dst);
991  		i += __bpf_ld_imm64(&insns[i], R2, src);
992  		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
993  		insns[i++] = BPF_ALU32_REG(op, R1, R2);
994  		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
995  		insns[i++] = BPF_EXIT_INSN();
996  	}
997  
998  	return i;
999  }
1000  
1001  static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1002  {
1003  	return __bpf_fill_pattern(self, &op, 64, 32,
1004  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1005  				  &__bpf_emit_alu64_imm);
1006  }
1007  
1008  static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1009  {
1010  	return __bpf_fill_pattern(self, &op, 64, 32,
1011  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1012  				  &__bpf_emit_alu32_imm);
1013  }
1014  
1015  static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1016  {
1017  	return __bpf_fill_pattern(self, &op, 64, 64,
1018  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1019  				  &__bpf_emit_alu64_reg);
1020  }
1021  
1022  static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1023  {
1024  	return __bpf_fill_pattern(self, &op, 64, 64,
1025  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1026  				  &__bpf_emit_alu32_reg);
1027  }
1028  
1029  /* ALU64 immediate operations */
1030  static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1031  {
1032  	return __bpf_fill_alu64_imm(self, BPF_MOV);
1033  }
1034  
1035  static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1036  {
1037  	return __bpf_fill_alu64_imm(self, BPF_AND);
1038  }
1039  
1040  static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1041  {
1042  	return __bpf_fill_alu64_imm(self, BPF_OR);
1043  }
1044  
1045  static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1046  {
1047  	return __bpf_fill_alu64_imm(self, BPF_XOR);
1048  }
1049  
1050  static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1051  {
1052  	return __bpf_fill_alu64_imm(self, BPF_ADD);
1053  }
1054  
1055  static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1056  {
1057  	return __bpf_fill_alu64_imm(self, BPF_SUB);
1058  }
1059  
1060  static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1061  {
1062  	return __bpf_fill_alu64_imm(self, BPF_MUL);
1063  }
1064  
1065  static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1066  {
1067  	return __bpf_fill_alu64_imm(self, BPF_DIV);
1068  }
1069  
1070  static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1071  {
1072  	return __bpf_fill_alu64_imm(self, BPF_MOD);
1073  }
1074  
1075  /* ALU32 immediate operations */
1076  static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1077  {
1078  	return __bpf_fill_alu32_imm(self, BPF_MOV);
1079  }
1080  
1081  static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1082  {
1083  	return __bpf_fill_alu32_imm(self, BPF_AND);
1084  }
1085  
1086  static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1087  {
1088  	return __bpf_fill_alu32_imm(self, BPF_OR);
1089  }
1090  
1091  static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1092  {
1093  	return __bpf_fill_alu32_imm(self, BPF_XOR);
1094  }
1095  
1096  static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1097  {
1098  	return __bpf_fill_alu32_imm(self, BPF_ADD);
1099  }
1100  
1101  static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1102  {
1103  	return __bpf_fill_alu32_imm(self, BPF_SUB);
1104  }
1105  
1106  static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1107  {
1108  	return __bpf_fill_alu32_imm(self, BPF_MUL);
1109  }
1110  
1111  static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1112  {
1113  	return __bpf_fill_alu32_imm(self, BPF_DIV);
1114  }
1115  
1116  static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1117  {
1118  	return __bpf_fill_alu32_imm(self, BPF_MOD);
1119  }
1120  
1121  /* ALU64 register operations */
1122  static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1123  {
1124  	return __bpf_fill_alu64_reg(self, BPF_MOV);
1125  }
1126  
1127  static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1128  {
1129  	return __bpf_fill_alu64_reg(self, BPF_AND);
1130  }
1131  
1132  static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1133  {
1134  	return __bpf_fill_alu64_reg(self, BPF_OR);
1135  }
1136  
1137  static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1138  {
1139  	return __bpf_fill_alu64_reg(self, BPF_XOR);
1140  }
1141  
1142  static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1143  {
1144  	return __bpf_fill_alu64_reg(self, BPF_ADD);
1145  }
1146  
1147  static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1148  {
1149  	return __bpf_fill_alu64_reg(self, BPF_SUB);
1150  }
1151  
1152  static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1153  {
1154  	return __bpf_fill_alu64_reg(self, BPF_MUL);
1155  }
1156  
1157  static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1158  {
1159  	return __bpf_fill_alu64_reg(self, BPF_DIV);
1160  }
1161  
1162  static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1163  {
1164  	return __bpf_fill_alu64_reg(self, BPF_MOD);
1165  }
1166  
1167  /* ALU32 register operations */
1168  static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1169  {
1170  	return __bpf_fill_alu32_reg(self, BPF_MOV);
1171  }
1172  
1173  static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1174  {
1175  	return __bpf_fill_alu32_reg(self, BPF_AND);
1176  }
1177  
1178  static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1179  {
1180  	return __bpf_fill_alu32_reg(self, BPF_OR);
1181  }
1182  
1183  static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1184  {
1185  	return __bpf_fill_alu32_reg(self, BPF_XOR);
1186  }
1187  
1188  static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1189  {
1190  	return __bpf_fill_alu32_reg(self, BPF_ADD);
1191  }
1192  
1193  static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1194  {
1195  	return __bpf_fill_alu32_reg(self, BPF_SUB);
1196  }
1197  
1198  static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1199  {
1200  	return __bpf_fill_alu32_reg(self, BPF_MUL);
1201  }
1202  
1203  static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1204  {
1205  	return __bpf_fill_alu32_reg(self, BPF_DIV);
1206  }
1207  
1208  static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1209  {
1210  	return __bpf_fill_alu32_reg(self, BPF_MOD);
1211  }
1212  
1213  /*
1214   * Test JITs that implement complex ALU operations as function
1215   * calls, and must re-arrange operands for argument passing.
1216   */
1217  static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1218  {
1219  	int len = 2 + 10 * 10;
1220  	struct bpf_insn *insns;
1221  	u64 dst, res;
1222  	int i = 0;
1223  	u32 imm;
1224  	int rd;
1225  
1226  	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1227  	if (!insns)
1228  		return -ENOMEM;
1229  
1230  	/* Operand and result values according to operation */
1231  	if (alu32)
1232  		dst = 0x76543210U;
1233  	else
1234  		dst = 0x7edcba9876543210ULL;
1235  	imm = 0x01234567U;
1236  
1237  	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1238  		imm &= 31;
1239  
1240  	__bpf_alu_result(&res, dst, imm, op);
1241  
1242  	if (alu32)
1243  		res = (u32)res;
1244  
1245  	/* Check all operand registers */
1246  	for (rd = R0; rd <= R9; rd++) {
1247  		i += __bpf_ld_imm64(&insns[i], rd, dst);
1248  
1249  		if (alu32)
1250  			insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1251  		else
1252  			insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1253  
1254  		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1255  		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1256  		insns[i++] = BPF_EXIT_INSN();
1257  
1258  		insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1259  		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1260  		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1261  		insns[i++] = BPF_EXIT_INSN();
1262  	}
1263  
1264  	insns[i++] = BPF_MOV64_IMM(R0, 1);
1265  	insns[i++] = BPF_EXIT_INSN();
1266  
1267  	self->u.ptr.insns = insns;
1268  	self->u.ptr.len = len;
1269  	BUG_ON(i != len);
1270  
1271  	return 0;
1272  }
1273  
1274  /* ALU64 K registers */
1275  static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1276  {
1277  	return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
1278  }
1279  
1280  static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1281  {
1282  	return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
1283  }
1284  
1285  static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1286  {
1287  	return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
1288  }
1289  
1290  static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1291  {
1292  	return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
1293  }
1294  
1295  static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1296  {
1297  	return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
1298  }
1299  
1300  static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1301  {
1302  	return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
1303  }
1304  
1305  static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1306  {
1307  	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
1308  }
1309  
1310  static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1311  {
1312  	return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
1313  }
1314  
1315  static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1316  {
1317  	return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
1318  }
1319  
1320  static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1321  {
1322  	return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
1323  }
1324  
1325  static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1326  {
1327  	return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
1328  }
1329  
1330  static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1331  {
1332  	return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
1333  }
1334  
1335  /* ALU32 K registers */
1336  static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1337  {
1338  	return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
1339  }
1340  
1341  static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1342  {
1343  	return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
1344  }
1345  
1346  static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1347  {
1348  	return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
1349  }
1350  
1351  static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1352  {
1353  	return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
1354  }
1355  
1356  static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1357  {
1358  	return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
1359  }
1360  
1361  static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1362  {
1363  	return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
1364  }
1365  
1366  static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1367  {
1368  	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
1369  }
1370  
1371  static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1372  {
1373  	return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
1374  }
1375  
1376  static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1377  {
1378  	return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
1379  }
1380  
1381  static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1382  {
1383  	return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
1384  }
1385  
1386  static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1387  {
1388  	return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
1389  }
1390  
1391  static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1392  {
1393  	return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
1394  }
1395  
1396  /*
1397   * Test JITs that implement complex ALU operations as function
1398   * calls, and must re-arrange operands for argument passing.
1399   */
1400  static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1401  {
1402  	int len = 2 + 10 * 10 * 12;
1403  	u64 dst, src, res, same;
1404  	struct bpf_insn *insns;
1405  	int rd, rs;
1406  	int i = 0;
1407  
1408  	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1409  	if (!insns)
1410  		return -ENOMEM;
1411  
1412  	/* Operand and result values according to operation */
1413  	if (alu32) {
1414  		dst = 0x76543210U;
1415  		src = 0x01234567U;
1416  	} else {
1417  		dst = 0x7edcba9876543210ULL;
1418  		src = 0x0123456789abcdefULL;
1419  	}
1420  
1421  	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1422  		src &= 31;
1423  
1424  	__bpf_alu_result(&res, dst, src, op);
1425  	__bpf_alu_result(&same, src, src, op);
1426  
1427  	if (alu32) {
1428  		res = (u32)res;
1429  		same = (u32)same;
1430  	}
1431  
1432  	/* Check all combinations of operand registers */
1433  	for (rd = R0; rd <= R9; rd++) {
1434  		for (rs = R0; rs <= R9; rs++) {
1435  			u64 val = rd == rs ? same : res;
1436  
1437  			i += __bpf_ld_imm64(&insns[i], rd, dst);
1438  			i += __bpf_ld_imm64(&insns[i], rs, src);
1439  
1440  			if (alu32)
1441  				insns[i++] = BPF_ALU32_REG(op, rd, rs);
1442  			else
1443  				insns[i++] = BPF_ALU64_REG(op, rd, rs);
1444  
1445  			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1446  			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1447  			insns[i++] = BPF_EXIT_INSN();
1448  
1449  			insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1450  			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1451  			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1452  			insns[i++] = BPF_EXIT_INSN();
1453  		}
1454  	}
1455  
1456  	insns[i++] = BPF_MOV64_IMM(R0, 1);
1457  	insns[i++] = BPF_EXIT_INSN();
1458  
1459  	self->u.ptr.insns = insns;
1460  	self->u.ptr.len = len;
1461  	BUG_ON(i != len);
1462  
1463  	return 0;
1464  }
1465  
1466  /* ALU64 X register combinations */
1467  static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1468  {
1469  	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
1470  }
1471  
1472  static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1473  {
1474  	return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
1475  }
1476  
1477  static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1478  {
1479  	return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
1480  }
1481  
1482  static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1483  {
1484  	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
1485  }
1486  
1487  static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1488  {
1489  	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
1490  }
1491  
1492  static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1493  {
1494  	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
1495  }
1496  
1497  static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1498  {
1499  	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
1500  }
1501  
1502  static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1503  {
1504  	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
1505  }
1506  
1507  static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1508  {
1509  	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
1510  }
1511  
1512  static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1513  {
1514  	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
1515  }
1516  
1517  static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1518  {
1519  	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
1520  }
1521  
1522  static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1523  {
1524  	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
1525  }
1526  
1527  /* ALU32 X register combinations */
1528  static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1529  {
1530  	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
1531  }
1532  
1533  static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1534  {
1535  	return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
1536  }
1537  
1538  static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1539  {
1540  	return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
1541  }
1542  
1543  static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1544  {
1545  	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
1546  }
1547  
1548  static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1549  {
1550  	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
1551  }
1552  
1553  static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1554  {
1555  	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
1556  }
1557  
1558  static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1559  {
1560  	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
1561  }
1562  
1563  static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1564  {
1565  	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
1566  }
1567  
1568  static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1569  {
1570  	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
1571  }
1572  
1573  static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1574  {
1575  	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
1576  }
1577  
1578  static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1579  {
1580  	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
1581  }
1582  
1583  static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1584  {
1585  	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
1586  }
1587  
1588  /*
1589   * Exhaustive tests of atomic operations for all power-of-two operand
1590   * magnitudes, both for positive and negative values.
1591   */
1592  
1593  static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1594  			       struct bpf_insn *insns, s64 dst, s64 src)
1595  {
1596  	int op = *(int *)arg;
1597  	u64 keep, fetch, res;
1598  	int i = 0;
1599  
1600  	if (!insns)
1601  		return 21;
1602  
1603  	switch (op) {
1604  	case BPF_XCHG:
1605  		res = src;
1606  		break;
1607  	default:
1608  		__bpf_alu_result(&res, dst, src, BPF_OP(op));
1609  	}
1610  
1611  	keep = 0x0123456789abcdefULL;
1612  	if (op & BPF_FETCH)
1613  		fetch = dst;
1614  	else
1615  		fetch = src;
1616  
1617  	i += __bpf_ld_imm64(&insns[i], R0, keep);
1618  	i += __bpf_ld_imm64(&insns[i], R1, dst);
1619  	i += __bpf_ld_imm64(&insns[i], R2, src);
1620  	i += __bpf_ld_imm64(&insns[i], R3, res);
1621  	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1622  	i += __bpf_ld_imm64(&insns[i], R5, keep);
1623  
1624  	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1625  	insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1626  	insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1627  
1628  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1629  	insns[i++] = BPF_EXIT_INSN();
1630  
1631  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1632  	insns[i++] = BPF_EXIT_INSN();
1633  
1634  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1635  	insns[i++] = BPF_EXIT_INSN();
1636  
1637  	return i;
1638  }
1639  
1640  static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1641  			       struct bpf_insn *insns, s64 dst, s64 src)
1642  {
1643  	int op = *(int *)arg;
1644  	u64 keep, fetch, res;
1645  	int i = 0;
1646  
1647  	if (!insns)
1648  		return 21;
1649  
1650  	switch (op) {
1651  	case BPF_XCHG:
1652  		res = src;
1653  		break;
1654  	default:
1655  		__bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
1656  	}
1657  
1658  	keep = 0x0123456789abcdefULL;
1659  	if (op & BPF_FETCH)
1660  		fetch = (u32)dst;
1661  	else
1662  		fetch = src;
1663  
1664  	i += __bpf_ld_imm64(&insns[i], R0, keep);
1665  	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1666  	i += __bpf_ld_imm64(&insns[i], R2, src);
1667  	i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1668  	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1669  	i += __bpf_ld_imm64(&insns[i], R5, keep);
1670  
1671  	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1672  	insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1673  	insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1674  
1675  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1676  	insns[i++] = BPF_EXIT_INSN();
1677  
1678  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1679  	insns[i++] = BPF_EXIT_INSN();
1680  
1681  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1682  	insns[i++] = BPF_EXIT_INSN();
1683  
1684  	return i;
1685  }
1686  
1687  static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1688  				struct bpf_insn *insns, s64 dst, s64 src)
1689  {
1690  	int i = 0;
1691  
1692  	if (!insns)
1693  		return 23;
1694  
1695  	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1696  	i += __bpf_ld_imm64(&insns[i], R1, dst);
1697  	i += __bpf_ld_imm64(&insns[i], R2, src);
1698  
1699  	/* Result unsuccessful */
1700  	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1701  	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1702  	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1703  
1704  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1705  	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1706  	insns[i++] = BPF_EXIT_INSN();
1707  
1708  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1709  	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1710  	insns[i++] = BPF_EXIT_INSN();
1711  
1712  	/* Result successful */
1713  	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1714  	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1715  
1716  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1717  	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1718  	insns[i++] = BPF_EXIT_INSN();
1719  
1720  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1721  	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1722  	insns[i++] = BPF_EXIT_INSN();
1723  
1724  	return i;
1725  }
1726  
1727  static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1728  				struct bpf_insn *insns, s64 dst, s64 src)
1729  {
1730  	int i = 0;
1731  
1732  	if (!insns)
1733  		return 27;
1734  
1735  	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1736  	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1737  	i += __bpf_ld_imm64(&insns[i], R2, src);
1738  
1739  	/* Result unsuccessful */
1740  	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1741  	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1742  	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1743  	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1744  
1745  	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1746  	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1747  	insns[i++] = BPF_EXIT_INSN();
1748  
1749  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1750  	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1751  	insns[i++] = BPF_EXIT_INSN();
1752  
1753  	/* Result successful */
1754  	i += __bpf_ld_imm64(&insns[i], R0, dst);
1755  	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1756  	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1757  	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1758  
1759  	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1760  	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1761  	insns[i++] = BPF_EXIT_INSN();
1762  
1763  	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1764  	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1765  	insns[i++] = BPF_EXIT_INSN();
1766  
1767  	return i;
1768  }
1769  
1770  static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1771  {
1772  	return __bpf_fill_pattern(self, &op, 64, 64,
1773  				  0, PATTERN_BLOCK2,
1774  				  &__bpf_emit_atomic64);
1775  }
1776  
1777  static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1778  {
1779  	return __bpf_fill_pattern(self, &op, 64, 64,
1780  				  0, PATTERN_BLOCK2,
1781  				  &__bpf_emit_atomic32);
1782  }
1783  
1784  /* 64-bit atomic operations */
1785  static int bpf_fill_atomic64_add(struct bpf_test *self)
1786  {
1787  	return __bpf_fill_atomic64(self, BPF_ADD);
1788  }
1789  
1790  static int bpf_fill_atomic64_and(struct bpf_test *self)
1791  {
1792  	return __bpf_fill_atomic64(self, BPF_AND);
1793  }
1794  
1795  static int bpf_fill_atomic64_or(struct bpf_test *self)
1796  {
1797  	return __bpf_fill_atomic64(self, BPF_OR);
1798  }
1799  
1800  static int bpf_fill_atomic64_xor(struct bpf_test *self)
1801  {
1802  	return __bpf_fill_atomic64(self, BPF_XOR);
1803  }
1804  
1805  static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1806  {
1807  	return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1808  }
1809  
1810  static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1811  {
1812  	return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1813  }
1814  
1815  static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1816  {
1817  	return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1818  }
1819  
1820  static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1821  {
1822  	return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1823  }
1824  
1825  static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1826  {
1827  	return __bpf_fill_atomic64(self, BPF_XCHG);
1828  }
1829  
1830  static int bpf_fill_cmpxchg64(struct bpf_test *self)
1831  {
1832  	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1833  				  &__bpf_emit_cmpxchg64);
1834  }
1835  
1836  /* 32-bit atomic operations */
1837  static int bpf_fill_atomic32_add(struct bpf_test *self)
1838  {
1839  	return __bpf_fill_atomic32(self, BPF_ADD);
1840  }
1841  
1842  static int bpf_fill_atomic32_and(struct bpf_test *self)
1843  {
1844  	return __bpf_fill_atomic32(self, BPF_AND);
1845  }
1846  
1847  static int bpf_fill_atomic32_or(struct bpf_test *self)
1848  {
1849  	return __bpf_fill_atomic32(self, BPF_OR);
1850  }
1851  
1852  static int bpf_fill_atomic32_xor(struct bpf_test *self)
1853  {
1854  	return __bpf_fill_atomic32(self, BPF_XOR);
1855  }
1856  
1857  static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1858  {
1859  	return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1860  }
1861  
1862  static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1863  {
1864  	return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1865  }
1866  
1867  static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1868  {
1869  	return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1870  }
1871  
1872  static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1873  {
1874  	return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1875  }
1876  
1877  static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1878  {
1879  	return __bpf_fill_atomic32(self, BPF_XCHG);
1880  }
1881  
1882  static int bpf_fill_cmpxchg32(struct bpf_test *self)
1883  {
1884  	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1885  				  &__bpf_emit_cmpxchg32);
1886  }
1887  
1888  /*
1889   * Test JITs that implement ATOMIC operations as function calls or
1890   * other primitives, and must re-arrange operands for argument passing.
1891   */
1892  static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1893  {
1894  	struct bpf_insn *insn;
1895  	int len = 2 + 34 * 10 * 10;
1896  	u64 mem, upd, res;
1897  	int rd, rs, i = 0;
1898  
1899  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1900  	if (!insn)
1901  		return -ENOMEM;
1902  
1903  	/* Operand and memory values */
1904  	if (width == BPF_DW) {
1905  		mem = 0x0123456789abcdefULL;
1906  		upd = 0xfedcba9876543210ULL;
1907  	} else { /* BPF_W */
1908  		mem = 0x01234567U;
1909  		upd = 0x76543210U;
1910  	}
1911  
1912  	/* Memory updated according to operation */
1913  	switch (op) {
1914  	case BPF_XCHG:
1915  		res = upd;
1916  		break;
1917  	case BPF_CMPXCHG:
1918  		res = mem;
1919  		break;
1920  	default:
1921  		__bpf_alu_result(&res, mem, upd, BPF_OP(op));
1922  	}
1923  
1924  	/* Test all operand registers */
1925  	for (rd = R0; rd <= R9; rd++) {
1926  		for (rs = R0; rs <= R9; rs++) {
1927  			u64 cmp, src;
1928  
1929  			/* Initialize value in memory */
1930  			i += __bpf_ld_imm64(&insn[i], R0, mem);
1931  			insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1932  
1933  			/* Initialize registers in order */
1934  			i += __bpf_ld_imm64(&insn[i], R0, ~mem);
1935  			i += __bpf_ld_imm64(&insn[i], rs, upd);
1936  			insn[i++] = BPF_MOV64_REG(rd, R10);
1937  
1938  			/* Perform atomic operation */
1939  			insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1940  			if (op == BPF_CMPXCHG && width == BPF_W)
1941  				insn[i++] = BPF_ZEXT_REG(R0);
1942  
1943  			/* Check R0 register value */
1944  			if (op == BPF_CMPXCHG)
1945  				cmp = mem;  /* Expect value from memory */
1946  			else if (R0 == rd || R0 == rs)
1947  				cmp = 0;    /* Aliased, checked below */
1948  			else
1949  				cmp = ~mem; /* Expect value to be preserved */
1950  			if (cmp) {
1951  				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1952  							   (u32)cmp, 2);
1953  				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1954  				insn[i++] = BPF_EXIT_INSN();
1955  				insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1956  				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1957  							   cmp >> 32, 2);
1958  				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1959  				insn[i++] = BPF_EXIT_INSN();
1960  			}
1961  
1962  			/* Check source register value */
1963  			if (rs == R0 && op == BPF_CMPXCHG)
1964  				src = 0;   /* Aliased with R0, checked above */
1965  			else if (rs == rd && (op == BPF_CMPXCHG ||
1966  					      !(op & BPF_FETCH)))
1967  				src = 0;   /* Aliased with rd, checked below */
1968  			else if (op == BPF_CMPXCHG)
1969  				src = upd; /* Expect value to be preserved */
1970  			else if (op & BPF_FETCH)
1971  				src = mem; /* Expect fetched value from mem */
1972  			else /* no fetch */
1973  				src = upd; /* Expect value to be preserved */
1974  			if (src) {
1975  				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1976  							   (u32)src, 2);
1977  				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1978  				insn[i++] = BPF_EXIT_INSN();
1979  				insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
1980  				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1981  							   src >> 32, 2);
1982  				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1983  				insn[i++] = BPF_EXIT_INSN();
1984  			}
1985  
1986  			/* Check destination register value */
1987  			if (!(rd == R0 && op == BPF_CMPXCHG) &&
1988  			    !(rd == rs && (op & BPF_FETCH))) {
1989  				insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
1990  				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1991  				insn[i++] = BPF_EXIT_INSN();
1992  			}
1993  
1994  			/* Check value in memory */
1995  			if (rs != rd) {                  /* No aliasing */
1996  				i += __bpf_ld_imm64(&insn[i], R1, res);
1997  			} else if (op == BPF_XCHG) {     /* Aliased, XCHG */
1998  				insn[i++] = BPF_MOV64_REG(R1, R10);
1999  			} else if (op == BPF_CMPXCHG) {  /* Aliased, CMPXCHG */
2000  				i += __bpf_ld_imm64(&insn[i], R1, mem);
2001  			} else {                        /* Aliased, ALU oper */
2002  				i += __bpf_ld_imm64(&insn[i], R1, mem);
2003  				insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2004  			}
2005  
2006  			insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2007  			if (width == BPF_DW)
2008  				insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2009  			else /* width == BPF_W */
2010  				insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2011  			insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2012  			insn[i++] = BPF_EXIT_INSN();
2013  		}
2014  	}
2015  
2016  	insn[i++] = BPF_MOV64_IMM(R0, 1);
2017  	insn[i++] = BPF_EXIT_INSN();
2018  
2019  	self->u.ptr.insns = insn;
2020  	self->u.ptr.len = i;
2021  	BUG_ON(i > len);
2022  
2023  	return 0;
2024  }
2025  
2026  /* 64-bit atomic register tests */
2027  static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2028  {
2029  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2030  }
2031  
2032  static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2033  {
2034  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2035  }
2036  
2037  static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2038  {
2039  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2040  }
2041  
2042  static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2043  {
2044  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2045  }
2046  
2047  static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2048  {
2049  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2050  }
2051  
2052  static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2053  {
2054  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2055  }
2056  
2057  static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2058  {
2059  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2060  }
2061  
2062  static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2063  {
2064  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2065  }
2066  
2067  static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2068  {
2069  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2070  }
2071  
2072  static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2073  {
2074  	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2075  }
2076  
2077  /* 32-bit atomic register tests */
2078  static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2079  {
2080  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2081  }
2082  
2083  static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2084  {
2085  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2086  }
2087  
2088  static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2089  {
2090  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2091  }
2092  
2093  static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2094  {
2095  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2096  }
2097  
2098  static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2099  {
2100  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2101  }
2102  
2103  static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2104  {
2105  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2106  }
2107  
2108  static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2109  {
2110  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2111  }
2112  
2113  static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2114  {
2115  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2116  }
2117  
2118  static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2119  {
2120  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2121  }
2122  
2123  static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2124  {
2125  	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2126  }
2127  
2128  /*
2129   * Test the two-instruction 64-bit immediate load operation for all
2130   * power-of-two magnitudes of the immediate operand. For each MSB, a block
2131   * of immediate values centered around the power-of-two MSB are tested,
2132   * both for positive and negative values. The test is designed to verify
2133   * the operation for JITs that emit different code depending on the magnitude
2134   * of the immediate value. This is often the case if the native instruction
2135   * immediate field width is narrower than 32 bits.
2136   */
2137  static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
2138  {
2139  	int block = 64; /* Increase for more tests per MSB position */
2140  	int len = 3 + 8 * 63 * block * 2;
2141  	struct bpf_insn *insn;
2142  	int bit, adj, sign;
2143  	int i = 0;
2144  
2145  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2146  	if (!insn)
2147  		return -ENOMEM;
2148  
2149  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2150  
2151  	for (bit = 0; bit <= 62; bit++) {
2152  		for (adj = -block / 2; adj < block / 2; adj++) {
2153  			for (sign = -1; sign <= 1; sign += 2) {
2154  				s64 imm = sign * ((1LL << bit) + adj);
2155  
2156  				/* Perform operation */
2157  				i += __bpf_ld_imm64(&insn[i], R1, imm);
2158  
2159  				/* Load reference */
2160  				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2161  				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2162  							  (u32)(imm >> 32));
2163  				insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2164  				insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2165  
2166  				/* Check result */
2167  				insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2168  				insn[i++] = BPF_EXIT_INSN();
2169  			}
2170  		}
2171  	}
2172  
2173  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2174  	insn[i++] = BPF_EXIT_INSN();
2175  
2176  	self->u.ptr.insns = insn;
2177  	self->u.ptr.len = len;
2178  	BUG_ON(i != len);
2179  
2180  	return 0;
2181  }
2182  
2183  /*
2184   * Test the two-instruction 64-bit immediate load operation for different
2185   * combinations of bytes. Each byte in the 64-bit word is constructed as
2186   * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2187   * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2188   */
2189  static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
2190  				     u8 base1, u8 mask1,
2191  				     u8 base2, u8 mask2)
2192  {
2193  	struct bpf_insn *insn;
2194  	int len = 3 + 8 * BIT(8);
2195  	int pattern, index;
2196  	u32 rand = 1;
2197  	int i = 0;
2198  
2199  	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2200  	if (!insn)
2201  		return -ENOMEM;
2202  
2203  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2204  
2205  	for (pattern = 0; pattern < BIT(8); pattern++) {
2206  		u64 imm = 0;
2207  
2208  		for (index = 0; index < 8; index++) {
2209  			int byte;
2210  
2211  			if (pattern & BIT(index))
2212  				byte = (base1 & mask1) | (rand & ~mask1);
2213  			else
2214  				byte = (base2 & mask2) | (rand & ~mask2);
2215  			imm = (imm << 8) | byte;
2216  		}
2217  
2218  		/* Update our LCG */
2219  		rand = rand * 1664525 + 1013904223;
2220  
2221  		/* Perform operation */
2222  		i += __bpf_ld_imm64(&insn[i], R1, imm);
2223  
2224  		/* Load reference */
2225  		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2226  		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
2227  		insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2228  		insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2229  
2230  		/* Check result */
2231  		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2232  		insn[i++] = BPF_EXIT_INSN();
2233  	}
2234  
2235  	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2236  	insn[i++] = BPF_EXIT_INSN();
2237  
2238  	self->u.ptr.insns = insn;
2239  	self->u.ptr.len = len;
2240  	BUG_ON(i != len);
2241  
2242  	return 0;
2243  }
2244  
2245  static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
2246  {
2247  	return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
2248  }
2249  
2250  static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
2251  {
2252  	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
2253  }
2254  
2255  static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
2256  {
2257  	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
2258  }
2259  
2260  static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
2261  {
2262  	return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
2263  }
2264  
2265  /*
2266   * Exhaustive tests of JMP operations for all combinations of power-of-two
2267   * magnitudes of the operands, both for positive and negative values. The
2268   * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2269   * emit different code depending on the magnitude of the immediate value.
2270   */
2271  
2272  static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2273  {
2274  	switch (op) {
2275  	case BPF_JSET:
2276  		return !!(v1 & v2);
2277  	case BPF_JEQ:
2278  		return v1 == v2;
2279  	case BPF_JNE:
2280  		return v1 != v2;
2281  	case BPF_JGT:
2282  		return (u64)v1 > (u64)v2;
2283  	case BPF_JGE:
2284  		return (u64)v1 >= (u64)v2;
2285  	case BPF_JLT:
2286  		return (u64)v1 < (u64)v2;
2287  	case BPF_JLE:
2288  		return (u64)v1 <= (u64)v2;
2289  	case BPF_JSGT:
2290  		return v1 > v2;
2291  	case BPF_JSGE:
2292  		return v1 >= v2;
2293  	case BPF_JSLT:
2294  		return v1 < v2;
2295  	case BPF_JSLE:
2296  		return v1 <= v2;
2297  	}
2298  	return false;
2299  }
2300  
2301  static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2302  			      struct bpf_insn *insns, s64 dst, s64 imm)
2303  {
2304  	int op = *(int *)arg;
2305  
2306  	if (insns) {
2307  		bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
2308  		int i = 0;
2309  
2310  		insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2311  
2312  		i += __bpf_ld_imm64(&insns[i], R1, dst);
2313  		insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2314  		if (!match)
2315  			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2316  		insns[i++] = BPF_EXIT_INSN();
2317  
2318  		return i;
2319  	}
2320  
2321  	return 5 + 1;
2322  }
2323  
2324  static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2325  				struct bpf_insn *insns, s64 dst, s64 imm)
2326  {
2327  	int op = *(int *)arg;
2328  
2329  	if (insns) {
2330  		bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
2331  		int i = 0;
2332  
2333  		i += __bpf_ld_imm64(&insns[i], R1, dst);
2334  		insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2335  		if (!match)
2336  			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2337  		insns[i++] = BPF_EXIT_INSN();
2338  
2339  		return i;
2340  	}
2341  
2342  	return 5;
2343  }
2344  
2345  static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2346  			      struct bpf_insn *insns, s64 dst, s64 src)
2347  {
2348  	int op = *(int *)arg;
2349  
2350  	if (insns) {
2351  		bool match = __bpf_match_jmp_cond(dst, src, op);
2352  		int i = 0;
2353  
2354  		i += __bpf_ld_imm64(&insns[i], R1, dst);
2355  		i += __bpf_ld_imm64(&insns[i], R2, src);
2356  		insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2357  		if (!match)
2358  			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2359  		insns[i++] = BPF_EXIT_INSN();
2360  
2361  		return i;
2362  	}
2363  
2364  	return 7;
2365  }
2366  
2367  static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2368  				struct bpf_insn *insns, s64 dst, s64 src)
2369  {
2370  	int op = *(int *)arg;
2371  
2372  	if (insns) {
2373  		bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
2374  		int i = 0;
2375  
2376  		i += __bpf_ld_imm64(&insns[i], R1, dst);
2377  		i += __bpf_ld_imm64(&insns[i], R2, src);
2378  		insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2379  		if (!match)
2380  			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2381  		insns[i++] = BPF_EXIT_INSN();
2382  
2383  		return i;
2384  	}
2385  
2386  	return 7;
2387  }
2388  
2389  static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2390  {
2391  	return __bpf_fill_pattern(self, &op, 64, 32,
2392  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2393  				  &__bpf_emit_jmp_imm);
2394  }
2395  
2396  static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2397  {
2398  	return __bpf_fill_pattern(self, &op, 64, 32,
2399  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2400  				  &__bpf_emit_jmp32_imm);
2401  }
2402  
2403  static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2404  {
2405  	return __bpf_fill_pattern(self, &op, 64, 64,
2406  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2407  				  &__bpf_emit_jmp_reg);
2408  }
2409  
2410  static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2411  {
2412  	return __bpf_fill_pattern(self, &op, 64, 64,
2413  				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2414  				  &__bpf_emit_jmp32_reg);
2415  }
2416  
2417  /* JMP immediate tests */
2418  static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2419  {
2420  	return __bpf_fill_jmp_imm(self, BPF_JSET);
2421  }
2422  
2423  static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2424  {
2425  	return __bpf_fill_jmp_imm(self, BPF_JEQ);
2426  }
2427  
2428  static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2429  {
2430  	return __bpf_fill_jmp_imm(self, BPF_JNE);
2431  }
2432  
2433  static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2434  {
2435  	return __bpf_fill_jmp_imm(self, BPF_JGT);
2436  }
2437  
2438  static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2439  {
2440  	return __bpf_fill_jmp_imm(self, BPF_JGE);
2441  }
2442  
2443  static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2444  {
2445  	return __bpf_fill_jmp_imm(self, BPF_JLT);
2446  }
2447  
2448  static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2449  {
2450  	return __bpf_fill_jmp_imm(self, BPF_JLE);
2451  }
2452  
2453  static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2454  {
2455  	return __bpf_fill_jmp_imm(self, BPF_JSGT);
2456  }
2457  
2458  static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2459  {
2460  	return __bpf_fill_jmp_imm(self, BPF_JSGE);
2461  }
2462  
2463  static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2464  {
2465  	return __bpf_fill_jmp_imm(self, BPF_JSLT);
2466  }
2467  
2468  static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2469  {
2470  	return __bpf_fill_jmp_imm(self, BPF_JSLE);
2471  }
2472  
2473  /* JMP32 immediate tests */
2474  static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2475  {
2476  	return __bpf_fill_jmp32_imm(self, BPF_JSET);
2477  }
2478  
2479  static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2480  {
2481  	return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2482  }
2483  
2484  static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2485  {
2486  	return __bpf_fill_jmp32_imm(self, BPF_JNE);
2487  }
2488  
2489  static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2490  {
2491  	return __bpf_fill_jmp32_imm(self, BPF_JGT);
2492  }
2493  
2494  static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2495  {
2496  	return __bpf_fill_jmp32_imm(self, BPF_JGE);
2497  }
2498  
2499  static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2500  {
2501  	return __bpf_fill_jmp32_imm(self, BPF_JLT);
2502  }
2503  
2504  static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2505  {
2506  	return __bpf_fill_jmp32_imm(self, BPF_JLE);
2507  }
2508  
2509  static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2510  {
2511  	return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2512  }
2513  
2514  static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2515  {
2516  	return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2517  }
2518  
2519  static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2520  {
2521  	return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2522  }
2523  
2524  static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2525  {
2526  	return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2527  }
2528  
2529  /* JMP register tests */
2530  static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2531  {
2532  	return __bpf_fill_jmp_reg(self, BPF_JSET);
2533  }
2534  
2535  static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2536  {
2537  	return __bpf_fill_jmp_reg(self, BPF_JEQ);
2538  }
2539  
2540  static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2541  {
2542  	return __bpf_fill_jmp_reg(self, BPF_JNE);
2543  }
2544  
2545  static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2546  {
2547  	return __bpf_fill_jmp_reg(self, BPF_JGT);
2548  }
2549  
2550  static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2551  {
2552  	return __bpf_fill_jmp_reg(self, BPF_JGE);
2553  }
2554  
2555  static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2556  {
2557  	return __bpf_fill_jmp_reg(self, BPF_JLT);
2558  }
2559  
2560  static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2561  {
2562  	return __bpf_fill_jmp_reg(self, BPF_JLE);
2563  }
2564  
2565  static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2566  {
2567  	return __bpf_fill_jmp_reg(self, BPF_JSGT);
2568  }
2569  
2570  static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2571  {
2572  	return __bpf_fill_jmp_reg(self, BPF_JSGE);
2573  }
2574  
2575  static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2576  {
2577  	return __bpf_fill_jmp_reg(self, BPF_JSLT);
2578  }
2579  
2580  static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2581  {
2582  	return __bpf_fill_jmp_reg(self, BPF_JSLE);
2583  }
2584  
2585  /* JMP32 register tests */
2586  static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2587  {
2588  	return __bpf_fill_jmp32_reg(self, BPF_JSET);
2589  }
2590  
2591  static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2592  {
2593  	return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2594  }
2595  
2596  static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2597  {
2598  	return __bpf_fill_jmp32_reg(self, BPF_JNE);
2599  }
2600  
2601  static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2602  {
2603  	return __bpf_fill_jmp32_reg(self, BPF_JGT);
2604  }
2605  
2606  static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2607  {
2608  	return __bpf_fill_jmp32_reg(self, BPF_JGE);
2609  }
2610  
2611  static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2612  {
2613  	return __bpf_fill_jmp32_reg(self, BPF_JLT);
2614  }
2615  
2616  static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2617  {
2618  	return __bpf_fill_jmp32_reg(self, BPF_JLE);
2619  }
2620  
2621  static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2622  {
2623  	return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2624  }
2625  
2626  static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2627  {
2628  	return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2629  }
2630  
2631  static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2632  {
2633  	return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2634  }
2635  
2636  static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2637  {
2638  	return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2639  }
2640  
2641  /*
2642   * Set up a sequence of staggered jumps, forwards and backwards with
2643   * increasing offset. This tests the conversion of relative jumps to
2644   * JITed native jumps. On some architectures, for example MIPS, a large
2645   * PC-relative jump offset may overflow the immediate field of the native
2646   * conditional branch instruction, triggering a conversion to use an
2647   * absolute jump instead. Since this changes the jump offsets, another
2648   * offset computation pass is necessary, and that may in turn trigger
2649   * another branch conversion. This jump sequence is particularly nasty
2650   * in that regard.
2651   *
2652   * The sequence generation is parameterized by size and jump type.
2653   * The size must be even, and the expected result is always size + 1.
2654   * Below is an example with size=8 and result=9.
2655   *
2656   *                     ________________________Start
2657   *                     R0 = 0
2658   *                     R1 = r1
2659   *                     R2 = r2
2660   *            ,------- JMP +4 * 3______________Preamble: 4 insns
2661   * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2662   * |          |        R0 = 8                                        |
2663   * |          |        JMP +7 * 3               ------------------------.
2664   * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------.     |  |
2665   * | |        |        R0 = 6                                  |     |  |
2666   * | |        |        JMP +5 * 3               ------------------.  |  |
2667   * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------.     |  |  |  |
2668   * | | |      |        R0 = 4                            |     |  |  |  |
2669   * | | |      |        JMP +3 * 3               ------------.  |  |  |  |
2670   * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--.     |  |  |  |  |  |
2671   * | | | |    |        R0 = 2                      |     |  |  |  |  |  |
2672   * | | | |    |        JMP +1 * 3               ------.  |  |  |  |  |  |
2673   * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1    1  2  3  4  5  6  7  8 loc
2674   * | | | | |           R0 = 1                     -1 +2 -3 +4 -5 +6 -7 +8 off
2675   * | | | | |           JMP -2 * 3               ---'  |  |  |  |  |  |  |
2676   * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----'  |  |  |  |  |  |
2677   * | | | | | |         R0 = 3                            |  |  |  |  |  |
2678   * | | | | | |         JMP -4 * 3               ---------'  |  |  |  |  |
2679   * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------'  |  |  |  |
2680   * | | | | | | |       R0 = 5                                  |  |  |  |
2681   * | | | | | | |       JMP -6 * 3               ---------------'  |  |  |
2682   * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------'  |  |
2683   * | | | | | | | |     R0 = 7                                        |  |
2684   * | | Error | | |     JMP -8 * 3               ---------------------'  |
2685   * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2686   * | | | | | | | | |   R0 = 9__________________Sequence: 3 * size - 1 insns
2687   * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2688   *
2689   */
2690  
2691  /* The maximum size parameter */
2692  #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2693  
2694  /* We use a reduced number of iterations to get a reasonable execution time */
2695  #define NR_STAGGERED_JMP_RUNS 10
2696  
2697  static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2698  				      const struct bpf_insn *jmp,
2699  				      u64 r1, u64 r2)
2700  {
2701  	int size = self->test[0].result - 1;
2702  	int len = 4 + 3 * (size + 1);
2703  	struct bpf_insn *insns;
2704  	int off, ind;
2705  
2706  	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
2707  	if (!insns)
2708  		return -ENOMEM;
2709  
2710  	/* Preamble */
2711  	insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2712  	insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2713  	insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2714  	insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2715  
2716  	/* Sequence */
2717  	for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2718  		struct bpf_insn *ins = &insns[4 + 3 * ind];
2719  		int loc;
2720  
2721  		if (off == 0)
2722  			off--;
2723  
2724  		loc = abs(off);
2725  		ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2726  				     3 * (size - ind) + 1);
2727  		ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2728  		ins[2] = *jmp;
2729  		ins[2].off = 3 * (off - 1);
2730  	}
2731  
2732  	/* Return */
2733  	insns[len - 1] = BPF_EXIT_INSN();
2734  
2735  	self->u.ptr.insns = insns;
2736  	self->u.ptr.len = len;
2737  
2738  	return 0;
2739  }
2740  
2741  /* 64-bit unconditional jump */
2742  static int bpf_fill_staggered_ja(struct bpf_test *self)
2743  {
2744  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2745  
2746  	return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
2747  }
2748  
2749  /* 64-bit immediate jumps */
2750  static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2751  {
2752  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2753  
2754  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2755  }
2756  
2757  static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2758  {
2759  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2760  
2761  	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2762  }
2763  
2764  static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2765  {
2766  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2767  
2768  	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2769  }
2770  
2771  static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2772  {
2773  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2774  
2775  	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2776  }
2777  
2778  static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2779  {
2780  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2781  
2782  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2783  }
2784  
2785  static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2786  {
2787  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2788  
2789  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2790  }
2791  
2792  static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2793  {
2794  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2795  
2796  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2797  }
2798  
2799  static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2800  {
2801  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2802  
2803  	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2804  }
2805  
2806  static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2807  {
2808  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2809  
2810  	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2811  }
2812  
2813  static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2814  {
2815  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2816  
2817  	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2818  }
2819  
2820  static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2821  {
2822  	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2823  
2824  	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2825  }
2826  
2827  /* 64-bit register jumps */
2828  static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2829  {
2830  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2831  
2832  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2833  }
2834  
2835  static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2836  {
2837  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2838  
2839  	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2840  }
2841  
2842  static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2843  {
2844  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2845  
2846  	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2847  }
2848  
2849  static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2850  {
2851  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2852  
2853  	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2854  }
2855  
2856  static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2857  {
2858  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2859  
2860  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2861  }
2862  
2863  static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2864  {
2865  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2866  
2867  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2868  }
2869  
2870  static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2871  {
2872  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2873  
2874  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2875  }
2876  
2877  static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2878  {
2879  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2880  
2881  	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2882  }
2883  
2884  static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2885  {
2886  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2887  
2888  	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2889  }
2890  
2891  static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2892  {
2893  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2894  
2895  	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2896  }
2897  
2898  static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2899  {
2900  	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2901  
2902  	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2903  }
2904  
2905  /* 32-bit immediate jumps */
2906  static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2907  {
2908  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2909  
2910  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2911  }
2912  
2913  static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2914  {
2915  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2916  
2917  	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2918  }
2919  
2920  static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2921  {
2922  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2923  
2924  	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2925  }
2926  
2927  static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2928  {
2929  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2930  
2931  	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2932  }
2933  
2934  static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2935  {
2936  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2937  
2938  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2939  }
2940  
2941  static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2942  {
2943  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2944  
2945  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2946  }
2947  
2948  static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2949  {
2950  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2951  
2952  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2953  }
2954  
2955  static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2956  {
2957  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2958  
2959  	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2960  }
2961  
2962  static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2963  {
2964  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2965  
2966  	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2967  }
2968  
2969  static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2970  {
2971  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2972  
2973  	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2974  }
2975  
2976  static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2977  {
2978  	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
2979  
2980  	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2981  }
2982  
2983  /* 32-bit register jumps */
2984  static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
2985  {
2986  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
2987  
2988  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2989  }
2990  
2991  static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
2992  {
2993  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
2994  
2995  	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2996  }
2997  
2998  static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
2999  {
3000  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
3001  
3002  	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
3003  }
3004  
3005  static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
3006  {
3007  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
3008  
3009  	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
3010  }
3011  
3012  static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
3013  {
3014  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
3015  
3016  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3017  }
3018  
3019  static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
3020  {
3021  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
3022  
3023  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
3024  }
3025  
3026  static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
3027  {
3028  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
3029  
3030  	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3031  }
3032  
3033  static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
3034  {
3035  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
3036  
3037  	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
3038  }
3039  
3040  static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
3041  {
3042  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
3043  
3044  	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
3045  }
3046  
3047  static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
3048  {
3049  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
3050  
3051  	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
3052  }
3053  
3054  static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
3055  {
3056  	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
3057  
3058  	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
3059  }
3060  
3061  
3062  static struct bpf_test tests[] = {
3063  	{
3064  		"TAX",
3065  		.u.insns = {
3066  			BPF_STMT(BPF_LD | BPF_IMM, 1),
3067  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3068  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3069  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3070  			BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
3071  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3072  			BPF_STMT(BPF_LD | BPF_LEN, 0),
3073  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3074  			BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
3075  			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
3076  			BPF_STMT(BPF_RET | BPF_A, 0)
3077  		},
3078  		CLASSIC,
3079  		{ 10, 20, 30, 40, 50 },
3080  		{ { 2, 10 }, { 3, 20 }, { 4, 30 } },
3081  	},
3082  	{
3083  		"TXA",
3084  		.u.insns = {
3085  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3086  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3087  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3088  			BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3089  		},
3090  		CLASSIC,
3091  		{ 10, 20, 30, 40, 50 },
3092  		{ { 1, 2 }, { 3, 6 }, { 4, 8 } },
3093  	},
3094  	{
3095  		"ADD_SUB_MUL_K",
3096  		.u.insns = {
3097  			BPF_STMT(BPF_LD | BPF_IMM, 1),
3098  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3099  			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3100  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3101  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3102  			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3103  			BPF_STMT(BPF_RET | BPF_A, 0)
3104  		},
3105  		CLASSIC | FLAG_NO_DATA,
3106  		{ },
3107  		{ { 0, 0xfffffffd } }
3108  	},
3109  	{
3110  		"DIV_MOD_KX",
3111  		.u.insns = {
3112  			BPF_STMT(BPF_LD | BPF_IMM, 8),
3113  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3114  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3115  			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3116  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3117  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3118  			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3119  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3120  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3121  			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3122  			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3123  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3124  			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3125  			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3126  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3127  			BPF_STMT(BPF_RET | BPF_A, 0)
3128  		},
3129  		CLASSIC | FLAG_NO_DATA,
3130  		{ },
3131  		{ { 0, 0x20000000 } }
3132  	},
3133  	{
3134  		"AND_OR_LSH_K",
3135  		.u.insns = {
3136  			BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3137  			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3138  			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3139  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3140  			BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3141  			BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3142  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3143  			BPF_STMT(BPF_RET | BPF_A, 0)
3144  		},
3145  		CLASSIC | FLAG_NO_DATA,
3146  		{ },
3147  		{ { 0, 0x800000ff }, { 1, 0x800000ff } },
3148  	},
3149  	{
3150  		"LD_IMM_0",
3151  		.u.insns = {
3152  			BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3153  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3154  			BPF_STMT(BPF_RET | BPF_K, 0),
3155  			BPF_STMT(BPF_RET | BPF_K, 1),
3156  		},
3157  		CLASSIC,
3158  		{ },
3159  		{ { 1, 1 } },
3160  	},
3161  	{
3162  		"LD_IND",
3163  		.u.insns = {
3164  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3165  			BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3166  			BPF_STMT(BPF_RET | BPF_K, 1)
3167  		},
3168  		CLASSIC,
3169  		{ },
3170  		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3171  	},
3172  	{
3173  		"LD_ABS",
3174  		.u.insns = {
3175  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3176  			BPF_STMT(BPF_RET | BPF_K, 1)
3177  		},
3178  		CLASSIC,
3179  		{ },
3180  		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3181  	},
3182  	{
3183  		"LD_ABS_LL",
3184  		.u.insns = {
3185  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3186  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3187  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3188  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3189  			BPF_STMT(BPF_RET | BPF_A, 0)
3190  		},
3191  		CLASSIC,
3192  		{ 1, 2, 3 },
3193  		{ { 1, 0 }, { 2, 3 } },
3194  	},
3195  	{
3196  		"LD_IND_LL",
3197  		.u.insns = {
3198  			BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3199  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3200  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3201  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3202  			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3203  			BPF_STMT(BPF_RET | BPF_A, 0)
3204  		},
3205  		CLASSIC,
3206  		{ 1, 2, 3, 0xff },
3207  		{ { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3208  	},
3209  	{
3210  		"LD_ABS_NET",
3211  		.u.insns = {
3212  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3213  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3214  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3215  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3216  			BPF_STMT(BPF_RET | BPF_A, 0)
3217  		},
3218  		CLASSIC,
3219  		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3220  		{ { 15, 0 }, { 16, 3 } },
3221  	},
3222  	{
3223  		"LD_IND_NET",
3224  		.u.insns = {
3225  			BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3226  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3227  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3228  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3229  			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3230  			BPF_STMT(BPF_RET | BPF_A, 0)
3231  		},
3232  		CLASSIC,
3233  		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3234  		{ { 14, 0 }, { 15, 1 }, { 17, 3 } },
3235  	},
3236  	{
3237  		"LD_PKTTYPE",
3238  		.u.insns = {
3239  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3240  				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3241  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3242  			BPF_STMT(BPF_RET | BPF_K, 1),
3243  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3244  				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3245  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3246  			BPF_STMT(BPF_RET | BPF_K, 1),
3247  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3248  				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3249  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3250  			BPF_STMT(BPF_RET | BPF_K, 1),
3251  			BPF_STMT(BPF_RET | BPF_A, 0)
3252  		},
3253  		CLASSIC,
3254  		{ },
3255  		{ { 1, 3 }, { 10, 3 } },
3256  	},
3257  	{
3258  		"LD_MARK",
3259  		.u.insns = {
3260  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3261  				 SKF_AD_OFF + SKF_AD_MARK),
3262  			BPF_STMT(BPF_RET | BPF_A, 0)
3263  		},
3264  		CLASSIC,
3265  		{ },
3266  		{ { 1, SKB_MARK}, { 10, SKB_MARK} },
3267  	},
3268  	{
3269  		"LD_RXHASH",
3270  		.u.insns = {
3271  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3272  				 SKF_AD_OFF + SKF_AD_RXHASH),
3273  			BPF_STMT(BPF_RET | BPF_A, 0)
3274  		},
3275  		CLASSIC,
3276  		{ },
3277  		{ { 1, SKB_HASH}, { 10, SKB_HASH} },
3278  	},
3279  	{
3280  		"LD_QUEUE",
3281  		.u.insns = {
3282  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3283  				 SKF_AD_OFF + SKF_AD_QUEUE),
3284  			BPF_STMT(BPF_RET | BPF_A, 0)
3285  		},
3286  		CLASSIC,
3287  		{ },
3288  		{ { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3289  	},
3290  	{
3291  		"LD_PROTOCOL",
3292  		.u.insns = {
3293  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3294  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3295  			BPF_STMT(BPF_RET | BPF_K, 0),
3296  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3297  				 SKF_AD_OFF + SKF_AD_PROTOCOL),
3298  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3299  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3300  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3301  			BPF_STMT(BPF_RET | BPF_K, 0),
3302  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3303  			BPF_STMT(BPF_RET | BPF_A, 0)
3304  		},
3305  		CLASSIC,
3306  		{ 10, 20, 30 },
3307  		{ { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3308  	},
3309  	{
3310  		"LD_VLAN_TAG",
3311  		.u.insns = {
3312  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3313  				 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3314  			BPF_STMT(BPF_RET | BPF_A, 0)
3315  		},
3316  		CLASSIC,
3317  		{ },
3318  		{
3319  			{ 1, SKB_VLAN_TCI },
3320  			{ 10, SKB_VLAN_TCI }
3321  		},
3322  	},
3323  	{
3324  		"LD_VLAN_TAG_PRESENT",
3325  		.u.insns = {
3326  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3327  				 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3328  			BPF_STMT(BPF_RET | BPF_A, 0)
3329  		},
3330  		CLASSIC,
3331  		{ },
3332  		{
3333  			{ 1, SKB_VLAN_PRESENT },
3334  			{ 10, SKB_VLAN_PRESENT }
3335  		},
3336  	},
3337  	{
3338  		"LD_IFINDEX",
3339  		.u.insns = {
3340  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3341  				 SKF_AD_OFF + SKF_AD_IFINDEX),
3342  			BPF_STMT(BPF_RET | BPF_A, 0)
3343  		},
3344  		CLASSIC,
3345  		{ },
3346  		{ { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3347  	},
3348  	{
3349  		"LD_HATYPE",
3350  		.u.insns = {
3351  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3352  				 SKF_AD_OFF + SKF_AD_HATYPE),
3353  			BPF_STMT(BPF_RET | BPF_A, 0)
3354  		},
3355  		CLASSIC,
3356  		{ },
3357  		{ { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3358  	},
3359  	{
3360  		"LD_CPU",
3361  		.u.insns = {
3362  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3363  				 SKF_AD_OFF + SKF_AD_CPU),
3364  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3365  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3366  				 SKF_AD_OFF + SKF_AD_CPU),
3367  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3368  			BPF_STMT(BPF_RET | BPF_A, 0)
3369  		},
3370  		CLASSIC,
3371  		{ },
3372  		{ { 1, 0 }, { 10, 0 } },
3373  	},
3374  	{
3375  		"LD_NLATTR",
3376  		.u.insns = {
3377  			BPF_STMT(BPF_LDX | BPF_IMM, 2),
3378  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3379  			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3380  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3381  				 SKF_AD_OFF + SKF_AD_NLATTR),
3382  			BPF_STMT(BPF_RET | BPF_A, 0)
3383  		},
3384  		CLASSIC,
3385  #ifdef __BIG_ENDIAN
3386  		{ 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3387  #else
3388  		{ 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3389  #endif
3390  		{ { 4, 0 }, { 20, 6 } },
3391  	},
3392  	{
3393  		"LD_NLATTR_NEST",
3394  		.u.insns = {
3395  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3396  			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3397  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3398  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3399  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3400  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3401  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3402  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3403  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3404  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3405  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3406  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3407  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3408  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3409  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3410  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3411  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3412  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3413  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3414  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3415  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3416  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3417  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3418  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3419  				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3420  			BPF_STMT(BPF_RET | BPF_A, 0)
3421  		},
3422  		CLASSIC,
3423  #ifdef __BIG_ENDIAN
3424  		{ 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3425  #else
3426  		{ 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3427  #endif
3428  		{ { 4, 0 }, { 20, 10 } },
3429  	},
3430  	{
3431  		"LD_PAYLOAD_OFF",
3432  		.u.insns = {
3433  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3434  				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3435  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3436  				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3437  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3438  				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3439  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3440  				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3441  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3442  				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3443  			BPF_STMT(BPF_RET | BPF_A, 0)
3444  		},
3445  		CLASSIC,
3446  		/* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3447  		 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3448  		 * id 9737, seq 1, length 64
3449  		 */
3450  		{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3451  		  0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3452  		  0x08, 0x00,
3453  		  0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3454  		  0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3455  		{ { 30, 0 }, { 100, 42 } },
3456  	},
3457  	{
3458  		"LD_ANC_XOR",
3459  		.u.insns = {
3460  			BPF_STMT(BPF_LD | BPF_IMM, 10),
3461  			BPF_STMT(BPF_LDX | BPF_IMM, 300),
3462  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3463  				 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3464  			BPF_STMT(BPF_RET | BPF_A, 0)
3465  		},
3466  		CLASSIC,
3467  		{ },
3468  		{ { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3469  	},
3470  	{
3471  		"SPILL_FILL",
3472  		.u.insns = {
3473  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3474  			BPF_STMT(BPF_LD | BPF_IMM, 2),
3475  			BPF_STMT(BPF_ALU | BPF_RSH, 1),
3476  			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3477  			BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3478  			BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3479  			BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3480  			BPF_STMT(BPF_STX, 15), /* M3 = len */
3481  			BPF_STMT(BPF_LDX | BPF_MEM, 1),
3482  			BPF_STMT(BPF_LD | BPF_MEM, 2),
3483  			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3484  			BPF_STMT(BPF_LDX | BPF_MEM, 15),
3485  			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3486  			BPF_STMT(BPF_RET | BPF_A, 0)
3487  		},
3488  		CLASSIC,
3489  		{ },
3490  		{ { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3491  	},
3492  	{
3493  		"JEQ",
3494  		.u.insns = {
3495  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3496  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3497  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3498  			BPF_STMT(BPF_RET | BPF_K, 1),
3499  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3500  		},
3501  		CLASSIC,
3502  		{ 3, 3, 3, 3, 3 },
3503  		{ { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3504  	},
3505  	{
3506  		"JGT",
3507  		.u.insns = {
3508  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3509  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3510  			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3511  			BPF_STMT(BPF_RET | BPF_K, 1),
3512  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3513  		},
3514  		CLASSIC,
3515  		{ 4, 4, 4, 3, 3 },
3516  		{ { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3517  	},
3518  	{
3519  		"JGE (jt 0), test 1",
3520  		.u.insns = {
3521  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3522  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3523  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3524  			BPF_STMT(BPF_RET | BPF_K, 1),
3525  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3526  		},
3527  		CLASSIC,
3528  		{ 4, 4, 4, 3, 3 },
3529  		{ { 2, 0 }, { 3, 1 }, { 4, 1 } },
3530  	},
3531  	{
3532  		"JGE (jt 0), test 2",
3533  		.u.insns = {
3534  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3535  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3536  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3537  			BPF_STMT(BPF_RET | BPF_K, 1),
3538  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3539  		},
3540  		CLASSIC,
3541  		{ 4, 4, 5, 3, 3 },
3542  		{ { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3543  	},
3544  	{
3545  		"JGE",
3546  		.u.insns = {
3547  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3548  			BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3549  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3550  			BPF_STMT(BPF_RET | BPF_K, 10),
3551  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3552  			BPF_STMT(BPF_RET | BPF_K, 20),
3553  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3554  			BPF_STMT(BPF_RET | BPF_K, 30),
3555  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3556  			BPF_STMT(BPF_RET | BPF_K, 40),
3557  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3558  		},
3559  		CLASSIC,
3560  		{ 1, 2, 3, 4, 5 },
3561  		{ { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3562  	},
3563  	{
3564  		"JSET",
3565  		.u.insns = {
3566  			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3567  			BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3568  			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3569  			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3570  			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3571  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3572  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3573  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3574  			BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3575  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3576  			BPF_STMT(BPF_RET | BPF_K, 10),
3577  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3578  			BPF_STMT(BPF_RET | BPF_K, 20),
3579  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3580  			BPF_STMT(BPF_RET | BPF_K, 30),
3581  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3582  			BPF_STMT(BPF_RET | BPF_K, 30),
3583  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3584  			BPF_STMT(BPF_RET | BPF_K, 30),
3585  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3586  			BPF_STMT(BPF_RET | BPF_K, 30),
3587  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3588  			BPF_STMT(BPF_RET | BPF_K, 30),
3589  			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3590  		},
3591  		CLASSIC,
3592  		{ 0, 0xAA, 0x55, 1 },
3593  		{ { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3594  	},
3595  	{
3596  		"tcpdump port 22",
3597  		.u.insns = {
3598  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3599  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3600  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3601  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3602  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3603  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3604  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3605  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3606  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3607  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3608  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3609  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3610  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3611  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3612  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3613  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3614  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3615  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3616  			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3617  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3618  			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3619  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3620  			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3621  			BPF_STMT(BPF_RET | BPF_K, 0),
3622  		},
3623  		CLASSIC,
3624  		/* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3625  		 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3626  		 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3627  		 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3628  		 */
3629  		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3630  		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3631  		  0x08, 0x00,
3632  		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3633  		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3634  		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3635  		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3636  		  0xc2, 0x24,
3637  		  0x00, 0x16 /* dst port */ },
3638  		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3639  	},
3640  	{
3641  		"tcpdump complex",
3642  		.u.insns = {
3643  			/* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3644  			 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3645  			 * (len > 115 or len < 30000000000)' -d
3646  			 */
3647  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3648  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3649  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3650  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3651  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3652  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3653  			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3654  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3655  			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3656  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3657  			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3658  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3659  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3660  			BPF_STMT(BPF_ST, 1),
3661  			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3662  			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3663  			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3664  			BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3665  			BPF_STMT(BPF_LD | BPF_MEM, 1),
3666  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3667  			BPF_STMT(BPF_ST, 5),
3668  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3669  			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3670  			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3671  			BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3672  			BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3673  			BPF_STMT(BPF_LD | BPF_MEM, 5),
3674  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3675  			BPF_STMT(BPF_LD | BPF_LEN, 0),
3676  			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3677  			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3678  			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3679  			BPF_STMT(BPF_RET | BPF_K, 0),
3680  		},
3681  		CLASSIC,
3682  		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3683  		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3684  		  0x08, 0x00,
3685  		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3686  		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3687  		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3688  		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3689  		  0xc2, 0x24,
3690  		  0x00, 0x16 /* dst port */ },
3691  		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3692  	},
3693  	{
3694  		"RET_A",
3695  		.u.insns = {
3696  			/* check that uninitialized X and A contain zeros */
3697  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3698  			BPF_STMT(BPF_RET | BPF_A, 0)
3699  		},
3700  		CLASSIC,
3701  		{ },
3702  		{ {1, 0}, {2, 0} },
3703  	},
3704  	{
3705  		"INT: ADD trivial",
3706  		.u.insns_int = {
3707  			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3708  			BPF_ALU64_IMM(BPF_ADD, R1, 2),
3709  			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3710  			BPF_ALU64_REG(BPF_SUB, R1, R2),
3711  			BPF_ALU64_IMM(BPF_ADD, R1, -1),
3712  			BPF_ALU64_IMM(BPF_MUL, R1, 3),
3713  			BPF_ALU64_REG(BPF_MOV, R0, R1),
3714  			BPF_EXIT_INSN(),
3715  		},
3716  		INTERNAL,
3717  		{ },
3718  		{ { 0, 0xfffffffd } }
3719  	},
3720  	{
3721  		"INT: MUL_X",
3722  		.u.insns_int = {
3723  			BPF_ALU64_IMM(BPF_MOV, R0, -1),
3724  			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3725  			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3726  			BPF_ALU64_REG(BPF_MUL, R1, R2),
3727  			BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3728  			BPF_EXIT_INSN(),
3729  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
3730  			BPF_EXIT_INSN(),
3731  		},
3732  		INTERNAL,
3733  		{ },
3734  		{ { 0, 1 } }
3735  	},
3736  	{
3737  		"INT: MUL_X2",
3738  		.u.insns_int = {
3739  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3740  			BPF_ALU32_IMM(BPF_MOV, R1, -1),
3741  			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3742  			BPF_ALU64_REG(BPF_MUL, R1, R2),
3743  			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3744  			BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3745  			BPF_EXIT_INSN(),
3746  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3747  			BPF_EXIT_INSN(),
3748  		},
3749  		INTERNAL,
3750  		{ },
3751  		{ { 0, 1 } }
3752  	},
3753  	{
3754  		"INT: MUL32_X",
3755  		.u.insns_int = {
3756  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3757  			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3758  			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3759  			BPF_ALU32_REG(BPF_MUL, R1, R2),
3760  			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3761  			BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3762  			BPF_EXIT_INSN(),
3763  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3764  			BPF_EXIT_INSN(),
3765  		},
3766  		INTERNAL,
3767  		{ },
3768  		{ { 0, 1 } }
3769  	},
3770  	{
3771  		/* Have to test all register combinations, since
3772  		 * JITing of different registers will produce
3773  		 * different asm code.
3774  		 */
3775  		"INT: ADD 64-bit",
3776  		.u.insns_int = {
3777  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
3778  			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3779  			BPF_ALU64_IMM(BPF_MOV, R2, 2),
3780  			BPF_ALU64_IMM(BPF_MOV, R3, 3),
3781  			BPF_ALU64_IMM(BPF_MOV, R4, 4),
3782  			BPF_ALU64_IMM(BPF_MOV, R5, 5),
3783  			BPF_ALU64_IMM(BPF_MOV, R6, 6),
3784  			BPF_ALU64_IMM(BPF_MOV, R7, 7),
3785  			BPF_ALU64_IMM(BPF_MOV, R8, 8),
3786  			BPF_ALU64_IMM(BPF_MOV, R9, 9),
3787  			BPF_ALU64_IMM(BPF_ADD, R0, 20),
3788  			BPF_ALU64_IMM(BPF_ADD, R1, 20),
3789  			BPF_ALU64_IMM(BPF_ADD, R2, 20),
3790  			BPF_ALU64_IMM(BPF_ADD, R3, 20),
3791  			BPF_ALU64_IMM(BPF_ADD, R4, 20),
3792  			BPF_ALU64_IMM(BPF_ADD, R5, 20),
3793  			BPF_ALU64_IMM(BPF_ADD, R6, 20),
3794  			BPF_ALU64_IMM(BPF_ADD, R7, 20),
3795  			BPF_ALU64_IMM(BPF_ADD, R8, 20),
3796  			BPF_ALU64_IMM(BPF_ADD, R9, 20),
3797  			BPF_ALU64_IMM(BPF_SUB, R0, 10),
3798  			BPF_ALU64_IMM(BPF_SUB, R1, 10),
3799  			BPF_ALU64_IMM(BPF_SUB, R2, 10),
3800  			BPF_ALU64_IMM(BPF_SUB, R3, 10),
3801  			BPF_ALU64_IMM(BPF_SUB, R4, 10),
3802  			BPF_ALU64_IMM(BPF_SUB, R5, 10),
3803  			BPF_ALU64_IMM(BPF_SUB, R6, 10),
3804  			BPF_ALU64_IMM(BPF_SUB, R7, 10),
3805  			BPF_ALU64_IMM(BPF_SUB, R8, 10),
3806  			BPF_ALU64_IMM(BPF_SUB, R9, 10),
3807  			BPF_ALU64_REG(BPF_ADD, R0, R0),
3808  			BPF_ALU64_REG(BPF_ADD, R0, R1),
3809  			BPF_ALU64_REG(BPF_ADD, R0, R2),
3810  			BPF_ALU64_REG(BPF_ADD, R0, R3),
3811  			BPF_ALU64_REG(BPF_ADD, R0, R4),
3812  			BPF_ALU64_REG(BPF_ADD, R0, R5),
3813  			BPF_ALU64_REG(BPF_ADD, R0, R6),
3814  			BPF_ALU64_REG(BPF_ADD, R0, R7),
3815  			BPF_ALU64_REG(BPF_ADD, R0, R8),
3816  			BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3817  			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3818  			BPF_EXIT_INSN(),
3819  			BPF_ALU64_REG(BPF_ADD, R1, R0),
3820  			BPF_ALU64_REG(BPF_ADD, R1, R1),
3821  			BPF_ALU64_REG(BPF_ADD, R1, R2),
3822  			BPF_ALU64_REG(BPF_ADD, R1, R3),
3823  			BPF_ALU64_REG(BPF_ADD, R1, R4),
3824  			BPF_ALU64_REG(BPF_ADD, R1, R5),
3825  			BPF_ALU64_REG(BPF_ADD, R1, R6),
3826  			BPF_ALU64_REG(BPF_ADD, R1, R7),
3827  			BPF_ALU64_REG(BPF_ADD, R1, R8),
3828  			BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3829  			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3830  			BPF_EXIT_INSN(),
3831  			BPF_ALU64_REG(BPF_ADD, R2, R0),
3832  			BPF_ALU64_REG(BPF_ADD, R2, R1),
3833  			BPF_ALU64_REG(BPF_ADD, R2, R2),
3834  			BPF_ALU64_REG(BPF_ADD, R2, R3),
3835  			BPF_ALU64_REG(BPF_ADD, R2, R4),
3836  			BPF_ALU64_REG(BPF_ADD, R2, R5),
3837  			BPF_ALU64_REG(BPF_ADD, R2, R6),
3838  			BPF_ALU64_REG(BPF_ADD, R2, R7),
3839  			BPF_ALU64_REG(BPF_ADD, R2, R8),
3840  			BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3841  			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3842  			BPF_EXIT_INSN(),
3843  			BPF_ALU64_REG(BPF_ADD, R3, R0),
3844  			BPF_ALU64_REG(BPF_ADD, R3, R1),
3845  			BPF_ALU64_REG(BPF_ADD, R3, R2),
3846  			BPF_ALU64_REG(BPF_ADD, R3, R3),
3847  			BPF_ALU64_REG(BPF_ADD, R3, R4),
3848  			BPF_ALU64_REG(BPF_ADD, R3, R5),
3849  			BPF_ALU64_REG(BPF_ADD, R3, R6),
3850  			BPF_ALU64_REG(BPF_ADD, R3, R7),
3851  			BPF_ALU64_REG(BPF_ADD, R3, R8),
3852  			BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3853  			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3854  			BPF_EXIT_INSN(),
3855  			BPF_ALU64_REG(BPF_ADD, R4, R0),
3856  			BPF_ALU64_REG(BPF_ADD, R4, R1),
3857  			BPF_ALU64_REG(BPF_ADD, R4, R2),
3858  			BPF_ALU64_REG(BPF_ADD, R4, R3),
3859  			BPF_ALU64_REG(BPF_ADD, R4, R4),
3860  			BPF_ALU64_REG(BPF_ADD, R4, R5),
3861  			BPF_ALU64_REG(BPF_ADD, R4, R6),
3862  			BPF_ALU64_REG(BPF_ADD, R4, R7),
3863  			BPF_ALU64_REG(BPF_ADD, R4, R8),
3864  			BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3865  			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3866  			BPF_EXIT_INSN(),
3867  			BPF_ALU64_REG(BPF_ADD, R5, R0),
3868  			BPF_ALU64_REG(BPF_ADD, R5, R1),
3869  			BPF_ALU64_REG(BPF_ADD, R5, R2),
3870  			BPF_ALU64_REG(BPF_ADD, R5, R3),
3871  			BPF_ALU64_REG(BPF_ADD, R5, R4),
3872  			BPF_ALU64_REG(BPF_ADD, R5, R5),
3873  			BPF_ALU64_REG(BPF_ADD, R5, R6),
3874  			BPF_ALU64_REG(BPF_ADD, R5, R7),
3875  			BPF_ALU64_REG(BPF_ADD, R5, R8),
3876  			BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3877  			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3878  			BPF_EXIT_INSN(),
3879  			BPF_ALU64_REG(BPF_ADD, R6, R0),
3880  			BPF_ALU64_REG(BPF_ADD, R6, R1),
3881  			BPF_ALU64_REG(BPF_ADD, R6, R2),
3882  			BPF_ALU64_REG(BPF_ADD, R6, R3),
3883  			BPF_ALU64_REG(BPF_ADD, R6, R4),
3884  			BPF_ALU64_REG(BPF_ADD, R6, R5),
3885  			BPF_ALU64_REG(BPF_ADD, R6, R6),
3886  			BPF_ALU64_REG(BPF_ADD, R6, R7),
3887  			BPF_ALU64_REG(BPF_ADD, R6, R8),
3888  			BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3889  			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3890  			BPF_EXIT_INSN(),
3891  			BPF_ALU64_REG(BPF_ADD, R7, R0),
3892  			BPF_ALU64_REG(BPF_ADD, R7, R1),
3893  			BPF_ALU64_REG(BPF_ADD, R7, R2),
3894  			BPF_ALU64_REG(BPF_ADD, R7, R3),
3895  			BPF_ALU64_REG(BPF_ADD, R7, R4),
3896  			BPF_ALU64_REG(BPF_ADD, R7, R5),
3897  			BPF_ALU64_REG(BPF_ADD, R7, R6),
3898  			BPF_ALU64_REG(BPF_ADD, R7, R7),
3899  			BPF_ALU64_REG(BPF_ADD, R7, R8),
3900  			BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3901  			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3902  			BPF_EXIT_INSN(),
3903  			BPF_ALU64_REG(BPF_ADD, R8, R0),
3904  			BPF_ALU64_REG(BPF_ADD, R8, R1),
3905  			BPF_ALU64_REG(BPF_ADD, R8, R2),
3906  			BPF_ALU64_REG(BPF_ADD, R8, R3),
3907  			BPF_ALU64_REG(BPF_ADD, R8, R4),
3908  			BPF_ALU64_REG(BPF_ADD, R8, R5),
3909  			BPF_ALU64_REG(BPF_ADD, R8, R6),
3910  			BPF_ALU64_REG(BPF_ADD, R8, R7),
3911  			BPF_ALU64_REG(BPF_ADD, R8, R8),
3912  			BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3913  			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3914  			BPF_EXIT_INSN(),
3915  			BPF_ALU64_REG(BPF_ADD, R9, R0),
3916  			BPF_ALU64_REG(BPF_ADD, R9, R1),
3917  			BPF_ALU64_REG(BPF_ADD, R9, R2),
3918  			BPF_ALU64_REG(BPF_ADD, R9, R3),
3919  			BPF_ALU64_REG(BPF_ADD, R9, R4),
3920  			BPF_ALU64_REG(BPF_ADD, R9, R5),
3921  			BPF_ALU64_REG(BPF_ADD, R9, R6),
3922  			BPF_ALU64_REG(BPF_ADD, R9, R7),
3923  			BPF_ALU64_REG(BPF_ADD, R9, R8),
3924  			BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3925  			BPF_ALU64_REG(BPF_MOV, R0, R9),
3926  			BPF_EXIT_INSN(),
3927  		},
3928  		INTERNAL,
3929  		{ },
3930  		{ { 0, 2957380 } }
3931  	},
3932  	{
3933  		"INT: ADD 32-bit",
3934  		.u.insns_int = {
3935  			BPF_ALU32_IMM(BPF_MOV, R0, 20),
3936  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
3937  			BPF_ALU32_IMM(BPF_MOV, R2, 2),
3938  			BPF_ALU32_IMM(BPF_MOV, R3, 3),
3939  			BPF_ALU32_IMM(BPF_MOV, R4, 4),
3940  			BPF_ALU32_IMM(BPF_MOV, R5, 5),
3941  			BPF_ALU32_IMM(BPF_MOV, R6, 6),
3942  			BPF_ALU32_IMM(BPF_MOV, R7, 7),
3943  			BPF_ALU32_IMM(BPF_MOV, R8, 8),
3944  			BPF_ALU32_IMM(BPF_MOV, R9, 9),
3945  			BPF_ALU64_IMM(BPF_ADD, R1, 10),
3946  			BPF_ALU64_IMM(BPF_ADD, R2, 10),
3947  			BPF_ALU64_IMM(BPF_ADD, R3, 10),
3948  			BPF_ALU64_IMM(BPF_ADD, R4, 10),
3949  			BPF_ALU64_IMM(BPF_ADD, R5, 10),
3950  			BPF_ALU64_IMM(BPF_ADD, R6, 10),
3951  			BPF_ALU64_IMM(BPF_ADD, R7, 10),
3952  			BPF_ALU64_IMM(BPF_ADD, R8, 10),
3953  			BPF_ALU64_IMM(BPF_ADD, R9, 10),
3954  			BPF_ALU32_REG(BPF_ADD, R0, R1),
3955  			BPF_ALU32_REG(BPF_ADD, R0, R2),
3956  			BPF_ALU32_REG(BPF_ADD, R0, R3),
3957  			BPF_ALU32_REG(BPF_ADD, R0, R4),
3958  			BPF_ALU32_REG(BPF_ADD, R0, R5),
3959  			BPF_ALU32_REG(BPF_ADD, R0, R6),
3960  			BPF_ALU32_REG(BPF_ADD, R0, R7),
3961  			BPF_ALU32_REG(BPF_ADD, R0, R8),
3962  			BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3963  			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3964  			BPF_EXIT_INSN(),
3965  			BPF_ALU32_REG(BPF_ADD, R1, R0),
3966  			BPF_ALU32_REG(BPF_ADD, R1, R1),
3967  			BPF_ALU32_REG(BPF_ADD, R1, R2),
3968  			BPF_ALU32_REG(BPF_ADD, R1, R3),
3969  			BPF_ALU32_REG(BPF_ADD, R1, R4),
3970  			BPF_ALU32_REG(BPF_ADD, R1, R5),
3971  			BPF_ALU32_REG(BPF_ADD, R1, R6),
3972  			BPF_ALU32_REG(BPF_ADD, R1, R7),
3973  			BPF_ALU32_REG(BPF_ADD, R1, R8),
3974  			BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3975  			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3976  			BPF_EXIT_INSN(),
3977  			BPF_ALU32_REG(BPF_ADD, R2, R0),
3978  			BPF_ALU32_REG(BPF_ADD, R2, R1),
3979  			BPF_ALU32_REG(BPF_ADD, R2, R2),
3980  			BPF_ALU32_REG(BPF_ADD, R2, R3),
3981  			BPF_ALU32_REG(BPF_ADD, R2, R4),
3982  			BPF_ALU32_REG(BPF_ADD, R2, R5),
3983  			BPF_ALU32_REG(BPF_ADD, R2, R6),
3984  			BPF_ALU32_REG(BPF_ADD, R2, R7),
3985  			BPF_ALU32_REG(BPF_ADD, R2, R8),
3986  			BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3987  			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3988  			BPF_EXIT_INSN(),
3989  			BPF_ALU32_REG(BPF_ADD, R3, R0),
3990  			BPF_ALU32_REG(BPF_ADD, R3, R1),
3991  			BPF_ALU32_REG(BPF_ADD, R3, R2),
3992  			BPF_ALU32_REG(BPF_ADD, R3, R3),
3993  			BPF_ALU32_REG(BPF_ADD, R3, R4),
3994  			BPF_ALU32_REG(BPF_ADD, R3, R5),
3995  			BPF_ALU32_REG(BPF_ADD, R3, R6),
3996  			BPF_ALU32_REG(BPF_ADD, R3, R7),
3997  			BPF_ALU32_REG(BPF_ADD, R3, R8),
3998  			BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3999  			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
4000  			BPF_EXIT_INSN(),
4001  			BPF_ALU32_REG(BPF_ADD, R4, R0),
4002  			BPF_ALU32_REG(BPF_ADD, R4, R1),
4003  			BPF_ALU32_REG(BPF_ADD, R4, R2),
4004  			BPF_ALU32_REG(BPF_ADD, R4, R3),
4005  			BPF_ALU32_REG(BPF_ADD, R4, R4),
4006  			BPF_ALU32_REG(BPF_ADD, R4, R5),
4007  			BPF_ALU32_REG(BPF_ADD, R4, R6),
4008  			BPF_ALU32_REG(BPF_ADD, R4, R7),
4009  			BPF_ALU32_REG(BPF_ADD, R4, R8),
4010  			BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
4011  			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
4012  			BPF_EXIT_INSN(),
4013  			BPF_ALU32_REG(BPF_ADD, R5, R0),
4014  			BPF_ALU32_REG(BPF_ADD, R5, R1),
4015  			BPF_ALU32_REG(BPF_ADD, R5, R2),
4016  			BPF_ALU32_REG(BPF_ADD, R5, R3),
4017  			BPF_ALU32_REG(BPF_ADD, R5, R4),
4018  			BPF_ALU32_REG(BPF_ADD, R5, R5),
4019  			BPF_ALU32_REG(BPF_ADD, R5, R6),
4020  			BPF_ALU32_REG(BPF_ADD, R5, R7),
4021  			BPF_ALU32_REG(BPF_ADD, R5, R8),
4022  			BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
4023  			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
4024  			BPF_EXIT_INSN(),
4025  			BPF_ALU32_REG(BPF_ADD, R6, R0),
4026  			BPF_ALU32_REG(BPF_ADD, R6, R1),
4027  			BPF_ALU32_REG(BPF_ADD, R6, R2),
4028  			BPF_ALU32_REG(BPF_ADD, R6, R3),
4029  			BPF_ALU32_REG(BPF_ADD, R6, R4),
4030  			BPF_ALU32_REG(BPF_ADD, R6, R5),
4031  			BPF_ALU32_REG(BPF_ADD, R6, R6),
4032  			BPF_ALU32_REG(BPF_ADD, R6, R7),
4033  			BPF_ALU32_REG(BPF_ADD, R6, R8),
4034  			BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
4035  			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
4036  			BPF_EXIT_INSN(),
4037  			BPF_ALU32_REG(BPF_ADD, R7, R0),
4038  			BPF_ALU32_REG(BPF_ADD, R7, R1),
4039  			BPF_ALU32_REG(BPF_ADD, R7, R2),
4040  			BPF_ALU32_REG(BPF_ADD, R7, R3),
4041  			BPF_ALU32_REG(BPF_ADD, R7, R4),
4042  			BPF_ALU32_REG(BPF_ADD, R7, R5),
4043  			BPF_ALU32_REG(BPF_ADD, R7, R6),
4044  			BPF_ALU32_REG(BPF_ADD, R7, R7),
4045  			BPF_ALU32_REG(BPF_ADD, R7, R8),
4046  			BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
4047  			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
4048  			BPF_EXIT_INSN(),
4049  			BPF_ALU32_REG(BPF_ADD, R8, R0),
4050  			BPF_ALU32_REG(BPF_ADD, R8, R1),
4051  			BPF_ALU32_REG(BPF_ADD, R8, R2),
4052  			BPF_ALU32_REG(BPF_ADD, R8, R3),
4053  			BPF_ALU32_REG(BPF_ADD, R8, R4),
4054  			BPF_ALU32_REG(BPF_ADD, R8, R5),
4055  			BPF_ALU32_REG(BPF_ADD, R8, R6),
4056  			BPF_ALU32_REG(BPF_ADD, R8, R7),
4057  			BPF_ALU32_REG(BPF_ADD, R8, R8),
4058  			BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
4059  			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
4060  			BPF_EXIT_INSN(),
4061  			BPF_ALU32_REG(BPF_ADD, R9, R0),
4062  			BPF_ALU32_REG(BPF_ADD, R9, R1),
4063  			BPF_ALU32_REG(BPF_ADD, R9, R2),
4064  			BPF_ALU32_REG(BPF_ADD, R9, R3),
4065  			BPF_ALU32_REG(BPF_ADD, R9, R4),
4066  			BPF_ALU32_REG(BPF_ADD, R9, R5),
4067  			BPF_ALU32_REG(BPF_ADD, R9, R6),
4068  			BPF_ALU32_REG(BPF_ADD, R9, R7),
4069  			BPF_ALU32_REG(BPF_ADD, R9, R8),
4070  			BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
4071  			BPF_ALU32_REG(BPF_MOV, R0, R9),
4072  			BPF_EXIT_INSN(),
4073  		},
4074  		INTERNAL,
4075  		{ },
4076  		{ { 0, 2957380 } }
4077  	},
4078  	{	/* Mainly checking JIT here. */
4079  		"INT: SUB",
4080  		.u.insns_int = {
4081  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4082  			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4083  			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4084  			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4085  			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4086  			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4087  			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4088  			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4089  			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4090  			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4091  			BPF_ALU64_REG(BPF_SUB, R0, R0),
4092  			BPF_ALU64_REG(BPF_SUB, R0, R1),
4093  			BPF_ALU64_REG(BPF_SUB, R0, R2),
4094  			BPF_ALU64_REG(BPF_SUB, R0, R3),
4095  			BPF_ALU64_REG(BPF_SUB, R0, R4),
4096  			BPF_ALU64_REG(BPF_SUB, R0, R5),
4097  			BPF_ALU64_REG(BPF_SUB, R0, R6),
4098  			BPF_ALU64_REG(BPF_SUB, R0, R7),
4099  			BPF_ALU64_REG(BPF_SUB, R0, R8),
4100  			BPF_ALU64_REG(BPF_SUB, R0, R9),
4101  			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4102  			BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4103  			BPF_EXIT_INSN(),
4104  			BPF_ALU64_REG(BPF_SUB, R1, R0),
4105  			BPF_ALU64_REG(BPF_SUB, R1, R2),
4106  			BPF_ALU64_REG(BPF_SUB, R1, R3),
4107  			BPF_ALU64_REG(BPF_SUB, R1, R4),
4108  			BPF_ALU64_REG(BPF_SUB, R1, R5),
4109  			BPF_ALU64_REG(BPF_SUB, R1, R6),
4110  			BPF_ALU64_REG(BPF_SUB, R1, R7),
4111  			BPF_ALU64_REG(BPF_SUB, R1, R8),
4112  			BPF_ALU64_REG(BPF_SUB, R1, R9),
4113  			BPF_ALU64_IMM(BPF_SUB, R1, 10),
4114  			BPF_ALU64_REG(BPF_SUB, R2, R0),
4115  			BPF_ALU64_REG(BPF_SUB, R2, R1),
4116  			BPF_ALU64_REG(BPF_SUB, R2, R3),
4117  			BPF_ALU64_REG(BPF_SUB, R2, R4),
4118  			BPF_ALU64_REG(BPF_SUB, R2, R5),
4119  			BPF_ALU64_REG(BPF_SUB, R2, R6),
4120  			BPF_ALU64_REG(BPF_SUB, R2, R7),
4121  			BPF_ALU64_REG(BPF_SUB, R2, R8),
4122  			BPF_ALU64_REG(BPF_SUB, R2, R9),
4123  			BPF_ALU64_IMM(BPF_SUB, R2, 10),
4124  			BPF_ALU64_REG(BPF_SUB, R3, R0),
4125  			BPF_ALU64_REG(BPF_SUB, R3, R1),
4126  			BPF_ALU64_REG(BPF_SUB, R3, R2),
4127  			BPF_ALU64_REG(BPF_SUB, R3, R4),
4128  			BPF_ALU64_REG(BPF_SUB, R3, R5),
4129  			BPF_ALU64_REG(BPF_SUB, R3, R6),
4130  			BPF_ALU64_REG(BPF_SUB, R3, R7),
4131  			BPF_ALU64_REG(BPF_SUB, R3, R8),
4132  			BPF_ALU64_REG(BPF_SUB, R3, R9),
4133  			BPF_ALU64_IMM(BPF_SUB, R3, 10),
4134  			BPF_ALU64_REG(BPF_SUB, R4, R0),
4135  			BPF_ALU64_REG(BPF_SUB, R4, R1),
4136  			BPF_ALU64_REG(BPF_SUB, R4, R2),
4137  			BPF_ALU64_REG(BPF_SUB, R4, R3),
4138  			BPF_ALU64_REG(BPF_SUB, R4, R5),
4139  			BPF_ALU64_REG(BPF_SUB, R4, R6),
4140  			BPF_ALU64_REG(BPF_SUB, R4, R7),
4141  			BPF_ALU64_REG(BPF_SUB, R4, R8),
4142  			BPF_ALU64_REG(BPF_SUB, R4, R9),
4143  			BPF_ALU64_IMM(BPF_SUB, R4, 10),
4144  			BPF_ALU64_REG(BPF_SUB, R5, R0),
4145  			BPF_ALU64_REG(BPF_SUB, R5, R1),
4146  			BPF_ALU64_REG(BPF_SUB, R5, R2),
4147  			BPF_ALU64_REG(BPF_SUB, R5, R3),
4148  			BPF_ALU64_REG(BPF_SUB, R5, R4),
4149  			BPF_ALU64_REG(BPF_SUB, R5, R6),
4150  			BPF_ALU64_REG(BPF_SUB, R5, R7),
4151  			BPF_ALU64_REG(BPF_SUB, R5, R8),
4152  			BPF_ALU64_REG(BPF_SUB, R5, R9),
4153  			BPF_ALU64_IMM(BPF_SUB, R5, 10),
4154  			BPF_ALU64_REG(BPF_SUB, R6, R0),
4155  			BPF_ALU64_REG(BPF_SUB, R6, R1),
4156  			BPF_ALU64_REG(BPF_SUB, R6, R2),
4157  			BPF_ALU64_REG(BPF_SUB, R6, R3),
4158  			BPF_ALU64_REG(BPF_SUB, R6, R4),
4159  			BPF_ALU64_REG(BPF_SUB, R6, R5),
4160  			BPF_ALU64_REG(BPF_SUB, R6, R7),
4161  			BPF_ALU64_REG(BPF_SUB, R6, R8),
4162  			BPF_ALU64_REG(BPF_SUB, R6, R9),
4163  			BPF_ALU64_IMM(BPF_SUB, R6, 10),
4164  			BPF_ALU64_REG(BPF_SUB, R7, R0),
4165  			BPF_ALU64_REG(BPF_SUB, R7, R1),
4166  			BPF_ALU64_REG(BPF_SUB, R7, R2),
4167  			BPF_ALU64_REG(BPF_SUB, R7, R3),
4168  			BPF_ALU64_REG(BPF_SUB, R7, R4),
4169  			BPF_ALU64_REG(BPF_SUB, R7, R5),
4170  			BPF_ALU64_REG(BPF_SUB, R7, R6),
4171  			BPF_ALU64_REG(BPF_SUB, R7, R8),
4172  			BPF_ALU64_REG(BPF_SUB, R7, R9),
4173  			BPF_ALU64_IMM(BPF_SUB, R7, 10),
4174  			BPF_ALU64_REG(BPF_SUB, R8, R0),
4175  			BPF_ALU64_REG(BPF_SUB, R8, R1),
4176  			BPF_ALU64_REG(BPF_SUB, R8, R2),
4177  			BPF_ALU64_REG(BPF_SUB, R8, R3),
4178  			BPF_ALU64_REG(BPF_SUB, R8, R4),
4179  			BPF_ALU64_REG(BPF_SUB, R8, R5),
4180  			BPF_ALU64_REG(BPF_SUB, R8, R6),
4181  			BPF_ALU64_REG(BPF_SUB, R8, R7),
4182  			BPF_ALU64_REG(BPF_SUB, R8, R9),
4183  			BPF_ALU64_IMM(BPF_SUB, R8, 10),
4184  			BPF_ALU64_REG(BPF_SUB, R9, R0),
4185  			BPF_ALU64_REG(BPF_SUB, R9, R1),
4186  			BPF_ALU64_REG(BPF_SUB, R9, R2),
4187  			BPF_ALU64_REG(BPF_SUB, R9, R3),
4188  			BPF_ALU64_REG(BPF_SUB, R9, R4),
4189  			BPF_ALU64_REG(BPF_SUB, R9, R5),
4190  			BPF_ALU64_REG(BPF_SUB, R9, R6),
4191  			BPF_ALU64_REG(BPF_SUB, R9, R7),
4192  			BPF_ALU64_REG(BPF_SUB, R9, R8),
4193  			BPF_ALU64_IMM(BPF_SUB, R9, 10),
4194  			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4195  			BPF_ALU64_IMM(BPF_NEG, R0, 0),
4196  			BPF_ALU64_REG(BPF_SUB, R0, R1),
4197  			BPF_ALU64_REG(BPF_SUB, R0, R2),
4198  			BPF_ALU64_REG(BPF_SUB, R0, R3),
4199  			BPF_ALU64_REG(BPF_SUB, R0, R4),
4200  			BPF_ALU64_REG(BPF_SUB, R0, R5),
4201  			BPF_ALU64_REG(BPF_SUB, R0, R6),
4202  			BPF_ALU64_REG(BPF_SUB, R0, R7),
4203  			BPF_ALU64_REG(BPF_SUB, R0, R8),
4204  			BPF_ALU64_REG(BPF_SUB, R0, R9),
4205  			BPF_EXIT_INSN(),
4206  		},
4207  		INTERNAL,
4208  		{ },
4209  		{ { 0, 11 } }
4210  	},
4211  	{	/* Mainly checking JIT here. */
4212  		"INT: XOR",
4213  		.u.insns_int = {
4214  			BPF_ALU64_REG(BPF_SUB, R0, R0),
4215  			BPF_ALU64_REG(BPF_XOR, R1, R1),
4216  			BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4217  			BPF_EXIT_INSN(),
4218  			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4219  			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4220  			BPF_ALU64_REG(BPF_SUB, R1, R1),
4221  			BPF_ALU64_REG(BPF_XOR, R2, R2),
4222  			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4223  			BPF_EXIT_INSN(),
4224  			BPF_ALU64_REG(BPF_SUB, R2, R2),
4225  			BPF_ALU64_REG(BPF_XOR, R3, R3),
4226  			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4227  			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4228  			BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4229  			BPF_EXIT_INSN(),
4230  			BPF_ALU64_REG(BPF_SUB, R3, R3),
4231  			BPF_ALU64_REG(BPF_XOR, R4, R4),
4232  			BPF_ALU64_IMM(BPF_MOV, R2, 1),
4233  			BPF_ALU64_IMM(BPF_MOV, R5, -1),
4234  			BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4235  			BPF_EXIT_INSN(),
4236  			BPF_ALU64_REG(BPF_SUB, R4, R4),
4237  			BPF_ALU64_REG(BPF_XOR, R5, R5),
4238  			BPF_ALU64_IMM(BPF_MOV, R3, 1),
4239  			BPF_ALU64_IMM(BPF_MOV, R7, -1),
4240  			BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4241  			BPF_EXIT_INSN(),
4242  			BPF_ALU64_IMM(BPF_MOV, R5, 1),
4243  			BPF_ALU64_REG(BPF_SUB, R5, R5),
4244  			BPF_ALU64_REG(BPF_XOR, R6, R6),
4245  			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4246  			BPF_ALU64_IMM(BPF_MOV, R8, -1),
4247  			BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4248  			BPF_EXIT_INSN(),
4249  			BPF_ALU64_REG(BPF_SUB, R6, R6),
4250  			BPF_ALU64_REG(BPF_XOR, R7, R7),
4251  			BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4252  			BPF_EXIT_INSN(),
4253  			BPF_ALU64_REG(BPF_SUB, R7, R7),
4254  			BPF_ALU64_REG(BPF_XOR, R8, R8),
4255  			BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4256  			BPF_EXIT_INSN(),
4257  			BPF_ALU64_REG(BPF_SUB, R8, R8),
4258  			BPF_ALU64_REG(BPF_XOR, R9, R9),
4259  			BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4260  			BPF_EXIT_INSN(),
4261  			BPF_ALU64_REG(BPF_SUB, R9, R9),
4262  			BPF_ALU64_REG(BPF_XOR, R0, R0),
4263  			BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4264  			BPF_EXIT_INSN(),
4265  			BPF_ALU64_REG(BPF_SUB, R1, R1),
4266  			BPF_ALU64_REG(BPF_XOR, R0, R0),
4267  			BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4268  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4269  			BPF_EXIT_INSN(),
4270  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
4271  			BPF_EXIT_INSN(),
4272  		},
4273  		INTERNAL,
4274  		{ },
4275  		{ { 0, 1 } }
4276  	},
4277  	{	/* Mainly checking JIT here. */
4278  		"INT: MUL",
4279  		.u.insns_int = {
4280  			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4281  			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4282  			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4283  			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4284  			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4285  			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4286  			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4287  			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4288  			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4289  			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4290  			BPF_ALU64_REG(BPF_MUL, R0, R0),
4291  			BPF_ALU64_REG(BPF_MUL, R0, R1),
4292  			BPF_ALU64_REG(BPF_MUL, R0, R2),
4293  			BPF_ALU64_REG(BPF_MUL, R0, R3),
4294  			BPF_ALU64_REG(BPF_MUL, R0, R4),
4295  			BPF_ALU64_REG(BPF_MUL, R0, R5),
4296  			BPF_ALU64_REG(BPF_MUL, R0, R6),
4297  			BPF_ALU64_REG(BPF_MUL, R0, R7),
4298  			BPF_ALU64_REG(BPF_MUL, R0, R8),
4299  			BPF_ALU64_REG(BPF_MUL, R0, R9),
4300  			BPF_ALU64_IMM(BPF_MUL, R0, 10),
4301  			BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4302  			BPF_EXIT_INSN(),
4303  			BPF_ALU64_REG(BPF_MUL, R1, R0),
4304  			BPF_ALU64_REG(BPF_MUL, R1, R2),
4305  			BPF_ALU64_REG(BPF_MUL, R1, R3),
4306  			BPF_ALU64_REG(BPF_MUL, R1, R4),
4307  			BPF_ALU64_REG(BPF_MUL, R1, R5),
4308  			BPF_ALU64_REG(BPF_MUL, R1, R6),
4309  			BPF_ALU64_REG(BPF_MUL, R1, R7),
4310  			BPF_ALU64_REG(BPF_MUL, R1, R8),
4311  			BPF_ALU64_REG(BPF_MUL, R1, R9),
4312  			BPF_ALU64_IMM(BPF_MUL, R1, 10),
4313  			BPF_ALU64_REG(BPF_MOV, R2, R1),
4314  			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4315  			BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4316  			BPF_EXIT_INSN(),
4317  			BPF_ALU64_IMM(BPF_LSH, R1, 32),
4318  			BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4319  			BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4320  			BPF_EXIT_INSN(),
4321  			BPF_ALU64_REG(BPF_MUL, R2, R0),
4322  			BPF_ALU64_REG(BPF_MUL, R2, R1),
4323  			BPF_ALU64_REG(BPF_MUL, R2, R3),
4324  			BPF_ALU64_REG(BPF_MUL, R2, R4),
4325  			BPF_ALU64_REG(BPF_MUL, R2, R5),
4326  			BPF_ALU64_REG(BPF_MUL, R2, R6),
4327  			BPF_ALU64_REG(BPF_MUL, R2, R7),
4328  			BPF_ALU64_REG(BPF_MUL, R2, R8),
4329  			BPF_ALU64_REG(BPF_MUL, R2, R9),
4330  			BPF_ALU64_IMM(BPF_MUL, R2, 10),
4331  			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4332  			BPF_ALU64_REG(BPF_MOV, R0, R2),
4333  			BPF_EXIT_INSN(),
4334  		},
4335  		INTERNAL,
4336  		{ },
4337  		{ { 0, 0x35d97ef2 } }
4338  	},
4339  	{	/* Mainly checking JIT here. */
4340  		"MOV REG64",
4341  		.u.insns_int = {
4342  			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4343  			BPF_MOV64_REG(R1, R0),
4344  			BPF_MOV64_REG(R2, R1),
4345  			BPF_MOV64_REG(R3, R2),
4346  			BPF_MOV64_REG(R4, R3),
4347  			BPF_MOV64_REG(R5, R4),
4348  			BPF_MOV64_REG(R6, R5),
4349  			BPF_MOV64_REG(R7, R6),
4350  			BPF_MOV64_REG(R8, R7),
4351  			BPF_MOV64_REG(R9, R8),
4352  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4353  			BPF_ALU64_IMM(BPF_MOV, R1, 0),
4354  			BPF_ALU64_IMM(BPF_MOV, R2, 0),
4355  			BPF_ALU64_IMM(BPF_MOV, R3, 0),
4356  			BPF_ALU64_IMM(BPF_MOV, R4, 0),
4357  			BPF_ALU64_IMM(BPF_MOV, R5, 0),
4358  			BPF_ALU64_IMM(BPF_MOV, R6, 0),
4359  			BPF_ALU64_IMM(BPF_MOV, R7, 0),
4360  			BPF_ALU64_IMM(BPF_MOV, R8, 0),
4361  			BPF_ALU64_IMM(BPF_MOV, R9, 0),
4362  			BPF_ALU64_REG(BPF_ADD, R0, R0),
4363  			BPF_ALU64_REG(BPF_ADD, R0, R1),
4364  			BPF_ALU64_REG(BPF_ADD, R0, R2),
4365  			BPF_ALU64_REG(BPF_ADD, R0, R3),
4366  			BPF_ALU64_REG(BPF_ADD, R0, R4),
4367  			BPF_ALU64_REG(BPF_ADD, R0, R5),
4368  			BPF_ALU64_REG(BPF_ADD, R0, R6),
4369  			BPF_ALU64_REG(BPF_ADD, R0, R7),
4370  			BPF_ALU64_REG(BPF_ADD, R0, R8),
4371  			BPF_ALU64_REG(BPF_ADD, R0, R9),
4372  			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4373  			BPF_EXIT_INSN(),
4374  		},
4375  		INTERNAL,
4376  		{ },
4377  		{ { 0, 0xfefe } }
4378  	},
4379  	{	/* Mainly checking JIT here. */
4380  		"MOV REG32",
4381  		.u.insns_int = {
4382  			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4383  			BPF_MOV64_REG(R1, R0),
4384  			BPF_MOV64_REG(R2, R1),
4385  			BPF_MOV64_REG(R3, R2),
4386  			BPF_MOV64_REG(R4, R3),
4387  			BPF_MOV64_REG(R5, R4),
4388  			BPF_MOV64_REG(R6, R5),
4389  			BPF_MOV64_REG(R7, R6),
4390  			BPF_MOV64_REG(R8, R7),
4391  			BPF_MOV64_REG(R9, R8),
4392  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4393  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
4394  			BPF_ALU32_IMM(BPF_MOV, R2, 0),
4395  			BPF_ALU32_IMM(BPF_MOV, R3, 0),
4396  			BPF_ALU32_IMM(BPF_MOV, R4, 0),
4397  			BPF_ALU32_IMM(BPF_MOV, R5, 0),
4398  			BPF_ALU32_IMM(BPF_MOV, R6, 0),
4399  			BPF_ALU32_IMM(BPF_MOV, R7, 0),
4400  			BPF_ALU32_IMM(BPF_MOV, R8, 0),
4401  			BPF_ALU32_IMM(BPF_MOV, R9, 0),
4402  			BPF_ALU64_REG(BPF_ADD, R0, R0),
4403  			BPF_ALU64_REG(BPF_ADD, R0, R1),
4404  			BPF_ALU64_REG(BPF_ADD, R0, R2),
4405  			BPF_ALU64_REG(BPF_ADD, R0, R3),
4406  			BPF_ALU64_REG(BPF_ADD, R0, R4),
4407  			BPF_ALU64_REG(BPF_ADD, R0, R5),
4408  			BPF_ALU64_REG(BPF_ADD, R0, R6),
4409  			BPF_ALU64_REG(BPF_ADD, R0, R7),
4410  			BPF_ALU64_REG(BPF_ADD, R0, R8),
4411  			BPF_ALU64_REG(BPF_ADD, R0, R9),
4412  			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4413  			BPF_EXIT_INSN(),
4414  		},
4415  		INTERNAL,
4416  		{ },
4417  		{ { 0, 0xfefe } }
4418  	},
4419  	{	/* Mainly checking JIT here. */
4420  		"LD IMM64",
4421  		.u.insns_int = {
4422  			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4423  			BPF_MOV64_REG(R1, R0),
4424  			BPF_MOV64_REG(R2, R1),
4425  			BPF_MOV64_REG(R3, R2),
4426  			BPF_MOV64_REG(R4, R3),
4427  			BPF_MOV64_REG(R5, R4),
4428  			BPF_MOV64_REG(R6, R5),
4429  			BPF_MOV64_REG(R7, R6),
4430  			BPF_MOV64_REG(R8, R7),
4431  			BPF_MOV64_REG(R9, R8),
4432  			BPF_LD_IMM64(R0, 0x0LL),
4433  			BPF_LD_IMM64(R1, 0x0LL),
4434  			BPF_LD_IMM64(R2, 0x0LL),
4435  			BPF_LD_IMM64(R3, 0x0LL),
4436  			BPF_LD_IMM64(R4, 0x0LL),
4437  			BPF_LD_IMM64(R5, 0x0LL),
4438  			BPF_LD_IMM64(R6, 0x0LL),
4439  			BPF_LD_IMM64(R7, 0x0LL),
4440  			BPF_LD_IMM64(R8, 0x0LL),
4441  			BPF_LD_IMM64(R9, 0x0LL),
4442  			BPF_ALU64_REG(BPF_ADD, R0, R0),
4443  			BPF_ALU64_REG(BPF_ADD, R0, R1),
4444  			BPF_ALU64_REG(BPF_ADD, R0, R2),
4445  			BPF_ALU64_REG(BPF_ADD, R0, R3),
4446  			BPF_ALU64_REG(BPF_ADD, R0, R4),
4447  			BPF_ALU64_REG(BPF_ADD, R0, R5),
4448  			BPF_ALU64_REG(BPF_ADD, R0, R6),
4449  			BPF_ALU64_REG(BPF_ADD, R0, R7),
4450  			BPF_ALU64_REG(BPF_ADD, R0, R8),
4451  			BPF_ALU64_REG(BPF_ADD, R0, R9),
4452  			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4453  			BPF_EXIT_INSN(),
4454  		},
4455  		INTERNAL,
4456  		{ },
4457  		{ { 0, 0xfefe } }
4458  	},
4459  	{
4460  		"INT: ALU MIX",
4461  		.u.insns_int = {
4462  			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4463  			BPF_ALU64_IMM(BPF_ADD, R0, -1),
4464  			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4465  			BPF_ALU64_IMM(BPF_XOR, R2, 3),
4466  			BPF_ALU64_REG(BPF_DIV, R0, R2),
4467  			BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4468  			BPF_EXIT_INSN(),
4469  			BPF_ALU64_IMM(BPF_MOD, R0, 3),
4470  			BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4471  			BPF_EXIT_INSN(),
4472  			BPF_ALU64_IMM(BPF_MOV, R0, -1),
4473  			BPF_EXIT_INSN(),
4474  		},
4475  		INTERNAL,
4476  		{ },
4477  		{ { 0, -1 } }
4478  	},
4479  	{
4480  		"INT: shifts by register",
4481  		.u.insns_int = {
4482  			BPF_MOV64_IMM(R0, -1234),
4483  			BPF_MOV64_IMM(R1, 1),
4484  			BPF_ALU32_REG(BPF_RSH, R0, R1),
4485  			BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4486  			BPF_EXIT_INSN(),
4487  			BPF_MOV64_IMM(R2, 1),
4488  			BPF_ALU64_REG(BPF_LSH, R0, R2),
4489  			BPF_MOV32_IMM(R4, -1234),
4490  			BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4491  			BPF_EXIT_INSN(),
4492  			BPF_ALU64_IMM(BPF_AND, R4, 63),
4493  			BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4494  			BPF_MOV64_IMM(R3, 47),
4495  			BPF_ALU64_REG(BPF_ARSH, R0, R3),
4496  			BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4497  			BPF_EXIT_INSN(),
4498  			BPF_MOV64_IMM(R2, 1),
4499  			BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4500  			BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4501  			BPF_EXIT_INSN(),
4502  			BPF_MOV64_IMM(R4, 4),
4503  			BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4504  			BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4505  			BPF_EXIT_INSN(),
4506  			BPF_MOV64_IMM(R4, 5),
4507  			BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4508  			BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4509  			BPF_EXIT_INSN(),
4510  			BPF_MOV64_IMM(R0, -1),
4511  			BPF_EXIT_INSN(),
4512  		},
4513  		INTERNAL,
4514  		{ },
4515  		{ { 0, -1 } }
4516  	},
4517  #ifdef CONFIG_32BIT
4518  	{
4519  		"INT: 32-bit context pointer word order and zero-extension",
4520  		.u.insns_int = {
4521  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4522  			BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4523  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
4524  			BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4525  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
4526  			BPF_EXIT_INSN(),
4527  		},
4528  		INTERNAL,
4529  		{ },
4530  		{ { 0, 1 } }
4531  	},
4532  #endif
4533  	{
4534  		"check: missing ret",
4535  		.u.insns = {
4536  			BPF_STMT(BPF_LD | BPF_IMM, 1),
4537  		},
4538  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4539  		{ },
4540  		{ },
4541  		.fill_helper = NULL,
4542  		.expected_errcode = -EINVAL,
4543  	},
4544  	{
4545  		"check: div_k_0",
4546  		.u.insns = {
4547  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4548  			BPF_STMT(BPF_RET | BPF_K, 0)
4549  		},
4550  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4551  		{ },
4552  		{ },
4553  		.fill_helper = NULL,
4554  		.expected_errcode = -EINVAL,
4555  	},
4556  	{
4557  		"check: unknown insn",
4558  		.u.insns = {
4559  			/* seccomp insn, rejected in socket filter */
4560  			BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4561  			BPF_STMT(BPF_RET | BPF_K, 0)
4562  		},
4563  		CLASSIC | FLAG_EXPECTED_FAIL,
4564  		{ },
4565  		{ },
4566  		.fill_helper = NULL,
4567  		.expected_errcode = -EINVAL,
4568  	},
4569  	{
4570  		"check: out of range spill/fill",
4571  		.u.insns = {
4572  			BPF_STMT(BPF_STX, 16),
4573  			BPF_STMT(BPF_RET | BPF_K, 0)
4574  		},
4575  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4576  		{ },
4577  		{ },
4578  		.fill_helper = NULL,
4579  		.expected_errcode = -EINVAL,
4580  	},
4581  	{
4582  		"JUMPS + HOLES",
4583  		.u.insns = {
4584  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4585  			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4586  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4587  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4588  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4589  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4590  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4591  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4592  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4593  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4594  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4595  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4596  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4597  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4598  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4599  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4600  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4601  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4602  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4603  			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4604  			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4605  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4606  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4607  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4608  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4609  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4610  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4611  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4612  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4613  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4614  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4615  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4616  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4617  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4618  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4619  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4620  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4621  			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4622  			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4623  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4624  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4625  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4626  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4627  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4628  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4629  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4630  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4631  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4632  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4633  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4634  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4635  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4636  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4637  			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4638  			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4639  			BPF_STMT(BPF_RET | BPF_A, 0),
4640  			BPF_STMT(BPF_RET | BPF_A, 0),
4641  		},
4642  		CLASSIC,
4643  		{ 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4644  		  0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4645  		  0x08, 0x00,
4646  		  0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4647  		  0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4648  		  0xc0, 0xa8, 0x33, 0x01,
4649  		  0xc0, 0xa8, 0x33, 0x02,
4650  		  0xbb, 0xb6,
4651  		  0xa9, 0xfa,
4652  		  0x00, 0x14, 0x00, 0x00,
4653  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4654  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4655  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4656  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4657  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4658  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4659  		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4660  		  0xcc, 0xcc, 0xcc, 0xcc },
4661  		{ { 88, 0x001b } }
4662  	},
4663  	{
4664  		"check: RET X",
4665  		.u.insns = {
4666  			BPF_STMT(BPF_RET | BPF_X, 0),
4667  		},
4668  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4669  		{ },
4670  		{ },
4671  		.fill_helper = NULL,
4672  		.expected_errcode = -EINVAL,
4673  	},
4674  	{
4675  		"check: LDX + RET X",
4676  		.u.insns = {
4677  			BPF_STMT(BPF_LDX | BPF_IMM, 42),
4678  			BPF_STMT(BPF_RET | BPF_X, 0),
4679  		},
4680  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4681  		{ },
4682  		{ },
4683  		.fill_helper = NULL,
4684  		.expected_errcode = -EINVAL,
4685  	},
4686  	{	/* Mainly checking JIT here. */
4687  		"M[]: alt STX + LDX",
4688  		.u.insns = {
4689  			BPF_STMT(BPF_LDX | BPF_IMM, 100),
4690  			BPF_STMT(BPF_STX, 0),
4691  			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4692  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4693  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4694  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4695  			BPF_STMT(BPF_STX, 1),
4696  			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4697  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4698  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4699  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4700  			BPF_STMT(BPF_STX, 2),
4701  			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4702  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4703  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4704  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4705  			BPF_STMT(BPF_STX, 3),
4706  			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4707  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4708  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4709  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4710  			BPF_STMT(BPF_STX, 4),
4711  			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4712  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4713  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4714  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4715  			BPF_STMT(BPF_STX, 5),
4716  			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4717  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4718  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4719  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4720  			BPF_STMT(BPF_STX, 6),
4721  			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4722  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4723  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4724  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4725  			BPF_STMT(BPF_STX, 7),
4726  			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4727  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4728  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4729  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4730  			BPF_STMT(BPF_STX, 8),
4731  			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4732  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4733  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4734  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4735  			BPF_STMT(BPF_STX, 9),
4736  			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4737  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4738  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4739  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4740  			BPF_STMT(BPF_STX, 10),
4741  			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4742  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4743  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4744  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4745  			BPF_STMT(BPF_STX, 11),
4746  			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4747  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4748  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4749  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4750  			BPF_STMT(BPF_STX, 12),
4751  			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4752  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4753  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4754  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4755  			BPF_STMT(BPF_STX, 13),
4756  			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4757  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4758  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4759  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4760  			BPF_STMT(BPF_STX, 14),
4761  			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4762  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4763  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4764  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4765  			BPF_STMT(BPF_STX, 15),
4766  			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4767  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4768  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4769  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4770  			BPF_STMT(BPF_RET | BPF_A, 0),
4771  		},
4772  		CLASSIC | FLAG_NO_DATA,
4773  		{ },
4774  		{ { 0, 116 } },
4775  	},
4776  	{	/* Mainly checking JIT here. */
4777  		"M[]: full STX + full LDX",
4778  		.u.insns = {
4779  			BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4780  			BPF_STMT(BPF_STX, 0),
4781  			BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4782  			BPF_STMT(BPF_STX, 1),
4783  			BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4784  			BPF_STMT(BPF_STX, 2),
4785  			BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4786  			BPF_STMT(BPF_STX, 3),
4787  			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4788  			BPF_STMT(BPF_STX, 4),
4789  			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4790  			BPF_STMT(BPF_STX, 5),
4791  			BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4792  			BPF_STMT(BPF_STX, 6),
4793  			BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4794  			BPF_STMT(BPF_STX, 7),
4795  			BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4796  			BPF_STMT(BPF_STX, 8),
4797  			BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4798  			BPF_STMT(BPF_STX, 9),
4799  			BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4800  			BPF_STMT(BPF_STX, 10),
4801  			BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4802  			BPF_STMT(BPF_STX, 11),
4803  			BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4804  			BPF_STMT(BPF_STX, 12),
4805  			BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4806  			BPF_STMT(BPF_STX, 13),
4807  			BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4808  			BPF_STMT(BPF_STX, 14),
4809  			BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4810  			BPF_STMT(BPF_STX, 15),
4811  			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4812  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4813  			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4814  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4815  			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4816  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4817  			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4818  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4819  			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4820  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4821  			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4822  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4823  			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4824  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4825  			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4826  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4827  			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4828  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4829  			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4830  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4831  			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4832  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4833  			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4834  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4835  			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4836  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4837  			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4838  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4839  			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4840  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4841  			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4842  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4843  			BPF_STMT(BPF_RET | BPF_A, 0),
4844  		},
4845  		CLASSIC | FLAG_NO_DATA,
4846  		{ },
4847  		{ { 0, 0x2a5a5e5 } },
4848  	},
4849  	{
4850  		"check: SKF_AD_MAX",
4851  		.u.insns = {
4852  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4853  				 SKF_AD_OFF + SKF_AD_MAX),
4854  			BPF_STMT(BPF_RET | BPF_A, 0),
4855  		},
4856  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4857  		{ },
4858  		{ },
4859  		.fill_helper = NULL,
4860  		.expected_errcode = -EINVAL,
4861  	},
4862  	{	/* Passes checker but fails during runtime. */
4863  		"LD [SKF_AD_OFF-1]",
4864  		.u.insns = {
4865  			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4866  				 SKF_AD_OFF - 1),
4867  			BPF_STMT(BPF_RET | BPF_K, 1),
4868  		},
4869  		CLASSIC,
4870  		{ },
4871  		{ { 1, 0 } },
4872  	},
4873  	{
4874  		"load 64-bit immediate",
4875  		.u.insns_int = {
4876  			BPF_LD_IMM64(R1, 0x567800001234LL),
4877  			BPF_MOV64_REG(R2, R1),
4878  			BPF_MOV64_REG(R3, R2),
4879  			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4880  			BPF_ALU64_IMM(BPF_LSH, R3, 32),
4881  			BPF_ALU64_IMM(BPF_RSH, R3, 32),
4882  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4883  			BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4884  			BPF_EXIT_INSN(),
4885  			BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4886  			BPF_EXIT_INSN(),
4887  			BPF_LD_IMM64(R0, 0x1ffffffffLL),
4888  			BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4889  			BPF_EXIT_INSN(),
4890  		},
4891  		INTERNAL,
4892  		{ },
4893  		{ { 0, 1 } }
4894  	},
4895  	/* BPF_ALU | BPF_MOV | BPF_X */
4896  	{
4897  		"ALU_MOV_X: dst = 2",
4898  		.u.insns_int = {
4899  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4900  			BPF_ALU32_REG(BPF_MOV, R0, R1),
4901  			BPF_EXIT_INSN(),
4902  		},
4903  		INTERNAL,
4904  		{ },
4905  		{ { 0, 2 } },
4906  	},
4907  	{
4908  		"ALU_MOV_X: dst = 4294967295",
4909  		.u.insns_int = {
4910  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4911  			BPF_ALU32_REG(BPF_MOV, R0, R1),
4912  			BPF_EXIT_INSN(),
4913  		},
4914  		INTERNAL,
4915  		{ },
4916  		{ { 0, 4294967295U } },
4917  	},
4918  	{
4919  		"ALU64_MOV_X: dst = 2",
4920  		.u.insns_int = {
4921  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4922  			BPF_ALU64_REG(BPF_MOV, R0, R1),
4923  			BPF_EXIT_INSN(),
4924  		},
4925  		INTERNAL,
4926  		{ },
4927  		{ { 0, 2 } },
4928  	},
4929  	{
4930  		"ALU64_MOV_X: dst = 4294967295",
4931  		.u.insns_int = {
4932  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4933  			BPF_ALU64_REG(BPF_MOV, R0, R1),
4934  			BPF_EXIT_INSN(),
4935  		},
4936  		INTERNAL,
4937  		{ },
4938  		{ { 0, 4294967295U } },
4939  	},
4940  	/* BPF_ALU | BPF_MOV | BPF_K */
4941  	{
4942  		"ALU_MOV_K: dst = 2",
4943  		.u.insns_int = {
4944  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
4945  			BPF_EXIT_INSN(),
4946  		},
4947  		INTERNAL,
4948  		{ },
4949  		{ { 0, 2 } },
4950  	},
4951  	{
4952  		"ALU_MOV_K: dst = 4294967295",
4953  		.u.insns_int = {
4954  			BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4955  			BPF_EXIT_INSN(),
4956  		},
4957  		INTERNAL,
4958  		{ },
4959  		{ { 0, 4294967295U } },
4960  	},
4961  	{
4962  		"ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4963  		.u.insns_int = {
4964  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4965  			BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4966  			BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4967  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4968  			BPF_MOV32_IMM(R0, 2),
4969  			BPF_EXIT_INSN(),
4970  			BPF_MOV32_IMM(R0, 1),
4971  			BPF_EXIT_INSN(),
4972  		},
4973  		INTERNAL,
4974  		{ },
4975  		{ { 0, 0x1 } },
4976  	},
4977  	{
4978  		"ALU_MOV_K: small negative",
4979  		.u.insns_int = {
4980  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4981  			BPF_EXIT_INSN(),
4982  		},
4983  		INTERNAL,
4984  		{ },
4985  		{ { 0, -123 } }
4986  	},
4987  	{
4988  		"ALU_MOV_K: small negative zero extension",
4989  		.u.insns_int = {
4990  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4991  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
4992  			BPF_EXIT_INSN(),
4993  		},
4994  		INTERNAL,
4995  		{ },
4996  		{ { 0, 0 } }
4997  	},
4998  	{
4999  		"ALU_MOV_K: large negative",
5000  		.u.insns_int = {
5001  			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5002  			BPF_EXIT_INSN(),
5003  		},
5004  		INTERNAL,
5005  		{ },
5006  		{ { 0, -123456789 } }
5007  	},
5008  	{
5009  		"ALU_MOV_K: large negative zero extension",
5010  		.u.insns_int = {
5011  			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5012  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5013  			BPF_EXIT_INSN(),
5014  		},
5015  		INTERNAL,
5016  		{ },
5017  		{ { 0, 0 } }
5018  	},
5019  	{
5020  		"ALU64_MOV_K: dst = 2",
5021  		.u.insns_int = {
5022  			BPF_ALU64_IMM(BPF_MOV, R0, 2),
5023  			BPF_EXIT_INSN(),
5024  		},
5025  		INTERNAL,
5026  		{ },
5027  		{ { 0, 2 } },
5028  	},
5029  	{
5030  		"ALU64_MOV_K: dst = 2147483647",
5031  		.u.insns_int = {
5032  			BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
5033  			BPF_EXIT_INSN(),
5034  		},
5035  		INTERNAL,
5036  		{ },
5037  		{ { 0, 2147483647 } },
5038  	},
5039  	{
5040  		"ALU64_OR_K: dst = 0x0",
5041  		.u.insns_int = {
5042  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5043  			BPF_LD_IMM64(R3, 0x0),
5044  			BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
5045  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5046  			BPF_MOV32_IMM(R0, 2),
5047  			BPF_EXIT_INSN(),
5048  			BPF_MOV32_IMM(R0, 1),
5049  			BPF_EXIT_INSN(),
5050  		},
5051  		INTERNAL,
5052  		{ },
5053  		{ { 0, 0x1 } },
5054  	},
5055  	{
5056  		"ALU64_MOV_K: dst = -1",
5057  		.u.insns_int = {
5058  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5059  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5060  			BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
5061  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5062  			BPF_MOV32_IMM(R0, 2),
5063  			BPF_EXIT_INSN(),
5064  			BPF_MOV32_IMM(R0, 1),
5065  			BPF_EXIT_INSN(),
5066  		},
5067  		INTERNAL,
5068  		{ },
5069  		{ { 0, 0x1 } },
5070  	},
5071  	{
5072  		"ALU64_MOV_K: small negative",
5073  		.u.insns_int = {
5074  			BPF_ALU64_IMM(BPF_MOV, R0, -123),
5075  			BPF_EXIT_INSN(),
5076  		},
5077  		INTERNAL,
5078  		{ },
5079  		{ { 0, -123 } }
5080  	},
5081  	{
5082  		"ALU64_MOV_K: small negative sign extension",
5083  		.u.insns_int = {
5084  			BPF_ALU64_IMM(BPF_MOV, R0, -123),
5085  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5086  			BPF_EXIT_INSN(),
5087  		},
5088  		INTERNAL,
5089  		{ },
5090  		{ { 0, 0xffffffff } }
5091  	},
5092  	{
5093  		"ALU64_MOV_K: large negative",
5094  		.u.insns_int = {
5095  			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5096  			BPF_EXIT_INSN(),
5097  		},
5098  		INTERNAL,
5099  		{ },
5100  		{ { 0, -123456789 } }
5101  	},
5102  	{
5103  		"ALU64_MOV_K: large negative sign extension",
5104  		.u.insns_int = {
5105  			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5106  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5107  			BPF_EXIT_INSN(),
5108  		},
5109  		INTERNAL,
5110  		{ },
5111  		{ { 0, 0xffffffff } }
5112  	},
5113  	/* BPF_ALU | BPF_ADD | BPF_X */
5114  	{
5115  		"ALU_ADD_X: 1 + 2 = 3",
5116  		.u.insns_int = {
5117  			BPF_LD_IMM64(R0, 1),
5118  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5119  			BPF_ALU32_REG(BPF_ADD, R0, R1),
5120  			BPF_EXIT_INSN(),
5121  		},
5122  		INTERNAL,
5123  		{ },
5124  		{ { 0, 3 } },
5125  	},
5126  	{
5127  		"ALU_ADD_X: 1 + 4294967294 = 4294967295",
5128  		.u.insns_int = {
5129  			BPF_LD_IMM64(R0, 1),
5130  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5131  			BPF_ALU32_REG(BPF_ADD, R0, R1),
5132  			BPF_EXIT_INSN(),
5133  		},
5134  		INTERNAL,
5135  		{ },
5136  		{ { 0, 4294967295U } },
5137  	},
5138  	{
5139  		"ALU_ADD_X: 2 + 4294967294 = 0",
5140  		.u.insns_int = {
5141  			BPF_LD_IMM64(R0, 2),
5142  			BPF_LD_IMM64(R1, 4294967294U),
5143  			BPF_ALU32_REG(BPF_ADD, R0, R1),
5144  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5145  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5146  			BPF_EXIT_INSN(),
5147  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5148  			BPF_EXIT_INSN(),
5149  		},
5150  		INTERNAL,
5151  		{ },
5152  		{ { 0, 1 } },
5153  	},
5154  	{
5155  		"ALU64_ADD_X: 1 + 2 = 3",
5156  		.u.insns_int = {
5157  			BPF_LD_IMM64(R0, 1),
5158  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5159  			BPF_ALU64_REG(BPF_ADD, R0, R1),
5160  			BPF_EXIT_INSN(),
5161  		},
5162  		INTERNAL,
5163  		{ },
5164  		{ { 0, 3 } },
5165  	},
5166  	{
5167  		"ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5168  		.u.insns_int = {
5169  			BPF_LD_IMM64(R0, 1),
5170  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5171  			BPF_ALU64_REG(BPF_ADD, R0, R1),
5172  			BPF_EXIT_INSN(),
5173  		},
5174  		INTERNAL,
5175  		{ },
5176  		{ { 0, 4294967295U } },
5177  	},
5178  	{
5179  		"ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5180  		.u.insns_int = {
5181  			BPF_LD_IMM64(R0, 2),
5182  			BPF_LD_IMM64(R1, 4294967294U),
5183  			BPF_LD_IMM64(R2, 4294967296ULL),
5184  			BPF_ALU64_REG(BPF_ADD, R0, R1),
5185  			BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5186  			BPF_MOV32_IMM(R0, 0),
5187  			BPF_EXIT_INSN(),
5188  			BPF_MOV32_IMM(R0, 1),
5189  			BPF_EXIT_INSN(),
5190  		},
5191  		INTERNAL,
5192  		{ },
5193  		{ { 0, 1 } },
5194  	},
5195  	/* BPF_ALU | BPF_ADD | BPF_K */
5196  	{
5197  		"ALU_ADD_K: 1 + 2 = 3",
5198  		.u.insns_int = {
5199  			BPF_LD_IMM64(R0, 1),
5200  			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5201  			BPF_EXIT_INSN(),
5202  		},
5203  		INTERNAL,
5204  		{ },
5205  		{ { 0, 3 } },
5206  	},
5207  	{
5208  		"ALU_ADD_K: 3 + 0 = 3",
5209  		.u.insns_int = {
5210  			BPF_LD_IMM64(R0, 3),
5211  			BPF_ALU32_IMM(BPF_ADD, R0, 0),
5212  			BPF_EXIT_INSN(),
5213  		},
5214  		INTERNAL,
5215  		{ },
5216  		{ { 0, 3 } },
5217  	},
5218  	{
5219  		"ALU_ADD_K: 1 + 4294967294 = 4294967295",
5220  		.u.insns_int = {
5221  			BPF_LD_IMM64(R0, 1),
5222  			BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5223  			BPF_EXIT_INSN(),
5224  		},
5225  		INTERNAL,
5226  		{ },
5227  		{ { 0, 4294967295U } },
5228  	},
5229  	{
5230  		"ALU_ADD_K: 4294967294 + 2 = 0",
5231  		.u.insns_int = {
5232  			BPF_LD_IMM64(R0, 4294967294U),
5233  			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5234  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5235  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5236  			BPF_EXIT_INSN(),
5237  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5238  			BPF_EXIT_INSN(),
5239  		},
5240  		INTERNAL,
5241  		{ },
5242  		{ { 0, 1 } },
5243  	},
5244  	{
5245  		"ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5246  		.u.insns_int = {
5247  			BPF_LD_IMM64(R2, 0x0),
5248  			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5249  			BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5250  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5251  			BPF_MOV32_IMM(R0, 2),
5252  			BPF_EXIT_INSN(),
5253  			BPF_MOV32_IMM(R0, 1),
5254  			BPF_EXIT_INSN(),
5255  		},
5256  		INTERNAL,
5257  		{ },
5258  		{ { 0, 0x1 } },
5259  	},
5260  	{
5261  		"ALU_ADD_K: 0 + 0xffff = 0xffff",
5262  		.u.insns_int = {
5263  			BPF_LD_IMM64(R2, 0x0),
5264  			BPF_LD_IMM64(R3, 0xffff),
5265  			BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5266  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5267  			BPF_MOV32_IMM(R0, 2),
5268  			BPF_EXIT_INSN(),
5269  			BPF_MOV32_IMM(R0, 1),
5270  			BPF_EXIT_INSN(),
5271  		},
5272  		INTERNAL,
5273  		{ },
5274  		{ { 0, 0x1 } },
5275  	},
5276  	{
5277  		"ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5278  		.u.insns_int = {
5279  			BPF_LD_IMM64(R2, 0x0),
5280  			BPF_LD_IMM64(R3, 0x7fffffff),
5281  			BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5282  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5283  			BPF_MOV32_IMM(R0, 2),
5284  			BPF_EXIT_INSN(),
5285  			BPF_MOV32_IMM(R0, 1),
5286  			BPF_EXIT_INSN(),
5287  		},
5288  		INTERNAL,
5289  		{ },
5290  		{ { 0, 0x1 } },
5291  	},
5292  	{
5293  		"ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5294  		.u.insns_int = {
5295  			BPF_LD_IMM64(R2, 0x0),
5296  			BPF_LD_IMM64(R3, 0x80000000),
5297  			BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5298  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5299  			BPF_MOV32_IMM(R0, 2),
5300  			BPF_EXIT_INSN(),
5301  			BPF_MOV32_IMM(R0, 1),
5302  			BPF_EXIT_INSN(),
5303  		},
5304  		INTERNAL,
5305  		{ },
5306  		{ { 0, 0x1 } },
5307  	},
5308  	{
5309  		"ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5310  		.u.insns_int = {
5311  			BPF_LD_IMM64(R2, 0x0),
5312  			BPF_LD_IMM64(R3, 0x80008000),
5313  			BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5314  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5315  			BPF_MOV32_IMM(R0, 2),
5316  			BPF_EXIT_INSN(),
5317  			BPF_MOV32_IMM(R0, 1),
5318  			BPF_EXIT_INSN(),
5319  		},
5320  		INTERNAL,
5321  		{ },
5322  		{ { 0, 0x1 } },
5323  	},
5324  	{
5325  		"ALU64_ADD_K: 1 + 2 = 3",
5326  		.u.insns_int = {
5327  			BPF_LD_IMM64(R0, 1),
5328  			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5329  			BPF_EXIT_INSN(),
5330  		},
5331  		INTERNAL,
5332  		{ },
5333  		{ { 0, 3 } },
5334  	},
5335  	{
5336  		"ALU64_ADD_K: 3 + 0 = 3",
5337  		.u.insns_int = {
5338  			BPF_LD_IMM64(R0, 3),
5339  			BPF_ALU64_IMM(BPF_ADD, R0, 0),
5340  			BPF_EXIT_INSN(),
5341  		},
5342  		INTERNAL,
5343  		{ },
5344  		{ { 0, 3 } },
5345  	},
5346  	{
5347  		"ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5348  		.u.insns_int = {
5349  			BPF_LD_IMM64(R0, 1),
5350  			BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5351  			BPF_EXIT_INSN(),
5352  		},
5353  		INTERNAL,
5354  		{ },
5355  		{ { 0, 2147483647 } },
5356  	},
5357  	{
5358  		"ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5359  		.u.insns_int = {
5360  			BPF_LD_IMM64(R0, 4294967294U),
5361  			BPF_LD_IMM64(R1, 4294967296ULL),
5362  			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5363  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5364  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5365  			BPF_EXIT_INSN(),
5366  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5367  			BPF_EXIT_INSN(),
5368  		},
5369  		INTERNAL,
5370  		{ },
5371  		{ { 0, 1 } },
5372  	},
5373  	{
5374  		"ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5375  		.u.insns_int = {
5376  			BPF_LD_IMM64(R0, 2147483646),
5377  			BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5378  			BPF_EXIT_INSN(),
5379  		},
5380  		INTERNAL,
5381  		{ },
5382  		{ { 0, -1 } },
5383  	},
5384  	{
5385  		"ALU64_ADD_K: 1 + 0 = 1",
5386  		.u.insns_int = {
5387  			BPF_LD_IMM64(R2, 0x1),
5388  			BPF_LD_IMM64(R3, 0x1),
5389  			BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5390  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5391  			BPF_MOV32_IMM(R0, 2),
5392  			BPF_EXIT_INSN(),
5393  			BPF_MOV32_IMM(R0, 1),
5394  			BPF_EXIT_INSN(),
5395  		},
5396  		INTERNAL,
5397  		{ },
5398  		{ { 0, 0x1 } },
5399  	},
5400  	{
5401  		"ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5402  		.u.insns_int = {
5403  			BPF_LD_IMM64(R2, 0x0),
5404  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5405  			BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5406  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5407  			BPF_MOV32_IMM(R0, 2),
5408  			BPF_EXIT_INSN(),
5409  			BPF_MOV32_IMM(R0, 1),
5410  			BPF_EXIT_INSN(),
5411  		},
5412  		INTERNAL,
5413  		{ },
5414  		{ { 0, 0x1 } },
5415  	},
5416  	{
5417  		"ALU64_ADD_K: 0 + 0xffff = 0xffff",
5418  		.u.insns_int = {
5419  			BPF_LD_IMM64(R2, 0x0),
5420  			BPF_LD_IMM64(R3, 0xffff),
5421  			BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5422  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5423  			BPF_MOV32_IMM(R0, 2),
5424  			BPF_EXIT_INSN(),
5425  			BPF_MOV32_IMM(R0, 1),
5426  			BPF_EXIT_INSN(),
5427  		},
5428  		INTERNAL,
5429  		{ },
5430  		{ { 0, 0x1 } },
5431  	},
5432  	{
5433  		"ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5434  		.u.insns_int = {
5435  			BPF_LD_IMM64(R2, 0x0),
5436  			BPF_LD_IMM64(R3, 0x7fffffff),
5437  			BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5438  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5439  			BPF_MOV32_IMM(R0, 2),
5440  			BPF_EXIT_INSN(),
5441  			BPF_MOV32_IMM(R0, 1),
5442  			BPF_EXIT_INSN(),
5443  		},
5444  		INTERNAL,
5445  		{ },
5446  		{ { 0, 0x1 } },
5447  	},
5448  	{
5449  		"ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5450  		.u.insns_int = {
5451  			BPF_LD_IMM64(R2, 0x0),
5452  			BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5453  			BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5454  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5455  			BPF_MOV32_IMM(R0, 2),
5456  			BPF_EXIT_INSN(),
5457  			BPF_MOV32_IMM(R0, 1),
5458  			BPF_EXIT_INSN(),
5459  		},
5460  		INTERNAL,
5461  		{ },
5462  		{ { 0, 0x1 } },
5463  	},
5464  	{
5465  		"ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5466  		.u.insns_int = {
5467  			BPF_LD_IMM64(R2, 0x0),
5468  			BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5469  			BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5470  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5471  			BPF_MOV32_IMM(R0, 2),
5472  			BPF_EXIT_INSN(),
5473  			BPF_MOV32_IMM(R0, 1),
5474  			BPF_EXIT_INSN(),
5475  		},
5476  		INTERNAL,
5477  		{ },
5478  		{ { 0, 0x1 } },
5479  	},
5480  	/* BPF_ALU | BPF_SUB | BPF_X */
5481  	{
5482  		"ALU_SUB_X: 3 - 1 = 2",
5483  		.u.insns_int = {
5484  			BPF_LD_IMM64(R0, 3),
5485  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5486  			BPF_ALU32_REG(BPF_SUB, R0, R1),
5487  			BPF_EXIT_INSN(),
5488  		},
5489  		INTERNAL,
5490  		{ },
5491  		{ { 0, 2 } },
5492  	},
5493  	{
5494  		"ALU_SUB_X: 4294967295 - 4294967294 = 1",
5495  		.u.insns_int = {
5496  			BPF_LD_IMM64(R0, 4294967295U),
5497  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5498  			BPF_ALU32_REG(BPF_SUB, R0, R1),
5499  			BPF_EXIT_INSN(),
5500  		},
5501  		INTERNAL,
5502  		{ },
5503  		{ { 0, 1 } },
5504  	},
5505  	{
5506  		"ALU64_SUB_X: 3 - 1 = 2",
5507  		.u.insns_int = {
5508  			BPF_LD_IMM64(R0, 3),
5509  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5510  			BPF_ALU64_REG(BPF_SUB, R0, R1),
5511  			BPF_EXIT_INSN(),
5512  		},
5513  		INTERNAL,
5514  		{ },
5515  		{ { 0, 2 } },
5516  	},
5517  	{
5518  		"ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5519  		.u.insns_int = {
5520  			BPF_LD_IMM64(R0, 4294967295U),
5521  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5522  			BPF_ALU64_REG(BPF_SUB, R0, R1),
5523  			BPF_EXIT_INSN(),
5524  		},
5525  		INTERNAL,
5526  		{ },
5527  		{ { 0, 1 } },
5528  	},
5529  	/* BPF_ALU | BPF_SUB | BPF_K */
5530  	{
5531  		"ALU_SUB_K: 3 - 1 = 2",
5532  		.u.insns_int = {
5533  			BPF_LD_IMM64(R0, 3),
5534  			BPF_ALU32_IMM(BPF_SUB, R0, 1),
5535  			BPF_EXIT_INSN(),
5536  		},
5537  		INTERNAL,
5538  		{ },
5539  		{ { 0, 2 } },
5540  	},
5541  	{
5542  		"ALU_SUB_K: 3 - 0 = 3",
5543  		.u.insns_int = {
5544  			BPF_LD_IMM64(R0, 3),
5545  			BPF_ALU32_IMM(BPF_SUB, R0, 0),
5546  			BPF_EXIT_INSN(),
5547  		},
5548  		INTERNAL,
5549  		{ },
5550  		{ { 0, 3 } },
5551  	},
5552  	{
5553  		"ALU_SUB_K: 4294967295 - 4294967294 = 1",
5554  		.u.insns_int = {
5555  			BPF_LD_IMM64(R0, 4294967295U),
5556  			BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5557  			BPF_EXIT_INSN(),
5558  		},
5559  		INTERNAL,
5560  		{ },
5561  		{ { 0, 1 } },
5562  	},
5563  	{
5564  		"ALU64_SUB_K: 3 - 1 = 2",
5565  		.u.insns_int = {
5566  			BPF_LD_IMM64(R0, 3),
5567  			BPF_ALU64_IMM(BPF_SUB, R0, 1),
5568  			BPF_EXIT_INSN(),
5569  		},
5570  		INTERNAL,
5571  		{ },
5572  		{ { 0, 2 } },
5573  	},
5574  	{
5575  		"ALU64_SUB_K: 3 - 0 = 3",
5576  		.u.insns_int = {
5577  			BPF_LD_IMM64(R0, 3),
5578  			BPF_ALU64_IMM(BPF_SUB, R0, 0),
5579  			BPF_EXIT_INSN(),
5580  		},
5581  		INTERNAL,
5582  		{ },
5583  		{ { 0, 3 } },
5584  	},
5585  	{
5586  		"ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5587  		.u.insns_int = {
5588  			BPF_LD_IMM64(R0, 4294967294U),
5589  			BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5590  			BPF_EXIT_INSN(),
5591  		},
5592  		INTERNAL,
5593  		{ },
5594  		{ { 0, -1 } },
5595  	},
5596  	{
5597  		"ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5598  		.u.insns_int = {
5599  			BPF_LD_IMM64(R0, 2147483646),
5600  			BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5601  			BPF_EXIT_INSN(),
5602  		},
5603  		INTERNAL,
5604  		{ },
5605  		{ { 0, -1 } },
5606  	},
5607  	/* BPF_ALU | BPF_MUL | BPF_X */
5608  	{
5609  		"ALU_MUL_X: 2 * 3 = 6",
5610  		.u.insns_int = {
5611  			BPF_LD_IMM64(R0, 2),
5612  			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5613  			BPF_ALU32_REG(BPF_MUL, R0, R1),
5614  			BPF_EXIT_INSN(),
5615  		},
5616  		INTERNAL,
5617  		{ },
5618  		{ { 0, 6 } },
5619  	},
5620  	{
5621  		"ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5622  		.u.insns_int = {
5623  			BPF_LD_IMM64(R0, 2),
5624  			BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5625  			BPF_ALU32_REG(BPF_MUL, R0, R1),
5626  			BPF_EXIT_INSN(),
5627  		},
5628  		INTERNAL,
5629  		{ },
5630  		{ { 0, 0xFFFFFFF0 } },
5631  	},
5632  	{
5633  		"ALU_MUL_X: -1 * -1 = 1",
5634  		.u.insns_int = {
5635  			BPF_LD_IMM64(R0, -1),
5636  			BPF_ALU32_IMM(BPF_MOV, R1, -1),
5637  			BPF_ALU32_REG(BPF_MUL, R0, R1),
5638  			BPF_EXIT_INSN(),
5639  		},
5640  		INTERNAL,
5641  		{ },
5642  		{ { 0, 1 } },
5643  	},
5644  	{
5645  		"ALU64_MUL_X: 2 * 3 = 6",
5646  		.u.insns_int = {
5647  			BPF_LD_IMM64(R0, 2),
5648  			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5649  			BPF_ALU64_REG(BPF_MUL, R0, R1),
5650  			BPF_EXIT_INSN(),
5651  		},
5652  		INTERNAL,
5653  		{ },
5654  		{ { 0, 6 } },
5655  	},
5656  	{
5657  		"ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5658  		.u.insns_int = {
5659  			BPF_LD_IMM64(R0, 1),
5660  			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5661  			BPF_ALU64_REG(BPF_MUL, R0, R1),
5662  			BPF_EXIT_INSN(),
5663  		},
5664  		INTERNAL,
5665  		{ },
5666  		{ { 0, 2147483647 } },
5667  	},
5668  	{
5669  		"ALU64_MUL_X: 64x64 multiply, low word",
5670  		.u.insns_int = {
5671  			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5672  			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5673  			BPF_ALU64_REG(BPF_MUL, R0, R1),
5674  			BPF_EXIT_INSN(),
5675  		},
5676  		INTERNAL,
5677  		{ },
5678  		{ { 0, 0xe5618cf0 } }
5679  	},
5680  	{
5681  		"ALU64_MUL_X: 64x64 multiply, high word",
5682  		.u.insns_int = {
5683  			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5684  			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5685  			BPF_ALU64_REG(BPF_MUL, R0, R1),
5686  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5687  			BPF_EXIT_INSN(),
5688  		},
5689  		INTERNAL,
5690  		{ },
5691  		{ { 0, 0x2236d88f } }
5692  	},
5693  	/* BPF_ALU | BPF_MUL | BPF_K */
5694  	{
5695  		"ALU_MUL_K: 2 * 3 = 6",
5696  		.u.insns_int = {
5697  			BPF_LD_IMM64(R0, 2),
5698  			BPF_ALU32_IMM(BPF_MUL, R0, 3),
5699  			BPF_EXIT_INSN(),
5700  		},
5701  		INTERNAL,
5702  		{ },
5703  		{ { 0, 6 } },
5704  	},
5705  	{
5706  		"ALU_MUL_K: 3 * 1 = 3",
5707  		.u.insns_int = {
5708  			BPF_LD_IMM64(R0, 3),
5709  			BPF_ALU32_IMM(BPF_MUL, R0, 1),
5710  			BPF_EXIT_INSN(),
5711  		},
5712  		INTERNAL,
5713  		{ },
5714  		{ { 0, 3 } },
5715  	},
5716  	{
5717  		"ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5718  		.u.insns_int = {
5719  			BPF_LD_IMM64(R0, 2),
5720  			BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5721  			BPF_EXIT_INSN(),
5722  		},
5723  		INTERNAL,
5724  		{ },
5725  		{ { 0, 0xFFFFFFF0 } },
5726  	},
5727  	{
5728  		"ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5729  		.u.insns_int = {
5730  			BPF_LD_IMM64(R2, 0x1),
5731  			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5732  			BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5733  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5734  			BPF_MOV32_IMM(R0, 2),
5735  			BPF_EXIT_INSN(),
5736  			BPF_MOV32_IMM(R0, 1),
5737  			BPF_EXIT_INSN(),
5738  		},
5739  		INTERNAL,
5740  		{ },
5741  		{ { 0, 0x1 } },
5742  	},
5743  	{
5744  		"ALU64_MUL_K: 2 * 3 = 6",
5745  		.u.insns_int = {
5746  			BPF_LD_IMM64(R0, 2),
5747  			BPF_ALU64_IMM(BPF_MUL, R0, 3),
5748  			BPF_EXIT_INSN(),
5749  		},
5750  		INTERNAL,
5751  		{ },
5752  		{ { 0, 6 } },
5753  	},
5754  	{
5755  		"ALU64_MUL_K: 3 * 1 = 3",
5756  		.u.insns_int = {
5757  			BPF_LD_IMM64(R0, 3),
5758  			BPF_ALU64_IMM(BPF_MUL, R0, 1),
5759  			BPF_EXIT_INSN(),
5760  		},
5761  		INTERNAL,
5762  		{ },
5763  		{ { 0, 3 } },
5764  	},
5765  	{
5766  		"ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5767  		.u.insns_int = {
5768  			BPF_LD_IMM64(R0, 1),
5769  			BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5770  			BPF_EXIT_INSN(),
5771  		},
5772  		INTERNAL,
5773  		{ },
5774  		{ { 0, 2147483647 } },
5775  	},
5776  	{
5777  		"ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5778  		.u.insns_int = {
5779  			BPF_LD_IMM64(R0, 1),
5780  			BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5781  			BPF_EXIT_INSN(),
5782  		},
5783  		INTERNAL,
5784  		{ },
5785  		{ { 0, -2147483647 } },
5786  	},
5787  	{
5788  		"ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5789  		.u.insns_int = {
5790  			BPF_LD_IMM64(R2, 0x1),
5791  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5792  			BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5793  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5794  			BPF_MOV32_IMM(R0, 2),
5795  			BPF_EXIT_INSN(),
5796  			BPF_MOV32_IMM(R0, 1),
5797  			BPF_EXIT_INSN(),
5798  		},
5799  		INTERNAL,
5800  		{ },
5801  		{ { 0, 0x1 } },
5802  	},
5803  	{
5804  		"ALU64_MUL_K: 64x32 multiply, low word",
5805  		.u.insns_int = {
5806  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5807  			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5808  			BPF_EXIT_INSN(),
5809  		},
5810  		INTERNAL,
5811  		{ },
5812  		{ { 0, 0xe242d208 } }
5813  	},
5814  	{
5815  		"ALU64_MUL_K: 64x32 multiply, high word",
5816  		.u.insns_int = {
5817  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5818  			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5819  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5820  			BPF_EXIT_INSN(),
5821  		},
5822  		INTERNAL,
5823  		{ },
5824  		{ { 0, 0xc28f5c28 } }
5825  	},
5826  	/* BPF_ALU | BPF_DIV | BPF_X */
5827  	{
5828  		"ALU_DIV_X: 6 / 2 = 3",
5829  		.u.insns_int = {
5830  			BPF_LD_IMM64(R0, 6),
5831  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5832  			BPF_ALU32_REG(BPF_DIV, R0, R1),
5833  			BPF_EXIT_INSN(),
5834  		},
5835  		INTERNAL,
5836  		{ },
5837  		{ { 0, 3 } },
5838  	},
5839  	{
5840  		"ALU_DIV_X: 4294967295 / 4294967295 = 1",
5841  		.u.insns_int = {
5842  			BPF_LD_IMM64(R0, 4294967295U),
5843  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5844  			BPF_ALU32_REG(BPF_DIV, R0, R1),
5845  			BPF_EXIT_INSN(),
5846  		},
5847  		INTERNAL,
5848  		{ },
5849  		{ { 0, 1 } },
5850  	},
5851  	{
5852  		"ALU64_DIV_X: 6 / 2 = 3",
5853  		.u.insns_int = {
5854  			BPF_LD_IMM64(R0, 6),
5855  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5856  			BPF_ALU64_REG(BPF_DIV, R0, R1),
5857  			BPF_EXIT_INSN(),
5858  		},
5859  		INTERNAL,
5860  		{ },
5861  		{ { 0, 3 } },
5862  	},
5863  	{
5864  		"ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5865  		.u.insns_int = {
5866  			BPF_LD_IMM64(R0, 2147483647),
5867  			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5868  			BPF_ALU64_REG(BPF_DIV, R0, R1),
5869  			BPF_EXIT_INSN(),
5870  		},
5871  		INTERNAL,
5872  		{ },
5873  		{ { 0, 1 } },
5874  	},
5875  	{
5876  		"ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5877  		.u.insns_int = {
5878  			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5879  			BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5880  			BPF_LD_IMM64(R3, 0x0000000000000001LL),
5881  			BPF_ALU64_REG(BPF_DIV, R2, R4),
5882  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5883  			BPF_MOV32_IMM(R0, 2),
5884  			BPF_EXIT_INSN(),
5885  			BPF_MOV32_IMM(R0, 1),
5886  			BPF_EXIT_INSN(),
5887  		},
5888  		INTERNAL,
5889  		{ },
5890  		{ { 0, 0x1 } },
5891  	},
5892  	/* BPF_ALU | BPF_DIV | BPF_K */
5893  	{
5894  		"ALU_DIV_K: 6 / 2 = 3",
5895  		.u.insns_int = {
5896  			BPF_LD_IMM64(R0, 6),
5897  			BPF_ALU32_IMM(BPF_DIV, R0, 2),
5898  			BPF_EXIT_INSN(),
5899  		},
5900  		INTERNAL,
5901  		{ },
5902  		{ { 0, 3 } },
5903  	},
5904  	{
5905  		"ALU_DIV_K: 3 / 1 = 3",
5906  		.u.insns_int = {
5907  			BPF_LD_IMM64(R0, 3),
5908  			BPF_ALU32_IMM(BPF_DIV, R0, 1),
5909  			BPF_EXIT_INSN(),
5910  		},
5911  		INTERNAL,
5912  		{ },
5913  		{ { 0, 3 } },
5914  	},
5915  	{
5916  		"ALU_DIV_K: 4294967295 / 4294967295 = 1",
5917  		.u.insns_int = {
5918  			BPF_LD_IMM64(R0, 4294967295U),
5919  			BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
5920  			BPF_EXIT_INSN(),
5921  		},
5922  		INTERNAL,
5923  		{ },
5924  		{ { 0, 1 } },
5925  	},
5926  	{
5927  		"ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
5928  		.u.insns_int = {
5929  			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5930  			BPF_LD_IMM64(R3, 0x1UL),
5931  			BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
5932  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5933  			BPF_MOV32_IMM(R0, 2),
5934  			BPF_EXIT_INSN(),
5935  			BPF_MOV32_IMM(R0, 1),
5936  			BPF_EXIT_INSN(),
5937  		},
5938  		INTERNAL,
5939  		{ },
5940  		{ { 0, 0x1 } },
5941  	},
5942  	{
5943  		"ALU64_DIV_K: 6 / 2 = 3",
5944  		.u.insns_int = {
5945  			BPF_LD_IMM64(R0, 6),
5946  			BPF_ALU64_IMM(BPF_DIV, R0, 2),
5947  			BPF_EXIT_INSN(),
5948  		},
5949  		INTERNAL,
5950  		{ },
5951  		{ { 0, 3 } },
5952  	},
5953  	{
5954  		"ALU64_DIV_K: 3 / 1 = 3",
5955  		.u.insns_int = {
5956  			BPF_LD_IMM64(R0, 3),
5957  			BPF_ALU64_IMM(BPF_DIV, R0, 1),
5958  			BPF_EXIT_INSN(),
5959  		},
5960  		INTERNAL,
5961  		{ },
5962  		{ { 0, 3 } },
5963  	},
5964  	{
5965  		"ALU64_DIV_K: 2147483647 / 2147483647 = 1",
5966  		.u.insns_int = {
5967  			BPF_LD_IMM64(R0, 2147483647),
5968  			BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
5969  			BPF_EXIT_INSN(),
5970  		},
5971  		INTERNAL,
5972  		{ },
5973  		{ { 0, 1 } },
5974  	},
5975  	{
5976  		"ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5977  		.u.insns_int = {
5978  			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5979  			BPF_LD_IMM64(R3, 0x0000000000000001LL),
5980  			BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
5981  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5982  			BPF_MOV32_IMM(R0, 2),
5983  			BPF_EXIT_INSN(),
5984  			BPF_MOV32_IMM(R0, 1),
5985  			BPF_EXIT_INSN(),
5986  		},
5987  		INTERNAL,
5988  		{ },
5989  		{ { 0, 0x1 } },
5990  	},
5991  	/* BPF_ALU | BPF_MOD | BPF_X */
5992  	{
5993  		"ALU_MOD_X: 3 % 2 = 1",
5994  		.u.insns_int = {
5995  			BPF_LD_IMM64(R0, 3),
5996  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5997  			BPF_ALU32_REG(BPF_MOD, R0, R1),
5998  			BPF_EXIT_INSN(),
5999  		},
6000  		INTERNAL,
6001  		{ },
6002  		{ { 0, 1 } },
6003  	},
6004  	{
6005  		"ALU_MOD_X: 4294967295 % 4294967293 = 2",
6006  		.u.insns_int = {
6007  			BPF_LD_IMM64(R0, 4294967295U),
6008  			BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
6009  			BPF_ALU32_REG(BPF_MOD, R0, R1),
6010  			BPF_EXIT_INSN(),
6011  		},
6012  		INTERNAL,
6013  		{ },
6014  		{ { 0, 2 } },
6015  	},
6016  	{
6017  		"ALU64_MOD_X: 3 % 2 = 1",
6018  		.u.insns_int = {
6019  			BPF_LD_IMM64(R0, 3),
6020  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6021  			BPF_ALU64_REG(BPF_MOD, R0, R1),
6022  			BPF_EXIT_INSN(),
6023  		},
6024  		INTERNAL,
6025  		{ },
6026  		{ { 0, 1 } },
6027  	},
6028  	{
6029  		"ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6030  		.u.insns_int = {
6031  			BPF_LD_IMM64(R0, 2147483647),
6032  			BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
6033  			BPF_ALU64_REG(BPF_MOD, R0, R1),
6034  			BPF_EXIT_INSN(),
6035  		},
6036  		INTERNAL,
6037  		{ },
6038  		{ { 0, 2 } },
6039  	},
6040  	/* BPF_ALU | BPF_MOD | BPF_K */
6041  	{
6042  		"ALU_MOD_K: 3 % 2 = 1",
6043  		.u.insns_int = {
6044  			BPF_LD_IMM64(R0, 3),
6045  			BPF_ALU32_IMM(BPF_MOD, R0, 2),
6046  			BPF_EXIT_INSN(),
6047  		},
6048  		INTERNAL,
6049  		{ },
6050  		{ { 0, 1 } },
6051  	},
6052  	{
6053  		"ALU_MOD_K: 3 % 1 = 0",
6054  		.u.insns_int = {
6055  			BPF_LD_IMM64(R0, 3),
6056  			BPF_ALU32_IMM(BPF_MOD, R0, 1),
6057  			BPF_EXIT_INSN(),
6058  		},
6059  		INTERNAL,
6060  		{ },
6061  		{ { 0, 0 } },
6062  	},
6063  	{
6064  		"ALU_MOD_K: 4294967295 % 4294967293 = 2",
6065  		.u.insns_int = {
6066  			BPF_LD_IMM64(R0, 4294967295U),
6067  			BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
6068  			BPF_EXIT_INSN(),
6069  		},
6070  		INTERNAL,
6071  		{ },
6072  		{ { 0, 2 } },
6073  	},
6074  	{
6075  		"ALU64_MOD_K: 3 % 2 = 1",
6076  		.u.insns_int = {
6077  			BPF_LD_IMM64(R0, 3),
6078  			BPF_ALU64_IMM(BPF_MOD, R0, 2),
6079  			BPF_EXIT_INSN(),
6080  		},
6081  		INTERNAL,
6082  		{ },
6083  		{ { 0, 1 } },
6084  	},
6085  	{
6086  		"ALU64_MOD_K: 3 % 1 = 0",
6087  		.u.insns_int = {
6088  			BPF_LD_IMM64(R0, 3),
6089  			BPF_ALU64_IMM(BPF_MOD, R0, 1),
6090  			BPF_EXIT_INSN(),
6091  		},
6092  		INTERNAL,
6093  		{ },
6094  		{ { 0, 0 } },
6095  	},
6096  	{
6097  		"ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6098  		.u.insns_int = {
6099  			BPF_LD_IMM64(R0, 2147483647),
6100  			BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6101  			BPF_EXIT_INSN(),
6102  		},
6103  		INTERNAL,
6104  		{ },
6105  		{ { 0, 2 } },
6106  	},
6107  	/* BPF_ALU | BPF_AND | BPF_X */
6108  	{
6109  		"ALU_AND_X: 3 & 2 = 2",
6110  		.u.insns_int = {
6111  			BPF_LD_IMM64(R0, 3),
6112  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6113  			BPF_ALU32_REG(BPF_AND, R0, R1),
6114  			BPF_EXIT_INSN(),
6115  		},
6116  		INTERNAL,
6117  		{ },
6118  		{ { 0, 2 } },
6119  	},
6120  	{
6121  		"ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6122  		.u.insns_int = {
6123  			BPF_LD_IMM64(R0, 0xffffffff),
6124  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6125  			BPF_ALU32_REG(BPF_AND, R0, R1),
6126  			BPF_EXIT_INSN(),
6127  		},
6128  		INTERNAL,
6129  		{ },
6130  		{ { 0, 0xffffffff } },
6131  	},
6132  	{
6133  		"ALU64_AND_X: 3 & 2 = 2",
6134  		.u.insns_int = {
6135  			BPF_LD_IMM64(R0, 3),
6136  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6137  			BPF_ALU64_REG(BPF_AND, R0, R1),
6138  			BPF_EXIT_INSN(),
6139  		},
6140  		INTERNAL,
6141  		{ },
6142  		{ { 0, 2 } },
6143  	},
6144  	{
6145  		"ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6146  		.u.insns_int = {
6147  			BPF_LD_IMM64(R0, 0xffffffff),
6148  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6149  			BPF_ALU64_REG(BPF_AND, R0, R1),
6150  			BPF_EXIT_INSN(),
6151  		},
6152  		INTERNAL,
6153  		{ },
6154  		{ { 0, 0xffffffff } },
6155  	},
6156  	/* BPF_ALU | BPF_AND | BPF_K */
6157  	{
6158  		"ALU_AND_K: 3 & 2 = 2",
6159  		.u.insns_int = {
6160  			BPF_LD_IMM64(R0, 3),
6161  			BPF_ALU32_IMM(BPF_AND, R0, 2),
6162  			BPF_EXIT_INSN(),
6163  		},
6164  		INTERNAL,
6165  		{ },
6166  		{ { 0, 2 } },
6167  	},
6168  	{
6169  		"ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6170  		.u.insns_int = {
6171  			BPF_LD_IMM64(R0, 0xffffffff),
6172  			BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6173  			BPF_EXIT_INSN(),
6174  		},
6175  		INTERNAL,
6176  		{ },
6177  		{ { 0, 0xffffffff } },
6178  	},
6179  	{
6180  		"ALU_AND_K: Small immediate",
6181  		.u.insns_int = {
6182  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6183  			BPF_ALU32_IMM(BPF_AND, R0, 15),
6184  			BPF_EXIT_INSN(),
6185  		},
6186  		INTERNAL,
6187  		{ },
6188  		{ { 0, 4 } }
6189  	},
6190  	{
6191  		"ALU_AND_K: Large immediate",
6192  		.u.insns_int = {
6193  			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6194  			BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6195  			BPF_EXIT_INSN(),
6196  		},
6197  		INTERNAL,
6198  		{ },
6199  		{ { 0, 0xa1b2c3d4 } }
6200  	},
6201  	{
6202  		"ALU_AND_K: Zero extension",
6203  		.u.insns_int = {
6204  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6205  			BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6206  			BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6207  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6208  			BPF_MOV32_IMM(R0, 2),
6209  			BPF_EXIT_INSN(),
6210  			BPF_MOV32_IMM(R0, 1),
6211  			BPF_EXIT_INSN(),
6212  		},
6213  		INTERNAL,
6214  		{ },
6215  		{ { 0, 1 } }
6216  	},
6217  	{
6218  		"ALU64_AND_K: 3 & 2 = 2",
6219  		.u.insns_int = {
6220  			BPF_LD_IMM64(R0, 3),
6221  			BPF_ALU64_IMM(BPF_AND, R0, 2),
6222  			BPF_EXIT_INSN(),
6223  		},
6224  		INTERNAL,
6225  		{ },
6226  		{ { 0, 2 } },
6227  	},
6228  	{
6229  		"ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6230  		.u.insns_int = {
6231  			BPF_LD_IMM64(R0, 0xffffffff),
6232  			BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6233  			BPF_EXIT_INSN(),
6234  		},
6235  		INTERNAL,
6236  		{ },
6237  		{ { 0, 0xffffffff } },
6238  	},
6239  	{
6240  		"ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6241  		.u.insns_int = {
6242  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6243  			BPF_LD_IMM64(R3, 0x0000000000000000LL),
6244  			BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6245  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6246  			BPF_MOV32_IMM(R0, 2),
6247  			BPF_EXIT_INSN(),
6248  			BPF_MOV32_IMM(R0, 1),
6249  			BPF_EXIT_INSN(),
6250  		},
6251  		INTERNAL,
6252  		{ },
6253  		{ { 0, 0x1 } },
6254  	},
6255  	{
6256  		"ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6257  		.u.insns_int = {
6258  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6259  			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6260  			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6261  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6262  			BPF_MOV32_IMM(R0, 2),
6263  			BPF_EXIT_INSN(),
6264  			BPF_MOV32_IMM(R0, 1),
6265  			BPF_EXIT_INSN(),
6266  		},
6267  		INTERNAL,
6268  		{ },
6269  		{ { 0, 0x1 } },
6270  	},
6271  	{
6272  		"ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6273  		.u.insns_int = {
6274  			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6275  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6276  			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6277  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6278  			BPF_MOV32_IMM(R0, 2),
6279  			BPF_EXIT_INSN(),
6280  			BPF_MOV32_IMM(R0, 1),
6281  			BPF_EXIT_INSN(),
6282  		},
6283  		INTERNAL,
6284  		{ },
6285  		{ { 0, 0x1 } },
6286  	},
6287  	{
6288  		"ALU64_AND_K: Sign extension 1",
6289  		.u.insns_int = {
6290  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6291  			BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6292  			BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6293  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6294  			BPF_MOV32_IMM(R0, 2),
6295  			BPF_EXIT_INSN(),
6296  			BPF_MOV32_IMM(R0, 1),
6297  			BPF_EXIT_INSN(),
6298  		},
6299  		INTERNAL,
6300  		{ },
6301  		{ { 0, 1 } }
6302  	},
6303  	{
6304  		"ALU64_AND_K: Sign extension 2",
6305  		.u.insns_int = {
6306  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6307  			BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6308  			BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6309  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6310  			BPF_MOV32_IMM(R0, 2),
6311  			BPF_EXIT_INSN(),
6312  			BPF_MOV32_IMM(R0, 1),
6313  			BPF_EXIT_INSN(),
6314  		},
6315  		INTERNAL,
6316  		{ },
6317  		{ { 0, 1 } }
6318  	},
6319  	/* BPF_ALU | BPF_OR | BPF_X */
6320  	{
6321  		"ALU_OR_X: 1 | 2 = 3",
6322  		.u.insns_int = {
6323  			BPF_LD_IMM64(R0, 1),
6324  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6325  			BPF_ALU32_REG(BPF_OR, R0, R1),
6326  			BPF_EXIT_INSN(),
6327  		},
6328  		INTERNAL,
6329  		{ },
6330  		{ { 0, 3 } },
6331  	},
6332  	{
6333  		"ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6334  		.u.insns_int = {
6335  			BPF_LD_IMM64(R0, 0),
6336  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6337  			BPF_ALU32_REG(BPF_OR, R0, R1),
6338  			BPF_EXIT_INSN(),
6339  		},
6340  		INTERNAL,
6341  		{ },
6342  		{ { 0, 0xffffffff } },
6343  	},
6344  	{
6345  		"ALU64_OR_X: 1 | 2 = 3",
6346  		.u.insns_int = {
6347  			BPF_LD_IMM64(R0, 1),
6348  			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6349  			BPF_ALU64_REG(BPF_OR, R0, R1),
6350  			BPF_EXIT_INSN(),
6351  		},
6352  		INTERNAL,
6353  		{ },
6354  		{ { 0, 3 } },
6355  	},
6356  	{
6357  		"ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6358  		.u.insns_int = {
6359  			BPF_LD_IMM64(R0, 0),
6360  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6361  			BPF_ALU64_REG(BPF_OR, R0, R1),
6362  			BPF_EXIT_INSN(),
6363  		},
6364  		INTERNAL,
6365  		{ },
6366  		{ { 0, 0xffffffff } },
6367  	},
6368  	/* BPF_ALU | BPF_OR | BPF_K */
6369  	{
6370  		"ALU_OR_K: 1 | 2 = 3",
6371  		.u.insns_int = {
6372  			BPF_LD_IMM64(R0, 1),
6373  			BPF_ALU32_IMM(BPF_OR, R0, 2),
6374  			BPF_EXIT_INSN(),
6375  		},
6376  		INTERNAL,
6377  		{ },
6378  		{ { 0, 3 } },
6379  	},
6380  	{
6381  		"ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6382  		.u.insns_int = {
6383  			BPF_LD_IMM64(R0, 0),
6384  			BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6385  			BPF_EXIT_INSN(),
6386  		},
6387  		INTERNAL,
6388  		{ },
6389  		{ { 0, 0xffffffff } },
6390  	},
6391  	{
6392  		"ALU_OR_K: Small immediate",
6393  		.u.insns_int = {
6394  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6395  			BPF_ALU32_IMM(BPF_OR, R0, 1),
6396  			BPF_EXIT_INSN(),
6397  		},
6398  		INTERNAL,
6399  		{ },
6400  		{ { 0, 0x01020305 } }
6401  	},
6402  	{
6403  		"ALU_OR_K: Large immediate",
6404  		.u.insns_int = {
6405  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6406  			BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6407  			BPF_EXIT_INSN(),
6408  		},
6409  		INTERNAL,
6410  		{ },
6411  		{ { 0, 0xa1b2c3d4 } }
6412  	},
6413  	{
6414  		"ALU_OR_K: Zero extension",
6415  		.u.insns_int = {
6416  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6417  			BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6418  			BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6419  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6420  			BPF_MOV32_IMM(R0, 2),
6421  			BPF_EXIT_INSN(),
6422  			BPF_MOV32_IMM(R0, 1),
6423  			BPF_EXIT_INSN(),
6424  		},
6425  		INTERNAL,
6426  		{ },
6427  		{ { 0, 1 } }
6428  	},
6429  	{
6430  		"ALU64_OR_K: 1 | 2 = 3",
6431  		.u.insns_int = {
6432  			BPF_LD_IMM64(R0, 1),
6433  			BPF_ALU64_IMM(BPF_OR, R0, 2),
6434  			BPF_EXIT_INSN(),
6435  		},
6436  		INTERNAL,
6437  		{ },
6438  		{ { 0, 3 } },
6439  	},
6440  	{
6441  		"ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6442  		.u.insns_int = {
6443  			BPF_LD_IMM64(R0, 0),
6444  			BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6445  			BPF_EXIT_INSN(),
6446  		},
6447  		INTERNAL,
6448  		{ },
6449  		{ { 0, 0xffffffff } },
6450  	},
6451  	{
6452  		"ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6453  		.u.insns_int = {
6454  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6455  			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6456  			BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6457  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6458  			BPF_MOV32_IMM(R0, 2),
6459  			BPF_EXIT_INSN(),
6460  			BPF_MOV32_IMM(R0, 1),
6461  			BPF_EXIT_INSN(),
6462  		},
6463  		INTERNAL,
6464  		{ },
6465  		{ { 0, 0x1 } },
6466  	},
6467  	{
6468  		"ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6469  		.u.insns_int = {
6470  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6471  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6472  			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6473  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6474  			BPF_MOV32_IMM(R0, 2),
6475  			BPF_EXIT_INSN(),
6476  			BPF_MOV32_IMM(R0, 1),
6477  			BPF_EXIT_INSN(),
6478  		},
6479  		INTERNAL,
6480  		{ },
6481  		{ { 0, 0x1 } },
6482  	},
6483  	{
6484  		"ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6485  		.u.insns_int = {
6486  			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6487  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6488  			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6489  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6490  			BPF_MOV32_IMM(R0, 2),
6491  			BPF_EXIT_INSN(),
6492  			BPF_MOV32_IMM(R0, 1),
6493  			BPF_EXIT_INSN(),
6494  		},
6495  		INTERNAL,
6496  		{ },
6497  		{ { 0, 0x1 } },
6498  	},
6499  	{
6500  		"ALU64_OR_K: Sign extension 1",
6501  		.u.insns_int = {
6502  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6503  			BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6504  			BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6505  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6506  			BPF_MOV32_IMM(R0, 2),
6507  			BPF_EXIT_INSN(),
6508  			BPF_MOV32_IMM(R0, 1),
6509  			BPF_EXIT_INSN(),
6510  		},
6511  		INTERNAL,
6512  		{ },
6513  		{ { 0, 1 } }
6514  	},
6515  	{
6516  		"ALU64_OR_K: Sign extension 2",
6517  		.u.insns_int = {
6518  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6519  			BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6520  			BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6521  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6522  			BPF_MOV32_IMM(R0, 2),
6523  			BPF_EXIT_INSN(),
6524  			BPF_MOV32_IMM(R0, 1),
6525  			BPF_EXIT_INSN(),
6526  		},
6527  		INTERNAL,
6528  		{ },
6529  		{ { 0, 1 } }
6530  	},
6531  	/* BPF_ALU | BPF_XOR | BPF_X */
6532  	{
6533  		"ALU_XOR_X: 5 ^ 6 = 3",
6534  		.u.insns_int = {
6535  			BPF_LD_IMM64(R0, 5),
6536  			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6537  			BPF_ALU32_REG(BPF_XOR, R0, R1),
6538  			BPF_EXIT_INSN(),
6539  		},
6540  		INTERNAL,
6541  		{ },
6542  		{ { 0, 3 } },
6543  	},
6544  	{
6545  		"ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6546  		.u.insns_int = {
6547  			BPF_LD_IMM64(R0, 1),
6548  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6549  			BPF_ALU32_REG(BPF_XOR, R0, R1),
6550  			BPF_EXIT_INSN(),
6551  		},
6552  		INTERNAL,
6553  		{ },
6554  		{ { 0, 0xfffffffe } },
6555  	},
6556  	{
6557  		"ALU64_XOR_X: 5 ^ 6 = 3",
6558  		.u.insns_int = {
6559  			BPF_LD_IMM64(R0, 5),
6560  			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6561  			BPF_ALU64_REG(BPF_XOR, R0, R1),
6562  			BPF_EXIT_INSN(),
6563  		},
6564  		INTERNAL,
6565  		{ },
6566  		{ { 0, 3 } },
6567  	},
6568  	{
6569  		"ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6570  		.u.insns_int = {
6571  			BPF_LD_IMM64(R0, 1),
6572  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6573  			BPF_ALU64_REG(BPF_XOR, R0, R1),
6574  			BPF_EXIT_INSN(),
6575  		},
6576  		INTERNAL,
6577  		{ },
6578  		{ { 0, 0xfffffffe } },
6579  	},
6580  	/* BPF_ALU | BPF_XOR | BPF_K */
6581  	{
6582  		"ALU_XOR_K: 5 ^ 6 = 3",
6583  		.u.insns_int = {
6584  			BPF_LD_IMM64(R0, 5),
6585  			BPF_ALU32_IMM(BPF_XOR, R0, 6),
6586  			BPF_EXIT_INSN(),
6587  		},
6588  		INTERNAL,
6589  		{ },
6590  		{ { 0, 3 } },
6591  	},
6592  	{
6593  		"ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6594  		.u.insns_int = {
6595  			BPF_LD_IMM64(R0, 1),
6596  			BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6597  			BPF_EXIT_INSN(),
6598  		},
6599  		INTERNAL,
6600  		{ },
6601  		{ { 0, 0xfffffffe } },
6602  	},
6603  	{
6604  		"ALU_XOR_K: Small immediate",
6605  		.u.insns_int = {
6606  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6607  			BPF_ALU32_IMM(BPF_XOR, R0, 15),
6608  			BPF_EXIT_INSN(),
6609  		},
6610  		INTERNAL,
6611  		{ },
6612  		{ { 0, 0x0102030b } }
6613  	},
6614  	{
6615  		"ALU_XOR_K: Large immediate",
6616  		.u.insns_int = {
6617  			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6618  			BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6619  			BPF_EXIT_INSN(),
6620  		},
6621  		INTERNAL,
6622  		{ },
6623  		{ { 0, 0x5e4d3c2b } }
6624  	},
6625  	{
6626  		"ALU_XOR_K: Zero extension",
6627  		.u.insns_int = {
6628  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6629  			BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6630  			BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6631  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6632  			BPF_MOV32_IMM(R0, 2),
6633  			BPF_EXIT_INSN(),
6634  			BPF_MOV32_IMM(R0, 1),
6635  			BPF_EXIT_INSN(),
6636  		},
6637  		INTERNAL,
6638  		{ },
6639  		{ { 0, 1 } }
6640  	},
6641  	{
6642  		"ALU64_XOR_K: 5 ^ 6 = 3",
6643  		.u.insns_int = {
6644  			BPF_LD_IMM64(R0, 5),
6645  			BPF_ALU64_IMM(BPF_XOR, R0, 6),
6646  			BPF_EXIT_INSN(),
6647  		},
6648  		INTERNAL,
6649  		{ },
6650  		{ { 0, 3 } },
6651  	},
6652  	{
6653  		"ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6654  		.u.insns_int = {
6655  			BPF_LD_IMM64(R0, 1),
6656  			BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6657  			BPF_EXIT_INSN(),
6658  		},
6659  		INTERNAL,
6660  		{ },
6661  		{ { 0, 0xfffffffe } },
6662  	},
6663  	{
6664  		"ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6665  		.u.insns_int = {
6666  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6667  			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6668  			BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6669  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6670  			BPF_MOV32_IMM(R0, 2),
6671  			BPF_EXIT_INSN(),
6672  			BPF_MOV32_IMM(R0, 1),
6673  			BPF_EXIT_INSN(),
6674  		},
6675  		INTERNAL,
6676  		{ },
6677  		{ { 0, 0x1 } },
6678  	},
6679  	{
6680  		"ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6681  		.u.insns_int = {
6682  			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6683  			BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6684  			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6685  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6686  			BPF_MOV32_IMM(R0, 2),
6687  			BPF_EXIT_INSN(),
6688  			BPF_MOV32_IMM(R0, 1),
6689  			BPF_EXIT_INSN(),
6690  		},
6691  		INTERNAL,
6692  		{ },
6693  		{ { 0, 0x1 } },
6694  	},
6695  	{
6696  		"ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6697  		.u.insns_int = {
6698  			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6699  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6700  			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6701  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6702  			BPF_MOV32_IMM(R0, 2),
6703  			BPF_EXIT_INSN(),
6704  			BPF_MOV32_IMM(R0, 1),
6705  			BPF_EXIT_INSN(),
6706  		},
6707  		INTERNAL,
6708  		{ },
6709  		{ { 0, 0x1 } },
6710  	},
6711  	{
6712  		"ALU64_XOR_K: Sign extension 1",
6713  		.u.insns_int = {
6714  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6715  			BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6716  			BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6717  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6718  			BPF_MOV32_IMM(R0, 2),
6719  			BPF_EXIT_INSN(),
6720  			BPF_MOV32_IMM(R0, 1),
6721  			BPF_EXIT_INSN(),
6722  		},
6723  		INTERNAL,
6724  		{ },
6725  		{ { 0, 1 } }
6726  	},
6727  	{
6728  		"ALU64_XOR_K: Sign extension 2",
6729  		.u.insns_int = {
6730  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6731  			BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6732  			BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6733  			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6734  			BPF_MOV32_IMM(R0, 2),
6735  			BPF_EXIT_INSN(),
6736  			BPF_MOV32_IMM(R0, 1),
6737  			BPF_EXIT_INSN(),
6738  		},
6739  		INTERNAL,
6740  		{ },
6741  		{ { 0, 1 } }
6742  	},
6743  	/* BPF_ALU | BPF_LSH | BPF_X */
6744  	{
6745  		"ALU_LSH_X: 1 << 1 = 2",
6746  		.u.insns_int = {
6747  			BPF_LD_IMM64(R0, 1),
6748  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6749  			BPF_ALU32_REG(BPF_LSH, R0, R1),
6750  			BPF_EXIT_INSN(),
6751  		},
6752  		INTERNAL,
6753  		{ },
6754  		{ { 0, 2 } },
6755  	},
6756  	{
6757  		"ALU_LSH_X: 1 << 31 = 0x80000000",
6758  		.u.insns_int = {
6759  			BPF_LD_IMM64(R0, 1),
6760  			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6761  			BPF_ALU32_REG(BPF_LSH, R0, R1),
6762  			BPF_EXIT_INSN(),
6763  		},
6764  		INTERNAL,
6765  		{ },
6766  		{ { 0, 0x80000000 } },
6767  	},
6768  	{
6769  		"ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6770  		.u.insns_int = {
6771  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6772  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6773  			BPF_ALU32_REG(BPF_LSH, R0, R1),
6774  			BPF_EXIT_INSN(),
6775  		},
6776  		INTERNAL,
6777  		{ },
6778  		{ { 0, 0x45678000 } }
6779  	},
6780  	{
6781  		"ALU64_LSH_X: 1 << 1 = 2",
6782  		.u.insns_int = {
6783  			BPF_LD_IMM64(R0, 1),
6784  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6785  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6786  			BPF_EXIT_INSN(),
6787  		},
6788  		INTERNAL,
6789  		{ },
6790  		{ { 0, 2 } },
6791  	},
6792  	{
6793  		"ALU64_LSH_X: 1 << 31 = 0x80000000",
6794  		.u.insns_int = {
6795  			BPF_LD_IMM64(R0, 1),
6796  			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6797  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6798  			BPF_EXIT_INSN(),
6799  		},
6800  		INTERNAL,
6801  		{ },
6802  		{ { 0, 0x80000000 } },
6803  	},
6804  	{
6805  		"ALU64_LSH_X: Shift < 32, low word",
6806  		.u.insns_int = {
6807  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6808  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6809  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6810  			BPF_EXIT_INSN(),
6811  		},
6812  		INTERNAL,
6813  		{ },
6814  		{ { 0, 0xbcdef000 } }
6815  	},
6816  	{
6817  		"ALU64_LSH_X: Shift < 32, high word",
6818  		.u.insns_int = {
6819  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6820  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6821  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6822  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6823  			BPF_EXIT_INSN(),
6824  		},
6825  		INTERNAL,
6826  		{ },
6827  		{ { 0, 0x3456789a } }
6828  	},
6829  	{
6830  		"ALU64_LSH_X: Shift > 32, low word",
6831  		.u.insns_int = {
6832  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6833  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
6834  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6835  			BPF_EXIT_INSN(),
6836  		},
6837  		INTERNAL,
6838  		{ },
6839  		{ { 0, 0 } }
6840  	},
6841  	{
6842  		"ALU64_LSH_X: Shift > 32, high word",
6843  		.u.insns_int = {
6844  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6845  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
6846  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6847  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6848  			BPF_EXIT_INSN(),
6849  		},
6850  		INTERNAL,
6851  		{ },
6852  		{ { 0, 0x9abcdef0 } }
6853  	},
6854  	{
6855  		"ALU64_LSH_X: Shift == 32, low word",
6856  		.u.insns_int = {
6857  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6858  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
6859  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6860  			BPF_EXIT_INSN(),
6861  		},
6862  		INTERNAL,
6863  		{ },
6864  		{ { 0, 0 } }
6865  	},
6866  	{
6867  		"ALU64_LSH_X: Shift == 32, high word",
6868  		.u.insns_int = {
6869  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6870  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
6871  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6872  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6873  			BPF_EXIT_INSN(),
6874  		},
6875  		INTERNAL,
6876  		{ },
6877  		{ { 0, 0x89abcdef } }
6878  	},
6879  	{
6880  		"ALU64_LSH_X: Zero shift, low word",
6881  		.u.insns_int = {
6882  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6883  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
6884  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6885  			BPF_EXIT_INSN(),
6886  		},
6887  		INTERNAL,
6888  		{ },
6889  		{ { 0, 0x89abcdef } }
6890  	},
6891  	{
6892  		"ALU64_LSH_X: Zero shift, high word",
6893  		.u.insns_int = {
6894  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6895  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
6896  			BPF_ALU64_REG(BPF_LSH, R0, R1),
6897  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6898  			BPF_EXIT_INSN(),
6899  		},
6900  		INTERNAL,
6901  		{ },
6902  		{ { 0, 0x01234567 } }
6903  	},
6904  	/* BPF_ALU | BPF_LSH | BPF_K */
6905  	{
6906  		"ALU_LSH_K: 1 << 1 = 2",
6907  		.u.insns_int = {
6908  			BPF_LD_IMM64(R0, 1),
6909  			BPF_ALU32_IMM(BPF_LSH, R0, 1),
6910  			BPF_EXIT_INSN(),
6911  		},
6912  		INTERNAL,
6913  		{ },
6914  		{ { 0, 2 } },
6915  	},
6916  	{
6917  		"ALU_LSH_K: 1 << 31 = 0x80000000",
6918  		.u.insns_int = {
6919  			BPF_LD_IMM64(R0, 1),
6920  			BPF_ALU32_IMM(BPF_LSH, R0, 31),
6921  			BPF_EXIT_INSN(),
6922  		},
6923  		INTERNAL,
6924  		{ },
6925  		{ { 0, 0x80000000 } },
6926  	},
6927  	{
6928  		"ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
6929  		.u.insns_int = {
6930  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6931  			BPF_ALU32_IMM(BPF_LSH, R0, 12),
6932  			BPF_EXIT_INSN(),
6933  		},
6934  		INTERNAL,
6935  		{ },
6936  		{ { 0, 0x45678000 } }
6937  	},
6938  	{
6939  		"ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
6940  		.u.insns_int = {
6941  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6942  			BPF_ALU32_IMM(BPF_LSH, R0, 0),
6943  			BPF_EXIT_INSN(),
6944  		},
6945  		INTERNAL,
6946  		{ },
6947  		{ { 0, 0x12345678 } }
6948  	},
6949  	{
6950  		"ALU64_LSH_K: 1 << 1 = 2",
6951  		.u.insns_int = {
6952  			BPF_LD_IMM64(R0, 1),
6953  			BPF_ALU64_IMM(BPF_LSH, R0, 1),
6954  			BPF_EXIT_INSN(),
6955  		},
6956  		INTERNAL,
6957  		{ },
6958  		{ { 0, 2 } },
6959  	},
6960  	{
6961  		"ALU64_LSH_K: 1 << 31 = 0x80000000",
6962  		.u.insns_int = {
6963  			BPF_LD_IMM64(R0, 1),
6964  			BPF_ALU64_IMM(BPF_LSH, R0, 31),
6965  			BPF_EXIT_INSN(),
6966  		},
6967  		INTERNAL,
6968  		{ },
6969  		{ { 0, 0x80000000 } },
6970  	},
6971  	{
6972  		"ALU64_LSH_K: Shift < 32, low word",
6973  		.u.insns_int = {
6974  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6975  			BPF_ALU64_IMM(BPF_LSH, R0, 12),
6976  			BPF_EXIT_INSN(),
6977  		},
6978  		INTERNAL,
6979  		{ },
6980  		{ { 0, 0xbcdef000 } }
6981  	},
6982  	{
6983  		"ALU64_LSH_K: Shift < 32, high word",
6984  		.u.insns_int = {
6985  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6986  			BPF_ALU64_IMM(BPF_LSH, R0, 12),
6987  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6988  			BPF_EXIT_INSN(),
6989  		},
6990  		INTERNAL,
6991  		{ },
6992  		{ { 0, 0x3456789a } }
6993  	},
6994  	{
6995  		"ALU64_LSH_K: Shift > 32, low word",
6996  		.u.insns_int = {
6997  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6998  			BPF_ALU64_IMM(BPF_LSH, R0, 36),
6999  			BPF_EXIT_INSN(),
7000  		},
7001  		INTERNAL,
7002  		{ },
7003  		{ { 0, 0 } }
7004  	},
7005  	{
7006  		"ALU64_LSH_K: Shift > 32, high word",
7007  		.u.insns_int = {
7008  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7009  			BPF_ALU64_IMM(BPF_LSH, R0, 36),
7010  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7011  			BPF_EXIT_INSN(),
7012  		},
7013  		INTERNAL,
7014  		{ },
7015  		{ { 0, 0x9abcdef0 } }
7016  	},
7017  	{
7018  		"ALU64_LSH_K: Shift == 32, low word",
7019  		.u.insns_int = {
7020  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7021  			BPF_ALU64_IMM(BPF_LSH, R0, 32),
7022  			BPF_EXIT_INSN(),
7023  		},
7024  		INTERNAL,
7025  		{ },
7026  		{ { 0, 0 } }
7027  	},
7028  	{
7029  		"ALU64_LSH_K: Shift == 32, high word",
7030  		.u.insns_int = {
7031  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7032  			BPF_ALU64_IMM(BPF_LSH, R0, 32),
7033  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7034  			BPF_EXIT_INSN(),
7035  		},
7036  		INTERNAL,
7037  		{ },
7038  		{ { 0, 0x89abcdef } }
7039  	},
7040  	{
7041  		"ALU64_LSH_K: Zero shift",
7042  		.u.insns_int = {
7043  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7044  			BPF_ALU64_IMM(BPF_LSH, R0, 0),
7045  			BPF_EXIT_INSN(),
7046  		},
7047  		INTERNAL,
7048  		{ },
7049  		{ { 0, 0x89abcdef } }
7050  	},
7051  	/* BPF_ALU | BPF_RSH | BPF_X */
7052  	{
7053  		"ALU_RSH_X: 2 >> 1 = 1",
7054  		.u.insns_int = {
7055  			BPF_LD_IMM64(R0, 2),
7056  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
7057  			BPF_ALU32_REG(BPF_RSH, R0, R1),
7058  			BPF_EXIT_INSN(),
7059  		},
7060  		INTERNAL,
7061  		{ },
7062  		{ { 0, 1 } },
7063  	},
7064  	{
7065  		"ALU_RSH_X: 0x80000000 >> 31 = 1",
7066  		.u.insns_int = {
7067  			BPF_LD_IMM64(R0, 0x80000000),
7068  			BPF_ALU32_IMM(BPF_MOV, R1, 31),
7069  			BPF_ALU32_REG(BPF_RSH, R0, R1),
7070  			BPF_EXIT_INSN(),
7071  		},
7072  		INTERNAL,
7073  		{ },
7074  		{ { 0, 1 } },
7075  	},
7076  	{
7077  		"ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7078  		.u.insns_int = {
7079  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7080  			BPF_ALU32_IMM(BPF_MOV, R1, 20),
7081  			BPF_ALU32_REG(BPF_RSH, R0, R1),
7082  			BPF_EXIT_INSN(),
7083  		},
7084  		INTERNAL,
7085  		{ },
7086  		{ { 0, 0x123 } }
7087  	},
7088  	{
7089  		"ALU64_RSH_X: 2 >> 1 = 1",
7090  		.u.insns_int = {
7091  			BPF_LD_IMM64(R0, 2),
7092  			BPF_ALU32_IMM(BPF_MOV, R1, 1),
7093  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7094  			BPF_EXIT_INSN(),
7095  		},
7096  		INTERNAL,
7097  		{ },
7098  		{ { 0, 1 } },
7099  	},
7100  	{
7101  		"ALU64_RSH_X: 0x80000000 >> 31 = 1",
7102  		.u.insns_int = {
7103  			BPF_LD_IMM64(R0, 0x80000000),
7104  			BPF_ALU32_IMM(BPF_MOV, R1, 31),
7105  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7106  			BPF_EXIT_INSN(),
7107  		},
7108  		INTERNAL,
7109  		{ },
7110  		{ { 0, 1 } },
7111  	},
7112  	{
7113  		"ALU64_RSH_X: Shift < 32, low word",
7114  		.u.insns_int = {
7115  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7116  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7117  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7118  			BPF_EXIT_INSN(),
7119  		},
7120  		INTERNAL,
7121  		{ },
7122  		{ { 0, 0x56789abc } }
7123  	},
7124  	{
7125  		"ALU64_RSH_X: Shift < 32, high word",
7126  		.u.insns_int = {
7127  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7128  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7129  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7130  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7131  			BPF_EXIT_INSN(),
7132  		},
7133  		INTERNAL,
7134  		{ },
7135  		{ { 0, 0x00081234 } }
7136  	},
7137  	{
7138  		"ALU64_RSH_X: Shift > 32, low word",
7139  		.u.insns_int = {
7140  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7141  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7142  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7143  			BPF_EXIT_INSN(),
7144  		},
7145  		INTERNAL,
7146  		{ },
7147  		{ { 0, 0x08123456 } }
7148  	},
7149  	{
7150  		"ALU64_RSH_X: Shift > 32, high word",
7151  		.u.insns_int = {
7152  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7153  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7154  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7155  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7156  			BPF_EXIT_INSN(),
7157  		},
7158  		INTERNAL,
7159  		{ },
7160  		{ { 0, 0 } }
7161  	},
7162  	{
7163  		"ALU64_RSH_X: Shift == 32, low word",
7164  		.u.insns_int = {
7165  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7166  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7167  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7168  			BPF_EXIT_INSN(),
7169  		},
7170  		INTERNAL,
7171  		{ },
7172  		{ { 0, 0x81234567 } }
7173  	},
7174  	{
7175  		"ALU64_RSH_X: Shift == 32, high word",
7176  		.u.insns_int = {
7177  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7178  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7179  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7180  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7181  			BPF_EXIT_INSN(),
7182  		},
7183  		INTERNAL,
7184  		{ },
7185  		{ { 0, 0 } }
7186  	},
7187  	{
7188  		"ALU64_RSH_X: Zero shift, low word",
7189  		.u.insns_int = {
7190  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7191  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7192  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7193  			BPF_EXIT_INSN(),
7194  		},
7195  		INTERNAL,
7196  		{ },
7197  		{ { 0, 0x89abcdef } }
7198  	},
7199  	{
7200  		"ALU64_RSH_X: Zero shift, high word",
7201  		.u.insns_int = {
7202  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7203  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7204  			BPF_ALU64_REG(BPF_RSH, R0, R1),
7205  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7206  			BPF_EXIT_INSN(),
7207  		},
7208  		INTERNAL,
7209  		{ },
7210  		{ { 0, 0x81234567 } }
7211  	},
7212  	/* BPF_ALU | BPF_RSH | BPF_K */
7213  	{
7214  		"ALU_RSH_K: 2 >> 1 = 1",
7215  		.u.insns_int = {
7216  			BPF_LD_IMM64(R0, 2),
7217  			BPF_ALU32_IMM(BPF_RSH, R0, 1),
7218  			BPF_EXIT_INSN(),
7219  		},
7220  		INTERNAL,
7221  		{ },
7222  		{ { 0, 1 } },
7223  	},
7224  	{
7225  		"ALU_RSH_K: 0x80000000 >> 31 = 1",
7226  		.u.insns_int = {
7227  			BPF_LD_IMM64(R0, 0x80000000),
7228  			BPF_ALU32_IMM(BPF_RSH, R0, 31),
7229  			BPF_EXIT_INSN(),
7230  		},
7231  		INTERNAL,
7232  		{ },
7233  		{ { 0, 1 } },
7234  	},
7235  	{
7236  		"ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7237  		.u.insns_int = {
7238  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7239  			BPF_ALU32_IMM(BPF_RSH, R0, 20),
7240  			BPF_EXIT_INSN(),
7241  		},
7242  		INTERNAL,
7243  		{ },
7244  		{ { 0, 0x123 } }
7245  	},
7246  	{
7247  		"ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7248  		.u.insns_int = {
7249  			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7250  			BPF_ALU32_IMM(BPF_RSH, R0, 0),
7251  			BPF_EXIT_INSN(),
7252  		},
7253  		INTERNAL,
7254  		{ },
7255  		{ { 0, 0x12345678 } }
7256  	},
7257  	{
7258  		"ALU64_RSH_K: 2 >> 1 = 1",
7259  		.u.insns_int = {
7260  			BPF_LD_IMM64(R0, 2),
7261  			BPF_ALU64_IMM(BPF_RSH, R0, 1),
7262  			BPF_EXIT_INSN(),
7263  		},
7264  		INTERNAL,
7265  		{ },
7266  		{ { 0, 1 } },
7267  	},
7268  	{
7269  		"ALU64_RSH_K: 0x80000000 >> 31 = 1",
7270  		.u.insns_int = {
7271  			BPF_LD_IMM64(R0, 0x80000000),
7272  			BPF_ALU64_IMM(BPF_RSH, R0, 31),
7273  			BPF_EXIT_INSN(),
7274  		},
7275  		INTERNAL,
7276  		{ },
7277  		{ { 0, 1 } },
7278  	},
7279  	{
7280  		"ALU64_RSH_K: Shift < 32, low word",
7281  		.u.insns_int = {
7282  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7283  			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7284  			BPF_EXIT_INSN(),
7285  		},
7286  		INTERNAL,
7287  		{ },
7288  		{ { 0, 0x56789abc } }
7289  	},
7290  	{
7291  		"ALU64_RSH_K: Shift < 32, high word",
7292  		.u.insns_int = {
7293  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7294  			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7295  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7296  			BPF_EXIT_INSN(),
7297  		},
7298  		INTERNAL,
7299  		{ },
7300  		{ { 0, 0x00081234 } }
7301  	},
7302  	{
7303  		"ALU64_RSH_K: Shift > 32, low word",
7304  		.u.insns_int = {
7305  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7306  			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7307  			BPF_EXIT_INSN(),
7308  		},
7309  		INTERNAL,
7310  		{ },
7311  		{ { 0, 0x08123456 } }
7312  	},
7313  	{
7314  		"ALU64_RSH_K: Shift > 32, high word",
7315  		.u.insns_int = {
7316  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7317  			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7318  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7319  			BPF_EXIT_INSN(),
7320  		},
7321  		INTERNAL,
7322  		{ },
7323  		{ { 0, 0 } }
7324  	},
7325  	{
7326  		"ALU64_RSH_K: Shift == 32, low word",
7327  		.u.insns_int = {
7328  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7329  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7330  			BPF_EXIT_INSN(),
7331  		},
7332  		INTERNAL,
7333  		{ },
7334  		{ { 0, 0x81234567 } }
7335  	},
7336  	{
7337  		"ALU64_RSH_K: Shift == 32, high word",
7338  		.u.insns_int = {
7339  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7340  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7341  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7342  			BPF_EXIT_INSN(),
7343  		},
7344  		INTERNAL,
7345  		{ },
7346  		{ { 0, 0 } }
7347  	},
7348  	{
7349  		"ALU64_RSH_K: Zero shift",
7350  		.u.insns_int = {
7351  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7352  			BPF_ALU64_IMM(BPF_RSH, R0, 0),
7353  			BPF_EXIT_INSN(),
7354  		},
7355  		INTERNAL,
7356  		{ },
7357  		{ { 0, 0x89abcdef } }
7358  	},
7359  	/* BPF_ALU | BPF_ARSH | BPF_X */
7360  	{
7361  		"ALU32_ARSH_X: -1234 >> 7 = -10",
7362  		.u.insns_int = {
7363  			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7364  			BPF_ALU32_IMM(BPF_MOV, R1, 7),
7365  			BPF_ALU32_REG(BPF_ARSH, R0, R1),
7366  			BPF_EXIT_INSN(),
7367  		},
7368  		INTERNAL,
7369  		{ },
7370  		{ { 0, -10 } }
7371  	},
7372  	{
7373  		"ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7374  		.u.insns_int = {
7375  			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7376  			BPF_ALU32_IMM(BPF_MOV, R1, 40),
7377  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7378  			BPF_EXIT_INSN(),
7379  		},
7380  		INTERNAL,
7381  		{ },
7382  		{ { 0, 0xffff00ff } },
7383  	},
7384  	{
7385  		"ALU64_ARSH_X: Shift < 32, low word",
7386  		.u.insns_int = {
7387  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7388  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7389  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7390  			BPF_EXIT_INSN(),
7391  		},
7392  		INTERNAL,
7393  		{ },
7394  		{ { 0, 0x56789abc } }
7395  	},
7396  	{
7397  		"ALU64_ARSH_X: Shift < 32, high word",
7398  		.u.insns_int = {
7399  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7400  			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7401  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7402  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7403  			BPF_EXIT_INSN(),
7404  		},
7405  		INTERNAL,
7406  		{ },
7407  		{ { 0, 0xfff81234 } }
7408  	},
7409  	{
7410  		"ALU64_ARSH_X: Shift > 32, low word",
7411  		.u.insns_int = {
7412  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7413  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7414  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7415  			BPF_EXIT_INSN(),
7416  		},
7417  		INTERNAL,
7418  		{ },
7419  		{ { 0, 0xf8123456 } }
7420  	},
7421  	{
7422  		"ALU64_ARSH_X: Shift > 32, high word",
7423  		.u.insns_int = {
7424  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7425  			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7426  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7427  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7428  			BPF_EXIT_INSN(),
7429  		},
7430  		INTERNAL,
7431  		{ },
7432  		{ { 0, -1 } }
7433  	},
7434  	{
7435  		"ALU64_ARSH_X: Shift == 32, low word",
7436  		.u.insns_int = {
7437  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7438  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7439  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7440  			BPF_EXIT_INSN(),
7441  		},
7442  		INTERNAL,
7443  		{ },
7444  		{ { 0, 0x81234567 } }
7445  	},
7446  	{
7447  		"ALU64_ARSH_X: Shift == 32, high word",
7448  		.u.insns_int = {
7449  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7450  			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7451  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7452  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7453  			BPF_EXIT_INSN(),
7454  		},
7455  		INTERNAL,
7456  		{ },
7457  		{ { 0, -1 } }
7458  	},
7459  	{
7460  		"ALU64_ARSH_X: Zero shift, low word",
7461  		.u.insns_int = {
7462  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7463  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7464  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7465  			BPF_EXIT_INSN(),
7466  		},
7467  		INTERNAL,
7468  		{ },
7469  		{ { 0, 0x89abcdef } }
7470  	},
7471  	{
7472  		"ALU64_ARSH_X: Zero shift, high word",
7473  		.u.insns_int = {
7474  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7475  			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7476  			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7477  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7478  			BPF_EXIT_INSN(),
7479  		},
7480  		INTERNAL,
7481  		{ },
7482  		{ { 0, 0x81234567 } }
7483  	},
7484  	/* BPF_ALU | BPF_ARSH | BPF_K */
7485  	{
7486  		"ALU32_ARSH_K: -1234 >> 7 = -10",
7487  		.u.insns_int = {
7488  			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7489  			BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7490  			BPF_EXIT_INSN(),
7491  		},
7492  		INTERNAL,
7493  		{ },
7494  		{ { 0, -10 } }
7495  	},
7496  	{
7497  		"ALU32_ARSH_K: -1234 >> 0 = -1234",
7498  		.u.insns_int = {
7499  			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7500  			BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7501  			BPF_EXIT_INSN(),
7502  		},
7503  		INTERNAL,
7504  		{ },
7505  		{ { 0, -1234 } }
7506  	},
7507  	{
7508  		"ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7509  		.u.insns_int = {
7510  			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7511  			BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7512  			BPF_EXIT_INSN(),
7513  		},
7514  		INTERNAL,
7515  		{ },
7516  		{ { 0, 0xffff00ff } },
7517  	},
7518  	{
7519  		"ALU64_ARSH_K: Shift < 32, low word",
7520  		.u.insns_int = {
7521  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7522  			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7523  			BPF_EXIT_INSN(),
7524  		},
7525  		INTERNAL,
7526  		{ },
7527  		{ { 0, 0x56789abc } }
7528  	},
7529  	{
7530  		"ALU64_ARSH_K: Shift < 32, high word",
7531  		.u.insns_int = {
7532  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7533  			BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7534  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7535  			BPF_EXIT_INSN(),
7536  		},
7537  		INTERNAL,
7538  		{ },
7539  		{ { 0, 0xfff81234 } }
7540  	},
7541  	{
7542  		"ALU64_ARSH_K: Shift > 32, low word",
7543  		.u.insns_int = {
7544  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7545  			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7546  			BPF_EXIT_INSN(),
7547  		},
7548  		INTERNAL,
7549  		{ },
7550  		{ { 0, 0xf8123456 } }
7551  	},
7552  	{
7553  		"ALU64_ARSH_K: Shift > 32, high word",
7554  		.u.insns_int = {
7555  			BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7556  			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7557  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7558  			BPF_EXIT_INSN(),
7559  		},
7560  		INTERNAL,
7561  		{ },
7562  		{ { 0, -1 } }
7563  	},
7564  	{
7565  		"ALU64_ARSH_K: Shift == 32, low word",
7566  		.u.insns_int = {
7567  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7568  			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7569  			BPF_EXIT_INSN(),
7570  		},
7571  		INTERNAL,
7572  		{ },
7573  		{ { 0, 0x81234567 } }
7574  	},
7575  	{
7576  		"ALU64_ARSH_K: Shift == 32, high word",
7577  		.u.insns_int = {
7578  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7579  			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7580  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7581  			BPF_EXIT_INSN(),
7582  		},
7583  		INTERNAL,
7584  		{ },
7585  		{ { 0, -1 } }
7586  	},
7587  	{
7588  		"ALU64_ARSH_K: Zero shift",
7589  		.u.insns_int = {
7590  			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7591  			BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7592  			BPF_EXIT_INSN(),
7593  		},
7594  		INTERNAL,
7595  		{ },
7596  		{ { 0, 0x89abcdef } }
7597  	},
7598  	/* BPF_ALU | BPF_NEG */
7599  	{
7600  		"ALU_NEG: -(3) = -3",
7601  		.u.insns_int = {
7602  			BPF_ALU32_IMM(BPF_MOV, R0, 3),
7603  			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7604  			BPF_EXIT_INSN(),
7605  		},
7606  		INTERNAL,
7607  		{ },
7608  		{ { 0, -3 } },
7609  	},
7610  	{
7611  		"ALU_NEG: -(-3) = 3",
7612  		.u.insns_int = {
7613  			BPF_ALU32_IMM(BPF_MOV, R0, -3),
7614  			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7615  			BPF_EXIT_INSN(),
7616  		},
7617  		INTERNAL,
7618  		{ },
7619  		{ { 0, 3 } },
7620  	},
7621  	{
7622  		"ALU64_NEG: -(3) = -3",
7623  		.u.insns_int = {
7624  			BPF_LD_IMM64(R0, 3),
7625  			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7626  			BPF_EXIT_INSN(),
7627  		},
7628  		INTERNAL,
7629  		{ },
7630  		{ { 0, -3 } },
7631  	},
7632  	{
7633  		"ALU64_NEG: -(-3) = 3",
7634  		.u.insns_int = {
7635  			BPF_LD_IMM64(R0, -3),
7636  			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7637  			BPF_EXIT_INSN(),
7638  		},
7639  		INTERNAL,
7640  		{ },
7641  		{ { 0, 3 } },
7642  	},
7643  	/* BPF_ALU | BPF_END | BPF_FROM_BE */
7644  	{
7645  		"ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7646  		.u.insns_int = {
7647  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7648  			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7649  			BPF_EXIT_INSN(),
7650  		},
7651  		INTERNAL,
7652  		{ },
7653  		{ { 0,  cpu_to_be16(0xcdef) } },
7654  	},
7655  	{
7656  		"ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7657  		.u.insns_int = {
7658  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7659  			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7660  			BPF_ALU64_REG(BPF_MOV, R1, R0),
7661  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7662  			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7663  			BPF_EXIT_INSN(),
7664  		},
7665  		INTERNAL,
7666  		{ },
7667  		{ { 0, cpu_to_be32(0x89abcdef) } },
7668  	},
7669  	{
7670  		"ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7671  		.u.insns_int = {
7672  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7673  			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7674  			BPF_EXIT_INSN(),
7675  		},
7676  		INTERNAL,
7677  		{ },
7678  		{ { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7679  	},
7680  	{
7681  		"ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7682  		.u.insns_int = {
7683  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7684  			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7685  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7686  			BPF_EXIT_INSN(),
7687  		},
7688  		INTERNAL,
7689  		{ },
7690  		{ { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7691  	},
7692  	/* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7693  	{
7694  		"ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7695  		.u.insns_int = {
7696  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7697  			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7698  			BPF_EXIT_INSN(),
7699  		},
7700  		INTERNAL,
7701  		{ },
7702  		{ { 0,  cpu_to_be16(0x3210) } },
7703  	},
7704  	{
7705  		"ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7706  		.u.insns_int = {
7707  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7708  			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7709  			BPF_ALU64_REG(BPF_MOV, R1, R0),
7710  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7711  			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7712  			BPF_EXIT_INSN(),
7713  		},
7714  		INTERNAL,
7715  		{ },
7716  		{ { 0, cpu_to_be32(0x76543210) } },
7717  	},
7718  	{
7719  		"ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7720  		.u.insns_int = {
7721  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7722  			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7723  			BPF_EXIT_INSN(),
7724  		},
7725  		INTERNAL,
7726  		{ },
7727  		{ { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7728  	},
7729  	{
7730  		"ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7731  		.u.insns_int = {
7732  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7733  			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7734  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7735  			BPF_EXIT_INSN(),
7736  		},
7737  		INTERNAL,
7738  		{ },
7739  		{ { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7740  	},
7741  	/* BPF_ALU | BPF_END | BPF_FROM_LE */
7742  	{
7743  		"ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7744  		.u.insns_int = {
7745  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7746  			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7747  			BPF_EXIT_INSN(),
7748  		},
7749  		INTERNAL,
7750  		{ },
7751  		{ { 0, cpu_to_le16(0xcdef) } },
7752  	},
7753  	{
7754  		"ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7755  		.u.insns_int = {
7756  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7757  			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7758  			BPF_ALU64_REG(BPF_MOV, R1, R0),
7759  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7760  			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7761  			BPF_EXIT_INSN(),
7762  		},
7763  		INTERNAL,
7764  		{ },
7765  		{ { 0, cpu_to_le32(0x89abcdef) } },
7766  	},
7767  	{
7768  		"ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7769  		.u.insns_int = {
7770  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7771  			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7772  			BPF_EXIT_INSN(),
7773  		},
7774  		INTERNAL,
7775  		{ },
7776  		{ { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7777  	},
7778  	{
7779  		"ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7780  		.u.insns_int = {
7781  			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7782  			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7783  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7784  			BPF_EXIT_INSN(),
7785  		},
7786  		INTERNAL,
7787  		{ },
7788  		{ { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7789  	},
7790  	/* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7791  	{
7792  		"ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7793  		.u.insns_int = {
7794  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7795  			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7796  			BPF_EXIT_INSN(),
7797  		},
7798  		INTERNAL,
7799  		{ },
7800  		{ { 0,  cpu_to_le16(0x3210) } },
7801  	},
7802  	{
7803  		"ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
7804  		.u.insns_int = {
7805  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7806  			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7807  			BPF_ALU64_REG(BPF_MOV, R1, R0),
7808  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7809  			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7810  			BPF_EXIT_INSN(),
7811  		},
7812  		INTERNAL,
7813  		{ },
7814  		{ { 0, cpu_to_le32(0x76543210) } },
7815  	},
7816  	{
7817  		"ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
7818  		.u.insns_int = {
7819  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7820  			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7821  			BPF_EXIT_INSN(),
7822  		},
7823  		INTERNAL,
7824  		{ },
7825  		{ { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
7826  	},
7827  	{
7828  		"ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
7829  		.u.insns_int = {
7830  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7831  			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7832  			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7833  			BPF_EXIT_INSN(),
7834  		},
7835  		INTERNAL,
7836  		{ },
7837  		{ { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
7838  	},
7839  	/* BPF_LDX_MEM B/H/W/DW */
7840  	{
7841  		"BPF_LDX_MEM | BPF_B",
7842  		.u.insns_int = {
7843  			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7844  			BPF_LD_IMM64(R2, 0x0000000000000008ULL),
7845  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7846  #ifdef __BIG_ENDIAN
7847  			BPF_LDX_MEM(BPF_B, R0, R10, -1),
7848  #else
7849  			BPF_LDX_MEM(BPF_B, R0, R10, -8),
7850  #endif
7851  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7852  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7853  			BPF_EXIT_INSN(),
7854  		},
7855  		INTERNAL,
7856  		{ },
7857  		{ { 0, 0 } },
7858  		.stack_depth = 8,
7859  	},
7860  	{
7861  		"BPF_LDX_MEM | BPF_B, MSB set",
7862  		.u.insns_int = {
7863  			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7864  			BPF_LD_IMM64(R2, 0x0000000000000088ULL),
7865  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7866  #ifdef __BIG_ENDIAN
7867  			BPF_LDX_MEM(BPF_B, R0, R10, -1),
7868  #else
7869  			BPF_LDX_MEM(BPF_B, R0, R10, -8),
7870  #endif
7871  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7872  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7873  			BPF_EXIT_INSN(),
7874  		},
7875  		INTERNAL,
7876  		{ },
7877  		{ { 0, 0 } },
7878  		.stack_depth = 8,
7879  	},
7880  	{
7881  		"BPF_LDX_MEM | BPF_H",
7882  		.u.insns_int = {
7883  			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7884  			BPF_LD_IMM64(R2, 0x0000000000000708ULL),
7885  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7886  #ifdef __BIG_ENDIAN
7887  			BPF_LDX_MEM(BPF_H, R0, R10, -2),
7888  #else
7889  			BPF_LDX_MEM(BPF_H, R0, R10, -8),
7890  #endif
7891  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7892  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7893  			BPF_EXIT_INSN(),
7894  		},
7895  		INTERNAL,
7896  		{ },
7897  		{ { 0, 0 } },
7898  		.stack_depth = 8,
7899  	},
7900  	{
7901  		"BPF_LDX_MEM | BPF_H, MSB set",
7902  		.u.insns_int = {
7903  			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7904  			BPF_LD_IMM64(R2, 0x0000000000008788ULL),
7905  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7906  #ifdef __BIG_ENDIAN
7907  			BPF_LDX_MEM(BPF_H, R0, R10, -2),
7908  #else
7909  			BPF_LDX_MEM(BPF_H, R0, R10, -8),
7910  #endif
7911  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7912  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7913  			BPF_EXIT_INSN(),
7914  		},
7915  		INTERNAL,
7916  		{ },
7917  		{ { 0, 0 } },
7918  		.stack_depth = 8,
7919  	},
7920  	{
7921  		"BPF_LDX_MEM | BPF_W",
7922  		.u.insns_int = {
7923  			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7924  			BPF_LD_IMM64(R2, 0x0000000005060708ULL),
7925  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7926  #ifdef __BIG_ENDIAN
7927  			BPF_LDX_MEM(BPF_W, R0, R10, -4),
7928  #else
7929  			BPF_LDX_MEM(BPF_W, R0, R10, -8),
7930  #endif
7931  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7932  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7933  			BPF_EXIT_INSN(),
7934  		},
7935  		INTERNAL,
7936  		{ },
7937  		{ { 0, 0 } },
7938  		.stack_depth = 8,
7939  	},
7940  	{
7941  		"BPF_LDX_MEM | BPF_W, MSB set",
7942  		.u.insns_int = {
7943  			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7944  			BPF_LD_IMM64(R2, 0x0000000085868788ULL),
7945  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7946  #ifdef __BIG_ENDIAN
7947  			BPF_LDX_MEM(BPF_W, R0, R10, -4),
7948  #else
7949  			BPF_LDX_MEM(BPF_W, R0, R10, -8),
7950  #endif
7951  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7952  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7953  			BPF_EXIT_INSN(),
7954  		},
7955  		INTERNAL,
7956  		{ },
7957  		{ { 0, 0 } },
7958  		.stack_depth = 8,
7959  	},
7960  	/* BPF_STX_MEM B/H/W/DW */
7961  	{
7962  		"BPF_STX_MEM | BPF_B",
7963  		.u.insns_int = {
7964  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7965  			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
7966  			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
7967  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7968  #ifdef __BIG_ENDIAN
7969  			BPF_STX_MEM(BPF_B, R10, R2, -1),
7970  #else
7971  			BPF_STX_MEM(BPF_B, R10, R2, -8),
7972  #endif
7973  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7974  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7975  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7976  			BPF_EXIT_INSN(),
7977  		},
7978  		INTERNAL,
7979  		{ },
7980  		{ { 0, 0 } },
7981  		.stack_depth = 8,
7982  	},
7983  	{
7984  		"BPF_STX_MEM | BPF_B, MSB set",
7985  		.u.insns_int = {
7986  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7987  			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
7988  			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
7989  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7990  #ifdef __BIG_ENDIAN
7991  			BPF_STX_MEM(BPF_B, R10, R2, -1),
7992  #else
7993  			BPF_STX_MEM(BPF_B, R10, R2, -8),
7994  #endif
7995  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7996  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7997  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7998  			BPF_EXIT_INSN(),
7999  		},
8000  		INTERNAL,
8001  		{ },
8002  		{ { 0, 0 } },
8003  		.stack_depth = 8,
8004  	},
8005  	{
8006  		"BPF_STX_MEM | BPF_H",
8007  		.u.insns_int = {
8008  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8009  			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8010  			BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
8011  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8012  #ifdef __BIG_ENDIAN
8013  			BPF_STX_MEM(BPF_H, R10, R2, -2),
8014  #else
8015  			BPF_STX_MEM(BPF_H, R10, R2, -8),
8016  #endif
8017  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8018  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8019  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8020  			BPF_EXIT_INSN(),
8021  		},
8022  		INTERNAL,
8023  		{ },
8024  		{ { 0, 0 } },
8025  		.stack_depth = 8,
8026  	},
8027  	{
8028  		"BPF_STX_MEM | BPF_H, MSB set",
8029  		.u.insns_int = {
8030  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8031  			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8032  			BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
8033  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8034  #ifdef __BIG_ENDIAN
8035  			BPF_STX_MEM(BPF_H, R10, R2, -2),
8036  #else
8037  			BPF_STX_MEM(BPF_H, R10, R2, -8),
8038  #endif
8039  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8040  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8041  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8042  			BPF_EXIT_INSN(),
8043  		},
8044  		INTERNAL,
8045  		{ },
8046  		{ { 0, 0 } },
8047  		.stack_depth = 8,
8048  	},
8049  	{
8050  		"BPF_STX_MEM | BPF_W",
8051  		.u.insns_int = {
8052  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8053  			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8054  			BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
8055  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8056  #ifdef __BIG_ENDIAN
8057  			BPF_STX_MEM(BPF_W, R10, R2, -4),
8058  #else
8059  			BPF_STX_MEM(BPF_W, R10, R2, -8),
8060  #endif
8061  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8062  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8063  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8064  			BPF_EXIT_INSN(),
8065  		},
8066  		INTERNAL,
8067  		{ },
8068  		{ { 0, 0 } },
8069  		.stack_depth = 8,
8070  	},
8071  	{
8072  		"BPF_STX_MEM | BPF_W, MSB set",
8073  		.u.insns_int = {
8074  			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8075  			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8076  			BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
8077  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8078  #ifdef __BIG_ENDIAN
8079  			BPF_STX_MEM(BPF_W, R10, R2, -4),
8080  #else
8081  			BPF_STX_MEM(BPF_W, R10, R2, -8),
8082  #endif
8083  			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8084  			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8085  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8086  			BPF_EXIT_INSN(),
8087  		},
8088  		INTERNAL,
8089  		{ },
8090  		{ { 0, 0 } },
8091  		.stack_depth = 8,
8092  	},
8093  	/* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8094  	{
8095  		"ST_MEM_B: Store/Load byte: max negative",
8096  		.u.insns_int = {
8097  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8098  			BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8099  			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8100  			BPF_EXIT_INSN(),
8101  		},
8102  		INTERNAL,
8103  		{ },
8104  		{ { 0, 0xff } },
8105  		.stack_depth = 40,
8106  	},
8107  	{
8108  		"ST_MEM_B: Store/Load byte: max positive",
8109  		.u.insns_int = {
8110  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8111  			BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8112  			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8113  			BPF_EXIT_INSN(),
8114  		},
8115  		INTERNAL,
8116  		{ },
8117  		{ { 0, 0x7f } },
8118  		.stack_depth = 40,
8119  	},
8120  	{
8121  		"STX_MEM_B: Store/Load byte: max negative",
8122  		.u.insns_int = {
8123  			BPF_LD_IMM64(R0, 0),
8124  			BPF_LD_IMM64(R1, 0xffLL),
8125  			BPF_STX_MEM(BPF_B, R10, R1, -40),
8126  			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8127  			BPF_EXIT_INSN(),
8128  		},
8129  		INTERNAL,
8130  		{ },
8131  		{ { 0, 0xff } },
8132  		.stack_depth = 40,
8133  	},
8134  	{
8135  		"ST_MEM_H: Store/Load half word: max negative",
8136  		.u.insns_int = {
8137  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8138  			BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8139  			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8140  			BPF_EXIT_INSN(),
8141  		},
8142  		INTERNAL,
8143  		{ },
8144  		{ { 0, 0xffff } },
8145  		.stack_depth = 40,
8146  	},
8147  	{
8148  		"ST_MEM_H: Store/Load half word: max positive",
8149  		.u.insns_int = {
8150  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8151  			BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8152  			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8153  			BPF_EXIT_INSN(),
8154  		},
8155  		INTERNAL,
8156  		{ },
8157  		{ { 0, 0x7fff } },
8158  		.stack_depth = 40,
8159  	},
8160  	{
8161  		"STX_MEM_H: Store/Load half word: max negative",
8162  		.u.insns_int = {
8163  			BPF_LD_IMM64(R0, 0),
8164  			BPF_LD_IMM64(R1, 0xffffLL),
8165  			BPF_STX_MEM(BPF_H, R10, R1, -40),
8166  			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8167  			BPF_EXIT_INSN(),
8168  		},
8169  		INTERNAL,
8170  		{ },
8171  		{ { 0, 0xffff } },
8172  		.stack_depth = 40,
8173  	},
8174  	{
8175  		"ST_MEM_W: Store/Load word: max negative",
8176  		.u.insns_int = {
8177  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8178  			BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8179  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8180  			BPF_EXIT_INSN(),
8181  		},
8182  		INTERNAL,
8183  		{ },
8184  		{ { 0, 0xffffffff } },
8185  		.stack_depth = 40,
8186  	},
8187  	{
8188  		"ST_MEM_W: Store/Load word: max positive",
8189  		.u.insns_int = {
8190  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8191  			BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8192  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8193  			BPF_EXIT_INSN(),
8194  		},
8195  		INTERNAL,
8196  		{ },
8197  		{ { 0, 0x7fffffff } },
8198  		.stack_depth = 40,
8199  	},
8200  	{
8201  		"STX_MEM_W: Store/Load word: max negative",
8202  		.u.insns_int = {
8203  			BPF_LD_IMM64(R0, 0),
8204  			BPF_LD_IMM64(R1, 0xffffffffLL),
8205  			BPF_STX_MEM(BPF_W, R10, R1, -40),
8206  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8207  			BPF_EXIT_INSN(),
8208  		},
8209  		INTERNAL,
8210  		{ },
8211  		{ { 0, 0xffffffff } },
8212  		.stack_depth = 40,
8213  	},
8214  	{
8215  		"ST_MEM_DW: Store/Load double word: max negative",
8216  		.u.insns_int = {
8217  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8218  			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8219  			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8220  			BPF_EXIT_INSN(),
8221  		},
8222  		INTERNAL,
8223  		{ },
8224  		{ { 0, 0xffffffff } },
8225  		.stack_depth = 40,
8226  	},
8227  	{
8228  		"ST_MEM_DW: Store/Load double word: max negative 2",
8229  		.u.insns_int = {
8230  			BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8231  			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8232  			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8233  			BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8234  			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8235  			BPF_MOV32_IMM(R0, 2),
8236  			BPF_EXIT_INSN(),
8237  			BPF_MOV32_IMM(R0, 1),
8238  			BPF_EXIT_INSN(),
8239  		},
8240  		INTERNAL,
8241  		{ },
8242  		{ { 0, 0x1 } },
8243  		.stack_depth = 40,
8244  	},
8245  	{
8246  		"ST_MEM_DW: Store/Load double word: max positive",
8247  		.u.insns_int = {
8248  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8249  			BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8250  			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8251  			BPF_EXIT_INSN(),
8252  		},
8253  		INTERNAL,
8254  		{ },
8255  		{ { 0, 0x7fffffff } },
8256  		.stack_depth = 40,
8257  	},
8258  	{
8259  		"STX_MEM_DW: Store/Load double word: max negative",
8260  		.u.insns_int = {
8261  			BPF_LD_IMM64(R0, 0),
8262  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8263  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8264  			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8265  			BPF_EXIT_INSN(),
8266  		},
8267  		INTERNAL,
8268  		{ },
8269  		{ { 0, 0xffffffff } },
8270  		.stack_depth = 40,
8271  	},
8272  	{
8273  		"STX_MEM_DW: Store double word: first word in memory",
8274  		.u.insns_int = {
8275  			BPF_LD_IMM64(R0, 0),
8276  			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8277  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8278  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8279  			BPF_EXIT_INSN(),
8280  		},
8281  		INTERNAL,
8282  		{ },
8283  #ifdef __BIG_ENDIAN
8284  		{ { 0, 0x01234567 } },
8285  #else
8286  		{ { 0, 0x89abcdef } },
8287  #endif
8288  		.stack_depth = 40,
8289  	},
8290  	{
8291  		"STX_MEM_DW: Store double word: second word in memory",
8292  		.u.insns_int = {
8293  			BPF_LD_IMM64(R0, 0),
8294  			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8295  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8296  			BPF_LDX_MEM(BPF_W, R0, R10, -36),
8297  			BPF_EXIT_INSN(),
8298  		},
8299  		INTERNAL,
8300  		{ },
8301  #ifdef __BIG_ENDIAN
8302  		{ { 0, 0x89abcdef } },
8303  #else
8304  		{ { 0, 0x01234567 } },
8305  #endif
8306  		.stack_depth = 40,
8307  	},
8308  	/* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8309  	{
8310  		"STX_XADD_W: X + 1 + 1 + 1 + ...",
8311  		{ },
8312  		INTERNAL,
8313  		{ },
8314  		{ { 0, 4134 } },
8315  		.fill_helper = bpf_fill_stxw,
8316  	},
8317  	{
8318  		"STX_XADD_DW: X + 1 + 1 + 1 + ...",
8319  		{ },
8320  		INTERNAL,
8321  		{ },
8322  		{ { 0, 4134 } },
8323  		.fill_helper = bpf_fill_stxdw,
8324  	},
8325  	/*
8326  	 * Exhaustive tests of atomic operation variants.
8327  	 * Individual tests are expanded from template macros for all
8328  	 * combinations of ALU operation, word size and fetching.
8329  	 */
8330  #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8331  
8332  #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result)	\
8333  {									\
8334  	"BPF_ATOMIC | " #width ", " #op ": Test: "			\
8335  		#old " " #logic " " #update " = " #result,		\
8336  	.u.insns_int = {						\
8337  		BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)),	\
8338  		BPF_ST_MEM(width, R10, -40, old),			\
8339  		BPF_ATOMIC_OP(width, op, R10, R5, -40),			\
8340  		BPF_LDX_MEM(width, R0, R10, -40),			\
8341  		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8342  		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8343  		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8344  		BPF_EXIT_INSN(),					\
8345  	},								\
8346  	INTERNAL,							\
8347  	{ },								\
8348  	{ { 0, result } },						\
8349  	.stack_depth = 40,						\
8350  }
8351  #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result)	\
8352  {									\
8353  	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: "	\
8354  		#old " " #logic " " #update " = " #result,		\
8355  	.u.insns_int = {						\
8356  		BPF_ALU64_REG(BPF_MOV, R1, R10),			\
8357  		BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)),	\
8358  		BPF_ST_MEM(BPF_W, R10, -40, old),			\
8359  		BPF_ATOMIC_OP(width, op, R10, R0, -40),			\
8360  		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8361  		BPF_ALU64_REG(BPF_SUB, R0, R1),				\
8362  		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8363  		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8364  		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8365  		BPF_EXIT_INSN(),					\
8366  	},								\
8367  	INTERNAL,							\
8368  	{ },								\
8369  	{ { 0, 0 } },							\
8370  	.stack_depth = 40,						\
8371  }
8372  #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result)	\
8373  {									\
8374  	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: "	\
8375  		#old " " #logic " " #update " = " #result,		\
8376  	.u.insns_int = {						\
8377  		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8378  		BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)),	\
8379  		BPF_ST_MEM(width, R10, -40, old),			\
8380  		BPF_ATOMIC_OP(width, op, R10, R1, -40),			\
8381  		BPF_ALU64_REG(BPF_SUB, R0, R10),			\
8382  		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8383  		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8384  		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8385  		BPF_EXIT_INSN(),					\
8386  	},								\
8387  	INTERNAL,                                                       \
8388  	{ },                                                            \
8389  	{ { 0, 0 } },                                                   \
8390  	.stack_depth = 40,                                              \
8391  }
8392  #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result)	\
8393  {									\
8394  	"BPF_ATOMIC | " #width ", " #op ": Test fetch: "		\
8395  		#old " " #logic " " #update " = " #result,		\
8396  	.u.insns_int = {						\
8397  		BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)),	\
8398  		BPF_ST_MEM(width, R10, -40, old),			\
8399  		BPF_ATOMIC_OP(width, op, R10, R3, -40),			\
8400  		BPF_ALU32_REG(BPF_MOV, R0, R3),                         \
8401  		BPF_EXIT_INSN(),					\
8402  	},								\
8403  	INTERNAL,                                                       \
8404  	{ },                                                            \
8405  	{ { 0, (op) & BPF_FETCH ? old : update } },			\
8406  	.stack_depth = 40,                                              \
8407  }
8408  	/* BPF_ATOMIC | BPF_W: BPF_ADD */
8409  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8410  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8411  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8412  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8413  	/* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
8414  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8415  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8416  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8417  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8418  	/* BPF_ATOMIC | BPF_DW: BPF_ADD */
8419  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8420  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8421  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8422  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8423  	/* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
8424  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8425  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8426  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8427  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8428  	/* BPF_ATOMIC | BPF_W: BPF_AND */
8429  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8430  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8431  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8432  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8433  	/* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
8434  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8435  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8436  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8437  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8438  	/* BPF_ATOMIC | BPF_DW: BPF_AND */
8439  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8440  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8441  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8442  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8443  	/* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
8444  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8445  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8446  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8447  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8448  	/* BPF_ATOMIC | BPF_W: BPF_OR */
8449  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8450  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8451  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8452  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8453  	/* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
8454  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8455  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8456  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8457  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8458  	/* BPF_ATOMIC | BPF_DW: BPF_OR */
8459  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8460  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8461  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8462  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8463  	/* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
8464  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8465  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8466  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8467  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8468  	/* BPF_ATOMIC | BPF_W: BPF_XOR */
8469  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8470  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8471  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8472  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8473  	/* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
8474  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8475  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8476  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8477  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8478  	/* BPF_ATOMIC | BPF_DW: BPF_XOR */
8479  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8480  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8481  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8482  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8483  	/* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
8484  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8485  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8486  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8487  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8488  	/* BPF_ATOMIC | BPF_W: BPF_XCHG */
8489  	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8490  	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8491  	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8492  	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8493  	/* BPF_ATOMIC | BPF_DW: BPF_XCHG */
8494  	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8495  	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8496  	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8497  	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8498  #undef BPF_ATOMIC_POISON
8499  #undef BPF_ATOMIC_OP_TEST1
8500  #undef BPF_ATOMIC_OP_TEST2
8501  #undef BPF_ATOMIC_OP_TEST3
8502  #undef BPF_ATOMIC_OP_TEST4
8503  	/* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
8504  	{
8505  		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
8506  		.u.insns_int = {
8507  			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8508  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8509  			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8510  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8511  			BPF_EXIT_INSN(),
8512  		},
8513  		INTERNAL,
8514  		{ },
8515  		{ { 0, 0x01234567 } },
8516  		.stack_depth = 40,
8517  	},
8518  	{
8519  		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
8520  		.u.insns_int = {
8521  			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8522  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8523  			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8524  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8525  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8526  			BPF_EXIT_INSN(),
8527  		},
8528  		INTERNAL,
8529  		{ },
8530  		{ { 0, 0x89abcdef } },
8531  		.stack_depth = 40,
8532  	},
8533  	{
8534  		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
8535  		.u.insns_int = {
8536  			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8537  			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8538  			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8539  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8540  			BPF_EXIT_INSN(),
8541  		},
8542  		INTERNAL,
8543  		{ },
8544  		{ { 0, 0x01234567 } },
8545  		.stack_depth = 40,
8546  	},
8547  	{
8548  		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
8549  		.u.insns_int = {
8550  			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8551  			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8552  			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8553  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8554  			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8555  			BPF_EXIT_INSN(),
8556  		},
8557  		INTERNAL,
8558  		{ },
8559  		{ { 0, 0x01234567 } },
8560  		.stack_depth = 40,
8561  	},
8562  	{
8563  		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
8564  		.u.insns_int = {
8565  			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8566  			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8567  			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8568  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8569  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8570  			BPF_ALU32_REG(BPF_MOV, R0, R3),
8571  			BPF_EXIT_INSN(),
8572  		},
8573  		INTERNAL,
8574  		{ },
8575  		{ { 0, 0x89abcdef } },
8576  		.stack_depth = 40,
8577  	},
8578  	/* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
8579  	{
8580  		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
8581  		.u.insns_int = {
8582  			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8583  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8584  			BPF_ALU64_REG(BPF_MOV, R0, R1),
8585  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8586  			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8587  			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8588  			BPF_ALU64_REG(BPF_SUB, R0, R1),
8589  			BPF_EXIT_INSN(),
8590  		},
8591  		INTERNAL,
8592  		{ },
8593  		{ { 0, 0 } },
8594  		.stack_depth = 40,
8595  	},
8596  	{
8597  		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
8598  		.u.insns_int = {
8599  			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8600  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8601  			BPF_ALU64_REG(BPF_MOV, R0, R1),
8602  			BPF_STX_MEM(BPF_DW, R10, R0, -40),
8603  			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8604  			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8605  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8606  			BPF_ALU64_REG(BPF_SUB, R0, R2),
8607  			BPF_EXIT_INSN(),
8608  		},
8609  		INTERNAL,
8610  		{ },
8611  		{ { 0, 0 } },
8612  		.stack_depth = 40,
8613  	},
8614  	{
8615  		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
8616  		.u.insns_int = {
8617  			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8618  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8619  			BPF_ALU64_REG(BPF_MOV, R0, R1),
8620  			BPF_ALU64_IMM(BPF_ADD, R0, 1),
8621  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8622  			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8623  			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8624  			BPF_ALU64_REG(BPF_SUB, R0, R1),
8625  			BPF_EXIT_INSN(),
8626  		},
8627  		INTERNAL,
8628  		{ },
8629  		{ { 0, 0 } },
8630  		.stack_depth = 40,
8631  	},
8632  	{
8633  		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
8634  		.u.insns_int = {
8635  			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8636  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8637  			BPF_ALU64_REG(BPF_MOV, R0, R1),
8638  			BPF_ALU64_IMM(BPF_ADD, R0, 1),
8639  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8640  			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8641  			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8642  			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8643  			BPF_ALU64_REG(BPF_SUB, R0, R1),
8644  			BPF_EXIT_INSN(),
8645  		},
8646  		INTERNAL,
8647  		{ },
8648  		{ { 0, 0 } },
8649  		.stack_depth = 40,
8650  	},
8651  	{
8652  		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
8653  		.u.insns_int = {
8654  			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8655  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8656  			BPF_ALU64_REG(BPF_MOV, R0, R1),
8657  			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8658  			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8659  			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8660  			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8661  			BPF_ALU64_REG(BPF_SUB, R0, R2),
8662  			BPF_EXIT_INSN(),
8663  		},
8664  		INTERNAL,
8665  		{ },
8666  		{ { 0, 0 } },
8667  		.stack_depth = 40,
8668  	},
8669  	/* BPF_JMP32 | BPF_JEQ | BPF_K */
8670  	{
8671  		"JMP32_JEQ_K: Small immediate",
8672  		.u.insns_int = {
8673  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8674  			BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
8675  			BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
8676  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8677  			BPF_EXIT_INSN(),
8678  		},
8679  		INTERNAL,
8680  		{ },
8681  		{ { 0, 123 } }
8682  	},
8683  	{
8684  		"JMP32_JEQ_K: Large immediate",
8685  		.u.insns_int = {
8686  			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8687  			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
8688  			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
8689  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8690  			BPF_EXIT_INSN(),
8691  		},
8692  		INTERNAL,
8693  		{ },
8694  		{ { 0, 12345678 } }
8695  	},
8696  	{
8697  		"JMP32_JEQ_K: negative immediate",
8698  		.u.insns_int = {
8699  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8700  			BPF_JMP32_IMM(BPF_JEQ, R0,  123, 1),
8701  			BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
8702  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8703  			BPF_EXIT_INSN(),
8704  		},
8705  		INTERNAL,
8706  		{ },
8707  		{ { 0, -123 } }
8708  	},
8709  	/* BPF_JMP32 | BPF_JEQ | BPF_X */
8710  	{
8711  		"JMP32_JEQ_X",
8712  		.u.insns_int = {
8713  			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8714  			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8715  			BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
8716  			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8717  			BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
8718  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8719  			BPF_EXIT_INSN(),
8720  		},
8721  		INTERNAL,
8722  		{ },
8723  		{ { 0, 1234 } }
8724  	},
8725  	/* BPF_JMP32 | BPF_JNE | BPF_K */
8726  	{
8727  		"JMP32_JNE_K: Small immediate",
8728  		.u.insns_int = {
8729  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8730  			BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
8731  			BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
8732  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8733  			BPF_EXIT_INSN(),
8734  		},
8735  		INTERNAL,
8736  		{ },
8737  		{ { 0, 123 } }
8738  	},
8739  	{
8740  		"JMP32_JNE_K: Large immediate",
8741  		.u.insns_int = {
8742  			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8743  			BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
8744  			BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
8745  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8746  			BPF_EXIT_INSN(),
8747  		},
8748  		INTERNAL,
8749  		{ },
8750  		{ { 0, 12345678 } }
8751  	},
8752  	{
8753  		"JMP32_JNE_K: negative immediate",
8754  		.u.insns_int = {
8755  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8756  			BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
8757  			BPF_JMP32_IMM(BPF_JNE, R0,  123, 1),
8758  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8759  			BPF_EXIT_INSN(),
8760  		},
8761  		INTERNAL,
8762  		{ },
8763  		{ { 0, -123 } }
8764  	},
8765  	/* BPF_JMP32 | BPF_JNE | BPF_X */
8766  	{
8767  		"JMP32_JNE_X",
8768  		.u.insns_int = {
8769  			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8770  			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8771  			BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
8772  			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8773  			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8774  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8775  			BPF_EXIT_INSN(),
8776  		},
8777  		INTERNAL,
8778  		{ },
8779  		{ { 0, 1234 } }
8780  	},
8781  	/* BPF_JMP32 | BPF_JSET | BPF_K */
8782  	{
8783  		"JMP32_JSET_K: Small immediate",
8784  		.u.insns_int = {
8785  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8786  			BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
8787  			BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
8788  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8789  			BPF_EXIT_INSN(),
8790  		},
8791  		INTERNAL,
8792  		{ },
8793  		{ { 0, 1 } }
8794  	},
8795  	{
8796  		"JMP32_JSET_K: Large immediate",
8797  		.u.insns_int = {
8798  			BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
8799  			BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
8800  			BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
8801  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8802  			BPF_EXIT_INSN(),
8803  		},
8804  		INTERNAL,
8805  		{ },
8806  		{ { 0, 0x40000000 } }
8807  	},
8808  	{
8809  		"JMP32_JSET_K: negative immediate",
8810  		.u.insns_int = {
8811  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8812  			BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
8813  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8814  			BPF_EXIT_INSN(),
8815  		},
8816  		INTERNAL,
8817  		{ },
8818  		{ { 0, -123 } }
8819  	},
8820  	/* BPF_JMP32 | BPF_JSET | BPF_X */
8821  	{
8822  		"JMP32_JSET_X",
8823  		.u.insns_int = {
8824  			BPF_ALU32_IMM(BPF_MOV, R0, 8),
8825  			BPF_ALU32_IMM(BPF_MOV, R1, 7),
8826  			BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
8827  			BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
8828  			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8829  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8830  			BPF_EXIT_INSN(),
8831  		},
8832  		INTERNAL,
8833  		{ },
8834  		{ { 0, 8 } }
8835  	},
8836  	/* BPF_JMP32 | BPF_JGT | BPF_K */
8837  	{
8838  		"JMP32_JGT_K: Small immediate",
8839  		.u.insns_int = {
8840  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8841  			BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
8842  			BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
8843  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8844  			BPF_EXIT_INSN(),
8845  		},
8846  		INTERNAL,
8847  		{ },
8848  		{ { 0, 123 } }
8849  	},
8850  	{
8851  		"JMP32_JGT_K: Large immediate",
8852  		.u.insns_int = {
8853  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8854  			BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
8855  			BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
8856  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8857  			BPF_EXIT_INSN(),
8858  		},
8859  		INTERNAL,
8860  		{ },
8861  		{ { 0, 0xfffffffe } }
8862  	},
8863  	/* BPF_JMP32 | BPF_JGT | BPF_X */
8864  	{
8865  		"JMP32_JGT_X",
8866  		.u.insns_int = {
8867  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8868  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8869  			BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
8870  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8871  			BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
8872  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8873  			BPF_EXIT_INSN(),
8874  		},
8875  		INTERNAL,
8876  		{ },
8877  		{ { 0, 0xfffffffe } }
8878  	},
8879  	/* BPF_JMP32 | BPF_JGE | BPF_K */
8880  	{
8881  		"JMP32_JGE_K: Small immediate",
8882  		.u.insns_int = {
8883  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8884  			BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
8885  			BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
8886  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8887  			BPF_EXIT_INSN(),
8888  		},
8889  		INTERNAL,
8890  		{ },
8891  		{ { 0, 123 } }
8892  	},
8893  	{
8894  		"JMP32_JGE_K: Large immediate",
8895  		.u.insns_int = {
8896  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8897  			BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
8898  			BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
8899  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8900  			BPF_EXIT_INSN(),
8901  		},
8902  		INTERNAL,
8903  		{ },
8904  		{ { 0, 0xfffffffe } }
8905  	},
8906  	/* BPF_JMP32 | BPF_JGE | BPF_X */
8907  	{
8908  		"JMP32_JGE_X",
8909  		.u.insns_int = {
8910  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8911  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8912  			BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
8913  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
8914  			BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
8915  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8916  			BPF_EXIT_INSN(),
8917  		},
8918  		INTERNAL,
8919  		{ },
8920  		{ { 0, 0xfffffffe } }
8921  	},
8922  	/* BPF_JMP32 | BPF_JLT | BPF_K */
8923  	{
8924  		"JMP32_JLT_K: Small immediate",
8925  		.u.insns_int = {
8926  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8927  			BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
8928  			BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
8929  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8930  			BPF_EXIT_INSN(),
8931  		},
8932  		INTERNAL,
8933  		{ },
8934  		{ { 0, 123 } }
8935  	},
8936  	{
8937  		"JMP32_JLT_K: Large immediate",
8938  		.u.insns_int = {
8939  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8940  			BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
8941  			BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
8942  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8943  			BPF_EXIT_INSN(),
8944  		},
8945  		INTERNAL,
8946  		{ },
8947  		{ { 0, 0xfffffffe } }
8948  	},
8949  	/* BPF_JMP32 | BPF_JLT | BPF_X */
8950  	{
8951  		"JMP32_JLT_X",
8952  		.u.insns_int = {
8953  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8954  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8955  			BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
8956  			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8957  			BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
8958  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8959  			BPF_EXIT_INSN(),
8960  		},
8961  		INTERNAL,
8962  		{ },
8963  		{ { 0, 0xfffffffe } }
8964  	},
8965  	/* BPF_JMP32 | BPF_JLE | BPF_K */
8966  	{
8967  		"JMP32_JLE_K: Small immediate",
8968  		.u.insns_int = {
8969  			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8970  			BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
8971  			BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
8972  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8973  			BPF_EXIT_INSN(),
8974  		},
8975  		INTERNAL,
8976  		{ },
8977  		{ { 0, 123 } }
8978  	},
8979  	{
8980  		"JMP32_JLE_K: Large immediate",
8981  		.u.insns_int = {
8982  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8983  			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
8984  			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
8985  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8986  			BPF_EXIT_INSN(),
8987  		},
8988  		INTERNAL,
8989  		{ },
8990  		{ { 0, 0xfffffffe } }
8991  	},
8992  	/* BPF_JMP32 | BPF_JLE | BPF_X */
8993  	{
8994  		"JMP32_JLE_X",
8995  		.u.insns_int = {
8996  			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8997  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8998  			BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
8999  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9000  			BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
9001  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9002  			BPF_EXIT_INSN(),
9003  		},
9004  		INTERNAL,
9005  		{ },
9006  		{ { 0, 0xfffffffe } }
9007  	},
9008  	/* BPF_JMP32 | BPF_JSGT | BPF_K */
9009  	{
9010  		"JMP32_JSGT_K: Small immediate",
9011  		.u.insns_int = {
9012  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9013  			BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
9014  			BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
9015  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9016  			BPF_EXIT_INSN(),
9017  		},
9018  		INTERNAL,
9019  		{ },
9020  		{ { 0, -123 } }
9021  	},
9022  	{
9023  		"JMP32_JSGT_K: Large immediate",
9024  		.u.insns_int = {
9025  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9026  			BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
9027  			BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
9028  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9029  			BPF_EXIT_INSN(),
9030  		},
9031  		INTERNAL,
9032  		{ },
9033  		{ { 0, -12345678 } }
9034  	},
9035  	/* BPF_JMP32 | BPF_JSGT | BPF_X */
9036  	{
9037  		"JMP32_JSGT_X",
9038  		.u.insns_int = {
9039  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9040  			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9041  			BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
9042  			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9043  			BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
9044  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9045  			BPF_EXIT_INSN(),
9046  		},
9047  		INTERNAL,
9048  		{ },
9049  		{ { 0, -12345678 } }
9050  	},
9051  	/* BPF_JMP32 | BPF_JSGE | BPF_K */
9052  	{
9053  		"JMP32_JSGE_K: Small immediate",
9054  		.u.insns_int = {
9055  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9056  			BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
9057  			BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
9058  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9059  			BPF_EXIT_INSN(),
9060  		},
9061  		INTERNAL,
9062  		{ },
9063  		{ { 0, -123 } }
9064  	},
9065  	{
9066  		"JMP32_JSGE_K: Large immediate",
9067  		.u.insns_int = {
9068  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9069  			BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
9070  			BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
9071  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9072  			BPF_EXIT_INSN(),
9073  		},
9074  		INTERNAL,
9075  		{ },
9076  		{ { 0, -12345678 } }
9077  	},
9078  	/* BPF_JMP32 | BPF_JSGE | BPF_X */
9079  	{
9080  		"JMP32_JSGE_X",
9081  		.u.insns_int = {
9082  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9083  			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9084  			BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9085  			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9086  			BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9087  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9088  			BPF_EXIT_INSN(),
9089  		},
9090  		INTERNAL,
9091  		{ },
9092  		{ { 0, -12345678 } }
9093  	},
9094  	/* BPF_JMP32 | BPF_JSLT | BPF_K */
9095  	{
9096  		"JMP32_JSLT_K: Small immediate",
9097  		.u.insns_int = {
9098  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9099  			BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9100  			BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9101  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9102  			BPF_EXIT_INSN(),
9103  		},
9104  		INTERNAL,
9105  		{ },
9106  		{ { 0, -123 } }
9107  	},
9108  	{
9109  		"JMP32_JSLT_K: Large immediate",
9110  		.u.insns_int = {
9111  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9112  			BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9113  			BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9114  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9115  			BPF_EXIT_INSN(),
9116  		},
9117  		INTERNAL,
9118  		{ },
9119  		{ { 0, -12345678 } }
9120  	},
9121  	/* BPF_JMP32 | BPF_JSLT | BPF_X */
9122  	{
9123  		"JMP32_JSLT_X",
9124  		.u.insns_int = {
9125  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9126  			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9127  			BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9128  			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9129  			BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9130  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9131  			BPF_EXIT_INSN(),
9132  		},
9133  		INTERNAL,
9134  		{ },
9135  		{ { 0, -12345678 } }
9136  	},
9137  	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9138  	{
9139  		"JMP32_JSLE_K: Small immediate",
9140  		.u.insns_int = {
9141  			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9142  			BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9143  			BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9144  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9145  			BPF_EXIT_INSN(),
9146  		},
9147  		INTERNAL,
9148  		{ },
9149  		{ { 0, -123 } }
9150  	},
9151  	{
9152  		"JMP32_JSLE_K: Large immediate",
9153  		.u.insns_int = {
9154  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9155  			BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9156  			BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9157  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9158  			BPF_EXIT_INSN(),
9159  		},
9160  		INTERNAL,
9161  		{ },
9162  		{ { 0, -12345678 } }
9163  	},
9164  	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9165  	{
9166  		"JMP32_JSLE_X",
9167  		.u.insns_int = {
9168  			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9169  			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9170  			BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9171  			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9172  			BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9173  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9174  			BPF_EXIT_INSN(),
9175  		},
9176  		INTERNAL,
9177  		{ },
9178  		{ { 0, -12345678 } }
9179  	},
9180  	/* BPF_JMP | BPF_EXIT */
9181  	{
9182  		"JMP_EXIT",
9183  		.u.insns_int = {
9184  			BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9185  			BPF_EXIT_INSN(),
9186  			BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9187  		},
9188  		INTERNAL,
9189  		{ },
9190  		{ { 0, 0x4711 } },
9191  	},
9192  	/* BPF_JMP | BPF_JA */
9193  	{
9194  		"JMP_JA: Unconditional jump: if (true) return 1",
9195  		.u.insns_int = {
9196  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9197  			BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9198  			BPF_EXIT_INSN(),
9199  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9200  			BPF_EXIT_INSN(),
9201  		},
9202  		INTERNAL,
9203  		{ },
9204  		{ { 0, 1 } },
9205  	},
9206  	/* BPF_JMP | BPF_JSLT | BPF_K */
9207  	{
9208  		"JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9209  		.u.insns_int = {
9210  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9211  			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9212  			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9213  			BPF_EXIT_INSN(),
9214  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9215  			BPF_EXIT_INSN(),
9216  		},
9217  		INTERNAL,
9218  		{ },
9219  		{ { 0, 1 } },
9220  	},
9221  	{
9222  		"JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9223  		.u.insns_int = {
9224  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9225  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9226  			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9227  			BPF_EXIT_INSN(),
9228  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9229  			BPF_EXIT_INSN(),
9230  		},
9231  		INTERNAL,
9232  		{ },
9233  		{ { 0, 1 } },
9234  	},
9235  	/* BPF_JMP | BPF_JSGT | BPF_K */
9236  	{
9237  		"JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9238  		.u.insns_int = {
9239  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9240  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9241  			BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9242  			BPF_EXIT_INSN(),
9243  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9244  			BPF_EXIT_INSN(),
9245  		},
9246  		INTERNAL,
9247  		{ },
9248  		{ { 0, 1 } },
9249  	},
9250  	{
9251  		"JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9252  		.u.insns_int = {
9253  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9254  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9255  			BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9256  			BPF_EXIT_INSN(),
9257  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9258  			BPF_EXIT_INSN(),
9259  		},
9260  		INTERNAL,
9261  		{ },
9262  		{ { 0, 1 } },
9263  	},
9264  	/* BPF_JMP | BPF_JSLE | BPF_K */
9265  	{
9266  		"JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9267  		.u.insns_int = {
9268  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9269  			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9270  			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9271  			BPF_EXIT_INSN(),
9272  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9273  			BPF_EXIT_INSN(),
9274  		},
9275  		INTERNAL,
9276  		{ },
9277  		{ { 0, 1 } },
9278  	},
9279  	{
9280  		"JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9281  		.u.insns_int = {
9282  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9283  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9284  			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9285  			BPF_EXIT_INSN(),
9286  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9287  			BPF_EXIT_INSN(),
9288  		},
9289  		INTERNAL,
9290  		{ },
9291  		{ { 0, 1 } },
9292  	},
9293  	{
9294  		"JMP_JSLE_K: Signed jump: value walk 1",
9295  		.u.insns_int = {
9296  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9297  			BPF_LD_IMM64(R1, 3),
9298  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9299  			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9300  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9301  			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9302  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9303  			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9304  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9305  			BPF_EXIT_INSN(),		/* bad exit */
9306  			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9307  			BPF_EXIT_INSN(),
9308  		},
9309  		INTERNAL,
9310  		{ },
9311  		{ { 0, 1 } },
9312  	},
9313  	{
9314  		"JMP_JSLE_K: Signed jump: value walk 2",
9315  		.u.insns_int = {
9316  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9317  			BPF_LD_IMM64(R1, 3),
9318  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9319  			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9320  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9321  			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9322  			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9323  			BPF_EXIT_INSN(),		/* bad exit */
9324  			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9325  			BPF_EXIT_INSN(),
9326  		},
9327  		INTERNAL,
9328  		{ },
9329  		{ { 0, 1 } },
9330  	},
9331  	/* BPF_JMP | BPF_JSGE | BPF_K */
9332  	{
9333  		"JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9334  		.u.insns_int = {
9335  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9336  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9337  			BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9338  			BPF_EXIT_INSN(),
9339  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9340  			BPF_EXIT_INSN(),
9341  		},
9342  		INTERNAL,
9343  		{ },
9344  		{ { 0, 1 } },
9345  	},
9346  	{
9347  		"JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9348  		.u.insns_int = {
9349  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9350  			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9351  			BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9352  			BPF_EXIT_INSN(),
9353  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9354  			BPF_EXIT_INSN(),
9355  		},
9356  		INTERNAL,
9357  		{ },
9358  		{ { 0, 1 } },
9359  	},
9360  	{
9361  		"JMP_JSGE_K: Signed jump: value walk 1",
9362  		.u.insns_int = {
9363  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9364  			BPF_LD_IMM64(R1, -3),
9365  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
9366  			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9367  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9368  			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9369  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9370  			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9371  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9372  			BPF_EXIT_INSN(),		/* bad exit */
9373  			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9374  			BPF_EXIT_INSN(),
9375  		},
9376  		INTERNAL,
9377  		{ },
9378  		{ { 0, 1 } },
9379  	},
9380  	{
9381  		"JMP_JSGE_K: Signed jump: value walk 2",
9382  		.u.insns_int = {
9383  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9384  			BPF_LD_IMM64(R1, -3),
9385  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9386  			BPF_ALU64_IMM(BPF_ADD, R1, 2),
9387  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9388  			BPF_ALU64_IMM(BPF_ADD, R1, 2),
9389  			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9390  			BPF_EXIT_INSN(),		/* bad exit */
9391  			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9392  			BPF_EXIT_INSN(),
9393  		},
9394  		INTERNAL,
9395  		{ },
9396  		{ { 0, 1 } },
9397  	},
9398  	/* BPF_JMP | BPF_JGT | BPF_K */
9399  	{
9400  		"JMP_JGT_K: if (3 > 2) return 1",
9401  		.u.insns_int = {
9402  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9403  			BPF_LD_IMM64(R1, 3),
9404  			BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
9405  			BPF_EXIT_INSN(),
9406  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9407  			BPF_EXIT_INSN(),
9408  		},
9409  		INTERNAL,
9410  		{ },
9411  		{ { 0, 1 } },
9412  	},
9413  	{
9414  		"JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
9415  		.u.insns_int = {
9416  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9417  			BPF_LD_IMM64(R1, -1),
9418  			BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
9419  			BPF_EXIT_INSN(),
9420  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9421  			BPF_EXIT_INSN(),
9422  		},
9423  		INTERNAL,
9424  		{ },
9425  		{ { 0, 1 } },
9426  	},
9427  	/* BPF_JMP | BPF_JLT | BPF_K */
9428  	{
9429  		"JMP_JLT_K: if (2 < 3) return 1",
9430  		.u.insns_int = {
9431  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9432  			BPF_LD_IMM64(R1, 2),
9433  			BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
9434  			BPF_EXIT_INSN(),
9435  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9436  			BPF_EXIT_INSN(),
9437  		},
9438  		INTERNAL,
9439  		{ },
9440  		{ { 0, 1 } },
9441  	},
9442  	{
9443  		"JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
9444  		.u.insns_int = {
9445  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9446  			BPF_LD_IMM64(R1, 1),
9447  			BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
9448  			BPF_EXIT_INSN(),
9449  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9450  			BPF_EXIT_INSN(),
9451  		},
9452  		INTERNAL,
9453  		{ },
9454  		{ { 0, 1 } },
9455  	},
9456  	/* BPF_JMP | BPF_JGE | BPF_K */
9457  	{
9458  		"JMP_JGE_K: if (3 >= 2) return 1",
9459  		.u.insns_int = {
9460  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9461  			BPF_LD_IMM64(R1, 3),
9462  			BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
9463  			BPF_EXIT_INSN(),
9464  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9465  			BPF_EXIT_INSN(),
9466  		},
9467  		INTERNAL,
9468  		{ },
9469  		{ { 0, 1 } },
9470  	},
9471  	/* BPF_JMP | BPF_JLE | BPF_K */
9472  	{
9473  		"JMP_JLE_K: if (2 <= 3) return 1",
9474  		.u.insns_int = {
9475  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9476  			BPF_LD_IMM64(R1, 2),
9477  			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9478  			BPF_EXIT_INSN(),
9479  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9480  			BPF_EXIT_INSN(),
9481  		},
9482  		INTERNAL,
9483  		{ },
9484  		{ { 0, 1 } },
9485  	},
9486  	/* BPF_JMP | BPF_JGT | BPF_K jump backwards */
9487  	{
9488  		"JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
9489  		.u.insns_int = {
9490  			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9491  			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9492  			BPF_EXIT_INSN(),
9493  			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9494  			BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
9495  			BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
9496  			BPF_EXIT_INSN(),
9497  		},
9498  		INTERNAL,
9499  		{ },
9500  		{ { 0, 1 } },
9501  	},
9502  	{
9503  		"JMP_JGE_K: if (3 >= 3) return 1",
9504  		.u.insns_int = {
9505  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9506  			BPF_LD_IMM64(R1, 3),
9507  			BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
9508  			BPF_EXIT_INSN(),
9509  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9510  			BPF_EXIT_INSN(),
9511  		},
9512  		INTERNAL,
9513  		{ },
9514  		{ { 0, 1 } },
9515  	},
9516  	/* BPF_JMP | BPF_JLT | BPF_K jump backwards */
9517  	{
9518  		"JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
9519  		.u.insns_int = {
9520  			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9521  			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9522  			BPF_EXIT_INSN(),
9523  			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9524  			BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
9525  			BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
9526  			BPF_EXIT_INSN(),
9527  		},
9528  		INTERNAL,
9529  		{ },
9530  		{ { 0, 1 } },
9531  	},
9532  	{
9533  		"JMP_JLE_K: if (3 <= 3) return 1",
9534  		.u.insns_int = {
9535  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9536  			BPF_LD_IMM64(R1, 3),
9537  			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9538  			BPF_EXIT_INSN(),
9539  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9540  			BPF_EXIT_INSN(),
9541  		},
9542  		INTERNAL,
9543  		{ },
9544  		{ { 0, 1 } },
9545  	},
9546  	/* BPF_JMP | BPF_JNE | BPF_K */
9547  	{
9548  		"JMP_JNE_K: if (3 != 2) return 1",
9549  		.u.insns_int = {
9550  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9551  			BPF_LD_IMM64(R1, 3),
9552  			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
9553  			BPF_EXIT_INSN(),
9554  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9555  			BPF_EXIT_INSN(),
9556  		},
9557  		INTERNAL,
9558  		{ },
9559  		{ { 0, 1 } },
9560  	},
9561  	/* BPF_JMP | BPF_JEQ | BPF_K */
9562  	{
9563  		"JMP_JEQ_K: if (3 == 3) return 1",
9564  		.u.insns_int = {
9565  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9566  			BPF_LD_IMM64(R1, 3),
9567  			BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
9568  			BPF_EXIT_INSN(),
9569  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9570  			BPF_EXIT_INSN(),
9571  		},
9572  		INTERNAL,
9573  		{ },
9574  		{ { 0, 1 } },
9575  	},
9576  	/* BPF_JMP | BPF_JSET | BPF_K */
9577  	{
9578  		"JMP_JSET_K: if (0x3 & 0x2) return 1",
9579  		.u.insns_int = {
9580  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9581  			BPF_LD_IMM64(R1, 3),
9582  			BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
9583  			BPF_EXIT_INSN(),
9584  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9585  			BPF_EXIT_INSN(),
9586  		},
9587  		INTERNAL,
9588  		{ },
9589  		{ { 0, 1 } },
9590  	},
9591  	{
9592  		"JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
9593  		.u.insns_int = {
9594  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9595  			BPF_LD_IMM64(R1, 3),
9596  			BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
9597  			BPF_EXIT_INSN(),
9598  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9599  			BPF_EXIT_INSN(),
9600  		},
9601  		INTERNAL,
9602  		{ },
9603  		{ { 0, 1 } },
9604  	},
9605  	/* BPF_JMP | BPF_JSGT | BPF_X */
9606  	{
9607  		"JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
9608  		.u.insns_int = {
9609  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9610  			BPF_LD_IMM64(R1, -1),
9611  			BPF_LD_IMM64(R2, -2),
9612  			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9613  			BPF_EXIT_INSN(),
9614  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9615  			BPF_EXIT_INSN(),
9616  		},
9617  		INTERNAL,
9618  		{ },
9619  		{ { 0, 1 } },
9620  	},
9621  	{
9622  		"JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
9623  		.u.insns_int = {
9624  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9625  			BPF_LD_IMM64(R1, -1),
9626  			BPF_LD_IMM64(R2, -1),
9627  			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9628  			BPF_EXIT_INSN(),
9629  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9630  			BPF_EXIT_INSN(),
9631  		},
9632  		INTERNAL,
9633  		{ },
9634  		{ { 0, 1 } },
9635  	},
9636  	/* BPF_JMP | BPF_JSLT | BPF_X */
9637  	{
9638  		"JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
9639  		.u.insns_int = {
9640  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9641  			BPF_LD_IMM64(R1, -1),
9642  			BPF_LD_IMM64(R2, -2),
9643  			BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
9644  			BPF_EXIT_INSN(),
9645  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9646  			BPF_EXIT_INSN(),
9647  		},
9648  		INTERNAL,
9649  		{ },
9650  		{ { 0, 1 } },
9651  	},
9652  	{
9653  		"JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
9654  		.u.insns_int = {
9655  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9656  			BPF_LD_IMM64(R1, -1),
9657  			BPF_LD_IMM64(R2, -1),
9658  			BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
9659  			BPF_EXIT_INSN(),
9660  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9661  			BPF_EXIT_INSN(),
9662  		},
9663  		INTERNAL,
9664  		{ },
9665  		{ { 0, 1 } },
9666  	},
9667  	/* BPF_JMP | BPF_JSGE | BPF_X */
9668  	{
9669  		"JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
9670  		.u.insns_int = {
9671  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9672  			BPF_LD_IMM64(R1, -1),
9673  			BPF_LD_IMM64(R2, -2),
9674  			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9675  			BPF_EXIT_INSN(),
9676  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9677  			BPF_EXIT_INSN(),
9678  		},
9679  		INTERNAL,
9680  		{ },
9681  		{ { 0, 1 } },
9682  	},
9683  	{
9684  		"JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
9685  		.u.insns_int = {
9686  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9687  			BPF_LD_IMM64(R1, -1),
9688  			BPF_LD_IMM64(R2, -1),
9689  			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9690  			BPF_EXIT_INSN(),
9691  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9692  			BPF_EXIT_INSN(),
9693  		},
9694  		INTERNAL,
9695  		{ },
9696  		{ { 0, 1 } },
9697  	},
9698  	/* BPF_JMP | BPF_JSLE | BPF_X */
9699  	{
9700  		"JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
9701  		.u.insns_int = {
9702  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9703  			BPF_LD_IMM64(R1, -1),
9704  			BPF_LD_IMM64(R2, -2),
9705  			BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
9706  			BPF_EXIT_INSN(),
9707  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9708  			BPF_EXIT_INSN(),
9709  		},
9710  		INTERNAL,
9711  		{ },
9712  		{ { 0, 1 } },
9713  	},
9714  	{
9715  		"JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
9716  		.u.insns_int = {
9717  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9718  			BPF_LD_IMM64(R1, -1),
9719  			BPF_LD_IMM64(R2, -1),
9720  			BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
9721  			BPF_EXIT_INSN(),
9722  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9723  			BPF_EXIT_INSN(),
9724  		},
9725  		INTERNAL,
9726  		{ },
9727  		{ { 0, 1 } },
9728  	},
9729  	/* BPF_JMP | BPF_JGT | BPF_X */
9730  	{
9731  		"JMP_JGT_X: if (3 > 2) return 1",
9732  		.u.insns_int = {
9733  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9734  			BPF_LD_IMM64(R1, 3),
9735  			BPF_LD_IMM64(R2, 2),
9736  			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9737  			BPF_EXIT_INSN(),
9738  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9739  			BPF_EXIT_INSN(),
9740  		},
9741  		INTERNAL,
9742  		{ },
9743  		{ { 0, 1 } },
9744  	},
9745  	{
9746  		"JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
9747  		.u.insns_int = {
9748  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9749  			BPF_LD_IMM64(R1, -1),
9750  			BPF_LD_IMM64(R2, 1),
9751  			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9752  			BPF_EXIT_INSN(),
9753  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9754  			BPF_EXIT_INSN(),
9755  		},
9756  		INTERNAL,
9757  		{ },
9758  		{ { 0, 1 } },
9759  	},
9760  	/* BPF_JMP | BPF_JLT | BPF_X */
9761  	{
9762  		"JMP_JLT_X: if (2 < 3) return 1",
9763  		.u.insns_int = {
9764  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9765  			BPF_LD_IMM64(R1, 3),
9766  			BPF_LD_IMM64(R2, 2),
9767  			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9768  			BPF_EXIT_INSN(),
9769  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9770  			BPF_EXIT_INSN(),
9771  		},
9772  		INTERNAL,
9773  		{ },
9774  		{ { 0, 1 } },
9775  	},
9776  	{
9777  		"JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
9778  		.u.insns_int = {
9779  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9780  			BPF_LD_IMM64(R1, -1),
9781  			BPF_LD_IMM64(R2, 1),
9782  			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9783  			BPF_EXIT_INSN(),
9784  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9785  			BPF_EXIT_INSN(),
9786  		},
9787  		INTERNAL,
9788  		{ },
9789  		{ { 0, 1 } },
9790  	},
9791  	/* BPF_JMP | BPF_JGE | BPF_X */
9792  	{
9793  		"JMP_JGE_X: if (3 >= 2) return 1",
9794  		.u.insns_int = {
9795  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9796  			BPF_LD_IMM64(R1, 3),
9797  			BPF_LD_IMM64(R2, 2),
9798  			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9799  			BPF_EXIT_INSN(),
9800  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9801  			BPF_EXIT_INSN(),
9802  		},
9803  		INTERNAL,
9804  		{ },
9805  		{ { 0, 1 } },
9806  	},
9807  	{
9808  		"JMP_JGE_X: if (3 >= 3) return 1",
9809  		.u.insns_int = {
9810  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9811  			BPF_LD_IMM64(R1, 3),
9812  			BPF_LD_IMM64(R2, 3),
9813  			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9814  			BPF_EXIT_INSN(),
9815  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9816  			BPF_EXIT_INSN(),
9817  		},
9818  		INTERNAL,
9819  		{ },
9820  		{ { 0, 1 } },
9821  	},
9822  	/* BPF_JMP | BPF_JLE | BPF_X */
9823  	{
9824  		"JMP_JLE_X: if (2 <= 3) return 1",
9825  		.u.insns_int = {
9826  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9827  			BPF_LD_IMM64(R1, 3),
9828  			BPF_LD_IMM64(R2, 2),
9829  			BPF_JMP_REG(BPF_JLE, R2, R1, 1),
9830  			BPF_EXIT_INSN(),
9831  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9832  			BPF_EXIT_INSN(),
9833  		},
9834  		INTERNAL,
9835  		{ },
9836  		{ { 0, 1 } },
9837  	},
9838  	{
9839  		"JMP_JLE_X: if (3 <= 3) return 1",
9840  		.u.insns_int = {
9841  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9842  			BPF_LD_IMM64(R1, 3),
9843  			BPF_LD_IMM64(R2, 3),
9844  			BPF_JMP_REG(BPF_JLE, R1, R2, 1),
9845  			BPF_EXIT_INSN(),
9846  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9847  			BPF_EXIT_INSN(),
9848  		},
9849  		INTERNAL,
9850  		{ },
9851  		{ { 0, 1 } },
9852  	},
9853  	{
9854  		/* Mainly testing JIT + imm64 here. */
9855  		"JMP_JGE_X: ldimm64 test 1",
9856  		.u.insns_int = {
9857  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9858  			BPF_LD_IMM64(R1, 3),
9859  			BPF_LD_IMM64(R2, 2),
9860  			BPF_JMP_REG(BPF_JGE, R1, R2, 2),
9861  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9862  			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9863  			BPF_EXIT_INSN(),
9864  		},
9865  		INTERNAL,
9866  		{ },
9867  		{ { 0, 0xeeeeeeeeU } },
9868  	},
9869  	{
9870  		"JMP_JGE_X: ldimm64 test 2",
9871  		.u.insns_int = {
9872  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9873  			BPF_LD_IMM64(R1, 3),
9874  			BPF_LD_IMM64(R2, 2),
9875  			BPF_JMP_REG(BPF_JGE, R1, R2, 0),
9876  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9877  			BPF_EXIT_INSN(),
9878  		},
9879  		INTERNAL,
9880  		{ },
9881  		{ { 0, 0xffffffffU } },
9882  	},
9883  	{
9884  		"JMP_JGE_X: ldimm64 test 3",
9885  		.u.insns_int = {
9886  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9887  			BPF_LD_IMM64(R1, 3),
9888  			BPF_LD_IMM64(R2, 2),
9889  			BPF_JMP_REG(BPF_JGE, R1, R2, 4),
9890  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9891  			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9892  			BPF_EXIT_INSN(),
9893  		},
9894  		INTERNAL,
9895  		{ },
9896  		{ { 0, 1 } },
9897  	},
9898  	{
9899  		"JMP_JLE_X: ldimm64 test 1",
9900  		.u.insns_int = {
9901  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9902  			BPF_LD_IMM64(R1, 3),
9903  			BPF_LD_IMM64(R2, 2),
9904  			BPF_JMP_REG(BPF_JLE, R2, R1, 2),
9905  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9906  			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9907  			BPF_EXIT_INSN(),
9908  		},
9909  		INTERNAL,
9910  		{ },
9911  		{ { 0, 0xeeeeeeeeU } },
9912  	},
9913  	{
9914  		"JMP_JLE_X: ldimm64 test 2",
9915  		.u.insns_int = {
9916  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9917  			BPF_LD_IMM64(R1, 3),
9918  			BPF_LD_IMM64(R2, 2),
9919  			BPF_JMP_REG(BPF_JLE, R2, R1, 0),
9920  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9921  			BPF_EXIT_INSN(),
9922  		},
9923  		INTERNAL,
9924  		{ },
9925  		{ { 0, 0xffffffffU } },
9926  	},
9927  	{
9928  		"JMP_JLE_X: ldimm64 test 3",
9929  		.u.insns_int = {
9930  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9931  			BPF_LD_IMM64(R1, 3),
9932  			BPF_LD_IMM64(R2, 2),
9933  			BPF_JMP_REG(BPF_JLE, R2, R1, 4),
9934  			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9935  			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9936  			BPF_EXIT_INSN(),
9937  		},
9938  		INTERNAL,
9939  		{ },
9940  		{ { 0, 1 } },
9941  	},
9942  	/* BPF_JMP | BPF_JNE | BPF_X */
9943  	{
9944  		"JMP_JNE_X: if (3 != 2) return 1",
9945  		.u.insns_int = {
9946  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9947  			BPF_LD_IMM64(R1, 3),
9948  			BPF_LD_IMM64(R2, 2),
9949  			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
9950  			BPF_EXIT_INSN(),
9951  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9952  			BPF_EXIT_INSN(),
9953  		},
9954  		INTERNAL,
9955  		{ },
9956  		{ { 0, 1 } },
9957  	},
9958  	/* BPF_JMP | BPF_JEQ | BPF_X */
9959  	{
9960  		"JMP_JEQ_X: if (3 == 3) return 1",
9961  		.u.insns_int = {
9962  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9963  			BPF_LD_IMM64(R1, 3),
9964  			BPF_LD_IMM64(R2, 3),
9965  			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
9966  			BPF_EXIT_INSN(),
9967  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9968  			BPF_EXIT_INSN(),
9969  		},
9970  		INTERNAL,
9971  		{ },
9972  		{ { 0, 1 } },
9973  	},
9974  	/* BPF_JMP | BPF_JSET | BPF_X */
9975  	{
9976  		"JMP_JSET_X: if (0x3 & 0x2) return 1",
9977  		.u.insns_int = {
9978  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9979  			BPF_LD_IMM64(R1, 3),
9980  			BPF_LD_IMM64(R2, 2),
9981  			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9982  			BPF_EXIT_INSN(),
9983  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9984  			BPF_EXIT_INSN(),
9985  		},
9986  		INTERNAL,
9987  		{ },
9988  		{ { 0, 1 } },
9989  	},
9990  	{
9991  		"JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
9992  		.u.insns_int = {
9993  			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9994  			BPF_LD_IMM64(R1, 3),
9995  			BPF_LD_IMM64(R2, 0xffffffff),
9996  			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9997  			BPF_EXIT_INSN(),
9998  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9999  			BPF_EXIT_INSN(),
10000  		},
10001  		INTERNAL,
10002  		{ },
10003  		{ { 0, 1 } },
10004  	},
10005  	{
10006  		"JMP_JA: Jump, gap, jump, ...",
10007  		{ },
10008  		CLASSIC | FLAG_NO_DATA,
10009  		{ },
10010  		{ { 0, 0xababcbac } },
10011  		.fill_helper = bpf_fill_ja,
10012  	},
10013  	{	/* Mainly checking JIT here. */
10014  		"BPF_MAXINSNS: Maximum possible literals",
10015  		{ },
10016  		CLASSIC | FLAG_NO_DATA,
10017  		{ },
10018  		{ { 0, 0xffffffff } },
10019  		.fill_helper = bpf_fill_maxinsns1,
10020  	},
10021  	{	/* Mainly checking JIT here. */
10022  		"BPF_MAXINSNS: Single literal",
10023  		{ },
10024  		CLASSIC | FLAG_NO_DATA,
10025  		{ },
10026  		{ { 0, 0xfefefefe } },
10027  		.fill_helper = bpf_fill_maxinsns2,
10028  	},
10029  	{	/* Mainly checking JIT here. */
10030  		"BPF_MAXINSNS: Run/add until end",
10031  		{ },
10032  		CLASSIC | FLAG_NO_DATA,
10033  		{ },
10034  		{ { 0, 0x947bf368 } },
10035  		.fill_helper = bpf_fill_maxinsns3,
10036  	},
10037  	{
10038  		"BPF_MAXINSNS: Too many instructions",
10039  		{ },
10040  		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
10041  		{ },
10042  		{ },
10043  		.fill_helper = bpf_fill_maxinsns4,
10044  		.expected_errcode = -EINVAL,
10045  	},
10046  	{	/* Mainly checking JIT here. */
10047  		"BPF_MAXINSNS: Very long jump",
10048  		{ },
10049  		CLASSIC | FLAG_NO_DATA,
10050  		{ },
10051  		{ { 0, 0xabababab } },
10052  		.fill_helper = bpf_fill_maxinsns5,
10053  	},
10054  	{	/* Mainly checking JIT here. */
10055  		"BPF_MAXINSNS: Ctx heavy transformations",
10056  		{ },
10057  		CLASSIC,
10058  		{ },
10059  		{
10060  			{  1, SKB_VLAN_PRESENT },
10061  			{ 10, SKB_VLAN_PRESENT }
10062  		},
10063  		.fill_helper = bpf_fill_maxinsns6,
10064  	},
10065  	{	/* Mainly checking JIT here. */
10066  		"BPF_MAXINSNS: Call heavy transformations",
10067  		{ },
10068  		CLASSIC | FLAG_NO_DATA,
10069  		{ },
10070  		{ { 1, 0 }, { 10, 0 } },
10071  		.fill_helper = bpf_fill_maxinsns7,
10072  	},
10073  	{	/* Mainly checking JIT here. */
10074  		"BPF_MAXINSNS: Jump heavy test",
10075  		{ },
10076  		CLASSIC | FLAG_NO_DATA,
10077  		{ },
10078  		{ { 0, 0xffffffff } },
10079  		.fill_helper = bpf_fill_maxinsns8,
10080  	},
10081  	{	/* Mainly checking JIT here. */
10082  		"BPF_MAXINSNS: Very long jump backwards",
10083  		{ },
10084  		INTERNAL | FLAG_NO_DATA,
10085  		{ },
10086  		{ { 0, 0xcbababab } },
10087  		.fill_helper = bpf_fill_maxinsns9,
10088  	},
10089  	{	/* Mainly checking JIT here. */
10090  		"BPF_MAXINSNS: Edge hopping nuthouse",
10091  		{ },
10092  		INTERNAL | FLAG_NO_DATA,
10093  		{ },
10094  		{ { 0, 0xabababac } },
10095  		.fill_helper = bpf_fill_maxinsns10,
10096  	},
10097  	{
10098  		"BPF_MAXINSNS: Jump, gap, jump, ...",
10099  		{ },
10100  		CLASSIC | FLAG_NO_DATA,
10101  		{ },
10102  		{ { 0, 0xababcbac } },
10103  		.fill_helper = bpf_fill_maxinsns11,
10104  	},
10105  	{
10106  		"BPF_MAXINSNS: jump over MSH",
10107  		{ },
10108  		CLASSIC | FLAG_EXPECTED_FAIL,
10109  		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10110  		{ { 4, 0xabababab } },
10111  		.fill_helper = bpf_fill_maxinsns12,
10112  		.expected_errcode = -EINVAL,
10113  	},
10114  	{
10115  		"BPF_MAXINSNS: exec all MSH",
10116  		{ },
10117  		CLASSIC,
10118  		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10119  		{ { 4, 0xababab83 } },
10120  		.fill_helper = bpf_fill_maxinsns13,
10121  	},
10122  	{
10123  		"BPF_MAXINSNS: ld_abs+get_processor_id",
10124  		{ },
10125  		CLASSIC,
10126  		{ },
10127  		{ { 1, 0xbee } },
10128  		.fill_helper = bpf_fill_ld_abs_get_processor_id,
10129  	},
10130  	/*
10131  	 * LD_IND / LD_ABS on fragmented SKBs
10132  	 */
10133  	{
10134  		"LD_IND byte frag",
10135  		.u.insns = {
10136  			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10137  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10138  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10139  		},
10140  		CLASSIC | FLAG_SKB_FRAG,
10141  		{ },
10142  		{ {0x40, 0x42} },
10143  		.frag_data = {
10144  			0x42, 0x00, 0x00, 0x00,
10145  			0x43, 0x44, 0x00, 0x00,
10146  			0x21, 0x07, 0x19, 0x83,
10147  		},
10148  	},
10149  	{
10150  		"LD_IND halfword frag",
10151  		.u.insns = {
10152  			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10153  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10154  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10155  		},
10156  		CLASSIC | FLAG_SKB_FRAG,
10157  		{ },
10158  		{ {0x40, 0x4344} },
10159  		.frag_data = {
10160  			0x42, 0x00, 0x00, 0x00,
10161  			0x43, 0x44, 0x00, 0x00,
10162  			0x21, 0x07, 0x19, 0x83,
10163  		},
10164  	},
10165  	{
10166  		"LD_IND word frag",
10167  		.u.insns = {
10168  			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10169  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10170  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10171  		},
10172  		CLASSIC | FLAG_SKB_FRAG,
10173  		{ },
10174  		{ {0x40, 0x21071983} },
10175  		.frag_data = {
10176  			0x42, 0x00, 0x00, 0x00,
10177  			0x43, 0x44, 0x00, 0x00,
10178  			0x21, 0x07, 0x19, 0x83,
10179  		},
10180  	},
10181  	{
10182  		"LD_IND halfword mixed head/frag",
10183  		.u.insns = {
10184  			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10185  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10186  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10187  		},
10188  		CLASSIC | FLAG_SKB_FRAG,
10189  		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10190  		{ {0x40, 0x0519} },
10191  		.frag_data = { 0x19, 0x82 },
10192  	},
10193  	{
10194  		"LD_IND word mixed head/frag",
10195  		.u.insns = {
10196  			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10197  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10198  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10199  		},
10200  		CLASSIC | FLAG_SKB_FRAG,
10201  		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10202  		{ {0x40, 0x25051982} },
10203  		.frag_data = { 0x19, 0x82 },
10204  	},
10205  	{
10206  		"LD_ABS byte frag",
10207  		.u.insns = {
10208  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10209  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10210  		},
10211  		CLASSIC | FLAG_SKB_FRAG,
10212  		{ },
10213  		{ {0x40, 0x42} },
10214  		.frag_data = {
10215  			0x42, 0x00, 0x00, 0x00,
10216  			0x43, 0x44, 0x00, 0x00,
10217  			0x21, 0x07, 0x19, 0x83,
10218  		},
10219  	},
10220  	{
10221  		"LD_ABS halfword frag",
10222  		.u.insns = {
10223  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10224  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10225  		},
10226  		CLASSIC | FLAG_SKB_FRAG,
10227  		{ },
10228  		{ {0x40, 0x4344} },
10229  		.frag_data = {
10230  			0x42, 0x00, 0x00, 0x00,
10231  			0x43, 0x44, 0x00, 0x00,
10232  			0x21, 0x07, 0x19, 0x83,
10233  		},
10234  	},
10235  	{
10236  		"LD_ABS word frag",
10237  		.u.insns = {
10238  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10239  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10240  		},
10241  		CLASSIC | FLAG_SKB_FRAG,
10242  		{ },
10243  		{ {0x40, 0x21071983} },
10244  		.frag_data = {
10245  			0x42, 0x00, 0x00, 0x00,
10246  			0x43, 0x44, 0x00, 0x00,
10247  			0x21, 0x07, 0x19, 0x83,
10248  		},
10249  	},
10250  	{
10251  		"LD_ABS halfword mixed head/frag",
10252  		.u.insns = {
10253  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10254  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10255  		},
10256  		CLASSIC | FLAG_SKB_FRAG,
10257  		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10258  		{ {0x40, 0x0519} },
10259  		.frag_data = { 0x19, 0x82 },
10260  	},
10261  	{
10262  		"LD_ABS word mixed head/frag",
10263  		.u.insns = {
10264  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10265  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10266  		},
10267  		CLASSIC | FLAG_SKB_FRAG,
10268  		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10269  		{ {0x40, 0x25051982} },
10270  		.frag_data = { 0x19, 0x82 },
10271  	},
10272  	/*
10273  	 * LD_IND / LD_ABS on non fragmented SKBs
10274  	 */
10275  	{
10276  		/*
10277  		 * this tests that the JIT/interpreter correctly resets X
10278  		 * before using it in an LD_IND instruction.
10279  		 */
10280  		"LD_IND byte default X",
10281  		.u.insns = {
10282  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10283  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10284  		},
10285  		CLASSIC,
10286  		{ [0x1] = 0x42 },
10287  		{ {0x40, 0x42 } },
10288  	},
10289  	{
10290  		"LD_IND byte positive offset",
10291  		.u.insns = {
10292  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10293  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10294  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10295  		},
10296  		CLASSIC,
10297  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10298  		{ {0x40, 0x82 } },
10299  	},
10300  	{
10301  		"LD_IND byte negative offset",
10302  		.u.insns = {
10303  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10304  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10305  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10306  		},
10307  		CLASSIC,
10308  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10309  		{ {0x40, 0x05 } },
10310  	},
10311  	{
10312  		"LD_IND byte positive offset, all ff",
10313  		.u.insns = {
10314  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10315  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10316  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10317  		},
10318  		CLASSIC,
10319  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10320  		{ {0x40, 0xff } },
10321  	},
10322  	{
10323  		"LD_IND byte positive offset, out of bounds",
10324  		.u.insns = {
10325  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10326  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10327  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10328  		},
10329  		CLASSIC,
10330  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10331  		{ {0x3f, 0 }, },
10332  	},
10333  	{
10334  		"LD_IND byte negative offset, out of bounds",
10335  		.u.insns = {
10336  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10337  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10338  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10339  		},
10340  		CLASSIC,
10341  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10342  		{ {0x3f, 0 } },
10343  	},
10344  	{
10345  		"LD_IND byte negative offset, multiple calls",
10346  		.u.insns = {
10347  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10348  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10349  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10350  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10351  			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10352  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10353  		},
10354  		CLASSIC,
10355  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10356  		{ {0x40, 0x82 }, },
10357  	},
10358  	{
10359  		"LD_IND halfword positive offset",
10360  		.u.insns = {
10361  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10362  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
10363  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10364  		},
10365  		CLASSIC,
10366  		{
10367  			[0x1c] = 0xaa, [0x1d] = 0x55,
10368  			[0x1e] = 0xbb, [0x1f] = 0x66,
10369  			[0x20] = 0xcc, [0x21] = 0x77,
10370  			[0x22] = 0xdd, [0x23] = 0x88,
10371  		},
10372  		{ {0x40, 0xdd88 } },
10373  	},
10374  	{
10375  		"LD_IND halfword negative offset",
10376  		.u.insns = {
10377  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10378  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
10379  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10380  		},
10381  		CLASSIC,
10382  		{
10383  			[0x1c] = 0xaa, [0x1d] = 0x55,
10384  			[0x1e] = 0xbb, [0x1f] = 0x66,
10385  			[0x20] = 0xcc, [0x21] = 0x77,
10386  			[0x22] = 0xdd, [0x23] = 0x88,
10387  		},
10388  		{ {0x40, 0xbb66 } },
10389  	},
10390  	{
10391  		"LD_IND halfword unaligned",
10392  		.u.insns = {
10393  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10394  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10395  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10396  		},
10397  		CLASSIC,
10398  		{
10399  			[0x1c] = 0xaa, [0x1d] = 0x55,
10400  			[0x1e] = 0xbb, [0x1f] = 0x66,
10401  			[0x20] = 0xcc, [0x21] = 0x77,
10402  			[0x22] = 0xdd, [0x23] = 0x88,
10403  		},
10404  		{ {0x40, 0x66cc } },
10405  	},
10406  	{
10407  		"LD_IND halfword positive offset, all ff",
10408  		.u.insns = {
10409  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
10410  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10411  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10412  		},
10413  		CLASSIC,
10414  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10415  		{ {0x40, 0xffff } },
10416  	},
10417  	{
10418  		"LD_IND halfword positive offset, out of bounds",
10419  		.u.insns = {
10420  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10421  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10422  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10423  		},
10424  		CLASSIC,
10425  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10426  		{ {0x3f, 0 }, },
10427  	},
10428  	{
10429  		"LD_IND halfword negative offset, out of bounds",
10430  		.u.insns = {
10431  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10432  			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
10433  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10434  		},
10435  		CLASSIC,
10436  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10437  		{ {0x3f, 0 } },
10438  	},
10439  	{
10440  		"LD_IND word positive offset",
10441  		.u.insns = {
10442  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10443  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
10444  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10445  		},
10446  		CLASSIC,
10447  		{
10448  			[0x1c] = 0xaa, [0x1d] = 0x55,
10449  			[0x1e] = 0xbb, [0x1f] = 0x66,
10450  			[0x20] = 0xcc, [0x21] = 0x77,
10451  			[0x22] = 0xdd, [0x23] = 0x88,
10452  			[0x24] = 0xee, [0x25] = 0x99,
10453  			[0x26] = 0xff, [0x27] = 0xaa,
10454  		},
10455  		{ {0x40, 0xee99ffaa } },
10456  	},
10457  	{
10458  		"LD_IND word negative offset",
10459  		.u.insns = {
10460  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10461  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
10462  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10463  		},
10464  		CLASSIC,
10465  		{
10466  			[0x1c] = 0xaa, [0x1d] = 0x55,
10467  			[0x1e] = 0xbb, [0x1f] = 0x66,
10468  			[0x20] = 0xcc, [0x21] = 0x77,
10469  			[0x22] = 0xdd, [0x23] = 0x88,
10470  			[0x24] = 0xee, [0x25] = 0x99,
10471  			[0x26] = 0xff, [0x27] = 0xaa,
10472  		},
10473  		{ {0x40, 0xaa55bb66 } },
10474  	},
10475  	{
10476  		"LD_IND word unaligned (addr & 3 == 2)",
10477  		.u.insns = {
10478  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10479  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10480  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10481  		},
10482  		CLASSIC,
10483  		{
10484  			[0x1c] = 0xaa, [0x1d] = 0x55,
10485  			[0x1e] = 0xbb, [0x1f] = 0x66,
10486  			[0x20] = 0xcc, [0x21] = 0x77,
10487  			[0x22] = 0xdd, [0x23] = 0x88,
10488  			[0x24] = 0xee, [0x25] = 0x99,
10489  			[0x26] = 0xff, [0x27] = 0xaa,
10490  		},
10491  		{ {0x40, 0xbb66cc77 } },
10492  	},
10493  	{
10494  		"LD_IND word unaligned (addr & 3 == 1)",
10495  		.u.insns = {
10496  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10497  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
10498  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10499  		},
10500  		CLASSIC,
10501  		{
10502  			[0x1c] = 0xaa, [0x1d] = 0x55,
10503  			[0x1e] = 0xbb, [0x1f] = 0x66,
10504  			[0x20] = 0xcc, [0x21] = 0x77,
10505  			[0x22] = 0xdd, [0x23] = 0x88,
10506  			[0x24] = 0xee, [0x25] = 0x99,
10507  			[0x26] = 0xff, [0x27] = 0xaa,
10508  		},
10509  		{ {0x40, 0x55bb66cc } },
10510  	},
10511  	{
10512  		"LD_IND word unaligned (addr & 3 == 3)",
10513  		.u.insns = {
10514  			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10515  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
10516  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10517  		},
10518  		CLASSIC,
10519  		{
10520  			[0x1c] = 0xaa, [0x1d] = 0x55,
10521  			[0x1e] = 0xbb, [0x1f] = 0x66,
10522  			[0x20] = 0xcc, [0x21] = 0x77,
10523  			[0x22] = 0xdd, [0x23] = 0x88,
10524  			[0x24] = 0xee, [0x25] = 0x99,
10525  			[0x26] = 0xff, [0x27] = 0xaa,
10526  		},
10527  		{ {0x40, 0x66cc77dd } },
10528  	},
10529  	{
10530  		"LD_IND word positive offset, all ff",
10531  		.u.insns = {
10532  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10533  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10534  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10535  		},
10536  		CLASSIC,
10537  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10538  		{ {0x40, 0xffffffff } },
10539  	},
10540  	{
10541  		"LD_IND word positive offset, out of bounds",
10542  		.u.insns = {
10543  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10544  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10545  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10546  		},
10547  		CLASSIC,
10548  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10549  		{ {0x3f, 0 }, },
10550  	},
10551  	{
10552  		"LD_IND word negative offset, out of bounds",
10553  		.u.insns = {
10554  			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10555  			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
10556  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10557  		},
10558  		CLASSIC,
10559  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10560  		{ {0x3f, 0 } },
10561  	},
10562  	{
10563  		"LD_ABS byte",
10564  		.u.insns = {
10565  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
10566  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10567  		},
10568  		CLASSIC,
10569  		{
10570  			[0x1c] = 0xaa, [0x1d] = 0x55,
10571  			[0x1e] = 0xbb, [0x1f] = 0x66,
10572  			[0x20] = 0xcc, [0x21] = 0x77,
10573  			[0x22] = 0xdd, [0x23] = 0x88,
10574  			[0x24] = 0xee, [0x25] = 0x99,
10575  			[0x26] = 0xff, [0x27] = 0xaa,
10576  		},
10577  		{ {0x40, 0xcc } },
10578  	},
10579  	{
10580  		"LD_ABS byte positive offset, all ff",
10581  		.u.insns = {
10582  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10583  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10584  		},
10585  		CLASSIC,
10586  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10587  		{ {0x40, 0xff } },
10588  	},
10589  	{
10590  		"LD_ABS byte positive offset, out of bounds",
10591  		.u.insns = {
10592  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10593  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10594  		},
10595  		CLASSIC,
10596  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10597  		{ {0x3f, 0 }, },
10598  	},
10599  	{
10600  		"LD_ABS byte negative offset, out of bounds load",
10601  		.u.insns = {
10602  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
10603  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10604  		},
10605  		CLASSIC | FLAG_EXPECTED_FAIL,
10606  		.expected_errcode = -EINVAL,
10607  	},
10608  	{
10609  		"LD_ABS byte negative offset, in bounds",
10610  		.u.insns = {
10611  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10612  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10613  		},
10614  		CLASSIC,
10615  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10616  		{ {0x40, 0x82 }, },
10617  	},
10618  	{
10619  		"LD_ABS byte negative offset, out of bounds",
10620  		.u.insns = {
10621  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10622  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10623  		},
10624  		CLASSIC,
10625  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10626  		{ {0x3f, 0 }, },
10627  	},
10628  	{
10629  		"LD_ABS byte negative offset, multiple calls",
10630  		.u.insns = {
10631  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
10632  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
10633  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
10634  			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10635  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10636  		},
10637  		CLASSIC,
10638  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10639  		{ {0x40, 0x82 }, },
10640  	},
10641  	{
10642  		"LD_ABS halfword",
10643  		.u.insns = {
10644  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
10645  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10646  		},
10647  		CLASSIC,
10648  		{
10649  			[0x1c] = 0xaa, [0x1d] = 0x55,
10650  			[0x1e] = 0xbb, [0x1f] = 0x66,
10651  			[0x20] = 0xcc, [0x21] = 0x77,
10652  			[0x22] = 0xdd, [0x23] = 0x88,
10653  			[0x24] = 0xee, [0x25] = 0x99,
10654  			[0x26] = 0xff, [0x27] = 0xaa,
10655  		},
10656  		{ {0x40, 0xdd88 } },
10657  	},
10658  	{
10659  		"LD_ABS halfword unaligned",
10660  		.u.insns = {
10661  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
10662  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10663  		},
10664  		CLASSIC,
10665  		{
10666  			[0x1c] = 0xaa, [0x1d] = 0x55,
10667  			[0x1e] = 0xbb, [0x1f] = 0x66,
10668  			[0x20] = 0xcc, [0x21] = 0x77,
10669  			[0x22] = 0xdd, [0x23] = 0x88,
10670  			[0x24] = 0xee, [0x25] = 0x99,
10671  			[0x26] = 0xff, [0x27] = 0xaa,
10672  		},
10673  		{ {0x40, 0x99ff } },
10674  	},
10675  	{
10676  		"LD_ABS halfword positive offset, all ff",
10677  		.u.insns = {
10678  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
10679  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10680  		},
10681  		CLASSIC,
10682  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10683  		{ {0x40, 0xffff } },
10684  	},
10685  	{
10686  		"LD_ABS halfword positive offset, out of bounds",
10687  		.u.insns = {
10688  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10689  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10690  		},
10691  		CLASSIC,
10692  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10693  		{ {0x3f, 0 }, },
10694  	},
10695  	{
10696  		"LD_ABS halfword negative offset, out of bounds load",
10697  		.u.insns = {
10698  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
10699  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10700  		},
10701  		CLASSIC | FLAG_EXPECTED_FAIL,
10702  		.expected_errcode = -EINVAL,
10703  	},
10704  	{
10705  		"LD_ABS halfword negative offset, in bounds",
10706  		.u.insns = {
10707  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10708  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10709  		},
10710  		CLASSIC,
10711  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10712  		{ {0x40, 0x1982 }, },
10713  	},
10714  	{
10715  		"LD_ABS halfword negative offset, out of bounds",
10716  		.u.insns = {
10717  			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10718  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10719  		},
10720  		CLASSIC,
10721  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10722  		{ {0x3f, 0 }, },
10723  	},
10724  	{
10725  		"LD_ABS word",
10726  		.u.insns = {
10727  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
10728  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10729  		},
10730  		CLASSIC,
10731  		{
10732  			[0x1c] = 0xaa, [0x1d] = 0x55,
10733  			[0x1e] = 0xbb, [0x1f] = 0x66,
10734  			[0x20] = 0xcc, [0x21] = 0x77,
10735  			[0x22] = 0xdd, [0x23] = 0x88,
10736  			[0x24] = 0xee, [0x25] = 0x99,
10737  			[0x26] = 0xff, [0x27] = 0xaa,
10738  		},
10739  		{ {0x40, 0xaa55bb66 } },
10740  	},
10741  	{
10742  		"LD_ABS word unaligned (addr & 3 == 2)",
10743  		.u.insns = {
10744  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
10745  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10746  		},
10747  		CLASSIC,
10748  		{
10749  			[0x1c] = 0xaa, [0x1d] = 0x55,
10750  			[0x1e] = 0xbb, [0x1f] = 0x66,
10751  			[0x20] = 0xcc, [0x21] = 0x77,
10752  			[0x22] = 0xdd, [0x23] = 0x88,
10753  			[0x24] = 0xee, [0x25] = 0x99,
10754  			[0x26] = 0xff, [0x27] = 0xaa,
10755  		},
10756  		{ {0x40, 0xdd88ee99 } },
10757  	},
10758  	{
10759  		"LD_ABS word unaligned (addr & 3 == 1)",
10760  		.u.insns = {
10761  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
10762  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10763  		},
10764  		CLASSIC,
10765  		{
10766  			[0x1c] = 0xaa, [0x1d] = 0x55,
10767  			[0x1e] = 0xbb, [0x1f] = 0x66,
10768  			[0x20] = 0xcc, [0x21] = 0x77,
10769  			[0x22] = 0xdd, [0x23] = 0x88,
10770  			[0x24] = 0xee, [0x25] = 0x99,
10771  			[0x26] = 0xff, [0x27] = 0xaa,
10772  		},
10773  		{ {0x40, 0x77dd88ee } },
10774  	},
10775  	{
10776  		"LD_ABS word unaligned (addr & 3 == 3)",
10777  		.u.insns = {
10778  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
10779  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10780  		},
10781  		CLASSIC,
10782  		{
10783  			[0x1c] = 0xaa, [0x1d] = 0x55,
10784  			[0x1e] = 0xbb, [0x1f] = 0x66,
10785  			[0x20] = 0xcc, [0x21] = 0x77,
10786  			[0x22] = 0xdd, [0x23] = 0x88,
10787  			[0x24] = 0xee, [0x25] = 0x99,
10788  			[0x26] = 0xff, [0x27] = 0xaa,
10789  		},
10790  		{ {0x40, 0x88ee99ff } },
10791  	},
10792  	{
10793  		"LD_ABS word positive offset, all ff",
10794  		.u.insns = {
10795  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
10796  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10797  		},
10798  		CLASSIC,
10799  		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10800  		{ {0x40, 0xffffffff } },
10801  	},
10802  	{
10803  		"LD_ABS word positive offset, out of bounds",
10804  		.u.insns = {
10805  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
10806  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10807  		},
10808  		CLASSIC,
10809  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10810  		{ {0x3f, 0 }, },
10811  	},
10812  	{
10813  		"LD_ABS word negative offset, out of bounds load",
10814  		.u.insns = {
10815  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
10816  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10817  		},
10818  		CLASSIC | FLAG_EXPECTED_FAIL,
10819  		.expected_errcode = -EINVAL,
10820  	},
10821  	{
10822  		"LD_ABS word negative offset, in bounds",
10823  		.u.insns = {
10824  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10825  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10826  		},
10827  		CLASSIC,
10828  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10829  		{ {0x40, 0x25051982 }, },
10830  	},
10831  	{
10832  		"LD_ABS word negative offset, out of bounds",
10833  		.u.insns = {
10834  			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10835  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10836  		},
10837  		CLASSIC,
10838  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10839  		{ {0x3f, 0 }, },
10840  	},
10841  	{
10842  		"LDX_MSH standalone, preserved A",
10843  		.u.insns = {
10844  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10845  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10846  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10847  		},
10848  		CLASSIC,
10849  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10850  		{ {0x40, 0xffeebbaa }, },
10851  	},
10852  	{
10853  		"LDX_MSH standalone, preserved A 2",
10854  		.u.insns = {
10855  			BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
10856  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10857  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
10858  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10859  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
10860  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10861  		},
10862  		CLASSIC,
10863  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10864  		{ {0x40, 0x175e9d63 }, },
10865  	},
10866  	{
10867  		"LDX_MSH standalone, test result 1",
10868  		.u.insns = {
10869  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10870  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10871  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10872  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10873  		},
10874  		CLASSIC,
10875  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10876  		{ {0x40, 0x14 }, },
10877  	},
10878  	{
10879  		"LDX_MSH standalone, test result 2",
10880  		.u.insns = {
10881  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10882  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10883  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10884  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10885  		},
10886  		CLASSIC,
10887  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10888  		{ {0x40, 0x24 }, },
10889  	},
10890  	{
10891  		"LDX_MSH standalone, negative offset",
10892  		.u.insns = {
10893  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10894  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
10895  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10896  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10897  		},
10898  		CLASSIC,
10899  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10900  		{ {0x40, 0 }, },
10901  	},
10902  	{
10903  		"LDX_MSH standalone, negative offset 2",
10904  		.u.insns = {
10905  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10906  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
10907  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10908  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10909  		},
10910  		CLASSIC,
10911  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10912  		{ {0x40, 0x24 }, },
10913  	},
10914  	{
10915  		"LDX_MSH standalone, out of bounds",
10916  		.u.insns = {
10917  			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10918  			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
10919  			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10920  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10921  		},
10922  		CLASSIC,
10923  		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10924  		{ {0x40, 0 }, },
10925  	},
10926  	/*
10927  	 * verify that the interpreter or JIT correctly sets A and X
10928  	 * to 0.
10929  	 */
10930  	{
10931  		"ADD default X",
10932  		.u.insns = {
10933  			/*
10934  			 * A = 0x42
10935  			 * A = A + X
10936  			 * ret A
10937  			 */
10938  			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10939  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
10940  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10941  		},
10942  		CLASSIC | FLAG_NO_DATA,
10943  		{},
10944  		{ {0x1, 0x42 } },
10945  	},
10946  	{
10947  		"ADD default A",
10948  		.u.insns = {
10949  			/*
10950  			 * A = A + 0x42
10951  			 * ret A
10952  			 */
10953  			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
10954  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10955  		},
10956  		CLASSIC | FLAG_NO_DATA,
10957  		{},
10958  		{ {0x1, 0x42 } },
10959  	},
10960  	{
10961  		"SUB default X",
10962  		.u.insns = {
10963  			/*
10964  			 * A = 0x66
10965  			 * A = A - X
10966  			 * ret A
10967  			 */
10968  			BPF_STMT(BPF_LD | BPF_IMM, 0x66),
10969  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
10970  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10971  		},
10972  		CLASSIC | FLAG_NO_DATA,
10973  		{},
10974  		{ {0x1, 0x66 } },
10975  	},
10976  	{
10977  		"SUB default A",
10978  		.u.insns = {
10979  			/*
10980  			 * A = A - -0x66
10981  			 * ret A
10982  			 */
10983  			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
10984  			BPF_STMT(BPF_RET | BPF_A, 0x0),
10985  		},
10986  		CLASSIC | FLAG_NO_DATA,
10987  		{},
10988  		{ {0x1, 0x66 } },
10989  	},
10990  	{
10991  		"MUL default X",
10992  		.u.insns = {
10993  			/*
10994  			 * A = 0x42
10995  			 * A = A * X
10996  			 * ret A
10997  			 */
10998  			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10999  			BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
11000  			BPF_STMT(BPF_RET | BPF_A, 0x0),
11001  		},
11002  		CLASSIC | FLAG_NO_DATA,
11003  		{},
11004  		{ {0x1, 0x0 } },
11005  	},
11006  	{
11007  		"MUL default A",
11008  		.u.insns = {
11009  			/*
11010  			 * A = A * 0x66
11011  			 * ret A
11012  			 */
11013  			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
11014  			BPF_STMT(BPF_RET | BPF_A, 0x0),
11015  		},
11016  		CLASSIC | FLAG_NO_DATA,
11017  		{},
11018  		{ {0x1, 0x0 } },
11019  	},
11020  	{
11021  		"DIV default X",
11022  		.u.insns = {
11023  			/*
11024  			 * A = 0x42
11025  			 * A = A / X ; this halt the filter execution if X is 0
11026  			 * ret 0x42
11027  			 */
11028  			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11029  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
11030  			BPF_STMT(BPF_RET | BPF_K, 0x42),
11031  		},
11032  		CLASSIC | FLAG_NO_DATA,
11033  		{},
11034  		{ {0x1, 0x0 } },
11035  	},
11036  	{
11037  		"DIV default A",
11038  		.u.insns = {
11039  			/*
11040  			 * A = A / 1
11041  			 * ret A
11042  			 */
11043  			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
11044  			BPF_STMT(BPF_RET | BPF_A, 0x0),
11045  		},
11046  		CLASSIC | FLAG_NO_DATA,
11047  		{},
11048  		{ {0x1, 0x0 } },
11049  	},
11050  	{
11051  		"MOD default X",
11052  		.u.insns = {
11053  			/*
11054  			 * A = 0x42
11055  			 * A = A mod X ; this halt the filter execution if X is 0
11056  			 * ret 0x42
11057  			 */
11058  			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11059  			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
11060  			BPF_STMT(BPF_RET | BPF_K, 0x42),
11061  		},
11062  		CLASSIC | FLAG_NO_DATA,
11063  		{},
11064  		{ {0x1, 0x0 } },
11065  	},
11066  	{
11067  		"MOD default A",
11068  		.u.insns = {
11069  			/*
11070  			 * A = A mod 1
11071  			 * ret A
11072  			 */
11073  			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
11074  			BPF_STMT(BPF_RET | BPF_A, 0x0),
11075  		},
11076  		CLASSIC | FLAG_NO_DATA,
11077  		{},
11078  		{ {0x1, 0x0 } },
11079  	},
11080  	{
11081  		"JMP EQ default A",
11082  		.u.insns = {
11083  			/*
11084  			 * cmp A, 0x0, 0, 1
11085  			 * ret 0x42
11086  			 * ret 0x66
11087  			 */
11088  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11089  			BPF_STMT(BPF_RET | BPF_K, 0x42),
11090  			BPF_STMT(BPF_RET | BPF_K, 0x66),
11091  		},
11092  		CLASSIC | FLAG_NO_DATA,
11093  		{},
11094  		{ {0x1, 0x42 } },
11095  	},
11096  	{
11097  		"JMP EQ default X",
11098  		.u.insns = {
11099  			/*
11100  			 * A = 0x0
11101  			 * cmp A, X, 0, 1
11102  			 * ret 0x42
11103  			 * ret 0x66
11104  			 */
11105  			BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11106  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11107  			BPF_STMT(BPF_RET | BPF_K, 0x42),
11108  			BPF_STMT(BPF_RET | BPF_K, 0x66),
11109  		},
11110  		CLASSIC | FLAG_NO_DATA,
11111  		{},
11112  		{ {0x1, 0x42 } },
11113  	},
11114  	/* Checking interpreter vs JIT wrt signed extended imms. */
11115  	{
11116  		"JNE signed compare, test 1",
11117  		.u.insns_int = {
11118  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11119  			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11120  			BPF_MOV64_REG(R2, R1),
11121  			BPF_ALU64_REG(BPF_AND, R2, R3),
11122  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11123  			BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11124  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11125  			BPF_EXIT_INSN(),
11126  		},
11127  		INTERNAL,
11128  		{ },
11129  		{ { 0, 1 } },
11130  	},
11131  	{
11132  		"JNE signed compare, test 2",
11133  		.u.insns_int = {
11134  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11135  			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11136  			BPF_MOV64_REG(R2, R1),
11137  			BPF_ALU64_REG(BPF_AND, R2, R3),
11138  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11139  			BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11140  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11141  			BPF_EXIT_INSN(),
11142  		},
11143  		INTERNAL,
11144  		{ },
11145  		{ { 0, 1 } },
11146  	},
11147  	{
11148  		"JNE signed compare, test 3",
11149  		.u.insns_int = {
11150  			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11151  			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11152  			BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11153  			BPF_MOV64_REG(R2, R1),
11154  			BPF_ALU64_REG(BPF_AND, R2, R3),
11155  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11156  			BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11157  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11158  			BPF_EXIT_INSN(),
11159  		},
11160  		INTERNAL,
11161  		{ },
11162  		{ { 0, 2 } },
11163  	},
11164  	{
11165  		"JNE signed compare, test 4",
11166  		.u.insns_int = {
11167  			BPF_LD_IMM64(R1, -17104896),
11168  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11169  			BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11170  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11171  			BPF_EXIT_INSN(),
11172  		},
11173  		INTERNAL,
11174  		{ },
11175  		{ { 0, 2 } },
11176  	},
11177  	{
11178  		"JNE signed compare, test 5",
11179  		.u.insns_int = {
11180  			BPF_LD_IMM64(R1, 0xfefb0000),
11181  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11182  			BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11183  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11184  			BPF_EXIT_INSN(),
11185  		},
11186  		INTERNAL,
11187  		{ },
11188  		{ { 0, 1 } },
11189  	},
11190  	{
11191  		"JNE signed compare, test 6",
11192  		.u.insns_int = {
11193  			BPF_LD_IMM64(R1, 0x7efb0000),
11194  			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11195  			BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11196  			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11197  			BPF_EXIT_INSN(),
11198  		},
11199  		INTERNAL,
11200  		{ },
11201  		{ { 0, 2 } },
11202  	},
11203  	{
11204  		"JNE signed compare, test 7",
11205  		.u.insns = {
11206  			BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11207  			BPF_STMT(BPF_MISC | BPF_TAX, 0),
11208  			BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11209  			BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11210  			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11211  			BPF_STMT(BPF_RET | BPF_K, 1),
11212  			BPF_STMT(BPF_RET | BPF_K, 2),
11213  		},
11214  		CLASSIC | FLAG_NO_DATA,
11215  		{},
11216  		{ { 0, 2 } },
11217  	},
11218  	/* BPF_LDX_MEM with operand aliasing */
11219  	{
11220  		"LDX_MEM_B: operand register aliasing",
11221  		.u.insns_int = {
11222  			BPF_ST_MEM(BPF_B, R10, -8, 123),
11223  			BPF_MOV64_REG(R0, R10),
11224  			BPF_LDX_MEM(BPF_B, R0, R0, -8),
11225  			BPF_EXIT_INSN(),
11226  		},
11227  		INTERNAL,
11228  		{ },
11229  		{ { 0, 123 } },
11230  		.stack_depth = 8,
11231  	},
11232  	{
11233  		"LDX_MEM_H: operand register aliasing",
11234  		.u.insns_int = {
11235  			BPF_ST_MEM(BPF_H, R10, -8, 12345),
11236  			BPF_MOV64_REG(R0, R10),
11237  			BPF_LDX_MEM(BPF_H, R0, R0, -8),
11238  			BPF_EXIT_INSN(),
11239  		},
11240  		INTERNAL,
11241  		{ },
11242  		{ { 0, 12345 } },
11243  		.stack_depth = 8,
11244  	},
11245  	{
11246  		"LDX_MEM_W: operand register aliasing",
11247  		.u.insns_int = {
11248  			BPF_ST_MEM(BPF_W, R10, -8, 123456789),
11249  			BPF_MOV64_REG(R0, R10),
11250  			BPF_LDX_MEM(BPF_W, R0, R0, -8),
11251  			BPF_EXIT_INSN(),
11252  		},
11253  		INTERNAL,
11254  		{ },
11255  		{ { 0, 123456789 } },
11256  		.stack_depth = 8,
11257  	},
11258  	{
11259  		"LDX_MEM_DW: operand register aliasing",
11260  		.u.insns_int = {
11261  			BPF_LD_IMM64(R1, 0x123456789abcdefULL),
11262  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
11263  			BPF_MOV64_REG(R0, R10),
11264  			BPF_LDX_MEM(BPF_DW, R0, R0, -8),
11265  			BPF_ALU64_REG(BPF_SUB, R0, R1),
11266  			BPF_MOV64_REG(R1, R0),
11267  			BPF_ALU64_IMM(BPF_RSH, R1, 32),
11268  			BPF_ALU64_REG(BPF_OR, R0, R1),
11269  			BPF_EXIT_INSN(),
11270  		},
11271  		INTERNAL,
11272  		{ },
11273  		{ { 0, 0 } },
11274  		.stack_depth = 8,
11275  	},
11276  	/*
11277  	 * Register (non-)clobbering tests for the case where a JIT implements
11278  	 * complex ALU or ATOMIC operations via function calls. If so, the
11279  	 * function call must be transparent to the eBPF registers. The JIT
11280  	 * must therefore save and restore relevant registers across the call.
11281  	 * The following tests check that the eBPF registers retain their
11282  	 * values after such an operation. Mainly intended for complex ALU
11283  	 * and atomic operation, but we run it for all. You never know...
11284  	 *
11285  	 * Note that each operations should be tested twice with different
11286  	 * destinations, to check preservation for all registers.
11287  	 */
11288  #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src)			\
11289  	{							\
11290  		#alu "_" #op " to " #dst ": no clobbering",	\
11291  		.u.insns_int = {				\
11292  			BPF_ALU64_IMM(BPF_MOV, R0, R0),		\
11293  			BPF_ALU64_IMM(BPF_MOV, R1, R1),		\
11294  			BPF_ALU64_IMM(BPF_MOV, R2, R2),		\
11295  			BPF_ALU64_IMM(BPF_MOV, R3, R3),		\
11296  			BPF_ALU64_IMM(BPF_MOV, R4, R4),		\
11297  			BPF_ALU64_IMM(BPF_MOV, R5, R5),		\
11298  			BPF_ALU64_IMM(BPF_MOV, R6, R6),		\
11299  			BPF_ALU64_IMM(BPF_MOV, R7, R7),		\
11300  			BPF_ALU64_IMM(BPF_MOV, R8, R8),		\
11301  			BPF_ALU64_IMM(BPF_MOV, R9, R9),		\
11302  			BPF_##alu(BPF_ ##op, dst, src),		\
11303  			BPF_ALU32_IMM(BPF_MOV, dst, dst),	\
11304  			BPF_JMP_IMM(BPF_JNE, R0, R0, 10),	\
11305  			BPF_JMP_IMM(BPF_JNE, R1, R1, 9),	\
11306  			BPF_JMP_IMM(BPF_JNE, R2, R2, 8),	\
11307  			BPF_JMP_IMM(BPF_JNE, R3, R3, 7),	\
11308  			BPF_JMP_IMM(BPF_JNE, R4, R4, 6),	\
11309  			BPF_JMP_IMM(BPF_JNE, R5, R5, 5),	\
11310  			BPF_JMP_IMM(BPF_JNE, R6, R6, 4),	\
11311  			BPF_JMP_IMM(BPF_JNE, R7, R7, 3),	\
11312  			BPF_JMP_IMM(BPF_JNE, R8, R8, 2),	\
11313  			BPF_JMP_IMM(BPF_JNE, R9, R9, 1),	\
11314  			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
11315  			BPF_EXIT_INSN(),			\
11316  		},						\
11317  		INTERNAL,					\
11318  		{ },						\
11319  		{ { 0, 1 } }					\
11320  	}
11321  	/* ALU64 operations, register clobbering */
11322  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11323  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11324  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11325  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11326  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11327  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11328  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11329  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11330  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11331  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11332  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11333  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11334  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11335  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11336  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11337  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11338  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11339  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11340  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11341  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11342  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11343  	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11344  	/* ALU32 immediate operations, register clobbering */
11345  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11346  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11347  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11348  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11349  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11350  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11351  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11352  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11353  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
11354  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
11355  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
11356  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
11357  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
11358  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
11359  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
11360  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
11361  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
11362  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
11363  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
11364  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
11365  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
11366  	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
11367  	/* ALU64 register operations, register clobbering */
11368  	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
11369  	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
11370  	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
11371  	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
11372  	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
11373  	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
11374  	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
11375  	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
11376  	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
11377  	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
11378  	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
11379  	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
11380  	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
11381  	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
11382  	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
11383  	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
11384  	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
11385  	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
11386  	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
11387  	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
11388  	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
11389  	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
11390  	/* ALU32 register operations, register clobbering */
11391  	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
11392  	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
11393  	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
11394  	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
11395  	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
11396  	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
11397  	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
11398  	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
11399  	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
11400  	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
11401  	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
11402  	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
11403  	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
11404  	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
11405  	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
11406  	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
11407  	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
11408  	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
11409  	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
11410  	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
11411  	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
11412  	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
11413  #undef BPF_TEST_CLOBBER_ALU
11414  #define BPF_TEST_CLOBBER_ATOMIC(width, op)			\
11415  	{							\
11416  		"Atomic_" #width " " #op ": no clobbering",	\
11417  		.u.insns_int = {				\
11418  			BPF_ALU64_IMM(BPF_MOV, R0, 0),		\
11419  			BPF_ALU64_IMM(BPF_MOV, R1, 1),		\
11420  			BPF_ALU64_IMM(BPF_MOV, R2, 2),		\
11421  			BPF_ALU64_IMM(BPF_MOV, R3, 3),		\
11422  			BPF_ALU64_IMM(BPF_MOV, R4, 4),		\
11423  			BPF_ALU64_IMM(BPF_MOV, R5, 5),		\
11424  			BPF_ALU64_IMM(BPF_MOV, R6, 6),		\
11425  			BPF_ALU64_IMM(BPF_MOV, R7, 7),		\
11426  			BPF_ALU64_IMM(BPF_MOV, R8, 8),		\
11427  			BPF_ALU64_IMM(BPF_MOV, R9, 9),		\
11428  			BPF_ST_MEM(width, R10, -8,		\
11429  				   (op) == BPF_CMPXCHG ? 0 :	\
11430  				   (op) & BPF_FETCH ? 1 : 0),	\
11431  			BPF_ATOMIC_OP(width, op, R10, R1, -8),	\
11432  			BPF_JMP_IMM(BPF_JNE, R0, 0, 10),	\
11433  			BPF_JMP_IMM(BPF_JNE, R1, 1, 9),		\
11434  			BPF_JMP_IMM(BPF_JNE, R2, 2, 8),		\
11435  			BPF_JMP_IMM(BPF_JNE, R3, 3, 7),		\
11436  			BPF_JMP_IMM(BPF_JNE, R4, 4, 6),		\
11437  			BPF_JMP_IMM(BPF_JNE, R5, 5, 5),		\
11438  			BPF_JMP_IMM(BPF_JNE, R6, 6, 4),		\
11439  			BPF_JMP_IMM(BPF_JNE, R7, 7, 3),		\
11440  			BPF_JMP_IMM(BPF_JNE, R8, 8, 2),		\
11441  			BPF_JMP_IMM(BPF_JNE, R9, 9, 1),		\
11442  			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
11443  			BPF_EXIT_INSN(),			\
11444  		},						\
11445  		INTERNAL,					\
11446  		{ },						\
11447  		{ { 0, 1 } },					\
11448  		.stack_depth = 8,				\
11449  	}
11450  	/* 64-bit atomic operations, register clobbering */
11451  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
11452  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
11453  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
11454  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
11455  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
11456  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
11457  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
11458  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
11459  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
11460  	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
11461  	/* 32-bit atomic operations, register clobbering */
11462  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
11463  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
11464  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
11465  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
11466  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
11467  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
11468  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
11469  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
11470  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
11471  	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
11472  #undef BPF_TEST_CLOBBER_ATOMIC
11473  	/* Checking that ALU32 src is not zero extended in place */
11474  #define BPF_ALU32_SRC_ZEXT(op)					\
11475  	{							\
11476  		"ALU32_" #op "_X: src preserved in zext",	\
11477  		.u.insns_int = {				\
11478  			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
11479  			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
11480  			BPF_ALU64_REG(BPF_MOV, R0, R1),		\
11481  			BPF_ALU32_REG(BPF_##op, R2, R1),	\
11482  			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
11483  			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11484  			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11485  			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11486  			BPF_EXIT_INSN(),			\
11487  		},						\
11488  		INTERNAL,					\
11489  		{ },						\
11490  		{ { 0, 0 } },					\
11491  	}
11492  	BPF_ALU32_SRC_ZEXT(MOV),
11493  	BPF_ALU32_SRC_ZEXT(AND),
11494  	BPF_ALU32_SRC_ZEXT(OR),
11495  	BPF_ALU32_SRC_ZEXT(XOR),
11496  	BPF_ALU32_SRC_ZEXT(ADD),
11497  	BPF_ALU32_SRC_ZEXT(SUB),
11498  	BPF_ALU32_SRC_ZEXT(MUL),
11499  	BPF_ALU32_SRC_ZEXT(DIV),
11500  	BPF_ALU32_SRC_ZEXT(MOD),
11501  #undef BPF_ALU32_SRC_ZEXT
11502  	/* Checking that ATOMIC32 src is not zero extended in place */
11503  #define BPF_ATOMIC32_SRC_ZEXT(op)					\
11504  	{								\
11505  		"ATOMIC_W_" #op ": src preserved in zext",		\
11506  		.u.insns_int = {					\
11507  			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),	\
11508  			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
11509  			BPF_ST_MEM(BPF_W, R10, -4, 0),			\
11510  			BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4),	\
11511  			BPF_ALU64_REG(BPF_SUB, R0, R1),			\
11512  			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
11513  			BPF_ALU64_IMM(BPF_RSH, R1, 32),			\
11514  			BPF_ALU64_REG(BPF_OR, R0, R1),			\
11515  			BPF_EXIT_INSN(),				\
11516  		},							\
11517  		INTERNAL,						\
11518  		{ },							\
11519  		{ { 0, 0 } },						\
11520  		.stack_depth = 8,					\
11521  	}
11522  	BPF_ATOMIC32_SRC_ZEXT(ADD),
11523  	BPF_ATOMIC32_SRC_ZEXT(AND),
11524  	BPF_ATOMIC32_SRC_ZEXT(OR),
11525  	BPF_ATOMIC32_SRC_ZEXT(XOR),
11526  #undef BPF_ATOMIC32_SRC_ZEXT
11527  	/* Checking that CMPXCHG32 src is not zero extended in place */
11528  	{
11529  		"ATOMIC_W_CMPXCHG: src preserved in zext",
11530  		.u.insns_int = {
11531  			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
11532  			BPF_ALU64_REG(BPF_MOV, R2, R1),
11533  			BPF_ALU64_REG(BPF_MOV, R0, 0),
11534  			BPF_ST_MEM(BPF_W, R10, -4, 0),
11535  			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
11536  			BPF_ALU64_REG(BPF_SUB, R1, R2),
11537  			BPF_ALU64_REG(BPF_MOV, R2, R1),
11538  			BPF_ALU64_IMM(BPF_RSH, R2, 32),
11539  			BPF_ALU64_REG(BPF_OR, R1, R2),
11540  			BPF_ALU64_REG(BPF_MOV, R0, R1),
11541  			BPF_EXIT_INSN(),
11542  		},
11543  		INTERNAL,
11544  		{ },
11545  		{ { 0, 0 } },
11546  		.stack_depth = 8,
11547  	},
11548  	/* Checking that JMP32 immediate src is not zero extended in place */
11549  #define BPF_JMP32_IMM_ZEXT(op)					\
11550  	{							\
11551  		"JMP32_" #op "_K: operand preserved in zext",	\
11552  		.u.insns_int = {				\
11553  			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11554  			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11555  			BPF_JMP32_IMM(BPF_##op, R0, 1234, 1),	\
11556  			BPF_JMP_A(0), /* Nop */			\
11557  			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
11558  			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11559  			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11560  			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11561  			BPF_EXIT_INSN(),			\
11562  		},						\
11563  		INTERNAL,					\
11564  		{ },						\
11565  		{ { 0, 0 } },					\
11566  	}
11567  	BPF_JMP32_IMM_ZEXT(JEQ),
11568  	BPF_JMP32_IMM_ZEXT(JNE),
11569  	BPF_JMP32_IMM_ZEXT(JSET),
11570  	BPF_JMP32_IMM_ZEXT(JGT),
11571  	BPF_JMP32_IMM_ZEXT(JGE),
11572  	BPF_JMP32_IMM_ZEXT(JLT),
11573  	BPF_JMP32_IMM_ZEXT(JLE),
11574  	BPF_JMP32_IMM_ZEXT(JSGT),
11575  	BPF_JMP32_IMM_ZEXT(JSGE),
11576  	BPF_JMP32_IMM_ZEXT(JSGT),
11577  	BPF_JMP32_IMM_ZEXT(JSLT),
11578  	BPF_JMP32_IMM_ZEXT(JSLE),
11579  #undef BPF_JMP2_IMM_ZEXT
11580  	/* Checking that JMP32 dst & src are not zero extended in place */
11581  #define BPF_JMP32_REG_ZEXT(op)					\
11582  	{							\
11583  		"JMP32_" #op "_X: operands preserved in zext",	\
11584  		.u.insns_int = {				\
11585  			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11586  			BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
11587  			BPF_ALU64_REG(BPF_MOV, R2, R0),		\
11588  			BPF_ALU64_REG(BPF_MOV, R3, R1),		\
11589  			BPF_JMP32_IMM(BPF_##op, R0, R1, 1),	\
11590  			BPF_JMP_A(0), /* Nop */			\
11591  			BPF_ALU64_REG(BPF_SUB, R0, R2),		\
11592  			BPF_ALU64_REG(BPF_SUB, R1, R3),		\
11593  			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11594  			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11595  			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11596  			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11597  			BPF_EXIT_INSN(),			\
11598  		},						\
11599  		INTERNAL,					\
11600  		{ },						\
11601  		{ { 0, 0 } },					\
11602  	}
11603  	BPF_JMP32_REG_ZEXT(JEQ),
11604  	BPF_JMP32_REG_ZEXT(JNE),
11605  	BPF_JMP32_REG_ZEXT(JSET),
11606  	BPF_JMP32_REG_ZEXT(JGT),
11607  	BPF_JMP32_REG_ZEXT(JGE),
11608  	BPF_JMP32_REG_ZEXT(JLT),
11609  	BPF_JMP32_REG_ZEXT(JLE),
11610  	BPF_JMP32_REG_ZEXT(JSGT),
11611  	BPF_JMP32_REG_ZEXT(JSGE),
11612  	BPF_JMP32_REG_ZEXT(JSGT),
11613  	BPF_JMP32_REG_ZEXT(JSLT),
11614  	BPF_JMP32_REG_ZEXT(JSLE),
11615  #undef BPF_JMP2_REG_ZEXT
11616  	/* ALU64 K register combinations */
11617  	{
11618  		"ALU64_MOV_K: registers",
11619  		{ },
11620  		INTERNAL,
11621  		{ },
11622  		{ { 0, 1 } },
11623  		.fill_helper = bpf_fill_alu64_mov_imm_regs,
11624  	},
11625  	{
11626  		"ALU64_AND_K: registers",
11627  		{ },
11628  		INTERNAL,
11629  		{ },
11630  		{ { 0, 1 } },
11631  		.fill_helper = bpf_fill_alu64_and_imm_regs,
11632  	},
11633  	{
11634  		"ALU64_OR_K: registers",
11635  		{ },
11636  		INTERNAL,
11637  		{ },
11638  		{ { 0, 1 } },
11639  		.fill_helper = bpf_fill_alu64_or_imm_regs,
11640  	},
11641  	{
11642  		"ALU64_XOR_K: registers",
11643  		{ },
11644  		INTERNAL,
11645  		{ },
11646  		{ { 0, 1 } },
11647  		.fill_helper = bpf_fill_alu64_xor_imm_regs,
11648  	},
11649  	{
11650  		"ALU64_LSH_K: registers",
11651  		{ },
11652  		INTERNAL,
11653  		{ },
11654  		{ { 0, 1 } },
11655  		.fill_helper = bpf_fill_alu64_lsh_imm_regs,
11656  	},
11657  	{
11658  		"ALU64_RSH_K: registers",
11659  		{ },
11660  		INTERNAL,
11661  		{ },
11662  		{ { 0, 1 } },
11663  		.fill_helper = bpf_fill_alu64_rsh_imm_regs,
11664  	},
11665  	{
11666  		"ALU64_ARSH_K: registers",
11667  		{ },
11668  		INTERNAL,
11669  		{ },
11670  		{ { 0, 1 } },
11671  		.fill_helper = bpf_fill_alu64_arsh_imm_regs,
11672  	},
11673  	{
11674  		"ALU64_ADD_K: registers",
11675  		{ },
11676  		INTERNAL,
11677  		{ },
11678  		{ { 0, 1 } },
11679  		.fill_helper = bpf_fill_alu64_add_imm_regs,
11680  	},
11681  	{
11682  		"ALU64_SUB_K: registers",
11683  		{ },
11684  		INTERNAL,
11685  		{ },
11686  		{ { 0, 1 } },
11687  		.fill_helper = bpf_fill_alu64_sub_imm_regs,
11688  	},
11689  	{
11690  		"ALU64_MUL_K: registers",
11691  		{ },
11692  		INTERNAL,
11693  		{ },
11694  		{ { 0, 1 } },
11695  		.fill_helper = bpf_fill_alu64_mul_imm_regs,
11696  	},
11697  	{
11698  		"ALU64_DIV_K: registers",
11699  		{ },
11700  		INTERNAL,
11701  		{ },
11702  		{ { 0, 1 } },
11703  		.fill_helper = bpf_fill_alu64_div_imm_regs,
11704  	},
11705  	{
11706  		"ALU64_MOD_K: registers",
11707  		{ },
11708  		INTERNAL,
11709  		{ },
11710  		{ { 0, 1 } },
11711  		.fill_helper = bpf_fill_alu64_mod_imm_regs,
11712  	},
11713  	/* ALU32 K registers */
11714  	{
11715  		"ALU32_MOV_K: registers",
11716  		{ },
11717  		INTERNAL,
11718  		{ },
11719  		{ { 0, 1 } },
11720  		.fill_helper = bpf_fill_alu32_mov_imm_regs,
11721  	},
11722  	{
11723  		"ALU32_AND_K: registers",
11724  		{ },
11725  		INTERNAL,
11726  		{ },
11727  		{ { 0, 1 } },
11728  		.fill_helper = bpf_fill_alu32_and_imm_regs,
11729  	},
11730  	{
11731  		"ALU32_OR_K: registers",
11732  		{ },
11733  		INTERNAL,
11734  		{ },
11735  		{ { 0, 1 } },
11736  		.fill_helper = bpf_fill_alu32_or_imm_regs,
11737  	},
11738  	{
11739  		"ALU32_XOR_K: registers",
11740  		{ },
11741  		INTERNAL,
11742  		{ },
11743  		{ { 0, 1 } },
11744  		.fill_helper = bpf_fill_alu32_xor_imm_regs,
11745  	},
11746  	{
11747  		"ALU32_LSH_K: registers",
11748  		{ },
11749  		INTERNAL,
11750  		{ },
11751  		{ { 0, 1 } },
11752  		.fill_helper = bpf_fill_alu32_lsh_imm_regs,
11753  	},
11754  	{
11755  		"ALU32_RSH_K: registers",
11756  		{ },
11757  		INTERNAL,
11758  		{ },
11759  		{ { 0, 1 } },
11760  		.fill_helper = bpf_fill_alu32_rsh_imm_regs,
11761  	},
11762  	{
11763  		"ALU32_ARSH_K: registers",
11764  		{ },
11765  		INTERNAL,
11766  		{ },
11767  		{ { 0, 1 } },
11768  		.fill_helper = bpf_fill_alu32_arsh_imm_regs,
11769  	},
11770  	{
11771  		"ALU32_ADD_K: registers",
11772  		{ },
11773  		INTERNAL,
11774  		{ },
11775  		{ { 0, 1 } },
11776  		.fill_helper = bpf_fill_alu32_add_imm_regs,
11777  	},
11778  	{
11779  		"ALU32_SUB_K: registers",
11780  		{ },
11781  		INTERNAL,
11782  		{ },
11783  		{ { 0, 1 } },
11784  		.fill_helper = bpf_fill_alu32_sub_imm_regs,
11785  	},
11786  	{
11787  		"ALU32_MUL_K: registers",
11788  		{ },
11789  		INTERNAL,
11790  		{ },
11791  		{ { 0, 1 } },
11792  		.fill_helper = bpf_fill_alu32_mul_imm_regs,
11793  	},
11794  	{
11795  		"ALU32_DIV_K: registers",
11796  		{ },
11797  		INTERNAL,
11798  		{ },
11799  		{ { 0, 1 } },
11800  		.fill_helper = bpf_fill_alu32_div_imm_regs,
11801  	},
11802  	{
11803  		"ALU32_MOD_K: registers",
11804  		{ },
11805  		INTERNAL,
11806  		{ },
11807  		{ { 0, 1 } },
11808  		.fill_helper = bpf_fill_alu32_mod_imm_regs,
11809  	},
11810  	/* ALU64 X register combinations */
11811  	{
11812  		"ALU64_MOV_X: register combinations",
11813  		{ },
11814  		INTERNAL,
11815  		{ },
11816  		{ { 0, 1 } },
11817  		.fill_helper = bpf_fill_alu64_mov_reg_pairs,
11818  	},
11819  	{
11820  		"ALU64_AND_X: register combinations",
11821  		{ },
11822  		INTERNAL,
11823  		{ },
11824  		{ { 0, 1 } },
11825  		.fill_helper = bpf_fill_alu64_and_reg_pairs,
11826  	},
11827  	{
11828  		"ALU64_OR_X: register combinations",
11829  		{ },
11830  		INTERNAL,
11831  		{ },
11832  		{ { 0, 1 } },
11833  		.fill_helper = bpf_fill_alu64_or_reg_pairs,
11834  	},
11835  	{
11836  		"ALU64_XOR_X: register combinations",
11837  		{ },
11838  		INTERNAL,
11839  		{ },
11840  		{ { 0, 1 } },
11841  		.fill_helper = bpf_fill_alu64_xor_reg_pairs,
11842  	},
11843  	{
11844  		"ALU64_LSH_X: register combinations",
11845  		{ },
11846  		INTERNAL,
11847  		{ },
11848  		{ { 0, 1 } },
11849  		.fill_helper = bpf_fill_alu64_lsh_reg_pairs,
11850  	},
11851  	{
11852  		"ALU64_RSH_X: register combinations",
11853  		{ },
11854  		INTERNAL,
11855  		{ },
11856  		{ { 0, 1 } },
11857  		.fill_helper = bpf_fill_alu64_rsh_reg_pairs,
11858  	},
11859  	{
11860  		"ALU64_ARSH_X: register combinations",
11861  		{ },
11862  		INTERNAL,
11863  		{ },
11864  		{ { 0, 1 } },
11865  		.fill_helper = bpf_fill_alu64_arsh_reg_pairs,
11866  	},
11867  	{
11868  		"ALU64_ADD_X: register combinations",
11869  		{ },
11870  		INTERNAL,
11871  		{ },
11872  		{ { 0, 1 } },
11873  		.fill_helper = bpf_fill_alu64_add_reg_pairs,
11874  	},
11875  	{
11876  		"ALU64_SUB_X: register combinations",
11877  		{ },
11878  		INTERNAL,
11879  		{ },
11880  		{ { 0, 1 } },
11881  		.fill_helper = bpf_fill_alu64_sub_reg_pairs,
11882  	},
11883  	{
11884  		"ALU64_MUL_X: register combinations",
11885  		{ },
11886  		INTERNAL,
11887  		{ },
11888  		{ { 0, 1 } },
11889  		.fill_helper = bpf_fill_alu64_mul_reg_pairs,
11890  	},
11891  	{
11892  		"ALU64_DIV_X: register combinations",
11893  		{ },
11894  		INTERNAL,
11895  		{ },
11896  		{ { 0, 1 } },
11897  		.fill_helper = bpf_fill_alu64_div_reg_pairs,
11898  	},
11899  	{
11900  		"ALU64_MOD_X: register combinations",
11901  		{ },
11902  		INTERNAL,
11903  		{ },
11904  		{ { 0, 1 } },
11905  		.fill_helper = bpf_fill_alu64_mod_reg_pairs,
11906  	},
11907  	/* ALU32 X register combinations */
11908  	{
11909  		"ALU32_MOV_X: register combinations",
11910  		{ },
11911  		INTERNAL,
11912  		{ },
11913  		{ { 0, 1 } },
11914  		.fill_helper = bpf_fill_alu32_mov_reg_pairs,
11915  	},
11916  	{
11917  		"ALU32_AND_X: register combinations",
11918  		{ },
11919  		INTERNAL,
11920  		{ },
11921  		{ { 0, 1 } },
11922  		.fill_helper = bpf_fill_alu32_and_reg_pairs,
11923  	},
11924  	{
11925  		"ALU32_OR_X: register combinations",
11926  		{ },
11927  		INTERNAL,
11928  		{ },
11929  		{ { 0, 1 } },
11930  		.fill_helper = bpf_fill_alu32_or_reg_pairs,
11931  	},
11932  	{
11933  		"ALU32_XOR_X: register combinations",
11934  		{ },
11935  		INTERNAL,
11936  		{ },
11937  		{ { 0, 1 } },
11938  		.fill_helper = bpf_fill_alu32_xor_reg_pairs,
11939  	},
11940  	{
11941  		"ALU32_LSH_X: register combinations",
11942  		{ },
11943  		INTERNAL,
11944  		{ },
11945  		{ { 0, 1 } },
11946  		.fill_helper = bpf_fill_alu32_lsh_reg_pairs,
11947  	},
11948  	{
11949  		"ALU32_RSH_X: register combinations",
11950  		{ },
11951  		INTERNAL,
11952  		{ },
11953  		{ { 0, 1 } },
11954  		.fill_helper = bpf_fill_alu32_rsh_reg_pairs,
11955  	},
11956  	{
11957  		"ALU32_ARSH_X: register combinations",
11958  		{ },
11959  		INTERNAL,
11960  		{ },
11961  		{ { 0, 1 } },
11962  		.fill_helper = bpf_fill_alu32_arsh_reg_pairs,
11963  	},
11964  	{
11965  		"ALU32_ADD_X: register combinations",
11966  		{ },
11967  		INTERNAL,
11968  		{ },
11969  		{ { 0, 1 } },
11970  		.fill_helper = bpf_fill_alu32_add_reg_pairs,
11971  	},
11972  	{
11973  		"ALU32_SUB_X: register combinations",
11974  		{ },
11975  		INTERNAL,
11976  		{ },
11977  		{ { 0, 1 } },
11978  		.fill_helper = bpf_fill_alu32_sub_reg_pairs,
11979  	},
11980  	{
11981  		"ALU32_MUL_X: register combinations",
11982  		{ },
11983  		INTERNAL,
11984  		{ },
11985  		{ { 0, 1 } },
11986  		.fill_helper = bpf_fill_alu32_mul_reg_pairs,
11987  	},
11988  	{
11989  		"ALU32_DIV_X: register combinations",
11990  		{ },
11991  		INTERNAL,
11992  		{ },
11993  		{ { 0, 1 } },
11994  		.fill_helper = bpf_fill_alu32_div_reg_pairs,
11995  	},
11996  	{
11997  		"ALU32_MOD_X register combinations",
11998  		{ },
11999  		INTERNAL,
12000  		{ },
12001  		{ { 0, 1 } },
12002  		.fill_helper = bpf_fill_alu32_mod_reg_pairs,
12003  	},
12004  	/* Exhaustive test of ALU64 shift operations */
12005  	{
12006  		"ALU64_LSH_K: all shift values",
12007  		{ },
12008  		INTERNAL | FLAG_NO_DATA,
12009  		{ },
12010  		{ { 0, 1 } },
12011  		.fill_helper = bpf_fill_alu64_lsh_imm,
12012  	},
12013  	{
12014  		"ALU64_RSH_K: all shift values",
12015  		{ },
12016  		INTERNAL | FLAG_NO_DATA,
12017  		{ },
12018  		{ { 0, 1 } },
12019  		.fill_helper = bpf_fill_alu64_rsh_imm,
12020  	},
12021  	{
12022  		"ALU64_ARSH_K: all shift values",
12023  		{ },
12024  		INTERNAL | FLAG_NO_DATA,
12025  		{ },
12026  		{ { 0, 1 } },
12027  		.fill_helper = bpf_fill_alu64_arsh_imm,
12028  	},
12029  	{
12030  		"ALU64_LSH_X: all shift values",
12031  		{ },
12032  		INTERNAL | FLAG_NO_DATA,
12033  		{ },
12034  		{ { 0, 1 } },
12035  		.fill_helper = bpf_fill_alu64_lsh_reg,
12036  	},
12037  	{
12038  		"ALU64_RSH_X: all shift values",
12039  		{ },
12040  		INTERNAL | FLAG_NO_DATA,
12041  		{ },
12042  		{ { 0, 1 } },
12043  		.fill_helper = bpf_fill_alu64_rsh_reg,
12044  	},
12045  	{
12046  		"ALU64_ARSH_X: all shift values",
12047  		{ },
12048  		INTERNAL | FLAG_NO_DATA,
12049  		{ },
12050  		{ { 0, 1 } },
12051  		.fill_helper = bpf_fill_alu64_arsh_reg,
12052  	},
12053  	/* Exhaustive test of ALU32 shift operations */
12054  	{
12055  		"ALU32_LSH_K: all shift values",
12056  		{ },
12057  		INTERNAL | FLAG_NO_DATA,
12058  		{ },
12059  		{ { 0, 1 } },
12060  		.fill_helper = bpf_fill_alu32_lsh_imm,
12061  	},
12062  	{
12063  		"ALU32_RSH_K: all shift values",
12064  		{ },
12065  		INTERNAL | FLAG_NO_DATA,
12066  		{ },
12067  		{ { 0, 1 } },
12068  		.fill_helper = bpf_fill_alu32_rsh_imm,
12069  	},
12070  	{
12071  		"ALU32_ARSH_K: all shift values",
12072  		{ },
12073  		INTERNAL | FLAG_NO_DATA,
12074  		{ },
12075  		{ { 0, 1 } },
12076  		.fill_helper = bpf_fill_alu32_arsh_imm,
12077  	},
12078  	{
12079  		"ALU32_LSH_X: all shift values",
12080  		{ },
12081  		INTERNAL | FLAG_NO_DATA,
12082  		{ },
12083  		{ { 0, 1 } },
12084  		.fill_helper = bpf_fill_alu32_lsh_reg,
12085  	},
12086  	{
12087  		"ALU32_RSH_X: all shift values",
12088  		{ },
12089  		INTERNAL | FLAG_NO_DATA,
12090  		{ },
12091  		{ { 0, 1 } },
12092  		.fill_helper = bpf_fill_alu32_rsh_reg,
12093  	},
12094  	{
12095  		"ALU32_ARSH_X: all shift values",
12096  		{ },
12097  		INTERNAL | FLAG_NO_DATA,
12098  		{ },
12099  		{ { 0, 1 } },
12100  		.fill_helper = bpf_fill_alu32_arsh_reg,
12101  	},
12102  	/*
12103  	 * Exhaustive test of ALU64 shift operations when
12104  	 * source and destination register are the same.
12105  	 */
12106  	{
12107  		"ALU64_LSH_X: all shift values with the same register",
12108  		{ },
12109  		INTERNAL | FLAG_NO_DATA,
12110  		{ },
12111  		{ { 0, 1 } },
12112  		.fill_helper = bpf_fill_alu64_lsh_same_reg,
12113  	},
12114  	{
12115  		"ALU64_RSH_X: all shift values with the same register",
12116  		{ },
12117  		INTERNAL | FLAG_NO_DATA,
12118  		{ },
12119  		{ { 0, 1 } },
12120  		.fill_helper = bpf_fill_alu64_rsh_same_reg,
12121  	},
12122  	{
12123  		"ALU64_ARSH_X: all shift values with the same register",
12124  		{ },
12125  		INTERNAL | FLAG_NO_DATA,
12126  		{ },
12127  		{ { 0, 1 } },
12128  		.fill_helper = bpf_fill_alu64_arsh_same_reg,
12129  	},
12130  	/*
12131  	 * Exhaustive test of ALU32 shift operations when
12132  	 * source and destination register are the same.
12133  	 */
12134  	{
12135  		"ALU32_LSH_X: all shift values with the same register",
12136  		{ },
12137  		INTERNAL | FLAG_NO_DATA,
12138  		{ },
12139  		{ { 0, 1 } },
12140  		.fill_helper = bpf_fill_alu32_lsh_same_reg,
12141  	},
12142  	{
12143  		"ALU32_RSH_X: all shift values with the same register",
12144  		{ },
12145  		INTERNAL | FLAG_NO_DATA,
12146  		{ },
12147  		{ { 0, 1 } },
12148  		.fill_helper = bpf_fill_alu32_rsh_same_reg,
12149  	},
12150  	{
12151  		"ALU32_ARSH_X: all shift values with the same register",
12152  		{ },
12153  		INTERNAL | FLAG_NO_DATA,
12154  		{ },
12155  		{ { 0, 1 } },
12156  		.fill_helper = bpf_fill_alu32_arsh_same_reg,
12157  	},
12158  	/* ALU64 immediate magnitudes */
12159  	{
12160  		"ALU64_MOV_K: all immediate value magnitudes",
12161  		{ },
12162  		INTERNAL | FLAG_NO_DATA,
12163  		{ },
12164  		{ { 0, 1 } },
12165  		.fill_helper = bpf_fill_alu64_mov_imm,
12166  		.nr_testruns = NR_PATTERN_RUNS,
12167  	},
12168  	{
12169  		"ALU64_AND_K: all immediate value magnitudes",
12170  		{ },
12171  		INTERNAL | FLAG_NO_DATA,
12172  		{ },
12173  		{ { 0, 1 } },
12174  		.fill_helper = bpf_fill_alu64_and_imm,
12175  		.nr_testruns = NR_PATTERN_RUNS,
12176  	},
12177  	{
12178  		"ALU64_OR_K: all immediate value magnitudes",
12179  		{ },
12180  		INTERNAL | FLAG_NO_DATA,
12181  		{ },
12182  		{ { 0, 1 } },
12183  		.fill_helper = bpf_fill_alu64_or_imm,
12184  		.nr_testruns = NR_PATTERN_RUNS,
12185  	},
12186  	{
12187  		"ALU64_XOR_K: all immediate value magnitudes",
12188  		{ },
12189  		INTERNAL | FLAG_NO_DATA,
12190  		{ },
12191  		{ { 0, 1 } },
12192  		.fill_helper = bpf_fill_alu64_xor_imm,
12193  		.nr_testruns = NR_PATTERN_RUNS,
12194  	},
12195  	{
12196  		"ALU64_ADD_K: all immediate value magnitudes",
12197  		{ },
12198  		INTERNAL | FLAG_NO_DATA,
12199  		{ },
12200  		{ { 0, 1 } },
12201  		.fill_helper = bpf_fill_alu64_add_imm,
12202  		.nr_testruns = NR_PATTERN_RUNS,
12203  	},
12204  	{
12205  		"ALU64_SUB_K: all immediate value magnitudes",
12206  		{ },
12207  		INTERNAL | FLAG_NO_DATA,
12208  		{ },
12209  		{ { 0, 1 } },
12210  		.fill_helper = bpf_fill_alu64_sub_imm,
12211  		.nr_testruns = NR_PATTERN_RUNS,
12212  	},
12213  	{
12214  		"ALU64_MUL_K: all immediate value magnitudes",
12215  		{ },
12216  		INTERNAL | FLAG_NO_DATA,
12217  		{ },
12218  		{ { 0, 1 } },
12219  		.fill_helper = bpf_fill_alu64_mul_imm,
12220  		.nr_testruns = NR_PATTERN_RUNS,
12221  	},
12222  	{
12223  		"ALU64_DIV_K: all immediate value magnitudes",
12224  		{ },
12225  		INTERNAL | FLAG_NO_DATA,
12226  		{ },
12227  		{ { 0, 1 } },
12228  		.fill_helper = bpf_fill_alu64_div_imm,
12229  		.nr_testruns = NR_PATTERN_RUNS,
12230  	},
12231  	{
12232  		"ALU64_MOD_K: all immediate value magnitudes",
12233  		{ },
12234  		INTERNAL | FLAG_NO_DATA,
12235  		{ },
12236  		{ { 0, 1 } },
12237  		.fill_helper = bpf_fill_alu64_mod_imm,
12238  		.nr_testruns = NR_PATTERN_RUNS,
12239  	},
12240  	/* ALU32 immediate magnitudes */
12241  	{
12242  		"ALU32_MOV_K: all immediate value magnitudes",
12243  		{ },
12244  		INTERNAL | FLAG_NO_DATA,
12245  		{ },
12246  		{ { 0, 1 } },
12247  		.fill_helper = bpf_fill_alu32_mov_imm,
12248  		.nr_testruns = NR_PATTERN_RUNS,
12249  	},
12250  	{
12251  		"ALU32_AND_K: all immediate value magnitudes",
12252  		{ },
12253  		INTERNAL | FLAG_NO_DATA,
12254  		{ },
12255  		{ { 0, 1 } },
12256  		.fill_helper = bpf_fill_alu32_and_imm,
12257  		.nr_testruns = NR_PATTERN_RUNS,
12258  	},
12259  	{
12260  		"ALU32_OR_K: all immediate value magnitudes",
12261  		{ },
12262  		INTERNAL | FLAG_NO_DATA,
12263  		{ },
12264  		{ { 0, 1 } },
12265  		.fill_helper = bpf_fill_alu32_or_imm,
12266  		.nr_testruns = NR_PATTERN_RUNS,
12267  	},
12268  	{
12269  		"ALU32_XOR_K: all immediate value magnitudes",
12270  		{ },
12271  		INTERNAL | FLAG_NO_DATA,
12272  		{ },
12273  		{ { 0, 1 } },
12274  		.fill_helper = bpf_fill_alu32_xor_imm,
12275  		.nr_testruns = NR_PATTERN_RUNS,
12276  	},
12277  	{
12278  		"ALU32_ADD_K: all immediate value magnitudes",
12279  		{ },
12280  		INTERNAL | FLAG_NO_DATA,
12281  		{ },
12282  		{ { 0, 1 } },
12283  		.fill_helper = bpf_fill_alu32_add_imm,
12284  		.nr_testruns = NR_PATTERN_RUNS,
12285  	},
12286  	{
12287  		"ALU32_SUB_K: all immediate value magnitudes",
12288  		{ },
12289  		INTERNAL | FLAG_NO_DATA,
12290  		{ },
12291  		{ { 0, 1 } },
12292  		.fill_helper = bpf_fill_alu32_sub_imm,
12293  		.nr_testruns = NR_PATTERN_RUNS,
12294  	},
12295  	{
12296  		"ALU32_MUL_K: all immediate value magnitudes",
12297  		{ },
12298  		INTERNAL | FLAG_NO_DATA,
12299  		{ },
12300  		{ { 0, 1 } },
12301  		.fill_helper = bpf_fill_alu32_mul_imm,
12302  		.nr_testruns = NR_PATTERN_RUNS,
12303  	},
12304  	{
12305  		"ALU32_DIV_K: all immediate value magnitudes",
12306  		{ },
12307  		INTERNAL | FLAG_NO_DATA,
12308  		{ },
12309  		{ { 0, 1 } },
12310  		.fill_helper = bpf_fill_alu32_div_imm,
12311  		.nr_testruns = NR_PATTERN_RUNS,
12312  	},
12313  	{
12314  		"ALU32_MOD_K: all immediate value magnitudes",
12315  		{ },
12316  		INTERNAL | FLAG_NO_DATA,
12317  		{ },
12318  		{ { 0, 1 } },
12319  		.fill_helper = bpf_fill_alu32_mod_imm,
12320  		.nr_testruns = NR_PATTERN_RUNS,
12321  	},
12322  	/* ALU64 register magnitudes */
12323  	{
12324  		"ALU64_MOV_X: all register value magnitudes",
12325  		{ },
12326  		INTERNAL | FLAG_NO_DATA,
12327  		{ },
12328  		{ { 0, 1 } },
12329  		.fill_helper = bpf_fill_alu64_mov_reg,
12330  		.nr_testruns = NR_PATTERN_RUNS,
12331  	},
12332  	{
12333  		"ALU64_AND_X: all register value magnitudes",
12334  		{ },
12335  		INTERNAL | FLAG_NO_DATA,
12336  		{ },
12337  		{ { 0, 1 } },
12338  		.fill_helper = bpf_fill_alu64_and_reg,
12339  		.nr_testruns = NR_PATTERN_RUNS,
12340  	},
12341  	{
12342  		"ALU64_OR_X: all register value magnitudes",
12343  		{ },
12344  		INTERNAL | FLAG_NO_DATA,
12345  		{ },
12346  		{ { 0, 1 } },
12347  		.fill_helper = bpf_fill_alu64_or_reg,
12348  		.nr_testruns = NR_PATTERN_RUNS,
12349  	},
12350  	{
12351  		"ALU64_XOR_X: all register value magnitudes",
12352  		{ },
12353  		INTERNAL | FLAG_NO_DATA,
12354  		{ },
12355  		{ { 0, 1 } },
12356  		.fill_helper = bpf_fill_alu64_xor_reg,
12357  		.nr_testruns = NR_PATTERN_RUNS,
12358  	},
12359  	{
12360  		"ALU64_ADD_X: all register value magnitudes",
12361  		{ },
12362  		INTERNAL | FLAG_NO_DATA,
12363  		{ },
12364  		{ { 0, 1 } },
12365  		.fill_helper = bpf_fill_alu64_add_reg,
12366  		.nr_testruns = NR_PATTERN_RUNS,
12367  	},
12368  	{
12369  		"ALU64_SUB_X: all register value magnitudes",
12370  		{ },
12371  		INTERNAL | FLAG_NO_DATA,
12372  		{ },
12373  		{ { 0, 1 } },
12374  		.fill_helper = bpf_fill_alu64_sub_reg,
12375  		.nr_testruns = NR_PATTERN_RUNS,
12376  	},
12377  	{
12378  		"ALU64_MUL_X: all register value magnitudes",
12379  		{ },
12380  		INTERNAL | FLAG_NO_DATA,
12381  		{ },
12382  		{ { 0, 1 } },
12383  		.fill_helper = bpf_fill_alu64_mul_reg,
12384  		.nr_testruns = NR_PATTERN_RUNS,
12385  	},
12386  	{
12387  		"ALU64_DIV_X: all register value magnitudes",
12388  		{ },
12389  		INTERNAL | FLAG_NO_DATA,
12390  		{ },
12391  		{ { 0, 1 } },
12392  		.fill_helper = bpf_fill_alu64_div_reg,
12393  		.nr_testruns = NR_PATTERN_RUNS,
12394  	},
12395  	{
12396  		"ALU64_MOD_X: all register value magnitudes",
12397  		{ },
12398  		INTERNAL | FLAG_NO_DATA,
12399  		{ },
12400  		{ { 0, 1 } },
12401  		.fill_helper = bpf_fill_alu64_mod_reg,
12402  		.nr_testruns = NR_PATTERN_RUNS,
12403  	},
12404  	/* ALU32 register magnitudes */
12405  	{
12406  		"ALU32_MOV_X: all register value magnitudes",
12407  		{ },
12408  		INTERNAL | FLAG_NO_DATA,
12409  		{ },
12410  		{ { 0, 1 } },
12411  		.fill_helper = bpf_fill_alu32_mov_reg,
12412  		.nr_testruns = NR_PATTERN_RUNS,
12413  	},
12414  	{
12415  		"ALU32_AND_X: all register value magnitudes",
12416  		{ },
12417  		INTERNAL | FLAG_NO_DATA,
12418  		{ },
12419  		{ { 0, 1 } },
12420  		.fill_helper = bpf_fill_alu32_and_reg,
12421  		.nr_testruns = NR_PATTERN_RUNS,
12422  	},
12423  	{
12424  		"ALU32_OR_X: all register value magnitudes",
12425  		{ },
12426  		INTERNAL | FLAG_NO_DATA,
12427  		{ },
12428  		{ { 0, 1 } },
12429  		.fill_helper = bpf_fill_alu32_or_reg,
12430  		.nr_testruns = NR_PATTERN_RUNS,
12431  	},
12432  	{
12433  		"ALU32_XOR_X: all register value magnitudes",
12434  		{ },
12435  		INTERNAL | FLAG_NO_DATA,
12436  		{ },
12437  		{ { 0, 1 } },
12438  		.fill_helper = bpf_fill_alu32_xor_reg,
12439  		.nr_testruns = NR_PATTERN_RUNS,
12440  	},
12441  	{
12442  		"ALU32_ADD_X: all register value magnitudes",
12443  		{ },
12444  		INTERNAL | FLAG_NO_DATA,
12445  		{ },
12446  		{ { 0, 1 } },
12447  		.fill_helper = bpf_fill_alu32_add_reg,
12448  		.nr_testruns = NR_PATTERN_RUNS,
12449  	},
12450  	{
12451  		"ALU32_SUB_X: all register value magnitudes",
12452  		{ },
12453  		INTERNAL | FLAG_NO_DATA,
12454  		{ },
12455  		{ { 0, 1 } },
12456  		.fill_helper = bpf_fill_alu32_sub_reg,
12457  		.nr_testruns = NR_PATTERN_RUNS,
12458  	},
12459  	{
12460  		"ALU32_MUL_X: all register value magnitudes",
12461  		{ },
12462  		INTERNAL | FLAG_NO_DATA,
12463  		{ },
12464  		{ { 0, 1 } },
12465  		.fill_helper = bpf_fill_alu32_mul_reg,
12466  		.nr_testruns = NR_PATTERN_RUNS,
12467  	},
12468  	{
12469  		"ALU32_DIV_X: all register value magnitudes",
12470  		{ },
12471  		INTERNAL | FLAG_NO_DATA,
12472  		{ },
12473  		{ { 0, 1 } },
12474  		.fill_helper = bpf_fill_alu32_div_reg,
12475  		.nr_testruns = NR_PATTERN_RUNS,
12476  	},
12477  	{
12478  		"ALU32_MOD_X: all register value magnitudes",
12479  		{ },
12480  		INTERNAL | FLAG_NO_DATA,
12481  		{ },
12482  		{ { 0, 1 } },
12483  		.fill_helper = bpf_fill_alu32_mod_reg,
12484  		.nr_testruns = NR_PATTERN_RUNS,
12485  	},
12486  	/* LD_IMM64 immediate magnitudes and byte patterns */
12487  	{
12488  		"LD_IMM64: all immediate value magnitudes",
12489  		{ },
12490  		INTERNAL | FLAG_NO_DATA,
12491  		{ },
12492  		{ { 0, 1 } },
12493  		.fill_helper = bpf_fill_ld_imm64_magn,
12494  	},
12495  	{
12496  		"LD_IMM64: checker byte patterns",
12497  		{ },
12498  		INTERNAL | FLAG_NO_DATA,
12499  		{ },
12500  		{ { 0, 1 } },
12501  		.fill_helper = bpf_fill_ld_imm64_checker,
12502  	},
12503  	{
12504  		"LD_IMM64: random positive and zero byte patterns",
12505  		{ },
12506  		INTERNAL | FLAG_NO_DATA,
12507  		{ },
12508  		{ { 0, 1 } },
12509  		.fill_helper = bpf_fill_ld_imm64_pos_zero,
12510  	},
12511  	{
12512  		"LD_IMM64: random negative and zero byte patterns",
12513  		{ },
12514  		INTERNAL | FLAG_NO_DATA,
12515  		{ },
12516  		{ { 0, 1 } },
12517  		.fill_helper = bpf_fill_ld_imm64_neg_zero,
12518  	},
12519  	{
12520  		"LD_IMM64: random positive and negative byte patterns",
12521  		{ },
12522  		INTERNAL | FLAG_NO_DATA,
12523  		{ },
12524  		{ { 0, 1 } },
12525  		.fill_helper = bpf_fill_ld_imm64_pos_neg,
12526  	},
12527  	/* 64-bit ATOMIC register combinations */
12528  	{
12529  		"ATOMIC_DW_ADD: register combinations",
12530  		{ },
12531  		INTERNAL,
12532  		{ },
12533  		{ { 0, 1 } },
12534  		.fill_helper = bpf_fill_atomic64_add_reg_pairs,
12535  		.stack_depth = 8,
12536  	},
12537  	{
12538  		"ATOMIC_DW_AND: register combinations",
12539  		{ },
12540  		INTERNAL,
12541  		{ },
12542  		{ { 0, 1 } },
12543  		.fill_helper = bpf_fill_atomic64_and_reg_pairs,
12544  		.stack_depth = 8,
12545  	},
12546  	{
12547  		"ATOMIC_DW_OR: register combinations",
12548  		{ },
12549  		INTERNAL,
12550  		{ },
12551  		{ { 0, 1 } },
12552  		.fill_helper = bpf_fill_atomic64_or_reg_pairs,
12553  		.stack_depth = 8,
12554  	},
12555  	{
12556  		"ATOMIC_DW_XOR: register combinations",
12557  		{ },
12558  		INTERNAL,
12559  		{ },
12560  		{ { 0, 1 } },
12561  		.fill_helper = bpf_fill_atomic64_xor_reg_pairs,
12562  		.stack_depth = 8,
12563  	},
12564  	{
12565  		"ATOMIC_DW_ADD_FETCH: register combinations",
12566  		{ },
12567  		INTERNAL,
12568  		{ },
12569  		{ { 0, 1 } },
12570  		.fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
12571  		.stack_depth = 8,
12572  	},
12573  	{
12574  		"ATOMIC_DW_AND_FETCH: register combinations",
12575  		{ },
12576  		INTERNAL,
12577  		{ },
12578  		{ { 0, 1 } },
12579  		.fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
12580  		.stack_depth = 8,
12581  	},
12582  	{
12583  		"ATOMIC_DW_OR_FETCH: register combinations",
12584  		{ },
12585  		INTERNAL,
12586  		{ },
12587  		{ { 0, 1 } },
12588  		.fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
12589  		.stack_depth = 8,
12590  	},
12591  	{
12592  		"ATOMIC_DW_XOR_FETCH: register combinations",
12593  		{ },
12594  		INTERNAL,
12595  		{ },
12596  		{ { 0, 1 } },
12597  		.fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
12598  		.stack_depth = 8,
12599  	},
12600  	{
12601  		"ATOMIC_DW_XCHG: register combinations",
12602  		{ },
12603  		INTERNAL,
12604  		{ },
12605  		{ { 0, 1 } },
12606  		.fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
12607  		.stack_depth = 8,
12608  	},
12609  	{
12610  		"ATOMIC_DW_CMPXCHG: register combinations",
12611  		{ },
12612  		INTERNAL,
12613  		{ },
12614  		{ { 0, 1 } },
12615  		.fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
12616  		.stack_depth = 8,
12617  	},
12618  	/* 32-bit ATOMIC register combinations */
12619  	{
12620  		"ATOMIC_W_ADD: register combinations",
12621  		{ },
12622  		INTERNAL,
12623  		{ },
12624  		{ { 0, 1 } },
12625  		.fill_helper = bpf_fill_atomic32_add_reg_pairs,
12626  		.stack_depth = 8,
12627  	},
12628  	{
12629  		"ATOMIC_W_AND: register combinations",
12630  		{ },
12631  		INTERNAL,
12632  		{ },
12633  		{ { 0, 1 } },
12634  		.fill_helper = bpf_fill_atomic32_and_reg_pairs,
12635  		.stack_depth = 8,
12636  	},
12637  	{
12638  		"ATOMIC_W_OR: register combinations",
12639  		{ },
12640  		INTERNAL,
12641  		{ },
12642  		{ { 0, 1 } },
12643  		.fill_helper = bpf_fill_atomic32_or_reg_pairs,
12644  		.stack_depth = 8,
12645  	},
12646  	{
12647  		"ATOMIC_W_XOR: register combinations",
12648  		{ },
12649  		INTERNAL,
12650  		{ },
12651  		{ { 0, 1 } },
12652  		.fill_helper = bpf_fill_atomic32_xor_reg_pairs,
12653  		.stack_depth = 8,
12654  	},
12655  	{
12656  		"ATOMIC_W_ADD_FETCH: register combinations",
12657  		{ },
12658  		INTERNAL,
12659  		{ },
12660  		{ { 0, 1 } },
12661  		.fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
12662  		.stack_depth = 8,
12663  	},
12664  	{
12665  		"ATOMIC_W_AND_FETCH: register combinations",
12666  		{ },
12667  		INTERNAL,
12668  		{ },
12669  		{ { 0, 1 } },
12670  		.fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
12671  		.stack_depth = 8,
12672  	},
12673  	{
12674  		"ATOMIC_W_OR_FETCH: register combinations",
12675  		{ },
12676  		INTERNAL,
12677  		{ },
12678  		{ { 0, 1 } },
12679  		.fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
12680  		.stack_depth = 8,
12681  	},
12682  	{
12683  		"ATOMIC_W_XOR_FETCH: register combinations",
12684  		{ },
12685  		INTERNAL,
12686  		{ },
12687  		{ { 0, 1 } },
12688  		.fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
12689  		.stack_depth = 8,
12690  	},
12691  	{
12692  		"ATOMIC_W_XCHG: register combinations",
12693  		{ },
12694  		INTERNAL,
12695  		{ },
12696  		{ { 0, 1 } },
12697  		.fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
12698  		.stack_depth = 8,
12699  	},
12700  	{
12701  		"ATOMIC_W_CMPXCHG: register combinations",
12702  		{ },
12703  		INTERNAL,
12704  		{ },
12705  		{ { 0, 1 } },
12706  		.fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
12707  		.stack_depth = 8,
12708  	},
12709  	/* 64-bit ATOMIC magnitudes */
12710  	{
12711  		"ATOMIC_DW_ADD: all operand magnitudes",
12712  		{ },
12713  		INTERNAL | FLAG_NO_DATA,
12714  		{ },
12715  		{ { 0, 1 } },
12716  		.fill_helper = bpf_fill_atomic64_add,
12717  		.stack_depth = 8,
12718  		.nr_testruns = NR_PATTERN_RUNS,
12719  	},
12720  	{
12721  		"ATOMIC_DW_AND: all operand magnitudes",
12722  		{ },
12723  		INTERNAL | FLAG_NO_DATA,
12724  		{ },
12725  		{ { 0, 1 } },
12726  		.fill_helper = bpf_fill_atomic64_and,
12727  		.stack_depth = 8,
12728  		.nr_testruns = NR_PATTERN_RUNS,
12729  	},
12730  	{
12731  		"ATOMIC_DW_OR: all operand magnitudes",
12732  		{ },
12733  		INTERNAL | FLAG_NO_DATA,
12734  		{ },
12735  		{ { 0, 1 } },
12736  		.fill_helper = bpf_fill_atomic64_or,
12737  		.stack_depth = 8,
12738  		.nr_testruns = NR_PATTERN_RUNS,
12739  	},
12740  	{
12741  		"ATOMIC_DW_XOR: all operand magnitudes",
12742  		{ },
12743  		INTERNAL | FLAG_NO_DATA,
12744  		{ },
12745  		{ { 0, 1 } },
12746  		.fill_helper = bpf_fill_atomic64_xor,
12747  		.stack_depth = 8,
12748  		.nr_testruns = NR_PATTERN_RUNS,
12749  	},
12750  	{
12751  		"ATOMIC_DW_ADD_FETCH: all operand magnitudes",
12752  		{ },
12753  		INTERNAL | FLAG_NO_DATA,
12754  		{ },
12755  		{ { 0, 1 } },
12756  		.fill_helper = bpf_fill_atomic64_add_fetch,
12757  		.stack_depth = 8,
12758  		.nr_testruns = NR_PATTERN_RUNS,
12759  	},
12760  	{
12761  		"ATOMIC_DW_AND_FETCH: all operand magnitudes",
12762  		{ },
12763  		INTERNAL | FLAG_NO_DATA,
12764  		{ },
12765  		{ { 0, 1 } },
12766  		.fill_helper = bpf_fill_atomic64_and_fetch,
12767  		.stack_depth = 8,
12768  		.nr_testruns = NR_PATTERN_RUNS,
12769  	},
12770  	{
12771  		"ATOMIC_DW_OR_FETCH: all operand magnitudes",
12772  		{ },
12773  		INTERNAL | FLAG_NO_DATA,
12774  		{ },
12775  		{ { 0, 1 } },
12776  		.fill_helper = bpf_fill_atomic64_or_fetch,
12777  		.stack_depth = 8,
12778  		.nr_testruns = NR_PATTERN_RUNS,
12779  	},
12780  	{
12781  		"ATOMIC_DW_XOR_FETCH: all operand magnitudes",
12782  		{ },
12783  		INTERNAL | FLAG_NO_DATA,
12784  		{ },
12785  		{ { 0, 1 } },
12786  		.fill_helper = bpf_fill_atomic64_xor_fetch,
12787  		.stack_depth = 8,
12788  		.nr_testruns = NR_PATTERN_RUNS,
12789  	},
12790  	{
12791  		"ATOMIC_DW_XCHG: all operand magnitudes",
12792  		{ },
12793  		INTERNAL | FLAG_NO_DATA,
12794  		{ },
12795  		{ { 0, 1 } },
12796  		.fill_helper = bpf_fill_atomic64_xchg,
12797  		.stack_depth = 8,
12798  		.nr_testruns = NR_PATTERN_RUNS,
12799  	},
12800  	{
12801  		"ATOMIC_DW_CMPXCHG: all operand magnitudes",
12802  		{ },
12803  		INTERNAL | FLAG_NO_DATA,
12804  		{ },
12805  		{ { 0, 1 } },
12806  		.fill_helper = bpf_fill_cmpxchg64,
12807  		.stack_depth = 8,
12808  		.nr_testruns = NR_PATTERN_RUNS,
12809  	},
12810  	/* 64-bit atomic magnitudes */
12811  	{
12812  		"ATOMIC_W_ADD: all operand magnitudes",
12813  		{ },
12814  		INTERNAL | FLAG_NO_DATA,
12815  		{ },
12816  		{ { 0, 1 } },
12817  		.fill_helper = bpf_fill_atomic32_add,
12818  		.stack_depth = 8,
12819  		.nr_testruns = NR_PATTERN_RUNS,
12820  	},
12821  	{
12822  		"ATOMIC_W_AND: all operand magnitudes",
12823  		{ },
12824  		INTERNAL | FLAG_NO_DATA,
12825  		{ },
12826  		{ { 0, 1 } },
12827  		.fill_helper = bpf_fill_atomic32_and,
12828  		.stack_depth = 8,
12829  		.nr_testruns = NR_PATTERN_RUNS,
12830  	},
12831  	{
12832  		"ATOMIC_W_OR: all operand magnitudes",
12833  		{ },
12834  		INTERNAL | FLAG_NO_DATA,
12835  		{ },
12836  		{ { 0, 1 } },
12837  		.fill_helper = bpf_fill_atomic32_or,
12838  		.stack_depth = 8,
12839  		.nr_testruns = NR_PATTERN_RUNS,
12840  	},
12841  	{
12842  		"ATOMIC_W_XOR: all operand magnitudes",
12843  		{ },
12844  		INTERNAL | FLAG_NO_DATA,
12845  		{ },
12846  		{ { 0, 1 } },
12847  		.fill_helper = bpf_fill_atomic32_xor,
12848  		.stack_depth = 8,
12849  		.nr_testruns = NR_PATTERN_RUNS,
12850  	},
12851  	{
12852  		"ATOMIC_W_ADD_FETCH: all operand magnitudes",
12853  		{ },
12854  		INTERNAL | FLAG_NO_DATA,
12855  		{ },
12856  		{ { 0, 1 } },
12857  		.fill_helper = bpf_fill_atomic32_add_fetch,
12858  		.stack_depth = 8,
12859  		.nr_testruns = NR_PATTERN_RUNS,
12860  	},
12861  	{
12862  		"ATOMIC_W_AND_FETCH: all operand magnitudes",
12863  		{ },
12864  		INTERNAL | FLAG_NO_DATA,
12865  		{ },
12866  		{ { 0, 1 } },
12867  		.fill_helper = bpf_fill_atomic32_and_fetch,
12868  		.stack_depth = 8,
12869  		.nr_testruns = NR_PATTERN_RUNS,
12870  	},
12871  	{
12872  		"ATOMIC_W_OR_FETCH: all operand magnitudes",
12873  		{ },
12874  		INTERNAL | FLAG_NO_DATA,
12875  		{ },
12876  		{ { 0, 1 } },
12877  		.fill_helper = bpf_fill_atomic32_or_fetch,
12878  		.stack_depth = 8,
12879  		.nr_testruns = NR_PATTERN_RUNS,
12880  	},
12881  	{
12882  		"ATOMIC_W_XOR_FETCH: all operand magnitudes",
12883  		{ },
12884  		INTERNAL | FLAG_NO_DATA,
12885  		{ },
12886  		{ { 0, 1 } },
12887  		.fill_helper = bpf_fill_atomic32_xor_fetch,
12888  		.stack_depth = 8,
12889  		.nr_testruns = NR_PATTERN_RUNS,
12890  	},
12891  	{
12892  		"ATOMIC_W_XCHG: all operand magnitudes",
12893  		{ },
12894  		INTERNAL | FLAG_NO_DATA,
12895  		{ },
12896  		{ { 0, 1 } },
12897  		.fill_helper = bpf_fill_atomic32_xchg,
12898  		.stack_depth = 8,
12899  		.nr_testruns = NR_PATTERN_RUNS,
12900  	},
12901  	{
12902  		"ATOMIC_W_CMPXCHG: all operand magnitudes",
12903  		{ },
12904  		INTERNAL | FLAG_NO_DATA,
12905  		{ },
12906  		{ { 0, 1 } },
12907  		.fill_helper = bpf_fill_cmpxchg32,
12908  		.stack_depth = 8,
12909  		.nr_testruns = NR_PATTERN_RUNS,
12910  	},
12911  	/* JMP immediate magnitudes */
12912  	{
12913  		"JMP_JSET_K: all immediate value magnitudes",
12914  		{ },
12915  		INTERNAL | FLAG_NO_DATA,
12916  		{ },
12917  		{ { 0, 1 } },
12918  		.fill_helper = bpf_fill_jmp_jset_imm,
12919  		.nr_testruns = NR_PATTERN_RUNS,
12920  	},
12921  	{
12922  		"JMP_JEQ_K: all immediate value magnitudes",
12923  		{ },
12924  		INTERNAL | FLAG_NO_DATA,
12925  		{ },
12926  		{ { 0, 1 } },
12927  		.fill_helper = bpf_fill_jmp_jeq_imm,
12928  		.nr_testruns = NR_PATTERN_RUNS,
12929  	},
12930  	{
12931  		"JMP_JNE_K: all immediate value magnitudes",
12932  		{ },
12933  		INTERNAL | FLAG_NO_DATA,
12934  		{ },
12935  		{ { 0, 1 } },
12936  		.fill_helper = bpf_fill_jmp_jne_imm,
12937  		.nr_testruns = NR_PATTERN_RUNS,
12938  	},
12939  	{
12940  		"JMP_JGT_K: all immediate value magnitudes",
12941  		{ },
12942  		INTERNAL | FLAG_NO_DATA,
12943  		{ },
12944  		{ { 0, 1 } },
12945  		.fill_helper = bpf_fill_jmp_jgt_imm,
12946  		.nr_testruns = NR_PATTERN_RUNS,
12947  	},
12948  	{
12949  		"JMP_JGE_K: all immediate value magnitudes",
12950  		{ },
12951  		INTERNAL | FLAG_NO_DATA,
12952  		{ },
12953  		{ { 0, 1 } },
12954  		.fill_helper = bpf_fill_jmp_jge_imm,
12955  		.nr_testruns = NR_PATTERN_RUNS,
12956  	},
12957  	{
12958  		"JMP_JLT_K: all immediate value magnitudes",
12959  		{ },
12960  		INTERNAL | FLAG_NO_DATA,
12961  		{ },
12962  		{ { 0, 1 } },
12963  		.fill_helper = bpf_fill_jmp_jlt_imm,
12964  		.nr_testruns = NR_PATTERN_RUNS,
12965  	},
12966  	{
12967  		"JMP_JLE_K: all immediate value magnitudes",
12968  		{ },
12969  		INTERNAL | FLAG_NO_DATA,
12970  		{ },
12971  		{ { 0, 1 } },
12972  		.fill_helper = bpf_fill_jmp_jle_imm,
12973  		.nr_testruns = NR_PATTERN_RUNS,
12974  	},
12975  	{
12976  		"JMP_JSGT_K: all immediate value magnitudes",
12977  		{ },
12978  		INTERNAL | FLAG_NO_DATA,
12979  		{ },
12980  		{ { 0, 1 } },
12981  		.fill_helper = bpf_fill_jmp_jsgt_imm,
12982  		.nr_testruns = NR_PATTERN_RUNS,
12983  	},
12984  	{
12985  		"JMP_JSGE_K: all immediate value magnitudes",
12986  		{ },
12987  		INTERNAL | FLAG_NO_DATA,
12988  		{ },
12989  		{ { 0, 1 } },
12990  		.fill_helper = bpf_fill_jmp_jsge_imm,
12991  		.nr_testruns = NR_PATTERN_RUNS,
12992  	},
12993  	{
12994  		"JMP_JSLT_K: all immediate value magnitudes",
12995  		{ },
12996  		INTERNAL | FLAG_NO_DATA,
12997  		{ },
12998  		{ { 0, 1 } },
12999  		.fill_helper = bpf_fill_jmp_jslt_imm,
13000  		.nr_testruns = NR_PATTERN_RUNS,
13001  	},
13002  	{
13003  		"JMP_JSLE_K: all immediate value magnitudes",
13004  		{ },
13005  		INTERNAL | FLAG_NO_DATA,
13006  		{ },
13007  		{ { 0, 1 } },
13008  		.fill_helper = bpf_fill_jmp_jsle_imm,
13009  		.nr_testruns = NR_PATTERN_RUNS,
13010  	},
13011  	/* JMP register magnitudes */
13012  	{
13013  		"JMP_JSET_X: all register value magnitudes",
13014  		{ },
13015  		INTERNAL | FLAG_NO_DATA,
13016  		{ },
13017  		{ { 0, 1 } },
13018  		.fill_helper = bpf_fill_jmp_jset_reg,
13019  		.nr_testruns = NR_PATTERN_RUNS,
13020  	},
13021  	{
13022  		"JMP_JEQ_X: all register value magnitudes",
13023  		{ },
13024  		INTERNAL | FLAG_NO_DATA,
13025  		{ },
13026  		{ { 0, 1 } },
13027  		.fill_helper = bpf_fill_jmp_jeq_reg,
13028  		.nr_testruns = NR_PATTERN_RUNS,
13029  	},
13030  	{
13031  		"JMP_JNE_X: all register value magnitudes",
13032  		{ },
13033  		INTERNAL | FLAG_NO_DATA,
13034  		{ },
13035  		{ { 0, 1 } },
13036  		.fill_helper = bpf_fill_jmp_jne_reg,
13037  		.nr_testruns = NR_PATTERN_RUNS,
13038  	},
13039  	{
13040  		"JMP_JGT_X: all register value magnitudes",
13041  		{ },
13042  		INTERNAL | FLAG_NO_DATA,
13043  		{ },
13044  		{ { 0, 1 } },
13045  		.fill_helper = bpf_fill_jmp_jgt_reg,
13046  		.nr_testruns = NR_PATTERN_RUNS,
13047  	},
13048  	{
13049  		"JMP_JGE_X: all register value magnitudes",
13050  		{ },
13051  		INTERNAL | FLAG_NO_DATA,
13052  		{ },
13053  		{ { 0, 1 } },
13054  		.fill_helper = bpf_fill_jmp_jge_reg,
13055  		.nr_testruns = NR_PATTERN_RUNS,
13056  	},
13057  	{
13058  		"JMP_JLT_X: all register value magnitudes",
13059  		{ },
13060  		INTERNAL | FLAG_NO_DATA,
13061  		{ },
13062  		{ { 0, 1 } },
13063  		.fill_helper = bpf_fill_jmp_jlt_reg,
13064  		.nr_testruns = NR_PATTERN_RUNS,
13065  	},
13066  	{
13067  		"JMP_JLE_X: all register value magnitudes",
13068  		{ },
13069  		INTERNAL | FLAG_NO_DATA,
13070  		{ },
13071  		{ { 0, 1 } },
13072  		.fill_helper = bpf_fill_jmp_jle_reg,
13073  		.nr_testruns = NR_PATTERN_RUNS,
13074  	},
13075  	{
13076  		"JMP_JSGT_X: all register value magnitudes",
13077  		{ },
13078  		INTERNAL | FLAG_NO_DATA,
13079  		{ },
13080  		{ { 0, 1 } },
13081  		.fill_helper = bpf_fill_jmp_jsgt_reg,
13082  		.nr_testruns = NR_PATTERN_RUNS,
13083  	},
13084  	{
13085  		"JMP_JSGE_X: all register value magnitudes",
13086  		{ },
13087  		INTERNAL | FLAG_NO_DATA,
13088  		{ },
13089  		{ { 0, 1 } },
13090  		.fill_helper = bpf_fill_jmp_jsge_reg,
13091  		.nr_testruns = NR_PATTERN_RUNS,
13092  	},
13093  	{
13094  		"JMP_JSLT_X: all register value magnitudes",
13095  		{ },
13096  		INTERNAL | FLAG_NO_DATA,
13097  		{ },
13098  		{ { 0, 1 } },
13099  		.fill_helper = bpf_fill_jmp_jslt_reg,
13100  		.nr_testruns = NR_PATTERN_RUNS,
13101  	},
13102  	{
13103  		"JMP_JSLE_X: all register value magnitudes",
13104  		{ },
13105  		INTERNAL | FLAG_NO_DATA,
13106  		{ },
13107  		{ { 0, 1 } },
13108  		.fill_helper = bpf_fill_jmp_jsle_reg,
13109  		.nr_testruns = NR_PATTERN_RUNS,
13110  	},
13111  	/* JMP32 immediate magnitudes */
13112  	{
13113  		"JMP32_JSET_K: all immediate value magnitudes",
13114  		{ },
13115  		INTERNAL | FLAG_NO_DATA,
13116  		{ },
13117  		{ { 0, 1 } },
13118  		.fill_helper = bpf_fill_jmp32_jset_imm,
13119  		.nr_testruns = NR_PATTERN_RUNS,
13120  	},
13121  	{
13122  		"JMP32_JEQ_K: all immediate value magnitudes",
13123  		{ },
13124  		INTERNAL | FLAG_NO_DATA,
13125  		{ },
13126  		{ { 0, 1 } },
13127  		.fill_helper = bpf_fill_jmp32_jeq_imm,
13128  		.nr_testruns = NR_PATTERN_RUNS,
13129  	},
13130  	{
13131  		"JMP32_JNE_K: all immediate value magnitudes",
13132  		{ },
13133  		INTERNAL | FLAG_NO_DATA,
13134  		{ },
13135  		{ { 0, 1 } },
13136  		.fill_helper = bpf_fill_jmp32_jne_imm,
13137  		.nr_testruns = NR_PATTERN_RUNS,
13138  	},
13139  	{
13140  		"JMP32_JGT_K: all immediate value magnitudes",
13141  		{ },
13142  		INTERNAL | FLAG_NO_DATA,
13143  		{ },
13144  		{ { 0, 1 } },
13145  		.fill_helper = bpf_fill_jmp32_jgt_imm,
13146  		.nr_testruns = NR_PATTERN_RUNS,
13147  	},
13148  	{
13149  		"JMP32_JGE_K: all immediate value magnitudes",
13150  		{ },
13151  		INTERNAL | FLAG_NO_DATA,
13152  		{ },
13153  		{ { 0, 1 } },
13154  		.fill_helper = bpf_fill_jmp32_jge_imm,
13155  		.nr_testruns = NR_PATTERN_RUNS,
13156  	},
13157  	{
13158  		"JMP32_JLT_K: all immediate value magnitudes",
13159  		{ },
13160  		INTERNAL | FLAG_NO_DATA,
13161  		{ },
13162  		{ { 0, 1 } },
13163  		.fill_helper = bpf_fill_jmp32_jlt_imm,
13164  		.nr_testruns = NR_PATTERN_RUNS,
13165  	},
13166  	{
13167  		"JMP32_JLE_K: all immediate value magnitudes",
13168  		{ },
13169  		INTERNAL | FLAG_NO_DATA,
13170  		{ },
13171  		{ { 0, 1 } },
13172  		.fill_helper = bpf_fill_jmp32_jle_imm,
13173  		.nr_testruns = NR_PATTERN_RUNS,
13174  	},
13175  	{
13176  		"JMP32_JSGT_K: all immediate value magnitudes",
13177  		{ },
13178  		INTERNAL | FLAG_NO_DATA,
13179  		{ },
13180  		{ { 0, 1 } },
13181  		.fill_helper = bpf_fill_jmp32_jsgt_imm,
13182  		.nr_testruns = NR_PATTERN_RUNS,
13183  	},
13184  	{
13185  		"JMP32_JSGE_K: all immediate value magnitudes",
13186  		{ },
13187  		INTERNAL | FLAG_NO_DATA,
13188  		{ },
13189  		{ { 0, 1 } },
13190  		.fill_helper = bpf_fill_jmp32_jsge_imm,
13191  		.nr_testruns = NR_PATTERN_RUNS,
13192  	},
13193  	{
13194  		"JMP32_JSLT_K: all immediate value magnitudes",
13195  		{ },
13196  		INTERNAL | FLAG_NO_DATA,
13197  		{ },
13198  		{ { 0, 1 } },
13199  		.fill_helper = bpf_fill_jmp32_jslt_imm,
13200  		.nr_testruns = NR_PATTERN_RUNS,
13201  	},
13202  	{
13203  		"JMP32_JSLE_K: all immediate value magnitudes",
13204  		{ },
13205  		INTERNAL | FLAG_NO_DATA,
13206  		{ },
13207  		{ { 0, 1 } },
13208  		.fill_helper = bpf_fill_jmp32_jsle_imm,
13209  		.nr_testruns = NR_PATTERN_RUNS,
13210  	},
13211  	/* JMP32 register magnitudes */
13212  	{
13213  		"JMP32_JSET_X: all register value magnitudes",
13214  		{ },
13215  		INTERNAL | FLAG_NO_DATA,
13216  		{ },
13217  		{ { 0, 1 } },
13218  		.fill_helper = bpf_fill_jmp32_jset_reg,
13219  		.nr_testruns = NR_PATTERN_RUNS,
13220  	},
13221  	{
13222  		"JMP32_JEQ_X: all register value magnitudes",
13223  		{ },
13224  		INTERNAL | FLAG_NO_DATA,
13225  		{ },
13226  		{ { 0, 1 } },
13227  		.fill_helper = bpf_fill_jmp32_jeq_reg,
13228  		.nr_testruns = NR_PATTERN_RUNS,
13229  	},
13230  	{
13231  		"JMP32_JNE_X: all register value magnitudes",
13232  		{ },
13233  		INTERNAL | FLAG_NO_DATA,
13234  		{ },
13235  		{ { 0, 1 } },
13236  		.fill_helper = bpf_fill_jmp32_jne_reg,
13237  		.nr_testruns = NR_PATTERN_RUNS,
13238  	},
13239  	{
13240  		"JMP32_JGT_X: all register value magnitudes",
13241  		{ },
13242  		INTERNAL | FLAG_NO_DATA,
13243  		{ },
13244  		{ { 0, 1 } },
13245  		.fill_helper = bpf_fill_jmp32_jgt_reg,
13246  		.nr_testruns = NR_PATTERN_RUNS,
13247  	},
13248  	{
13249  		"JMP32_JGE_X: all register value magnitudes",
13250  		{ },
13251  		INTERNAL | FLAG_NO_DATA,
13252  		{ },
13253  		{ { 0, 1 } },
13254  		.fill_helper = bpf_fill_jmp32_jge_reg,
13255  		.nr_testruns = NR_PATTERN_RUNS,
13256  	},
13257  	{
13258  		"JMP32_JLT_X: all register value magnitudes",
13259  		{ },
13260  		INTERNAL | FLAG_NO_DATA,
13261  		{ },
13262  		{ { 0, 1 } },
13263  		.fill_helper = bpf_fill_jmp32_jlt_reg,
13264  		.nr_testruns = NR_PATTERN_RUNS,
13265  	},
13266  	{
13267  		"JMP32_JLE_X: all register value magnitudes",
13268  		{ },
13269  		INTERNAL | FLAG_NO_DATA,
13270  		{ },
13271  		{ { 0, 1 } },
13272  		.fill_helper = bpf_fill_jmp32_jle_reg,
13273  		.nr_testruns = NR_PATTERN_RUNS,
13274  	},
13275  	{
13276  		"JMP32_JSGT_X: all register value magnitudes",
13277  		{ },
13278  		INTERNAL | FLAG_NO_DATA,
13279  		{ },
13280  		{ { 0, 1 } },
13281  		.fill_helper = bpf_fill_jmp32_jsgt_reg,
13282  		.nr_testruns = NR_PATTERN_RUNS,
13283  	},
13284  	{
13285  		"JMP32_JSGE_X: all register value magnitudes",
13286  		{ },
13287  		INTERNAL | FLAG_NO_DATA,
13288  		{ },
13289  		{ { 0, 1 } },
13290  		.fill_helper = bpf_fill_jmp32_jsge_reg,
13291  		.nr_testruns = NR_PATTERN_RUNS,
13292  	},
13293  	{
13294  		"JMP32_JSLT_X: all register value magnitudes",
13295  		{ },
13296  		INTERNAL | FLAG_NO_DATA,
13297  		{ },
13298  		{ { 0, 1 } },
13299  		.fill_helper = bpf_fill_jmp32_jslt_reg,
13300  		.nr_testruns = NR_PATTERN_RUNS,
13301  	},
13302  	{
13303  		"JMP32_JSLE_X: all register value magnitudes",
13304  		{ },
13305  		INTERNAL | FLAG_NO_DATA,
13306  		{ },
13307  		{ { 0, 1 } },
13308  		.fill_helper = bpf_fill_jmp32_jsle_reg,
13309  		.nr_testruns = NR_PATTERN_RUNS,
13310  	},
13311  	/* Conditional jumps with constant decision */
13312  	{
13313  		"JMP_JSET_K: imm = 0 -> never taken",
13314  		.u.insns_int = {
13315  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13316  			BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13317  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13318  			BPF_EXIT_INSN(),
13319  		},
13320  		INTERNAL | FLAG_NO_DATA,
13321  		{ },
13322  		{ { 0, 0 } },
13323  	},
13324  	{
13325  		"JMP_JLT_K: imm = 0 -> never taken",
13326  		.u.insns_int = {
13327  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13328  			BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13329  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13330  			BPF_EXIT_INSN(),
13331  		},
13332  		INTERNAL | FLAG_NO_DATA,
13333  		{ },
13334  		{ { 0, 0 } },
13335  	},
13336  	{
13337  		"JMP_JGE_K: imm = 0 -> always taken",
13338  		.u.insns_int = {
13339  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13340  			BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13341  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13342  			BPF_EXIT_INSN(),
13343  		},
13344  		INTERNAL | FLAG_NO_DATA,
13345  		{ },
13346  		{ { 0, 1 } },
13347  	},
13348  	{
13349  		"JMP_JGT_K: imm = 0xffffffff -> never taken",
13350  		.u.insns_int = {
13351  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13352  			BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13353  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13354  			BPF_EXIT_INSN(),
13355  		},
13356  		INTERNAL | FLAG_NO_DATA,
13357  		{ },
13358  		{ { 0, 0 } },
13359  	},
13360  	{
13361  		"JMP_JLE_K: imm = 0xffffffff -> always taken",
13362  		.u.insns_int = {
13363  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13364  			BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
13365  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13366  			BPF_EXIT_INSN(),
13367  		},
13368  		INTERNAL | FLAG_NO_DATA,
13369  		{ },
13370  		{ { 0, 1 } },
13371  	},
13372  	{
13373  		"JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
13374  		.u.insns_int = {
13375  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13376  			BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
13377  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13378  			BPF_EXIT_INSN(),
13379  		},
13380  		INTERNAL | FLAG_NO_DATA,
13381  		{ },
13382  		{ { 0, 0 } },
13383  	},
13384  	{
13385  		"JMP32_JSGE_K: imm = -0x80000000 -> always taken",
13386  		.u.insns_int = {
13387  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13388  			BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
13389  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13390  			BPF_EXIT_INSN(),
13391  		},
13392  		INTERNAL | FLAG_NO_DATA,
13393  		{ },
13394  		{ { 0, 1 } },
13395  	},
13396  	{
13397  		"JMP32_JSLT_K: imm = -0x80000000 -> never taken",
13398  		.u.insns_int = {
13399  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13400  			BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
13401  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13402  			BPF_EXIT_INSN(),
13403  		},
13404  		INTERNAL | FLAG_NO_DATA,
13405  		{ },
13406  		{ { 0, 0 } },
13407  	},
13408  	{
13409  		"JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
13410  		.u.insns_int = {
13411  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13412  			BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
13413  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13414  			BPF_EXIT_INSN(),
13415  		},
13416  		INTERNAL | FLAG_NO_DATA,
13417  		{ },
13418  		{ { 0, 1 } },
13419  	},
13420  	{
13421  		"JMP_JEQ_X: dst = src -> always taken",
13422  		.u.insns_int = {
13423  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13424  			BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
13425  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13426  			BPF_EXIT_INSN(),
13427  		},
13428  		INTERNAL | FLAG_NO_DATA,
13429  		{ },
13430  		{ { 0, 1 } },
13431  	},
13432  	{
13433  		"JMP_JGE_X: dst = src -> always taken",
13434  		.u.insns_int = {
13435  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13436  			BPF_JMP_REG(BPF_JGE, R1, R1, 1),
13437  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13438  			BPF_EXIT_INSN(),
13439  		},
13440  		INTERNAL | FLAG_NO_DATA,
13441  		{ },
13442  		{ { 0, 1 } },
13443  	},
13444  	{
13445  		"JMP_JLE_X: dst = src -> always taken",
13446  		.u.insns_int = {
13447  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13448  			BPF_JMP_REG(BPF_JLE, R1, R1, 1),
13449  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13450  			BPF_EXIT_INSN(),
13451  		},
13452  		INTERNAL | FLAG_NO_DATA,
13453  		{ },
13454  		{ { 0, 1 } },
13455  	},
13456  	{
13457  		"JMP_JSGE_X: dst = src -> always taken",
13458  		.u.insns_int = {
13459  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13460  			BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
13461  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13462  			BPF_EXIT_INSN(),
13463  		},
13464  		INTERNAL | FLAG_NO_DATA,
13465  		{ },
13466  		{ { 0, 1 } },
13467  	},
13468  	{
13469  		"JMP_JSLE_X: dst = src -> always taken",
13470  		.u.insns_int = {
13471  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13472  			BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
13473  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13474  			BPF_EXIT_INSN(),
13475  		},
13476  		INTERNAL | FLAG_NO_DATA,
13477  		{ },
13478  		{ { 0, 1 } },
13479  	},
13480  	{
13481  		"JMP_JNE_X: dst = src -> never taken",
13482  		.u.insns_int = {
13483  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13484  			BPF_JMP_REG(BPF_JNE, R1, R1, 1),
13485  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13486  			BPF_EXIT_INSN(),
13487  		},
13488  		INTERNAL | FLAG_NO_DATA,
13489  		{ },
13490  		{ { 0, 0 } },
13491  	},
13492  	{
13493  		"JMP_JGT_X: dst = src -> never taken",
13494  		.u.insns_int = {
13495  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13496  			BPF_JMP_REG(BPF_JGT, R1, R1, 1),
13497  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13498  			BPF_EXIT_INSN(),
13499  		},
13500  		INTERNAL | FLAG_NO_DATA,
13501  		{ },
13502  		{ { 0, 0 } },
13503  	},
13504  	{
13505  		"JMP_JLT_X: dst = src -> never taken",
13506  		.u.insns_int = {
13507  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13508  			BPF_JMP_REG(BPF_JLT, R1, R1, 1),
13509  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13510  			BPF_EXIT_INSN(),
13511  		},
13512  		INTERNAL | FLAG_NO_DATA,
13513  		{ },
13514  		{ { 0, 0 } },
13515  	},
13516  	{
13517  		"JMP_JSGT_X: dst = src -> never taken",
13518  		.u.insns_int = {
13519  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13520  			BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
13521  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13522  			BPF_EXIT_INSN(),
13523  		},
13524  		INTERNAL | FLAG_NO_DATA,
13525  		{ },
13526  		{ { 0, 0 } },
13527  	},
13528  	{
13529  		"JMP_JSLT_X: dst = src -> never taken",
13530  		.u.insns_int = {
13531  			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13532  			BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
13533  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13534  			BPF_EXIT_INSN(),
13535  		},
13536  		INTERNAL | FLAG_NO_DATA,
13537  		{ },
13538  		{ { 0, 0 } },
13539  	},
13540  	/* Short relative jumps */
13541  	{
13542  		"Short relative jump: offset=0",
13543  		.u.insns_int = {
13544  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13545  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
13546  			BPF_EXIT_INSN(),
13547  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13548  		},
13549  		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13550  		{ },
13551  		{ { 0, 0 } },
13552  	},
13553  	{
13554  		"Short relative jump: offset=1",
13555  		.u.insns_int = {
13556  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13557  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
13558  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13559  			BPF_EXIT_INSN(),
13560  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13561  		},
13562  		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13563  		{ },
13564  		{ { 0, 0 } },
13565  	},
13566  	{
13567  		"Short relative jump: offset=2",
13568  		.u.insns_int = {
13569  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13570  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
13571  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13572  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13573  			BPF_EXIT_INSN(),
13574  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13575  		},
13576  		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13577  		{ },
13578  		{ { 0, 0 } },
13579  	},
13580  	{
13581  		"Short relative jump: offset=3",
13582  		.u.insns_int = {
13583  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13584  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
13585  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13586  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13587  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13588  			BPF_EXIT_INSN(),
13589  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13590  		},
13591  		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13592  		{ },
13593  		{ { 0, 0 } },
13594  	},
13595  	{
13596  		"Short relative jump: offset=4",
13597  		.u.insns_int = {
13598  			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13599  			BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
13600  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13601  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13602  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13603  			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13604  			BPF_EXIT_INSN(),
13605  			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13606  		},
13607  		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13608  		{ },
13609  		{ { 0, 0 } },
13610  	},
13611  	/* Conditional branch conversions */
13612  	{
13613  		"Long conditional jump: taken at runtime",
13614  		{ },
13615  		INTERNAL | FLAG_NO_DATA,
13616  		{ },
13617  		{ { 0, 1 } },
13618  		.fill_helper = bpf_fill_max_jmp_taken,
13619  	},
13620  	{
13621  		"Long conditional jump: not taken at runtime",
13622  		{ },
13623  		INTERNAL | FLAG_NO_DATA,
13624  		{ },
13625  		{ { 0, 2 } },
13626  		.fill_helper = bpf_fill_max_jmp_not_taken,
13627  	},
13628  	{
13629  		"Long conditional jump: always taken, known at JIT time",
13630  		{ },
13631  		INTERNAL | FLAG_NO_DATA,
13632  		{ },
13633  		{ { 0, 1 } },
13634  		.fill_helper = bpf_fill_max_jmp_always_taken,
13635  	},
13636  	{
13637  		"Long conditional jump: never taken, known at JIT time",
13638  		{ },
13639  		INTERNAL | FLAG_NO_DATA,
13640  		{ },
13641  		{ { 0, 2 } },
13642  		.fill_helper = bpf_fill_max_jmp_never_taken,
13643  	},
13644  	/* Staggered jump sequences, immediate */
13645  	{
13646  		"Staggered jumps: JMP_JA",
13647  		{ },
13648  		INTERNAL | FLAG_NO_DATA,
13649  		{ },
13650  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13651  		.fill_helper = bpf_fill_staggered_ja,
13652  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13653  	},
13654  	{
13655  		"Staggered jumps: JMP_JEQ_K",
13656  		{ },
13657  		INTERNAL | FLAG_NO_DATA,
13658  		{ },
13659  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13660  		.fill_helper = bpf_fill_staggered_jeq_imm,
13661  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13662  	},
13663  	{
13664  		"Staggered jumps: JMP_JNE_K",
13665  		{ },
13666  		INTERNAL | FLAG_NO_DATA,
13667  		{ },
13668  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13669  		.fill_helper = bpf_fill_staggered_jne_imm,
13670  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13671  	},
13672  	{
13673  		"Staggered jumps: JMP_JSET_K",
13674  		{ },
13675  		INTERNAL | FLAG_NO_DATA,
13676  		{ },
13677  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13678  		.fill_helper = bpf_fill_staggered_jset_imm,
13679  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13680  	},
13681  	{
13682  		"Staggered jumps: JMP_JGT_K",
13683  		{ },
13684  		INTERNAL | FLAG_NO_DATA,
13685  		{ },
13686  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13687  		.fill_helper = bpf_fill_staggered_jgt_imm,
13688  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13689  	},
13690  	{
13691  		"Staggered jumps: JMP_JGE_K",
13692  		{ },
13693  		INTERNAL | FLAG_NO_DATA,
13694  		{ },
13695  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13696  		.fill_helper = bpf_fill_staggered_jge_imm,
13697  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13698  	},
13699  	{
13700  		"Staggered jumps: JMP_JLT_K",
13701  		{ },
13702  		INTERNAL | FLAG_NO_DATA,
13703  		{ },
13704  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13705  		.fill_helper = bpf_fill_staggered_jlt_imm,
13706  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13707  	},
13708  	{
13709  		"Staggered jumps: JMP_JLE_K",
13710  		{ },
13711  		INTERNAL | FLAG_NO_DATA,
13712  		{ },
13713  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13714  		.fill_helper = bpf_fill_staggered_jle_imm,
13715  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13716  	},
13717  	{
13718  		"Staggered jumps: JMP_JSGT_K",
13719  		{ },
13720  		INTERNAL | FLAG_NO_DATA,
13721  		{ },
13722  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13723  		.fill_helper = bpf_fill_staggered_jsgt_imm,
13724  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13725  	},
13726  	{
13727  		"Staggered jumps: JMP_JSGE_K",
13728  		{ },
13729  		INTERNAL | FLAG_NO_DATA,
13730  		{ },
13731  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13732  		.fill_helper = bpf_fill_staggered_jsge_imm,
13733  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13734  	},
13735  	{
13736  		"Staggered jumps: JMP_JSLT_K",
13737  		{ },
13738  		INTERNAL | FLAG_NO_DATA,
13739  		{ },
13740  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13741  		.fill_helper = bpf_fill_staggered_jslt_imm,
13742  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13743  	},
13744  	{
13745  		"Staggered jumps: JMP_JSLE_K",
13746  		{ },
13747  		INTERNAL | FLAG_NO_DATA,
13748  		{ },
13749  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13750  		.fill_helper = bpf_fill_staggered_jsle_imm,
13751  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13752  	},
13753  	/* Staggered jump sequences, register */
13754  	{
13755  		"Staggered jumps: JMP_JEQ_X",
13756  		{ },
13757  		INTERNAL | FLAG_NO_DATA,
13758  		{ },
13759  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13760  		.fill_helper = bpf_fill_staggered_jeq_reg,
13761  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13762  	},
13763  	{
13764  		"Staggered jumps: JMP_JNE_X",
13765  		{ },
13766  		INTERNAL | FLAG_NO_DATA,
13767  		{ },
13768  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13769  		.fill_helper = bpf_fill_staggered_jne_reg,
13770  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13771  	},
13772  	{
13773  		"Staggered jumps: JMP_JSET_X",
13774  		{ },
13775  		INTERNAL | FLAG_NO_DATA,
13776  		{ },
13777  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13778  		.fill_helper = bpf_fill_staggered_jset_reg,
13779  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13780  	},
13781  	{
13782  		"Staggered jumps: JMP_JGT_X",
13783  		{ },
13784  		INTERNAL | FLAG_NO_DATA,
13785  		{ },
13786  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13787  		.fill_helper = bpf_fill_staggered_jgt_reg,
13788  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13789  	},
13790  	{
13791  		"Staggered jumps: JMP_JGE_X",
13792  		{ },
13793  		INTERNAL | FLAG_NO_DATA,
13794  		{ },
13795  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13796  		.fill_helper = bpf_fill_staggered_jge_reg,
13797  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13798  	},
13799  	{
13800  		"Staggered jumps: JMP_JLT_X",
13801  		{ },
13802  		INTERNAL | FLAG_NO_DATA,
13803  		{ },
13804  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13805  		.fill_helper = bpf_fill_staggered_jlt_reg,
13806  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13807  	},
13808  	{
13809  		"Staggered jumps: JMP_JLE_X",
13810  		{ },
13811  		INTERNAL | FLAG_NO_DATA,
13812  		{ },
13813  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13814  		.fill_helper = bpf_fill_staggered_jle_reg,
13815  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13816  	},
13817  	{
13818  		"Staggered jumps: JMP_JSGT_X",
13819  		{ },
13820  		INTERNAL | FLAG_NO_DATA,
13821  		{ },
13822  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13823  		.fill_helper = bpf_fill_staggered_jsgt_reg,
13824  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13825  	},
13826  	{
13827  		"Staggered jumps: JMP_JSGE_X",
13828  		{ },
13829  		INTERNAL | FLAG_NO_DATA,
13830  		{ },
13831  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13832  		.fill_helper = bpf_fill_staggered_jsge_reg,
13833  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13834  	},
13835  	{
13836  		"Staggered jumps: JMP_JSLT_X",
13837  		{ },
13838  		INTERNAL | FLAG_NO_DATA,
13839  		{ },
13840  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13841  		.fill_helper = bpf_fill_staggered_jslt_reg,
13842  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13843  	},
13844  	{
13845  		"Staggered jumps: JMP_JSLE_X",
13846  		{ },
13847  		INTERNAL | FLAG_NO_DATA,
13848  		{ },
13849  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13850  		.fill_helper = bpf_fill_staggered_jsle_reg,
13851  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13852  	},
13853  	/* Staggered jump sequences, JMP32 immediate */
13854  	{
13855  		"Staggered jumps: JMP32_JEQ_K",
13856  		{ },
13857  		INTERNAL | FLAG_NO_DATA,
13858  		{ },
13859  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13860  		.fill_helper = bpf_fill_staggered_jeq32_imm,
13861  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13862  	},
13863  	{
13864  		"Staggered jumps: JMP32_JNE_K",
13865  		{ },
13866  		INTERNAL | FLAG_NO_DATA,
13867  		{ },
13868  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13869  		.fill_helper = bpf_fill_staggered_jne32_imm,
13870  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13871  	},
13872  	{
13873  		"Staggered jumps: JMP32_JSET_K",
13874  		{ },
13875  		INTERNAL | FLAG_NO_DATA,
13876  		{ },
13877  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13878  		.fill_helper = bpf_fill_staggered_jset32_imm,
13879  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13880  	},
13881  	{
13882  		"Staggered jumps: JMP32_JGT_K",
13883  		{ },
13884  		INTERNAL | FLAG_NO_DATA,
13885  		{ },
13886  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13887  		.fill_helper = bpf_fill_staggered_jgt32_imm,
13888  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13889  	},
13890  	{
13891  		"Staggered jumps: JMP32_JGE_K",
13892  		{ },
13893  		INTERNAL | FLAG_NO_DATA,
13894  		{ },
13895  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13896  		.fill_helper = bpf_fill_staggered_jge32_imm,
13897  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13898  	},
13899  	{
13900  		"Staggered jumps: JMP32_JLT_K",
13901  		{ },
13902  		INTERNAL | FLAG_NO_DATA,
13903  		{ },
13904  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13905  		.fill_helper = bpf_fill_staggered_jlt32_imm,
13906  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13907  	},
13908  	{
13909  		"Staggered jumps: JMP32_JLE_K",
13910  		{ },
13911  		INTERNAL | FLAG_NO_DATA,
13912  		{ },
13913  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13914  		.fill_helper = bpf_fill_staggered_jle32_imm,
13915  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13916  	},
13917  	{
13918  		"Staggered jumps: JMP32_JSGT_K",
13919  		{ },
13920  		INTERNAL | FLAG_NO_DATA,
13921  		{ },
13922  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13923  		.fill_helper = bpf_fill_staggered_jsgt32_imm,
13924  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13925  	},
13926  	{
13927  		"Staggered jumps: JMP32_JSGE_K",
13928  		{ },
13929  		INTERNAL | FLAG_NO_DATA,
13930  		{ },
13931  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13932  		.fill_helper = bpf_fill_staggered_jsge32_imm,
13933  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13934  	},
13935  	{
13936  		"Staggered jumps: JMP32_JSLT_K",
13937  		{ },
13938  		INTERNAL | FLAG_NO_DATA,
13939  		{ },
13940  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13941  		.fill_helper = bpf_fill_staggered_jslt32_imm,
13942  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13943  	},
13944  	{
13945  		"Staggered jumps: JMP32_JSLE_K",
13946  		{ },
13947  		INTERNAL | FLAG_NO_DATA,
13948  		{ },
13949  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13950  		.fill_helper = bpf_fill_staggered_jsle32_imm,
13951  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13952  	},
13953  	/* Staggered jump sequences, JMP32 register */
13954  	{
13955  		"Staggered jumps: JMP32_JEQ_X",
13956  		{ },
13957  		INTERNAL | FLAG_NO_DATA,
13958  		{ },
13959  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13960  		.fill_helper = bpf_fill_staggered_jeq32_reg,
13961  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13962  	},
13963  	{
13964  		"Staggered jumps: JMP32_JNE_X",
13965  		{ },
13966  		INTERNAL | FLAG_NO_DATA,
13967  		{ },
13968  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13969  		.fill_helper = bpf_fill_staggered_jne32_reg,
13970  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13971  	},
13972  	{
13973  		"Staggered jumps: JMP32_JSET_X",
13974  		{ },
13975  		INTERNAL | FLAG_NO_DATA,
13976  		{ },
13977  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13978  		.fill_helper = bpf_fill_staggered_jset32_reg,
13979  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13980  	},
13981  	{
13982  		"Staggered jumps: JMP32_JGT_X",
13983  		{ },
13984  		INTERNAL | FLAG_NO_DATA,
13985  		{ },
13986  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13987  		.fill_helper = bpf_fill_staggered_jgt32_reg,
13988  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13989  	},
13990  	{
13991  		"Staggered jumps: JMP32_JGE_X",
13992  		{ },
13993  		INTERNAL | FLAG_NO_DATA,
13994  		{ },
13995  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13996  		.fill_helper = bpf_fill_staggered_jge32_reg,
13997  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13998  	},
13999  	{
14000  		"Staggered jumps: JMP32_JLT_X",
14001  		{ },
14002  		INTERNAL | FLAG_NO_DATA,
14003  		{ },
14004  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14005  		.fill_helper = bpf_fill_staggered_jlt32_reg,
14006  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14007  	},
14008  	{
14009  		"Staggered jumps: JMP32_JLE_X",
14010  		{ },
14011  		INTERNAL | FLAG_NO_DATA,
14012  		{ },
14013  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14014  		.fill_helper = bpf_fill_staggered_jle32_reg,
14015  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14016  	},
14017  	{
14018  		"Staggered jumps: JMP32_JSGT_X",
14019  		{ },
14020  		INTERNAL | FLAG_NO_DATA,
14021  		{ },
14022  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14023  		.fill_helper = bpf_fill_staggered_jsgt32_reg,
14024  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14025  	},
14026  	{
14027  		"Staggered jumps: JMP32_JSGE_X",
14028  		{ },
14029  		INTERNAL | FLAG_NO_DATA,
14030  		{ },
14031  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14032  		.fill_helper = bpf_fill_staggered_jsge32_reg,
14033  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14034  	},
14035  	{
14036  		"Staggered jumps: JMP32_JSLT_X",
14037  		{ },
14038  		INTERNAL | FLAG_NO_DATA,
14039  		{ },
14040  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14041  		.fill_helper = bpf_fill_staggered_jslt32_reg,
14042  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14043  	},
14044  	{
14045  		"Staggered jumps: JMP32_JSLE_X",
14046  		{ },
14047  		INTERNAL | FLAG_NO_DATA,
14048  		{ },
14049  		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14050  		.fill_helper = bpf_fill_staggered_jsle32_reg,
14051  		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14052  	},
14053  };
14054  
14055  static struct net_device dev;
14056  
14057  static struct sk_buff *populate_skb(char *buf, int size)
14058  {
14059  	struct sk_buff *skb;
14060  
14061  	if (size >= MAX_DATA)
14062  		return NULL;
14063  
14064  	skb = alloc_skb(MAX_DATA, GFP_KERNEL);
14065  	if (!skb)
14066  		return NULL;
14067  
14068  	__skb_put_data(skb, buf, size);
14069  
14070  	/* Initialize a fake skb with test pattern. */
14071  	skb_reset_mac_header(skb);
14072  	skb->protocol = htons(ETH_P_IP);
14073  	skb->pkt_type = SKB_TYPE;
14074  	skb->mark = SKB_MARK;
14075  	skb->hash = SKB_HASH;
14076  	skb->queue_mapping = SKB_QUEUE_MAP;
14077  	skb->vlan_tci = SKB_VLAN_TCI;
14078  	skb->vlan_present = SKB_VLAN_PRESENT;
14079  	skb->vlan_proto = htons(ETH_P_IP);
14080  	dev_net_set(&dev, &init_net);
14081  	skb->dev = &dev;
14082  	skb->dev->ifindex = SKB_DEV_IFINDEX;
14083  	skb->dev->type = SKB_DEV_TYPE;
14084  	skb_set_network_header(skb, min(size, ETH_HLEN));
14085  
14086  	return skb;
14087  }
14088  
14089  static void *generate_test_data(struct bpf_test *test, int sub)
14090  {
14091  	struct sk_buff *skb;
14092  	struct page *page;
14093  
14094  	if (test->aux & FLAG_NO_DATA)
14095  		return NULL;
14096  
14097  	/* Test case expects an skb, so populate one. Various
14098  	 * subtests generate skbs of different sizes based on
14099  	 * the same data.
14100  	 */
14101  	skb = populate_skb(test->data, test->test[sub].data_size);
14102  	if (!skb)
14103  		return NULL;
14104  
14105  	if (test->aux & FLAG_SKB_FRAG) {
14106  		/*
14107  		 * when the test requires a fragmented skb, add a
14108  		 * single fragment to the skb, filled with
14109  		 * test->frag_data.
14110  		 */
14111  		void *ptr;
14112  
14113  		page = alloc_page(GFP_KERNEL);
14114  
14115  		if (!page)
14116  			goto err_kfree_skb;
14117  
14118  		ptr = kmap(page);
14119  		if (!ptr)
14120  			goto err_free_page;
14121  		memcpy(ptr, test->frag_data, MAX_DATA);
14122  		kunmap(page);
14123  		skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
14124  	}
14125  
14126  	return skb;
14127  
14128  err_free_page:
14129  	__free_page(page);
14130  err_kfree_skb:
14131  	kfree_skb(skb);
14132  	return NULL;
14133  }
14134  
14135  static void release_test_data(const struct bpf_test *test, void *data)
14136  {
14137  	if (test->aux & FLAG_NO_DATA)
14138  		return;
14139  
14140  	kfree_skb(data);
14141  }
14142  
14143  static int filter_length(int which)
14144  {
14145  	struct sock_filter *fp;
14146  	int len;
14147  
14148  	if (tests[which].fill_helper)
14149  		return tests[which].u.ptr.len;
14150  
14151  	fp = tests[which].u.insns;
14152  	for (len = MAX_INSNS - 1; len > 0; --len)
14153  		if (fp[len].code != 0 || fp[len].k != 0)
14154  			break;
14155  
14156  	return len + 1;
14157  }
14158  
14159  static void *filter_pointer(int which)
14160  {
14161  	if (tests[which].fill_helper)
14162  		return tests[which].u.ptr.insns;
14163  	else
14164  		return tests[which].u.insns;
14165  }
14166  
14167  static struct bpf_prog *generate_filter(int which, int *err)
14168  {
14169  	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14170  	unsigned int flen = filter_length(which);
14171  	void *fptr = filter_pointer(which);
14172  	struct sock_fprog_kern fprog;
14173  	struct bpf_prog *fp;
14174  
14175  	switch (test_type) {
14176  	case CLASSIC:
14177  		fprog.filter = fptr;
14178  		fprog.len = flen;
14179  
14180  		*err = bpf_prog_create(&fp, &fprog);
14181  		if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14182  			if (*err == tests[which].expected_errcode) {
14183  				pr_cont("PASS\n");
14184  				/* Verifier rejected filter as expected. */
14185  				*err = 0;
14186  				return NULL;
14187  			} else {
14188  				pr_cont("UNEXPECTED_PASS\n");
14189  				/* Verifier didn't reject the test that's
14190  				 * bad enough, just return!
14191  				 */
14192  				*err = -EINVAL;
14193  				return NULL;
14194  			}
14195  		}
14196  		if (*err) {
14197  			pr_cont("FAIL to prog_create err=%d len=%d\n",
14198  				*err, fprog.len);
14199  			return NULL;
14200  		}
14201  		break;
14202  
14203  	case INTERNAL:
14204  		fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
14205  		if (fp == NULL) {
14206  			pr_cont("UNEXPECTED_FAIL no memory left\n");
14207  			*err = -ENOMEM;
14208  			return NULL;
14209  		}
14210  
14211  		fp->len = flen;
14212  		/* Type doesn't really matter here as long as it's not unspec. */
14213  		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14214  		memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14215  		fp->aux->stack_depth = tests[which].stack_depth;
14216  		fp->aux->verifier_zext = !!(tests[which].aux &
14217  					    FLAG_VERIFIER_ZEXT);
14218  
14219  		/* We cannot error here as we don't need type compatibility
14220  		 * checks.
14221  		 */
14222  		fp = bpf_prog_select_runtime(fp, err);
14223  		if (*err) {
14224  			pr_cont("FAIL to select_runtime err=%d\n", *err);
14225  			return NULL;
14226  		}
14227  		break;
14228  	}
14229  
14230  	*err = 0;
14231  	return fp;
14232  }
14233  
14234  static void release_filter(struct bpf_prog *fp, int which)
14235  {
14236  	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14237  
14238  	switch (test_type) {
14239  	case CLASSIC:
14240  		bpf_prog_destroy(fp);
14241  		break;
14242  	case INTERNAL:
14243  		bpf_prog_free(fp);
14244  		break;
14245  	}
14246  }
14247  
14248  static int __run_one(const struct bpf_prog *fp, const void *data,
14249  		     int runs, u64 *duration)
14250  {
14251  	u64 start, finish;
14252  	int ret = 0, i;
14253  
14254  	migrate_disable();
14255  	start = ktime_get_ns();
14256  
14257  	for (i = 0; i < runs; i++)
14258  		ret = bpf_prog_run(fp, data);
14259  
14260  	finish = ktime_get_ns();
14261  	migrate_enable();
14262  
14263  	*duration = finish - start;
14264  	do_div(*duration, runs);
14265  
14266  	return ret;
14267  }
14268  
14269  static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14270  {
14271  	int err_cnt = 0, i, runs = MAX_TESTRUNS;
14272  
14273  	if (test->nr_testruns)
14274  		runs = min(test->nr_testruns, MAX_TESTRUNS);
14275  
14276  	for (i = 0; i < MAX_SUBTESTS; i++) {
14277  		void *data;
14278  		u64 duration;
14279  		u32 ret;
14280  
14281  		/*
14282  		 * NOTE: Several sub-tests may be present, in which case
14283  		 * a zero {data_size, result} tuple indicates the end of
14284  		 * the sub-test array. The first test is always run,
14285  		 * even if both data_size and result happen to be zero.
14286  		 */
14287  		if (i > 0 &&
14288  		    test->test[i].data_size == 0 &&
14289  		    test->test[i].result == 0)
14290  			break;
14291  
14292  		data = generate_test_data(test, i);
14293  		if (!data && !(test->aux & FLAG_NO_DATA)) {
14294  			pr_cont("data generation failed ");
14295  			err_cnt++;
14296  			break;
14297  		}
14298  		ret = __run_one(fp, data, runs, &duration);
14299  		release_test_data(test, data);
14300  
14301  		if (ret == test->test[i].result) {
14302  			pr_cont("%lld ", duration);
14303  		} else {
14304  			pr_cont("ret %d != %d ", ret,
14305  				test->test[i].result);
14306  			err_cnt++;
14307  		}
14308  	}
14309  
14310  	return err_cnt;
14311  }
14312  
14313  static char test_name[64];
14314  module_param_string(test_name, test_name, sizeof(test_name), 0);
14315  
14316  static int test_id = -1;
14317  module_param(test_id, int, 0);
14318  
14319  static int test_range[2] = { 0, INT_MAX };
14320  module_param_array(test_range, int, NULL, 0);
14321  
14322  static bool exclude_test(int test_id)
14323  {
14324  	return test_id < test_range[0] || test_id > test_range[1];
14325  }
14326  
14327  static __init struct sk_buff *build_test_skb(void)
14328  {
14329  	u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
14330  	struct sk_buff *skb[2];
14331  	struct page *page[2];
14332  	int i, data_size = 8;
14333  
14334  	for (i = 0; i < 2; i++) {
14335  		page[i] = alloc_page(GFP_KERNEL);
14336  		if (!page[i]) {
14337  			if (i == 0)
14338  				goto err_page0;
14339  			else
14340  				goto err_page1;
14341  		}
14342  
14343  		/* this will set skb[i]->head_frag */
14344  		skb[i] = dev_alloc_skb(headroom + data_size);
14345  		if (!skb[i]) {
14346  			if (i == 0)
14347  				goto err_skb0;
14348  			else
14349  				goto err_skb1;
14350  		}
14351  
14352  		skb_reserve(skb[i], headroom);
14353  		skb_put(skb[i], data_size);
14354  		skb[i]->protocol = htons(ETH_P_IP);
14355  		skb_reset_network_header(skb[i]);
14356  		skb_set_mac_header(skb[i], -ETH_HLEN);
14357  
14358  		skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
14359  		// skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
14360  	}
14361  
14362  	/* setup shinfo */
14363  	skb_shinfo(skb[0])->gso_size = 1448;
14364  	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
14365  	skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
14366  	skb_shinfo(skb[0])->gso_segs = 0;
14367  	skb_shinfo(skb[0])->frag_list = skb[1];
14368  	skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
14369  
14370  	/* adjust skb[0]'s len */
14371  	skb[0]->len += skb[1]->len;
14372  	skb[0]->data_len += skb[1]->data_len;
14373  	skb[0]->truesize += skb[1]->truesize;
14374  
14375  	return skb[0];
14376  
14377  err_skb1:
14378  	__free_page(page[1]);
14379  err_page1:
14380  	kfree_skb(skb[0]);
14381  err_skb0:
14382  	__free_page(page[0]);
14383  err_page0:
14384  	return NULL;
14385  }
14386  
14387  static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
14388  {
14389  	unsigned int alloc_size = 2000;
14390  	unsigned int headroom = 102, doffset = 72, data_size = 1308;
14391  	struct sk_buff *skb[2];
14392  	int i;
14393  
14394  	/* skbs linked in a frag_list, both with linear data, with head_frag=0
14395  	 * (data allocated by kmalloc), both have tcp data of 1308 bytes
14396  	 * (total payload is 2616 bytes).
14397  	 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
14398  	 */
14399  	for (i = 0; i < 2; i++) {
14400  		skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
14401  		if (!skb[i]) {
14402  			if (i == 0)
14403  				goto err_skb0;
14404  			else
14405  				goto err_skb1;
14406  		}
14407  
14408  		skb[i]->protocol = htons(ETH_P_IPV6);
14409  		skb_reserve(skb[i], headroom);
14410  		skb_put(skb[i], doffset + data_size);
14411  		skb_reset_network_header(skb[i]);
14412  		if (i == 0)
14413  			skb_reset_mac_header(skb[i]);
14414  		else
14415  			skb_set_mac_header(skb[i], -ETH_HLEN);
14416  		__skb_pull(skb[i], doffset);
14417  	}
14418  
14419  	/* setup shinfo.
14420  	 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
14421  	 * reduced gso_size.
14422  	 */
14423  	skb_shinfo(skb[0])->gso_size = 1288;
14424  	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
14425  	skb_shinfo(skb[0])->gso_segs = 0;
14426  	skb_shinfo(skb[0])->frag_list = skb[1];
14427  
14428  	/* adjust skb[0]'s len */
14429  	skb[0]->len += skb[1]->len;
14430  	skb[0]->data_len += skb[1]->len;
14431  	skb[0]->truesize += skb[1]->truesize;
14432  
14433  	return skb[0];
14434  
14435  err_skb1:
14436  	kfree_skb(skb[0]);
14437  err_skb0:
14438  	return NULL;
14439  }
14440  
14441  struct skb_segment_test {
14442  	const char *descr;
14443  	struct sk_buff *(*build_skb)(void);
14444  	netdev_features_t features;
14445  };
14446  
14447  static struct skb_segment_test skb_segment_tests[] __initconst = {
14448  	{
14449  		.descr = "gso_with_rx_frags",
14450  		.build_skb = build_test_skb,
14451  		.features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
14452  			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
14453  	},
14454  	{
14455  		.descr = "gso_linear_no_head_frag",
14456  		.build_skb = build_test_skb_linear_no_head_frag,
14457  		.features = NETIF_F_SG | NETIF_F_FRAGLIST |
14458  			    NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
14459  			    NETIF_F_LLTX_BIT | NETIF_F_GRO |
14460  			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
14461  			    NETIF_F_HW_VLAN_STAG_TX_BIT
14462  	}
14463  };
14464  
14465  static __init int test_skb_segment_single(const struct skb_segment_test *test)
14466  {
14467  	struct sk_buff *skb, *segs;
14468  	int ret = -1;
14469  
14470  	skb = test->build_skb();
14471  	if (!skb) {
14472  		pr_info("%s: failed to build_test_skb", __func__);
14473  		goto done;
14474  	}
14475  
14476  	segs = skb_segment(skb, test->features);
14477  	if (!IS_ERR(segs)) {
14478  		kfree_skb_list(segs);
14479  		ret = 0;
14480  	}
14481  	kfree_skb(skb);
14482  done:
14483  	return ret;
14484  }
14485  
14486  static __init int test_skb_segment(void)
14487  {
14488  	int i, err_cnt = 0, pass_cnt = 0;
14489  
14490  	for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
14491  		const struct skb_segment_test *test = &skb_segment_tests[i];
14492  
14493  		cond_resched();
14494  		if (exclude_test(i))
14495  			continue;
14496  
14497  		pr_info("#%d %s ", i, test->descr);
14498  
14499  		if (test_skb_segment_single(test)) {
14500  			pr_cont("FAIL\n");
14501  			err_cnt++;
14502  		} else {
14503  			pr_cont("PASS\n");
14504  			pass_cnt++;
14505  		}
14506  	}
14507  
14508  	pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
14509  		pass_cnt, err_cnt);
14510  	return err_cnt ? -EINVAL : 0;
14511  }
14512  
14513  static __init int test_bpf(void)
14514  {
14515  	int i, err_cnt = 0, pass_cnt = 0;
14516  	int jit_cnt = 0, run_cnt = 0;
14517  
14518  	for (i = 0; i < ARRAY_SIZE(tests); i++) {
14519  		struct bpf_prog *fp;
14520  		int err;
14521  
14522  		cond_resched();
14523  		if (exclude_test(i))
14524  			continue;
14525  
14526  		pr_info("#%d %s ", i, tests[i].descr);
14527  
14528  		if (tests[i].fill_helper &&
14529  		    tests[i].fill_helper(&tests[i]) < 0) {
14530  			pr_cont("FAIL to prog_fill\n");
14531  			continue;
14532  		}
14533  
14534  		fp = generate_filter(i, &err);
14535  
14536  		if (tests[i].fill_helper) {
14537  			kfree(tests[i].u.ptr.insns);
14538  			tests[i].u.ptr.insns = NULL;
14539  		}
14540  
14541  		if (fp == NULL) {
14542  			if (err == 0) {
14543  				pass_cnt++;
14544  				continue;
14545  			}
14546  			err_cnt++;
14547  			continue;
14548  		}
14549  
14550  		pr_cont("jited:%u ", fp->jited);
14551  
14552  		run_cnt++;
14553  		if (fp->jited)
14554  			jit_cnt++;
14555  
14556  		err = run_one(fp, &tests[i]);
14557  		release_filter(fp, i);
14558  
14559  		if (err) {
14560  			pr_cont("FAIL (%d times)\n", err);
14561  			err_cnt++;
14562  		} else {
14563  			pr_cont("PASS\n");
14564  			pass_cnt++;
14565  		}
14566  	}
14567  
14568  	pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14569  		pass_cnt, err_cnt, jit_cnt, run_cnt);
14570  
14571  	return err_cnt ? -EINVAL : 0;
14572  }
14573  
14574  struct tail_call_test {
14575  	const char *descr;
14576  	struct bpf_insn insns[MAX_INSNS];
14577  	int flags;
14578  	int result;
14579  	int stack_depth;
14580  };
14581  
14582  /* Flags that can be passed to tail call test cases */
14583  #define FLAG_NEED_STATE		BIT(0)
14584  #define FLAG_RESULT_IN_STATE	BIT(1)
14585  
14586  /*
14587   * Magic marker used in test snippets for tail calls below.
14588   * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
14589   * with the proper values by the test runner.
14590   */
14591  #define TAIL_CALL_MARKER 0x7a11ca11
14592  
14593  /* Special offset to indicate a NULL call target */
14594  #define TAIL_CALL_NULL 0x7fff
14595  
14596  /* Special offset to indicate an out-of-range index */
14597  #define TAIL_CALL_INVALID 0x7ffe
14598  
14599  #define TAIL_CALL(offset)			       \
14600  	BPF_LD_IMM64(R2, TAIL_CALL_MARKER),	       \
14601  	BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
14602  		     offset, TAIL_CALL_MARKER),	       \
14603  	BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
14604  
14605  /*
14606   * A test function to be called from a BPF program, clobbering a lot of
14607   * CPU registers in the process. A JITed BPF program calling this function
14608   * must save and restore any caller-saved registers it uses for internal
14609   * state, for example the current tail call count.
14610   */
14611  BPF_CALL_1(bpf_test_func, u64, arg)
14612  {
14613  	char buf[64];
14614  	long a = 0;
14615  	long b = 1;
14616  	long c = 2;
14617  	long d = 3;
14618  	long e = 4;
14619  	long f = 5;
14620  	long g = 6;
14621  	long h = 7;
14622  
14623  	return snprintf(buf, sizeof(buf),
14624  			"%ld %lu %lx %ld %lu %lx %ld %lu %x",
14625  			a, b, c, d, e, f, g, h, (int)arg);
14626  }
14627  #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
14628  
14629  /*
14630   * Tail call tests. Each test case may call any other test in the table,
14631   * including itself, specified as a relative index offset from the calling
14632   * test. The index TAIL_CALL_NULL can be used to specify a NULL target
14633   * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
14634   * results in a target index that is out of range.
14635   */
14636  static struct tail_call_test tail_call_tests[] = {
14637  	{
14638  		"Tail call leaf",
14639  		.insns = {
14640  			BPF_ALU64_REG(BPF_MOV, R0, R1),
14641  			BPF_ALU64_IMM(BPF_ADD, R0, 1),
14642  			BPF_EXIT_INSN(),
14643  		},
14644  		.result = 1,
14645  	},
14646  	{
14647  		"Tail call 2",
14648  		.insns = {
14649  			BPF_ALU64_IMM(BPF_ADD, R1, 2),
14650  			TAIL_CALL(-1),
14651  			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14652  			BPF_EXIT_INSN(),
14653  		},
14654  		.result = 3,
14655  	},
14656  	{
14657  		"Tail call 3",
14658  		.insns = {
14659  			BPF_ALU64_IMM(BPF_ADD, R1, 3),
14660  			TAIL_CALL(-1),
14661  			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14662  			BPF_EXIT_INSN(),
14663  		},
14664  		.result = 6,
14665  	},
14666  	{
14667  		"Tail call 4",
14668  		.insns = {
14669  			BPF_ALU64_IMM(BPF_ADD, R1, 4),
14670  			TAIL_CALL(-1),
14671  			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14672  			BPF_EXIT_INSN(),
14673  		},
14674  		.result = 10,
14675  	},
14676  	{
14677  		"Tail call error path, max count reached",
14678  		.insns = {
14679  			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14680  			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14681  			BPF_STX_MEM(BPF_W, R1, R2, 0),
14682  			TAIL_CALL(0),
14683  			BPF_EXIT_INSN(),
14684  		},
14685  		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14686  		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
14687  	},
14688  	{
14689  		"Tail call count preserved across function calls",
14690  		.insns = {
14691  			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14692  			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14693  			BPF_STX_MEM(BPF_W, R1, R2, 0),
14694  			BPF_STX_MEM(BPF_DW, R10, R1, -8),
14695  			BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
14696  			BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
14697  			BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
14698  			BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
14699  			BPF_CALL_REL(BPF_FUNC_jiffies64),
14700  			BPF_CALL_REL(BPF_FUNC_test_func),
14701  			BPF_LDX_MEM(BPF_DW, R1, R10, -8),
14702  			BPF_ALU32_REG(BPF_MOV, R0, R1),
14703  			TAIL_CALL(0),
14704  			BPF_EXIT_INSN(),
14705  		},
14706  		.stack_depth = 8,
14707  		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14708  		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
14709  	},
14710  	{
14711  		"Tail call error path, NULL target",
14712  		.insns = {
14713  			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14714  			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14715  			BPF_STX_MEM(BPF_W, R1, R2, 0),
14716  			TAIL_CALL(TAIL_CALL_NULL),
14717  			BPF_EXIT_INSN(),
14718  		},
14719  		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14720  		.result = MAX_TESTRUNS,
14721  	},
14722  	{
14723  		"Tail call error path, index out of range",
14724  		.insns = {
14725  			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14726  			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14727  			BPF_STX_MEM(BPF_W, R1, R2, 0),
14728  			TAIL_CALL(TAIL_CALL_INVALID),
14729  			BPF_EXIT_INSN(),
14730  		},
14731  		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14732  		.result = MAX_TESTRUNS,
14733  	},
14734  };
14735  
14736  static void __init destroy_tail_call_tests(struct bpf_array *progs)
14737  {
14738  	int i;
14739  
14740  	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
14741  		if (progs->ptrs[i])
14742  			bpf_prog_free(progs->ptrs[i]);
14743  	kfree(progs);
14744  }
14745  
14746  static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
14747  {
14748  	int ntests = ARRAY_SIZE(tail_call_tests);
14749  	struct bpf_array *progs;
14750  	int which, err;
14751  
14752  	/* Allocate the table of programs to be used for tall calls */
14753  	progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
14754  			GFP_KERNEL);
14755  	if (!progs)
14756  		goto out_nomem;
14757  
14758  	/* Create all eBPF programs and populate the table */
14759  	for (which = 0; which < ntests; which++) {
14760  		struct tail_call_test *test = &tail_call_tests[which];
14761  		struct bpf_prog *fp;
14762  		int len, i;
14763  
14764  		/* Compute the number of program instructions */
14765  		for (len = 0; len < MAX_INSNS; len++) {
14766  			struct bpf_insn *insn = &test->insns[len];
14767  
14768  			if (len < MAX_INSNS - 1 &&
14769  			    insn->code == (BPF_LD | BPF_DW | BPF_IMM))
14770  				len++;
14771  			if (insn->code == 0)
14772  				break;
14773  		}
14774  
14775  		/* Allocate and initialize the program */
14776  		fp = bpf_prog_alloc(bpf_prog_size(len), 0);
14777  		if (!fp)
14778  			goto out_nomem;
14779  
14780  		fp->len = len;
14781  		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14782  		fp->aux->stack_depth = test->stack_depth;
14783  		memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
14784  
14785  		/* Relocate runtime tail call offsets and addresses */
14786  		for (i = 0; i < len; i++) {
14787  			struct bpf_insn *insn = &fp->insnsi[i];
14788  			long addr = 0;
14789  
14790  			switch (insn->code) {
14791  			case BPF_LD | BPF_DW | BPF_IMM:
14792  				if (insn->imm != TAIL_CALL_MARKER)
14793  					break;
14794  				insn[0].imm = (u32)(long)progs;
14795  				insn[1].imm = ((u64)(long)progs) >> 32;
14796  				break;
14797  
14798  			case BPF_ALU | BPF_MOV | BPF_K:
14799  				if (insn->imm != TAIL_CALL_MARKER)
14800  					break;
14801  				if (insn->off == TAIL_CALL_NULL)
14802  					insn->imm = ntests;
14803  				else if (insn->off == TAIL_CALL_INVALID)
14804  					insn->imm = ntests + 1;
14805  				else
14806  					insn->imm = which + insn->off;
14807  				insn->off = 0;
14808  				break;
14809  
14810  			case BPF_JMP | BPF_CALL:
14811  				if (insn->src_reg != BPF_PSEUDO_CALL)
14812  					break;
14813  				switch (insn->imm) {
14814  				case BPF_FUNC_get_numa_node_id:
14815  					addr = (long)&numa_node_id;
14816  					break;
14817  				case BPF_FUNC_ktime_get_ns:
14818  					addr = (long)&ktime_get_ns;
14819  					break;
14820  				case BPF_FUNC_ktime_get_boot_ns:
14821  					addr = (long)&ktime_get_boot_fast_ns;
14822  					break;
14823  				case BPF_FUNC_ktime_get_coarse_ns:
14824  					addr = (long)&ktime_get_coarse_ns;
14825  					break;
14826  				case BPF_FUNC_jiffies64:
14827  					addr = (long)&get_jiffies_64;
14828  					break;
14829  				case BPF_FUNC_test_func:
14830  					addr = (long)&bpf_test_func;
14831  					break;
14832  				default:
14833  					err = -EFAULT;
14834  					goto out_err;
14835  				}
14836  				*insn = BPF_EMIT_CALL(addr);
14837  				if ((long)__bpf_call_base + insn->imm != addr)
14838  					*insn = BPF_JMP_A(0); /* Skip: NOP */
14839  				break;
14840  			}
14841  		}
14842  
14843  		fp = bpf_prog_select_runtime(fp, &err);
14844  		if (err)
14845  			goto out_err;
14846  
14847  		progs->ptrs[which] = fp;
14848  	}
14849  
14850  	/* The last entry contains a NULL program pointer */
14851  	progs->map.max_entries = ntests + 1;
14852  	*pprogs = progs;
14853  	return 0;
14854  
14855  out_nomem:
14856  	err = -ENOMEM;
14857  
14858  out_err:
14859  	if (progs)
14860  		destroy_tail_call_tests(progs);
14861  	return err;
14862  }
14863  
14864  static __init int test_tail_calls(struct bpf_array *progs)
14865  {
14866  	int i, err_cnt = 0, pass_cnt = 0;
14867  	int jit_cnt = 0, run_cnt = 0;
14868  
14869  	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
14870  		struct tail_call_test *test = &tail_call_tests[i];
14871  		struct bpf_prog *fp = progs->ptrs[i];
14872  		int *data = NULL;
14873  		int state = 0;
14874  		u64 duration;
14875  		int ret;
14876  
14877  		cond_resched();
14878  		if (exclude_test(i))
14879  			continue;
14880  
14881  		pr_info("#%d %s ", i, test->descr);
14882  		if (!fp) {
14883  			err_cnt++;
14884  			continue;
14885  		}
14886  		pr_cont("jited:%u ", fp->jited);
14887  
14888  		run_cnt++;
14889  		if (fp->jited)
14890  			jit_cnt++;
14891  
14892  		if (test->flags & FLAG_NEED_STATE)
14893  			data = &state;
14894  		ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
14895  		if (test->flags & FLAG_RESULT_IN_STATE)
14896  			ret = state;
14897  		if (ret == test->result) {
14898  			pr_cont("%lld PASS", duration);
14899  			pass_cnt++;
14900  		} else {
14901  			pr_cont("ret %d != %d FAIL", ret, test->result);
14902  			err_cnt++;
14903  		}
14904  	}
14905  
14906  	pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14907  		__func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
14908  
14909  	return err_cnt ? -EINVAL : 0;
14910  }
14911  
14912  static char test_suite[32];
14913  module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
14914  
14915  static __init int find_test_index(const char *test_name)
14916  {
14917  	int i;
14918  
14919  	if (!strcmp(test_suite, "test_bpf")) {
14920  		for (i = 0; i < ARRAY_SIZE(tests); i++) {
14921  			if (!strcmp(tests[i].descr, test_name))
14922  				return i;
14923  		}
14924  	}
14925  
14926  	if (!strcmp(test_suite, "test_tail_calls")) {
14927  		for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
14928  			if (!strcmp(tail_call_tests[i].descr, test_name))
14929  				return i;
14930  		}
14931  	}
14932  
14933  	if (!strcmp(test_suite, "test_skb_segment")) {
14934  		for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
14935  			if (!strcmp(skb_segment_tests[i].descr, test_name))
14936  				return i;
14937  		}
14938  	}
14939  
14940  	return -1;
14941  }
14942  
14943  static __init int prepare_test_range(void)
14944  {
14945  	int valid_range;
14946  
14947  	if (!strcmp(test_suite, "test_bpf"))
14948  		valid_range = ARRAY_SIZE(tests);
14949  	else if (!strcmp(test_suite, "test_tail_calls"))
14950  		valid_range = ARRAY_SIZE(tail_call_tests);
14951  	else if (!strcmp(test_suite, "test_skb_segment"))
14952  		valid_range = ARRAY_SIZE(skb_segment_tests);
14953  	else
14954  		return 0;
14955  
14956  	if (test_id >= 0) {
14957  		/*
14958  		 * if a test_id was specified, use test_range to
14959  		 * cover only that test.
14960  		 */
14961  		if (test_id >= valid_range) {
14962  			pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
14963  			       test_suite);
14964  			return -EINVAL;
14965  		}
14966  
14967  		test_range[0] = test_id;
14968  		test_range[1] = test_id;
14969  	} else if (*test_name) {
14970  		/*
14971  		 * if a test_name was specified, find it and setup
14972  		 * test_range to cover only that test.
14973  		 */
14974  		int idx = find_test_index(test_name);
14975  
14976  		if (idx < 0) {
14977  			pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
14978  			       test_name, test_suite);
14979  			return -EINVAL;
14980  		}
14981  		test_range[0] = idx;
14982  		test_range[1] = idx;
14983  	} else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
14984  		/*
14985  		 * check that the supplied test_range is valid.
14986  		 */
14987  		if (test_range[0] < 0 || test_range[1] >= valid_range) {
14988  			pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
14989  			       test_suite);
14990  			return -EINVAL;
14991  		}
14992  
14993  		if (test_range[1] < test_range[0]) {
14994  			pr_err("test_bpf: test_range is ending before it starts.\n");
14995  			return -EINVAL;
14996  		}
14997  	}
14998  
14999  	return 0;
15000  }
15001  
15002  static int __init test_bpf_init(void)
15003  {
15004  	struct bpf_array *progs = NULL;
15005  	int ret;
15006  
15007  	if (strlen(test_suite) &&
15008  	    strcmp(test_suite, "test_bpf") &&
15009  	    strcmp(test_suite, "test_tail_calls") &&
15010  	    strcmp(test_suite, "test_skb_segment")) {
15011  		pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
15012  		return -EINVAL;
15013  	}
15014  
15015  	/*
15016  	 * if test_suite is not specified, but test_id, test_name or test_range
15017  	 * is specified, set 'test_bpf' as the default test suite.
15018  	 */
15019  	if (!strlen(test_suite) &&
15020  	    (test_id != -1 || strlen(test_name) ||
15021  	    (test_range[0] != 0 || test_range[1] != INT_MAX))) {
15022  		pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15023  		strscpy(test_suite, "test_bpf", sizeof(test_suite));
15024  	}
15025  
15026  	ret = prepare_test_range();
15027  	if (ret < 0)
15028  		return ret;
15029  
15030  	if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
15031  		ret = test_bpf();
15032  		if (ret)
15033  			return ret;
15034  	}
15035  
15036  	if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
15037  		ret = prepare_tail_call_tests(&progs);
15038  		if (ret)
15039  			return ret;
15040  		ret = test_tail_calls(progs);
15041  		destroy_tail_call_tests(progs);
15042  		if (ret)
15043  			return ret;
15044  	}
15045  
15046  	if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
15047  		return test_skb_segment();
15048  
15049  	return 0;
15050  }
15051  
15052  static void __exit test_bpf_exit(void)
15053  {
15054  }
15055  
15056  module_init(test_bpf_init);
15057  module_exit(test_bpf_exit);
15058  
15059  MODULE_LICENSE("GPL");
15060