Lines Matching refs:insns
68 struct sock_filter insns[MAX_INSNS]; member
71 void *insns; member
104 self->u.ptr.insns = insn; in bpf_fill_maxinsns1()
123 self->u.ptr.insns = insn; in bpf_fill_maxinsns2()
150 self->u.ptr.insns = insn; in bpf_fill_maxinsns3()
169 self->u.ptr.insns = insn; in bpf_fill_maxinsns4()
192 self->u.ptr.insns = insn; in bpf_fill_maxinsns5()
214 self->u.ptr.insns = insn; in bpf_fill_maxinsns6()
240 self->u.ptr.insns = insn; in bpf_fill_maxinsns7()
263 self->u.ptr.insns = insn; in bpf_fill_maxinsns8()
289 self->u.ptr.insns = insn; in bpf_fill_maxinsns9()
314 self->u.ptr.insns = insn; in bpf_fill_maxinsns10()
343 self->u.ptr.insns = insn; in __bpf_fill_ja()
372 self->u.ptr.insns = insn; in bpf_fill_maxinsns12()
395 self->u.ptr.insns = insn; in bpf_fill_maxinsns13()
425 self->u.ptr.insns = insn; in bpf_fill_ld_abs_get_processor_id()
450 self->u.ptr.insns = insn; in __bpf_fill_stxdw()
467 static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64) in __bpf_ld_imm64()
471 memcpy(insns, tmp, sizeof(tmp)); in __bpf_ld_imm64()
483 struct bpf_insn *insns; in __bpf_fill_max_jmp() local
487 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL); in __bpf_fill_max_jmp()
488 if (!insns) in __bpf_fill_max_jmp()
491 i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL); in __bpf_fill_max_jmp()
492 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1); in __bpf_fill_max_jmp()
493 insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX); in __bpf_fill_max_jmp()
494 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2); in __bpf_fill_max_jmp()
495 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_max_jmp()
505 insns[i++] = BPF_ALU32_REG(op, R0, R1); in __bpf_fill_max_jmp()
507 insns[i++] = BPF_ALU64_REG(op, R0, R1); in __bpf_fill_max_jmp()
510 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_max_jmp()
511 self->u.ptr.insns = insns; in __bpf_fill_max_jmp()
682 self->u.ptr.insns = insn; in __bpf_fill_alu_shift()
792 self->u.ptr.insns = insn; in __bpf_fill_alu_shift_same_reg()
847 struct bpf_insn *insns; in __bpf_fill_pattern() local
859 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL); in __bpf_fill_pattern()
860 if (!insns) in __bpf_fill_pattern()
864 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0); in __bpf_fill_pattern()
882 &insns[i], in __bpf_fill_pattern()
899 i += (*emit)(self, arg, &insns[i], in __bpf_fill_pattern()
904 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1); in __bpf_fill_pattern()
905 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_pattern()
908 self->u.ptr.insns = insns; in __bpf_fill_pattern()
937 struct bpf_insn *insns, s64 dst, s64 imm) in __bpf_emit_alu64_imm() argument
943 if (!insns) in __bpf_emit_alu64_imm()
947 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_alu64_imm()
948 i += __bpf_ld_imm64(&insns[i], R3, res); in __bpf_emit_alu64_imm()
949 insns[i++] = BPF_ALU64_IMM(op, R1, imm); in __bpf_emit_alu64_imm()
950 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_alu64_imm()
951 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_alu64_imm()
958 struct bpf_insn *insns, s64 dst, s64 imm) in __bpf_emit_alu32_imm() argument
964 if (!insns) in __bpf_emit_alu32_imm()
968 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_alu32_imm()
969 i += __bpf_ld_imm64(&insns[i], R3, (u32)res); in __bpf_emit_alu32_imm()
970 insns[i++] = BPF_ALU32_IMM(op, R1, imm); in __bpf_emit_alu32_imm()
971 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_alu32_imm()
972 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_alu32_imm()
979 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_alu64_reg() argument
985 if (!insns) in __bpf_emit_alu64_reg()
989 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_alu64_reg()
990 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_alu64_reg()
991 i += __bpf_ld_imm64(&insns[i], R3, res); in __bpf_emit_alu64_reg()
992 insns[i++] = BPF_ALU64_REG(op, R1, R2); in __bpf_emit_alu64_reg()
993 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_alu64_reg()
994 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_alu64_reg()
1001 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_alu32_reg() argument
1007 if (!insns) in __bpf_emit_alu32_reg()
1011 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_alu32_reg()
1012 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_alu32_reg()
1013 i += __bpf_ld_imm64(&insns[i], R3, (u32)res); in __bpf_emit_alu32_reg()
1014 insns[i++] = BPF_ALU32_REG(op, R1, R2); in __bpf_emit_alu32_reg()
1015 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_alu32_reg()
1016 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_alu32_reg()
1241 struct bpf_insn *insns; in __bpf_fill_alu_imm_regs() local
1247 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL); in __bpf_fill_alu_imm_regs()
1248 if (!insns) in __bpf_fill_alu_imm_regs()
1268 i += __bpf_ld_imm64(&insns[i], rd, dst); in __bpf_fill_alu_imm_regs()
1271 insns[i++] = BPF_ALU32_IMM(op, rd, imm); in __bpf_fill_alu_imm_regs()
1273 insns[i++] = BPF_ALU64_IMM(op, rd, imm); in __bpf_fill_alu_imm_regs()
1275 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2); in __bpf_fill_alu_imm_regs()
1276 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_fill_alu_imm_regs()
1277 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_imm_regs()
1279 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32); in __bpf_fill_alu_imm_regs()
1280 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2); in __bpf_fill_alu_imm_regs()
1281 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_fill_alu_imm_regs()
1282 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_imm_regs()
1285 insns[i++] = BPF_MOV64_IMM(R0, 1); in __bpf_fill_alu_imm_regs()
1286 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_imm_regs()
1288 self->u.ptr.insns = insns; in __bpf_fill_alu_imm_regs()
1425 struct bpf_insn *insns; in __bpf_fill_alu_reg_pairs() local
1429 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL); in __bpf_fill_alu_reg_pairs()
1430 if (!insns) in __bpf_fill_alu_reg_pairs()
1458 i += __bpf_ld_imm64(&insns[i], rd, dst); in __bpf_fill_alu_reg_pairs()
1459 i += __bpf_ld_imm64(&insns[i], rs, src); in __bpf_fill_alu_reg_pairs()
1462 insns[i++] = BPF_ALU32_REG(op, rd, rs); in __bpf_fill_alu_reg_pairs()
1464 insns[i++] = BPF_ALU64_REG(op, rd, rs); in __bpf_fill_alu_reg_pairs()
1466 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2); in __bpf_fill_alu_reg_pairs()
1467 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_fill_alu_reg_pairs()
1468 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_reg_pairs()
1470 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32); in __bpf_fill_alu_reg_pairs()
1471 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2); in __bpf_fill_alu_reg_pairs()
1472 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_fill_alu_reg_pairs()
1473 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_reg_pairs()
1477 insns[i++] = BPF_MOV64_IMM(R0, 1); in __bpf_fill_alu_reg_pairs()
1478 insns[i++] = BPF_EXIT_INSN(); in __bpf_fill_alu_reg_pairs()
1480 self->u.ptr.insns = insns; in __bpf_fill_alu_reg_pairs()
1615 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_atomic64() argument
1621 if (!insns) in __bpf_emit_atomic64()
1638 i += __bpf_ld_imm64(&insns[i], R0, keep); in __bpf_emit_atomic64()
1639 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_atomic64()
1640 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_atomic64()
1641 i += __bpf_ld_imm64(&insns[i], R3, res); in __bpf_emit_atomic64()
1642 i += __bpf_ld_imm64(&insns[i], R4, fetch); in __bpf_emit_atomic64()
1643 i += __bpf_ld_imm64(&insns[i], R5, keep); in __bpf_emit_atomic64()
1645 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8); in __bpf_emit_atomic64()
1646 insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8); in __bpf_emit_atomic64()
1647 insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8); in __bpf_emit_atomic64()
1649 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_atomic64()
1650 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic64()
1652 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1); in __bpf_emit_atomic64()
1653 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic64()
1655 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1); in __bpf_emit_atomic64()
1656 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic64()
1662 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_atomic32() argument
1668 if (!insns) in __bpf_emit_atomic32()
1685 i += __bpf_ld_imm64(&insns[i], R0, keep); in __bpf_emit_atomic32()
1686 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst); in __bpf_emit_atomic32()
1687 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_atomic32()
1688 i += __bpf_ld_imm64(&insns[i], R3, (u32)res); in __bpf_emit_atomic32()
1689 i += __bpf_ld_imm64(&insns[i], R4, fetch); in __bpf_emit_atomic32()
1690 i += __bpf_ld_imm64(&insns[i], R5, keep); in __bpf_emit_atomic32()
1692 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4); in __bpf_emit_atomic32()
1693 insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4); in __bpf_emit_atomic32()
1694 insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4); in __bpf_emit_atomic32()
1696 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1); in __bpf_emit_atomic32()
1697 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic32()
1699 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1); in __bpf_emit_atomic32()
1700 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic32()
1702 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1); in __bpf_emit_atomic32()
1703 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_atomic32()
1709 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_cmpxchg64() argument
1713 if (!insns) in __bpf_emit_cmpxchg64()
1716 i += __bpf_ld_imm64(&insns[i], R0, ~dst); in __bpf_emit_cmpxchg64()
1717 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_cmpxchg64()
1718 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_cmpxchg64()
1721 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8); in __bpf_emit_cmpxchg64()
1722 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8); in __bpf_emit_cmpxchg64()
1723 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8); in __bpf_emit_cmpxchg64()
1725 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2); in __bpf_emit_cmpxchg64()
1726 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_emit_cmpxchg64()
1727 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg64()
1729 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2); in __bpf_emit_cmpxchg64()
1730 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_emit_cmpxchg64()
1731 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg64()
1734 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8); in __bpf_emit_cmpxchg64()
1735 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8); in __bpf_emit_cmpxchg64()
1737 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2); in __bpf_emit_cmpxchg64()
1738 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_emit_cmpxchg64()
1739 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg64()
1741 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2); in __bpf_emit_cmpxchg64()
1742 insns[i++] = BPF_MOV64_IMM(R0, __LINE__); in __bpf_emit_cmpxchg64()
1743 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg64()
1749 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_cmpxchg32() argument
1753 if (!insns) in __bpf_emit_cmpxchg32()
1756 i += __bpf_ld_imm64(&insns[i], R0, ~dst); in __bpf_emit_cmpxchg32()
1757 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst); in __bpf_emit_cmpxchg32()
1758 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_cmpxchg32()
1761 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4); in __bpf_emit_cmpxchg32()
1762 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4); in __bpf_emit_cmpxchg32()
1763 insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */ in __bpf_emit_cmpxchg32()
1764 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4); in __bpf_emit_cmpxchg32()
1766 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2); in __bpf_emit_cmpxchg32()
1767 insns[i++] = BPF_MOV32_IMM(R0, __LINE__); in __bpf_emit_cmpxchg32()
1768 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg32()
1770 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2); in __bpf_emit_cmpxchg32()
1771 insns[i++] = BPF_MOV32_IMM(R0, __LINE__); in __bpf_emit_cmpxchg32()
1772 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg32()
1775 i += __bpf_ld_imm64(&insns[i], R0, dst); in __bpf_emit_cmpxchg32()
1776 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4); in __bpf_emit_cmpxchg32()
1777 insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */ in __bpf_emit_cmpxchg32()
1778 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4); in __bpf_emit_cmpxchg32()
1780 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2); in __bpf_emit_cmpxchg32()
1781 insns[i++] = BPF_MOV32_IMM(R0, __LINE__); in __bpf_emit_cmpxchg32()
1782 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg32()
1784 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2); in __bpf_emit_cmpxchg32()
1785 insns[i++] = BPF_MOV32_IMM(R0, __LINE__); in __bpf_emit_cmpxchg32()
1786 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_cmpxchg32()
2040 self->u.ptr.insns = insn; in __bpf_fill_atomic_reg_pairs()
2197 self->u.ptr.insns = insn; in bpf_fill_ld_imm64_magn()
2259 self->u.ptr.insns = insn; in __bpf_fill_ld_imm64_bytes()
2323 struct bpf_insn *insns, s64 dst, s64 imm) in __bpf_emit_jmp_imm() argument
2327 if (insns) { in __bpf_emit_jmp_imm()
2331 insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match); in __bpf_emit_jmp_imm()
2333 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_jmp_imm()
2334 insns[i++] = BPF_JMP_IMM(op, R1, imm, 1); in __bpf_emit_jmp_imm()
2336 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1); in __bpf_emit_jmp_imm()
2337 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_jmp_imm()
2346 struct bpf_insn *insns, s64 dst, s64 imm) in __bpf_emit_jmp32_imm() argument
2350 if (insns) { in __bpf_emit_jmp32_imm()
2354 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_jmp32_imm()
2355 insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1); in __bpf_emit_jmp32_imm()
2357 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1); in __bpf_emit_jmp32_imm()
2358 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_jmp32_imm()
2367 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_jmp_reg() argument
2371 if (insns) { in __bpf_emit_jmp_reg()
2375 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_jmp_reg()
2376 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_jmp_reg()
2377 insns[i++] = BPF_JMP_REG(op, R1, R2, 1); in __bpf_emit_jmp_reg()
2379 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1); in __bpf_emit_jmp_reg()
2380 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_jmp_reg()
2389 struct bpf_insn *insns, s64 dst, s64 src) in __bpf_emit_jmp32_reg() argument
2393 if (insns) { in __bpf_emit_jmp32_reg()
2397 i += __bpf_ld_imm64(&insns[i], R1, dst); in __bpf_emit_jmp32_reg()
2398 i += __bpf_ld_imm64(&insns[i], R2, src); in __bpf_emit_jmp32_reg()
2399 insns[i++] = BPF_JMP32_REG(op, R1, R2, 1); in __bpf_emit_jmp32_reg()
2401 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1); in __bpf_emit_jmp32_reg()
2402 insns[i++] = BPF_EXIT_INSN(); in __bpf_emit_jmp32_reg()
2724 struct bpf_insn *insns; in __bpf_fill_staggered_jumps() local
2727 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL); in __bpf_fill_staggered_jumps()
2728 if (!insns) in __bpf_fill_staggered_jumps()
2732 insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0); in __bpf_fill_staggered_jumps()
2733 insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1); in __bpf_fill_staggered_jumps()
2734 insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2); in __bpf_fill_staggered_jumps()
2735 insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2); in __bpf_fill_staggered_jumps()
2739 struct bpf_insn *ins = &insns[4 + 3 * ind]; in __bpf_fill_staggered_jumps()
2754 insns[len - 1] = BPF_EXIT_INSN(); in __bpf_fill_staggered_jumps()
2756 self->u.ptr.insns = insns; in __bpf_fill_staggered_jumps()
3086 .u.insns = {
3105 .u.insns = {
3117 .u.insns = {
3132 .u.insns = {
3156 .u.insns = {
3172 .u.insns = {
3184 .u.insns = {
3195 .u.insns = {
3205 .u.insns = {
3218 .u.insns = {
3232 .u.insns = {
3245 .u.insns = {
3259 .u.insns = {
3280 .u.insns = {
3291 .u.insns = {
3302 .u.insns = {
3313 .u.insns = {
3332 .u.insns = {
3346 .u.insns = {
3360 .u.insns = {
3371 .u.insns = {
3382 .u.insns = {
3397 .u.insns = {
3415 .u.insns = {
3453 .u.insns = {
3480 .u.insns = {
3493 .u.insns = {
3515 .u.insns = {
3528 .u.insns = {
3541 .u.insns = {
3554 .u.insns = {
3567 .u.insns = {
3586 .u.insns = {
3618 .u.insns = {
3663 .u.insns = {
3716 .u.insns = {
4556 .u.insns = {
4567 .u.insns = {
4579 .u.insns = {
4592 .u.insns = {
4604 .u.insns = {
4686 .u.insns = {
4697 .u.insns = {
4709 .u.insns = {
4799 .u.insns = {
4872 .u.insns = {
4885 .u.insns = {
10781 .u.insns = {
10797 .u.insns = {
10813 .u.insns = {
10829 .u.insns = {
10841 .u.insns = {
10853 .u.insns = {
10868 .u.insns = {
10883 .u.insns = {
10898 .u.insns = {
10909 .u.insns = {
10927 .u.insns = {
10937 .u.insns = {
10948 .u.insns = {
10959 .u.insns = {
10970 .u.insns = {
10981 .u.insns = {
10992 .u.insns = {
11006 .u.insns = {
11022 .u.insns = {
11038 .u.insns = {
11054 .u.insns = {
11065 .u.insns = {
11076 .u.insns = {
11087 .u.insns = {
11105 .u.insns = {
11123 .u.insns = {
11141 .u.insns = {
11159 .u.insns = {
11177 .u.insns = {
11188 .u.insns = {
11199 .u.insns = {
11210 .u.insns = {
11227 .u.insns = {
11237 .u.insns = {
11247 .u.insns = {
11256 .u.insns = {
11266 .u.insns = {
11276 .u.insns = {
11289 .u.insns = {
11306 .u.insns = {
11323 .u.insns = {
11333 .u.insns = {
11343 .u.insns = {
11352 .u.insns = {
11362 .u.insns = {
11372 .u.insns = {
11389 .u.insns = {
11406 .u.insns = {
11423 .u.insns = {
11440 .u.insns = {
11450 .u.insns = {
11460 .u.insns = {
11469 .u.insns = {
11479 .u.insns = {
11489 .u.insns = {
11500 .u.insns = {
11514 .u.insns = {
11526 .u.insns = {
11538 .u.insns = {
11550 .u.insns = {
11562 .u.insns = {
11578 .u.insns = {
11594 .u.insns = {
11608 .u.insns = {
11624 .u.insns = {
11638 .u.insns = {
11654 .u.insns = {
11668 .u.insns = {
11684 .u.insns = {
11698 .u.insns = {
11714 .u.insns = {
11728 .u.insns = {
11744 .u.insns = {
11851 .u.insns = {
14822 fp = tests[which].u.insns; in filter_length()
14833 return tests[which].u.ptr.insns; in filter_pointer()
14835 return tests[which].u.insns; in filter_pointer()
15209 kfree(tests[i].u.ptr.insns); in test_bpf()
15210 tests[i].u.ptr.insns = NULL; in test_bpf()
15248 struct bpf_insn insns[MAX_INSNS]; member
15312 .insns = {
15321 .insns = {
15332 .insns = {
15343 .insns = {
15354 .insns = {
15372 .insns = {
15385 .insns = {
15398 .insns = {
15421 .insns = {
15434 .insns = {
15476 struct bpf_insn *insn = &test->insns[len]; in prepare_tail_call_tests()
15494 memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn)); in prepare_tail_call_tests()