Lines Matching full:off

653 /* Load operation: dst = *(size*)(src + off) */
655 const u8 dst[], u8 src, s16 off, u8 size) in emit_ldx() argument
660 emit(ctx, lbu, lo(dst), off, src); in emit_ldx()
665 emit(ctx, lhu, lo(dst), off, src); in emit_ldx()
670 emit(ctx, lw, lo(dst), off, src); in emit_ldx()
676 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
677 emit(ctx, lw, dst[1], off, src); in emit_ldx()
679 emit(ctx, lw, dst[1], off, src); in emit_ldx()
680 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
688 /* Store operation: *(size *)(dst + off) = src */
690 const u8 dst, const u8 src[], s16 off, u8 size) in emit_stx() argument
695 emit(ctx, sb, lo(src), off, dst); in emit_stx()
699 emit(ctx, sh, lo(src), off, dst); in emit_stx()
703 emit(ctx, sw, lo(src), off, dst); in emit_stx()
707 emit(ctx, sw, src[1], off, dst); in emit_stx()
708 emit(ctx, sw, src[0], off + 4, dst); in emit_stx()
715 u8 dst, u8 src, s16 off, u8 code) in emit_atomic_r32() argument
724 * Argument 1: dst+off if xchg, otherwise src, passed in register a0 in emit_atomic_r32()
725 * Argument 2: src if xchg, otherwise dst+off, passed in register a1 in emit_atomic_r32()
730 emit(ctx, addiu, MIPS_R_A0, MIPS_R_T9, off); in emit_atomic_r32()
733 emit(ctx, addiu, MIPS_R_A1, MIPS_R_T9, off); in emit_atomic_r32()
798 u8 dst, const u8 src[], s16 off, u8 code) in emit_atomic_r64() argument
810 * Argument 2: 32-bit dst+off, passed in register a2 in emit_atomic_r64()
815 emit(ctx, addiu, MIPS_R_A2, MIPS_R_T9, off); in emit_atomic_r64()
873 static void emit_cmpxchg_r32(struct jit_context *ctx, u8 dst, u8 src, s16 off) in emit_cmpxchg_r32() argument
881 * Argument 1: 32-bit dst+off, passed in register a0 in emit_cmpxchg_r32()
885 emit(ctx, addiu, MIPS_R_T9, dst, off); in emit_cmpxchg_r32()
910 u8 dst, const u8 src[], s16 off) in emit_cmpxchg_r64() argument
919 * Argument 1: 32-bit dst+off, passed in register a0 (a1 unused) in emit_cmpxchg_r64()
924 emit(ctx, addiu, MIPS_R_T9, dst, off); in emit_cmpxchg_r64()
1093 const u8 dst[], s32 imm, s32 off, u8 op) in emit_jmp_i64() argument
1101 /* PC += off if dst == imm */ in emit_jmp_i64()
1102 /* PC += off if dst != imm */ in emit_jmp_i64()
1120 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1122 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1124 /* PC += off if dst & imm */ in emit_jmp_i64()
1125 /* PC += off if (dst & imm) == 0 (not in BPF, used for long jumps) */ in emit_jmp_i64()
1137 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1139 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1141 /* PC += off if dst > imm */ in emit_jmp_i64()
1144 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1146 /* PC += off if dst >= imm */ in emit_jmp_i64()
1149 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1151 /* PC += off if dst < imm */ in emit_jmp_i64()
1154 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1156 /* PC += off if dst <= imm */ in emit_jmp_i64()
1159 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1161 /* PC += off if dst > imm (signed) */ in emit_jmp_i64()
1164 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1166 /* PC += off if dst >= imm (signed) */ in emit_jmp_i64()
1169 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1171 /* PC += off if dst < imm (signed) */ in emit_jmp_i64()
1174 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1176 /* PC += off if dst <= imm (signed) */ in emit_jmp_i64()
1179 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1186 const u8 dst[], const u8 src[], s32 off, u8 op) in emit_jmp_r64() argument
1195 /* PC += off if dst == src */ in emit_jmp_r64()
1196 /* PC += off if dst != src */ in emit_jmp_r64()
1203 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1205 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1207 /* PC += off if dst & src */ in emit_jmp_r64()
1208 /* PC += off if (dst & imm) == 0 (not in BPF, used for long jumps) */ in emit_jmp_r64()
1215 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1217 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1219 /* PC += off if dst > src */ in emit_jmp_r64()
1222 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1224 /* PC += off if dst >= src */ in emit_jmp_r64()
1227 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1229 /* PC += off if dst < src */ in emit_jmp_r64()
1232 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1234 /* PC += off if dst <= src */ in emit_jmp_r64()
1237 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1239 /* PC += off if dst > src (signed) */ in emit_jmp_r64()
1242 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1244 /* PC += off if dst >= src (signed) */ in emit_jmp_r64()
1247 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1249 /* PC += off if dst < src (signed) */ in emit_jmp_r64()
1252 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1254 /* PC += off if dst <= src (signed) */ in emit_jmp_r64()
1257 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1296 int off; in emit_tail_call() local
1307 off = offsetof(struct bpf_array, map.max_entries); in emit_tail_call()
1308 if (off > 0x7fff) in emit_tail_call()
1310 emit(ctx, lw, t1, off, ary); /* t1 = ary->map.max_entries*/ in emit_tail_call()
1313 emit(ctx, beqz, t1, get_offset(ctx, 1)); /* PC += off(1) if t1 == 0 */ in emit_tail_call()
1318 emit(ctx, blez, t2, get_offset(ctx, 1)); /* PC += off(1) if t2 <= 0 */ in emit_tail_call()
1323 off = offsetof(struct bpf_array, ptrs); in emit_tail_call()
1324 if (off > 0x7fff) in emit_tail_call()
1328 emit(ctx, lw, t2, off, t1); /* t2 = *(t1 + off) */ in emit_tail_call()
1332 emit(ctx, beqz, t2, get_offset(ctx, 1)); /* PC += off(1) if t2 == 0 */ in emit_tail_call()
1336 off = offsetof(struct bpf_prog, bpf_func); in emit_tail_call()
1337 if (off > 0x7fff) in emit_tail_call()
1339 emit(ctx, lw, t1, off, t2); /* t1 = *(t2 + off) */ in emit_tail_call()
1470 s16 off = insn->off; in build_insn() local
1654 /* LDX: dst = *(size *)(src + off) */ in build_insn()
1659 emit_ldx(ctx, dst, lo(src), off, BPF_SIZE(code)); in build_insn()
1661 /* ST: *(size *)(dst + off) = imm */ in build_insn()
1677 emit_stx(ctx, lo(dst), tmp, off, BPF_SIZE(code)); in build_insn()
1679 /* STX: *(size *)(dst + off) = src */ in build_insn()
1684 emit_stx(ctx, lo(dst), src, off, BPF_SIZE(code)); in build_insn()
1702 emit_atomic_r(ctx, lo(dst), lo(src), off, imm); in build_insn()
1705 off, imm); in build_insn()
1712 lo(res), off); in build_insn()
1714 emit_cmpxchg_r32(ctx, lo(dst), lo(src), off); in build_insn()
1733 emit_atomic_r64(ctx, lo(dst), src, off, imm); in build_insn()
1736 emit_cmpxchg_r64(ctx, lo(dst), src, off); in build_insn()
1742 /* PC += off if dst == src */ in build_insn()
1743 /* PC += off if dst != src */ in build_insn()
1744 /* PC += off if dst & src */ in build_insn()
1745 /* PC += off if dst > src */ in build_insn()
1746 /* PC += off if dst >= src */ in build_insn()
1747 /* PC += off if dst < src */ in build_insn()
1748 /* PC += off if dst <= src */ in build_insn()
1749 /* PC += off if dst > src (signed) */ in build_insn()
1750 /* PC += off if dst >= src (signed) */ in build_insn()
1751 /* PC += off if dst < src (signed) */ in build_insn()
1752 /* PC += off if dst <= src (signed) */ in build_insn()
1764 if (off == 0) in build_insn()
1766 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1768 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1771 /* PC += off if dst == imm */ in build_insn()
1772 /* PC += off if dst != imm */ in build_insn()
1773 /* PC += off if dst & imm */ in build_insn()
1774 /* PC += off if dst > imm */ in build_insn()
1775 /* PC += off if dst >= imm */ in build_insn()
1776 /* PC += off if dst < imm */ in build_insn()
1777 /* PC += off if dst <= imm */ in build_insn()
1778 /* PC += off if dst > imm (signed) */ in build_insn()
1779 /* PC += off if dst >= imm (signed) */ in build_insn()
1780 /* PC += off if dst < imm (signed) */ in build_insn()
1781 /* PC += off if dst <= imm (signed) */ in build_insn()
1793 if (off == 0) in build_insn()
1795 setup_jmp_i(ctx, imm, 32, BPF_OP(code), off, &jmp, &rel); in build_insn()
1803 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1806 /* PC += off if dst == src */ in build_insn()
1807 /* PC += off if dst != src */ in build_insn()
1808 /* PC += off if dst & src */ in build_insn()
1809 /* PC += off if dst > src */ in build_insn()
1810 /* PC += off if dst >= src */ in build_insn()
1811 /* PC += off if dst < src */ in build_insn()
1812 /* PC += off if dst <= src */ in build_insn()
1813 /* PC += off if dst > src (signed) */ in build_insn()
1814 /* PC += off if dst >= src (signed) */ in build_insn()
1815 /* PC += off if dst < src (signed) */ in build_insn()
1816 /* PC += off if dst <= src (signed) */ in build_insn()
1828 if (off == 0) in build_insn()
1830 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1832 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1835 /* PC += off if dst == imm */ in build_insn()
1836 /* PC += off if dst != imm */ in build_insn()
1837 /* PC += off if dst & imm */ in build_insn()
1838 /* PC += off if dst > imm */ in build_insn()
1839 /* PC += off if dst >= imm */ in build_insn()
1840 /* PC += off if dst < imm */ in build_insn()
1841 /* PC += off if dst <= imm */ in build_insn()
1842 /* PC += off if dst > imm (signed) */ in build_insn()
1843 /* PC += off if dst >= imm (signed) */ in build_insn()
1844 /* PC += off if dst < imm (signed) */ in build_insn()
1845 /* PC += off if dst <= imm (signed) */ in build_insn()
1857 if (off == 0) in build_insn()
1859 setup_jmp_i(ctx, imm, 64, BPF_OP(code), off, &jmp, &rel); in build_insn()
1861 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1864 /* PC += off */ in build_insn()
1866 if (off == 0) in build_insn()
1868 if (emit_ja(ctx, off) < 0) in build_insn()