Lines Matching full:src
204 u8 src = MIPS_R_T6; in emit_alu_i64() local
223 emit_mov_i(ctx, src, imm); in emit_alu_i64()
228 emit(ctx, addu, lo(dst), lo(dst), src); in emit_alu_i64()
229 emit(ctx, sltu, MIPS_R_T9, lo(dst), src); in emit_alu_i64()
236 emit(ctx, sltu, MIPS_R_T9, lo(dst), src); in emit_alu_i64()
237 emit(ctx, subu, lo(dst), lo(dst), src); in emit_alu_i64()
244 emit(ctx, or, lo(dst), lo(dst), src); in emit_alu_i64()
250 emit(ctx, and, lo(dst), lo(dst), src); in emit_alu_i64()
256 emit(ctx, xor, lo(dst), lo(dst), src); in emit_alu_i64()
268 const u8 dst[], const u8 src[], u8 op) in emit_alu_r64() argument
271 /* dst = dst + src */ in emit_alu_r64()
273 if (src == dst) { in emit_alu_r64()
277 emit(ctx, addu, lo(dst), lo(dst), lo(src)); in emit_alu_r64()
278 emit(ctx, sltu, MIPS_R_T9, lo(dst), lo(src)); in emit_alu_r64()
280 emit(ctx, addu, hi(dst), hi(dst), hi(src)); in emit_alu_r64()
283 /* dst = dst - src */ in emit_alu_r64()
285 emit(ctx, sltu, MIPS_R_T9, lo(dst), lo(src)); in emit_alu_r64()
286 emit(ctx, subu, lo(dst), lo(dst), lo(src)); in emit_alu_r64()
287 emit(ctx, subu, hi(dst), hi(dst), hi(src)); in emit_alu_r64()
290 /* dst = dst | src */ in emit_alu_r64()
292 emit(ctx, or, lo(dst), lo(dst), lo(src)); in emit_alu_r64()
293 emit(ctx, or, hi(dst), hi(dst), hi(src)); in emit_alu_r64()
295 /* dst = dst & src */ in emit_alu_r64()
297 emit(ctx, and, lo(dst), lo(dst), lo(src)); in emit_alu_r64()
298 emit(ctx, and, hi(dst), hi(dst), hi(src)); in emit_alu_r64()
300 /* dst = dst ^ src */ in emit_alu_r64()
302 emit(ctx, xor, lo(dst), lo(dst), lo(src)); in emit_alu_r64()
303 emit(ctx, xor, hi(dst), hi(dst), hi(src)); in emit_alu_r64()
367 const u8 dst[], u8 src, u8 op) in emit_shift_r64() argument
372 emit(ctx, andi, t1, src, 32); /* t1 = src & 32 */ in emit_shift_r64()
374 emit(ctx, nor, t2, src, MIPS_R_ZERO); /* t2 = ~src (delay slot) */ in emit_shift_r64()
377 /* dst = dst << src */ in emit_shift_r64()
380 emit(ctx, sllv, hi(dst), lo(dst), src); /* dh = dl << src */ in emit_shift_r64()
386 emit(ctx, sllv, lo(dst), lo(dst), src); /* dl = dl << src */ in emit_shift_r64()
387 emit(ctx, sllv, hi(dst), hi(dst), src); /* dh = dh << src */ in emit_shift_r64()
390 /* dst = dst >> src */ in emit_shift_r64()
393 emit(ctx, srlv, lo(dst), hi(dst), src); /* dl = dh >> src */ in emit_shift_r64()
399 emit(ctx, srlv, lo(dst), lo(dst), src); /* dl = dl >> src */ in emit_shift_r64()
400 emit(ctx, srlv, hi(dst), hi(dst), src); /* dh = dh >> src */ in emit_shift_r64()
403 /* dst = dst >> src (arithmetic) */ in emit_shift_r64()
406 emit(ctx, srav, lo(dst), hi(dst), src); /* dl = dh >>a src */ in emit_shift_r64()
412 emit(ctx, srlv, lo(dst), lo(dst), src); /* dl = dl >>a src */ in emit_shift_r64()
413 emit(ctx, srav, hi(dst), hi(dst), src); /* dh = dh >> src */ in emit_shift_r64()
425 u8 src = MIPS_R_T6; in emit_mul_i64() local
442 /* hi(dst) = hi(dst) * src(imm) */ in emit_mul_i64()
443 emit_mov_i(ctx, src, imm); in emit_mul_i64()
445 emit(ctx, mul, hi(dst), hi(dst), src); in emit_mul_i64()
447 emit(ctx, multu, hi(dst), src); in emit_mul_i64()
455 /* tmp = lo(dst) * src(imm) >> 32 */ in emit_mul_i64()
456 /* lo(dst) = lo(dst) * src(imm) */ in emit_mul_i64()
458 emit(ctx, muhu, tmp, lo(dst), src); in emit_mul_i64()
459 emit(ctx, mulu, lo(dst), lo(dst), src); in emit_mul_i64()
461 emit(ctx, multu, lo(dst), src); in emit_mul_i64()
475 const u8 dst[], const u8 src[]) in emit_mul_r64() argument
480 /* acc = hi(dst) * lo(src) */ in emit_mul_r64()
482 emit(ctx, mul, acc, hi(dst), lo(src)); in emit_mul_r64()
484 emit(ctx, multu, hi(dst), lo(src)); in emit_mul_r64()
488 /* tmp = lo(dst) * hi(src) */ in emit_mul_r64()
490 emit(ctx, mul, tmp, lo(dst), hi(src)); in emit_mul_r64()
492 emit(ctx, multu, lo(dst), hi(src)); in emit_mul_r64()
499 /* tmp = lo(dst) * lo(src) >> 32 */ in emit_mul_r64()
500 /* lo(dst) = lo(dst) * lo(src) */ in emit_mul_r64()
502 emit(ctx, muhu, tmp, lo(dst), lo(src)); in emit_mul_r64()
503 emit(ctx, mulu, lo(dst), lo(dst), lo(src)); in emit_mul_r64()
505 emit(ctx, multu, lo(dst), lo(src)); in emit_mul_r64()
526 const u8 dst[], const u8 src[], u8 op) in emit_divmod_r64() argument
540 emit(ctx, move, MIPS_R_T9, src[k]); in emit_divmod_r64()
547 /* dst = dst / src */ in emit_divmod_r64()
551 /* dst = dst % src */ in emit_divmod_r64()
577 static void emit_swap8_r(struct jit_context *ctx, u8 dst, u8 src, u8 mask) in emit_swap8_r() argument
581 emit(ctx, and, tmp, src, mask); /* tmp = src & 0x00ff00ff */ in emit_swap8_r()
583 emit(ctx, srl, dst, src, 8); /* dst = src >> 8 */ in emit_swap8_r()
589 static void emit_swap16_r(struct jit_context *ctx, u8 dst, u8 src) in emit_swap16_r() argument
593 emit(ctx, sll, tmp, src, 16); /* tmp = src << 16 */ in emit_swap16_r()
594 emit(ctx, srl, dst, src, 16); /* dst = src >> 16 */ in emit_swap16_r()
653 /* Load operation: dst = *(size*)(src + off) */
655 const u8 dst[], u8 src, s16 off, u8 size) in emit_ldx() argument
660 emit(ctx, lbu, lo(dst), off, src); in emit_ldx()
665 emit(ctx, lhu, lo(dst), off, src); in emit_ldx()
670 emit(ctx, lw, lo(dst), off, src); in emit_ldx()
675 if (dst[1] == src) { in emit_ldx()
676 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
677 emit(ctx, lw, dst[1], off, src); in emit_ldx()
679 emit(ctx, lw, dst[1], off, src); in emit_ldx()
680 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
688 /* Store operation: *(size *)(dst + off) = src */
690 const u8 dst, const u8 src[], s16 off, u8 size) in emit_stx() argument
695 emit(ctx, sb, lo(src), off, dst); in emit_stx()
699 emit(ctx, sh, lo(src), off, dst); in emit_stx()
703 emit(ctx, sw, lo(src), off, dst); in emit_stx()
707 emit(ctx, sw, src[1], off, dst); in emit_stx()
708 emit(ctx, sw, src[0], off + 4, dst); in emit_stx()
715 u8 dst, u8 src, s16 off, u8 code) in emit_atomic_r32() argument
724 * Argument 1: dst+off if xchg, otherwise src, passed in register a0 in emit_atomic_r32()
725 * Argument 2: src if xchg, otherwise dst+off, passed in register a1 in emit_atomic_r32()
729 emit(ctx, move, MIPS_R_A1, src); in emit_atomic_r32()
732 emit(ctx, move, MIPS_R_A0, src); in emit_atomic_r32()
776 /* Update src register with old value, if specified */ in emit_atomic_r32()
778 emit(ctx, move, src, MIPS_R_V0); in emit_atomic_r32()
779 exclude = BIT(src); in emit_atomic_r32()
780 clobber_reg(ctx, src); in emit_atomic_r32()
798 u8 dst, const u8 src[], s16 off, u8 code) in emit_atomic_r64() argument
809 * Argument 1: 64-bit src, passed in registers a0-a1 in emit_atomic_r64()
813 emit(ctx, move, r1[0], src[0]); in emit_atomic_r64()
814 emit(ctx, move, r1[1], src[1]); in emit_atomic_r64()
857 /* Update src register with old value, if specified */ in emit_atomic_r64()
859 emit(ctx, move, lo(src), lo(r0)); in emit_atomic_r64()
860 emit(ctx, move, hi(src), hi(r0)); in emit_atomic_r64()
861 exclude = BIT(src[0]) | BIT(src[1]); in emit_atomic_r64()
862 clobber_reg64(ctx, src); in emit_atomic_r64()
873 static void emit_cmpxchg_r32(struct jit_context *ctx, u8 dst, u8 src, s16 off) in emit_cmpxchg_r32() argument
883 * Argument 3: 32-bit src, passed in register a2 in emit_cmpxchg_r32()
886 emit(ctx, move, MIPS_R_T8, src); in emit_cmpxchg_r32()
910 u8 dst, const u8 src[], s16 off) in emit_cmpxchg_r64() argument
921 * Argument 3: 64-bit src, passed on stack in emit_cmpxchg_r64()
923 push_regs(ctx, BIT(src[0]) | BIT(src[1]), 0, JIT_RESERVED_STACK); in emit_cmpxchg_r64()
1186 const u8 dst[], const u8 src[], s32 off, u8 op) in emit_jmp_r64() argument
1195 /* PC += off if dst == src */ in emit_jmp_r64()
1196 /* PC += off if dst != src */ in emit_jmp_r64()
1199 emit(ctx, subu, t1, lo(dst), lo(src)); in emit_jmp_r64()
1200 emit(ctx, subu, t2, hi(dst), hi(src)); in emit_jmp_r64()
1207 /* PC += off if dst & src */ in emit_jmp_r64()
1211 emit(ctx, and, t1, lo(dst), lo(src)); in emit_jmp_r64()
1212 emit(ctx, and, t2, hi(dst), hi(src)); in emit_jmp_r64()
1219 /* PC += off if dst > src */ in emit_jmp_r64()
1221 emit_sltu_r64(ctx, t1, src, dst); in emit_jmp_r64()
1224 /* PC += off if dst >= src */ in emit_jmp_r64()
1226 emit_sltu_r64(ctx, t1, dst, src); in emit_jmp_r64()
1229 /* PC += off if dst < src */ in emit_jmp_r64()
1231 emit_sltu_r64(ctx, t1, dst, src); in emit_jmp_r64()
1234 /* PC += off if dst <= src */ in emit_jmp_r64()
1236 emit_sltu_r64(ctx, t1, src, dst); in emit_jmp_r64()
1239 /* PC += off if dst > src (signed) */ in emit_jmp_r64()
1241 emit_slt_r64(ctx, t1, src, dst); in emit_jmp_r64()
1244 /* PC += off if dst >= src (signed) */ in emit_jmp_r64()
1246 emit_slt_r64(ctx, t1, dst, src); in emit_jmp_r64()
1249 /* PC += off if dst < src (signed) */ in emit_jmp_r64()
1251 emit_slt_r64(ctx, t1, dst, src); in emit_jmp_r64()
1254 /* PC += off if dst <= src (signed) */ in emit_jmp_r64()
1256 emit_slt_r64(ctx, t1, src, dst); in emit_jmp_r64()
1466 const u8 *src = bpf2mips32[insn->src_reg]; in build_insn() local
1482 /* dst = src */ in build_insn()
1488 emit_mov_r(ctx, lo(dst), lo(src)); in build_insn()
1527 /* dst = dst & src */ in build_insn()
1528 /* dst = dst | src */ in build_insn()
1529 /* dst = dst ^ src */ in build_insn()
1530 /* dst = dst << src */ in build_insn()
1531 /* dst = dst >> src */ in build_insn()
1532 /* dst = dst >> src (arithmetic) */ in build_insn()
1533 /* dst = dst + src */ in build_insn()
1534 /* dst = dst - src */ in build_insn()
1535 /* dst = dst * src */ in build_insn()
1536 /* dst = dst / src */ in build_insn()
1537 /* dst = dst % src */ in build_insn()
1549 emit_alu_r(ctx, lo(dst), lo(src), BPF_OP(code)); in build_insn()
1556 /* dst = src (64-bit) */ in build_insn()
1558 emit_mov_r(ctx, lo(dst), lo(src)); in build_insn()
1559 emit_mov_r(ctx, hi(dst), hi(src)); in build_insn()
1604 /* dst = dst & src (64-bit) */ in build_insn()
1605 /* dst = dst | src (64-bit) */ in build_insn()
1606 /* dst = dst ^ src (64-bit) */ in build_insn()
1607 /* dst = dst + src (64-bit) */ in build_insn()
1608 /* dst = dst - src (64-bit) */ in build_insn()
1614 emit_alu_r64(ctx, dst, src, BPF_OP(code)); in build_insn()
1616 /* dst = dst << src (64-bit) */ in build_insn()
1617 /* dst = dst >> src (64-bit) */ in build_insn()
1618 /* dst = dst >> src (64-bit, arithmetic) */ in build_insn()
1622 emit_shift_r64(ctx, dst, lo(src), BPF_OP(code)); in build_insn()
1624 /* dst = dst * src (64-bit) */ in build_insn()
1626 emit_mul_r64(ctx, dst, src); in build_insn()
1628 /* dst = dst / src (64-bit) */ in build_insn()
1629 /* dst = dst % src (64-bit) */ in build_insn()
1632 emit_divmod_r64(ctx, dst, src, BPF_OP(code)); in build_insn()
1654 /* LDX: dst = *(size *)(src + off) */ in build_insn()
1659 emit_ldx(ctx, dst, lo(src), off, BPF_SIZE(code)); in build_insn()
1679 /* STX: *(size *)(dst + off) = src */ in build_insn()
1684 emit_stx(ctx, lo(dst), src, off, BPF_SIZE(code)); in build_insn()
1702 emit_atomic_r(ctx, lo(dst), lo(src), off, imm); in build_insn()
1704 emit_atomic_r32(ctx, lo(dst), lo(src), in build_insn()
1707 emit_zext_ver(ctx, src); in build_insn()
1711 emit_cmpxchg_r(ctx, lo(dst), lo(src), in build_insn()
1714 emit_cmpxchg_r32(ctx, lo(dst), lo(src), off); in build_insn()
1733 emit_atomic_r64(ctx, lo(dst), src, off, imm); in build_insn()
1736 emit_cmpxchg_r64(ctx, lo(dst), src, off); in build_insn()
1742 /* PC += off if dst == src */ in build_insn()
1743 /* PC += off if dst != src */ in build_insn()
1744 /* PC += off if dst & src */ in build_insn()
1745 /* PC += off if dst > src */ in build_insn()
1746 /* PC += off if dst >= src */ in build_insn()
1747 /* PC += off if dst < src */ in build_insn()
1748 /* PC += off if dst <= src */ in build_insn()
1749 /* PC += off if dst > src (signed) */ in build_insn()
1750 /* PC += off if dst >= src (signed) */ in build_insn()
1751 /* PC += off if dst < src (signed) */ in build_insn()
1752 /* PC += off if dst <= src (signed) */ in build_insn()
1766 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1767 emit_jmp_r(ctx, lo(dst), lo(src), rel, jmp); in build_insn()
1806 /* PC += off if dst == src */ in build_insn()
1807 /* PC += off if dst != src */ in build_insn()
1808 /* PC += off if dst & src */ in build_insn()
1809 /* PC += off if dst > src */ in build_insn()
1810 /* PC += off if dst >= src */ in build_insn()
1811 /* PC += off if dst < src */ in build_insn()
1812 /* PC += off if dst <= src */ in build_insn()
1813 /* PC += off if dst > src (signed) */ in build_insn()
1814 /* PC += off if dst >= src (signed) */ in build_insn()
1815 /* PC += off if dst < src (signed) */ in build_insn()
1816 /* PC += off if dst <= src (signed) */ in build_insn()
1830 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1831 emit_jmp_r64(ctx, dst, src, rel, jmp); in build_insn()