Lines Matching +full:function +full:- +full:off
1 // SPDX-License-Identifier: GPL-2.0-only
3 * Just-In-Time compiler for eBPF bytecode on MIPS.
4 * Implementation of JIT functions for 32-bit CPUs.
19 #include <asm/cpu-features.h>
20 #include <asm/isa-rev.h>
25 /* MIPS a4-a7 are not available in the o32 ABI */
31 /* Stack is 8-byte aligned in o32 ABI */
36 * This corresponds to stack space for register arguments a0-a3.
40 /* Temporary 64-bit register used by JIT */
46 * R0-to-v0 assignment (4 bytes) if big endian.
75 /* Caller-saved CPU registers */
81 /* Callee-saved CPU registers */
96 * Mapping of 64-bit eBPF registers to 32-bit native MIPS registers.
99 * the MIPS convention for passing 64-bit arguments and return values.
100 * 2) The eBPF return value, arguments and callee-saved registers are mapped
103 * only one general-purpose register is actually needed for the mapping.
107 * for constant blinding. The gp register is callee-saved.
108 * 5) One 64-bit temporary register is mapped for use when sign-extending
109 * immediate operands. MIPS registers t6-t9 are available to the JIT
110 * for as temporaries when implementing complex 64-bit operations.
114 * register mapping (2) simplifies the handling of function calls.
117 /* Return value from in-kernel function, and exit value from eBPF */
119 /* Arguments from eBPF program to in-kernel function */
126 /* Callee-saved registers that in-kernel function will preserve */
131 /* Read-only frame pointer to access the eBPF stack */
143 /* Get low CPU register for a 64-bit eBPF register mapping */
153 /* Get high CPU register for a 64-bit eBPF register mapping */
164 * Mark a 64-bit CPU register pair as clobbered, it needs to be
165 * saved/restored by the program if callee-saved.
173 /* dst = imm (sign-extended) */
178 emit(ctx, addiu, hi(dst), MIPS_R_ZERO, -1); in emit_mov_se_i64()
187 if (!ctx->program->aux->verifier_zext) { in emit_zext_ver()
200 /* ALU immediate operation (64-bit) */
214 imm = -imm; in emit_alu_i64()
218 imm = -imm; in emit_alu_i64()
232 emit(ctx, addiu, hi(dst), hi(dst), -1); in emit_alu_i64()
234 /* dst = dst - imm */ in emit_alu_i64()
246 emit(ctx, addiu, hi(dst), MIPS_R_ZERO, -1); in emit_alu_i64()
259 emit(ctx, addiu, hi(dst), hi(dst), -1); in emit_alu_i64()
266 /* ALU register operation (64-bit) */
283 /* dst = dst - src */ in emit_alu_r64()
309 /* ALU invert (64-bit) */
320 /* ALU shift immediate (64-bit) */
328 emit(ctx, srl, MIPS_R_T9, lo(dst), 32 - imm); in emit_shift_i64()
333 emit(ctx, sll, hi(dst), lo(dst), imm - 32); in emit_shift_i64()
340 emit(ctx, sll, MIPS_R_T9, hi(dst), 32 - imm); in emit_shift_i64()
345 emit(ctx, srl, lo(dst), hi(dst), imm - 32); in emit_shift_i64()
352 emit(ctx, sll, MIPS_R_T9, hi(dst), 32 - imm); in emit_shift_i64()
357 emit(ctx, sra, lo(dst), hi(dst), imm - 32); in emit_shift_i64()
365 /* ALU shift register (64-bit) */
422 /* ALU mul immediate (64x32-bit) */
429 /* dst = dst * 1 is a no-op */ in emit_mul_i64()
432 /* dst = dst * -1 */ in emit_mul_i64()
433 case -1: in emit_mul_i64()
451 /* hi(dst) = hi(dst) - lo(dst) */ in emit_mul_i64()
473 /* ALU mul register (64x64-bit) */
515 /* Helper function for 64-bit modulo */
524 /* ALU div/mod register (64-bit) */
528 const u8 *r0 = bpf2mips32[BPF_REG_0]; /* Mapped to v0-v1 */ in emit_divmod_r64()
529 const u8 *r1 = bpf2mips32[BPF_REG_1]; /* Mapped to a0-a1 */ in emit_divmod_r64()
530 const u8 *r2 = bpf2mips32[BPF_REG_2]; /* Mapped to a2-a3 */ in emit_divmod_r64()
534 /* Push caller-saved registers on stack */ in emit_divmod_r64()
535 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_divmod_r64()
538 /* Put 64-bit arguments 1 and 2 in registers a0-a3 */ in emit_divmod_r64()
545 /* Emit function call */ in emit_divmod_r64()
560 /* Store the 64-bit result in dst */ in emit_divmod_r64()
564 /* Restore caller-saved registers, excluding the computed result */ in emit_divmod_r64()
566 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_divmod_r64()
639 /* Zero-extend a word */ in emit_trunc_r64()
644 /* Zero-extend a half word */ in emit_trunc_r64()
653 /* Load operation: dst = *(size*)(src + off) */
655 const u8 dst[], u8 src, s16 off, u8 size) in emit_ldx() argument
660 emit(ctx, lbu, lo(dst), off, src); in emit_ldx()
665 emit(ctx, lhu, lo(dst), off, src); in emit_ldx()
670 emit(ctx, lw, lo(dst), off, src); in emit_ldx()
676 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
677 emit(ctx, lw, dst[1], off, src); in emit_ldx()
679 emit(ctx, lw, dst[1], off, src); in emit_ldx()
680 emit(ctx, lw, dst[0], off + 4, src); in emit_ldx()
688 /* Store operation: *(size *)(dst + off) = src */
690 const u8 dst, const u8 src[], s16 off, u8 size) in emit_stx() argument
695 emit(ctx, sb, lo(src), off, dst); in emit_stx()
699 emit(ctx, sh, lo(src), off, dst); in emit_stx()
703 emit(ctx, sw, lo(src), off, dst); in emit_stx()
707 emit(ctx, sw, src[1], off, dst); in emit_stx()
708 emit(ctx, sw, src[0], off + 4, dst); in emit_stx()
713 /* Atomic read-modify-write (32-bit, non-ll/sc fallback) */
715 u8 dst, u8 src, s16 off, u8 code) in emit_atomic_r32() argument
720 /* Push caller-saved registers on stack */ in emit_atomic_r32()
721 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r32()
724 * Argument 1: dst+off if xchg, otherwise src, passed in register a0 in emit_atomic_r32()
725 * Argument 2: src if xchg, otherwise dst+off, passed in register a1 in emit_atomic_r32()
730 emit(ctx, addiu, MIPS_R_A0, MIPS_R_T9, off); in emit_atomic_r32()
733 emit(ctx, addiu, MIPS_R_A1, MIPS_R_T9, off); in emit_atomic_r32()
736 /* Emit function call */ in emit_atomic_r32()
783 /* Restore caller-saved registers, except any fetched value */ in emit_atomic_r32()
784 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r32()
790 /* Helper function for 64-bit atomic exchange */
796 /* Atomic read-modify-write (64-bit) */
798 u8 dst, const u8 src[], s16 off, u8 code) in emit_atomic_r64() argument
800 const u8 *r0 = bpf2mips32[BPF_REG_0]; /* Mapped to v0-v1 */ in emit_atomic_r64()
801 const u8 *r1 = bpf2mips32[BPF_REG_1]; /* Mapped to a0-a1 */ in emit_atomic_r64()
805 /* Push caller-saved registers on stack */ in emit_atomic_r64()
806 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r64()
809 * Argument 1: 64-bit src, passed in registers a0-a1 in emit_atomic_r64()
810 * Argument 2: 32-bit dst+off, passed in register a2 in emit_atomic_r64()
815 emit(ctx, addiu, MIPS_R_A2, MIPS_R_T9, off); in emit_atomic_r64()
817 /* Emit function call */ in emit_atomic_r64()
865 /* Restore caller-saved registers, except any fetched value */ in emit_atomic_r64()
866 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r64()
872 /* Atomic compare-and-exchange (32-bit, non-ll/sc fallback) */
873 static void emit_cmpxchg_r32(struct jit_context *ctx, u8 dst, u8 src, s16 off) in emit_cmpxchg_r32() argument
877 /* Push caller-saved registers on stack */ in emit_cmpxchg_r32()
878 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r32()
881 * Argument 1: 32-bit dst+off, passed in register a0 in emit_cmpxchg_r32()
882 * Argument 2: 32-bit r0, passed in register a1 in emit_cmpxchg_r32()
883 * Argument 3: 32-bit src, passed in register a2 in emit_cmpxchg_r32()
885 emit(ctx, addiu, MIPS_R_T9, dst, off); in emit_cmpxchg_r32()
891 /* Emit function call */ in emit_cmpxchg_r32()
899 /* Restore caller-saved registers, except the return value */ in emit_cmpxchg_r32()
900 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r32()
908 /* Atomic compare-and-exchange (64-bit) */
910 u8 dst, const u8 src[], s16 off) in emit_cmpxchg_r64() argument
915 /* Push caller-saved registers on stack */ in emit_cmpxchg_r64()
916 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r64()
919 * Argument 1: 32-bit dst+off, passed in register a0 (a1 unused) in emit_cmpxchg_r64()
920 * Argument 2: 64-bit r0, passed in registers a2-a3 in emit_cmpxchg_r64()
921 * Argument 3: 64-bit src, passed on stack in emit_cmpxchg_r64()
924 emit(ctx, addiu, MIPS_R_T9, dst, off); in emit_cmpxchg_r64()
929 /* Emit function call */ in emit_cmpxchg_r64()
934 /* Restore caller-saved registers, except the return value */ in emit_cmpxchg_r64()
935 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r64()
989 /* Emulation of 64-bit sltiu rd, rs, imm, where imm may be S32_MAX + 1 */
998 emit(ctx, sltiu, tmp, hi(rs), -1); /* tmp = rsh < ~0U */ in emit_sltiu_r64()
1011 /* Emulation of 64-bit sltu rd, rs, rt */
1018 emit(ctx, subu, tmp, hi(rs), hi(rt)); /* tmp = rsh - rth */ in emit_sltu_r64()
1024 /* Emulation of 64-bit slti rd, rs, imm, where imm may be S32_MAX + 1 */
1058 * if (imm < 0) rd = rsh < -1 in emit_slti_r64()
1062 emit(ctx, slti, rd, hi(rs), imm < 0 ? -1 : 0); /* rd = rsh < hi(imm) */ in emit_slti_r64()
1066 /* Emulation of 64-bit(slt rd, rs, rt) */
1091 /* Jump immediate (64-bit) */
1093 const u8 dst[], s32 imm, s32 off, u8 op) in emit_jmp_i64() argument
1098 /* No-op, used internally for branch optimization */ in emit_jmp_i64()
1101 /* PC += off if dst == imm */ in emit_jmp_i64()
1102 /* PC += off if dst != imm */ in emit_jmp_i64()
1105 if (imm >= -0x7fff && imm <= 0x8000) { in emit_jmp_i64()
1106 emit(ctx, addiu, tmp, lo(dst), -imm); in emit_jmp_i64()
1120 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1122 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1124 /* PC += off if dst & imm */ in emit_jmp_i64()
1125 /* PC += off if (dst & imm) == 0 (not in BPF, used for long jumps) */ in emit_jmp_i64()
1134 if (imm < 0) /* Sign-extension pulls in high word */ in emit_jmp_i64()
1137 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1139 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1141 /* PC += off if dst > imm */ in emit_jmp_i64()
1144 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1146 /* PC += off if dst >= imm */ in emit_jmp_i64()
1149 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1151 /* PC += off if dst < imm */ in emit_jmp_i64()
1154 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1156 /* PC += off if dst <= imm */ in emit_jmp_i64()
1159 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1161 /* PC += off if dst > imm (signed) */ in emit_jmp_i64()
1164 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1166 /* PC += off if dst >= imm (signed) */ in emit_jmp_i64()
1169 emit(ctx, beqz, tmp, off); in emit_jmp_i64()
1171 /* PC += off if dst < imm (signed) */ in emit_jmp_i64()
1174 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1176 /* PC += off if dst <= imm (signed) */ in emit_jmp_i64()
1179 emit(ctx, bnez, tmp, off); in emit_jmp_i64()
1184 /* Jump register (64-bit) */
1186 const u8 dst[], const u8 src[], s32 off, u8 op) in emit_jmp_r64() argument
1192 /* No-op, used internally for branch optimization */ in emit_jmp_r64()
1195 /* PC += off if dst == src */ in emit_jmp_r64()
1196 /* PC += off if dst != src */ in emit_jmp_r64()
1203 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1205 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1207 /* PC += off if dst & src */ in emit_jmp_r64()
1208 /* PC += off if (dst & imm) == 0 (not in BPF, used for long jumps) */ in emit_jmp_r64()
1215 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1217 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1219 /* PC += off if dst > src */ in emit_jmp_r64()
1222 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1224 /* PC += off if dst >= src */ in emit_jmp_r64()
1227 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1229 /* PC += off if dst < src */ in emit_jmp_r64()
1232 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1234 /* PC += off if dst <= src */ in emit_jmp_r64()
1237 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1239 /* PC += off if dst > src (signed) */ in emit_jmp_r64()
1242 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1244 /* PC += off if dst >= src (signed) */ in emit_jmp_r64()
1247 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1249 /* PC += off if dst < src (signed) */ in emit_jmp_r64()
1252 emit(ctx, bnez, t1, off); in emit_jmp_r64()
1254 /* PC += off if dst <= src (signed) */ in emit_jmp_r64()
1257 emit(ctx, beqz, t1, off); in emit_jmp_r64()
1262 /* Function call */
1269 if (bpf_jit_get_func_addr(ctx->program, insn, false, in emit_call()
1271 return -1; in emit_call()
1273 return -1; in emit_call()
1278 /* Emit function call */ in emit_call()
1289 /* Function tail call */
1296 int off; in emit_tail_call() local
1300 * eBPF R1 - function argument (context ptr), passed in a0-a1 in emit_tail_call()
1301 * eBPF R2 - ptr to object with array of function entry points in emit_tail_call()
1302 * eBPF R3 - array index of function to be called in emit_tail_call()
1303 * stack[sz] - remaining tail call count, initialized in prologue in emit_tail_call()
1306 /* if (ind >= ary->map.max_entries) goto out */ in emit_tail_call()
1307 off = offsetof(struct bpf_array, map.max_entries); in emit_tail_call()
1308 if (off > 0x7fff) in emit_tail_call()
1309 return -1; in emit_tail_call()
1310 emit(ctx, lw, t1, off, ary); /* t1 = ary->map.max_entries*/ in emit_tail_call()
1313 emit(ctx, beqz, t1, get_offset(ctx, 1)); /* PC += off(1) if t1 == 0 */ in emit_tail_call()
1315 /* if (TCC-- <= 0) goto out */ in emit_tail_call()
1316 emit(ctx, lw, t2, ctx->stack_size, MIPS_R_SP); /* t2 = *(SP + size) */ in emit_tail_call()
1318 emit(ctx, blez, t2, get_offset(ctx, 1)); /* PC += off(1) if t2 <= 0 */ in emit_tail_call()
1319 emit(ctx, addiu, t2, t2, -1); /* t2-- (delay slot) */ in emit_tail_call()
1320 emit(ctx, sw, t2, ctx->stack_size, MIPS_R_SP); /* *(SP + size) = t2 */ in emit_tail_call()
1322 /* prog = ary->ptrs[ind] */ in emit_tail_call()
1323 off = offsetof(struct bpf_array, ptrs); in emit_tail_call()
1324 if (off > 0x7fff) in emit_tail_call()
1325 return -1; in emit_tail_call()
1328 emit(ctx, lw, t2, off, t1); /* t2 = *(t1 + off) */ in emit_tail_call()
1332 emit(ctx, beqz, t2, get_offset(ctx, 1)); /* PC += off(1) if t2 == 0 */ in emit_tail_call()
1335 /* func = prog->bpf_func + 8 (prologue skip offset) */ in emit_tail_call()
1336 off = offsetof(struct bpf_prog, bpf_func); in emit_tail_call()
1337 if (off > 0x7fff) in emit_tail_call()
1338 return -1; in emit_tail_call()
1339 emit(ctx, lw, t1, off, t2); /* t1 = *(t2 + off) */ in emit_tail_call()
1352 * :----------------------------:
1353 * : 64-bit eBPF args r3-r5 :
1354 * :----------------------------:
1356 * +============================+ <--- MIPS sp before call
1357 * | Callee-saved registers, |
1359 * +----------------------------+ <--- eBPF FP (MIPS zero,fp)
1362 * +----------------------------+
1363 * | Reserved for caller-saved |
1365 * +----------------------------+
1366 * | Reserved for 64-bit eBPF |
1367 * | args r3-r5 & args passed |
1369 * Lower address +============================+ <--- MIPS sp
1389 * 16-byte area in the parent's stack frame. On a tail call, the in build_prologue()
1390 * calling function jumps into the prologue after these instructions. in build_prologue()
1396 * Register eBPF R1 contains the 32-bit context pointer argument. in build_prologue()
1397 * A 32-bit argument is always passed in MIPS register a0, regardless in build_prologue()
1398 * of CPU endianness. Initialize R1 accordingly and zero-extend. in build_prologue()
1404 /* === Entry-point for tail calls === */ in build_prologue()
1406 /* Zero-extend the 32-bit argument */ in build_prologue()
1410 if (ctx->accessed & BIT(BPF_REG_FP)) in build_prologue()
1413 /* Compute the stack space needed for callee-saved registers */ in build_prologue()
1414 saved = hweight32(ctx->clobbered & JIT_CALLEE_REGS) * sizeof(u32); in build_prologue()
1418 locals = ALIGN(ctx->program->aux->stack_depth, MIPS_STACK_ALIGNMENT); in build_prologue()
1421 * If we are emitting function calls, reserve extra stack space for in build_prologue()
1422 * caller-saved registers and function arguments passed on the stack. in build_prologue()
1426 reserved = ctx->stack_used; in build_prologue()
1430 emit(ctx, addiu, MIPS_R_SP, MIPS_R_SP, -stack); in build_prologue()
1432 /* Store callee-saved registers on stack */ in build_prologue()
1433 push_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0, stack - saved); in build_prologue()
1436 if (ctx->accessed & BIT(BPF_REG_FP)) in build_prologue()
1437 emit(ctx, addiu, lo(fp), MIPS_R_SP, stack - saved); in build_prologue()
1439 ctx->saved_size = saved; in build_prologue()
1440 ctx->stack_size = stack; in build_prologue()
1446 /* Restore callee-saved registers from stack */ in build_epilogue()
1447 pop_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0, in build_epilogue()
1448 ctx->stack_size - ctx->saved_size); in build_epilogue()
1450 * A 32-bit return value is always passed in MIPS register v0, in build_epilogue()
1451 * but on big-endian targets the low part of R0 is mapped to v1. in build_epilogue()
1459 emit(ctx, addiu, MIPS_R_SP, MIPS_R_SP, ctx->stack_size); in build_epilogue()
1465 const u8 *dst = bpf2mips32[insn->dst_reg]; in build_insn()
1466 const u8 *src = bpf2mips32[insn->src_reg]; in build_insn()
1469 u8 code = insn->code; in build_insn()
1470 s16 off = insn->off; in build_insn() local
1471 s32 imm = insn->imm; in build_insn()
1492 /* dst = -dst */ in build_insn()
1504 /* dst = dst - imm */ in build_insn()
1534 /* dst = dst - src */ in build_insn()
1552 /* dst = imm (64-bit) */ in build_insn()
1556 /* dst = src (64-bit) */ in build_insn()
1561 /* dst = -dst (64-bit) */ in build_insn()
1565 /* dst = dst & imm (64-bit) */ in build_insn()
1569 /* dst = dst | imm (64-bit) */ in build_insn()
1570 /* dst = dst ^ imm (64-bit) */ in build_insn()
1571 /* dst = dst + imm (64-bit) */ in build_insn()
1572 /* dst = dst - imm (64-bit) */ in build_insn()
1580 /* dst = dst << imm (64-bit) */ in build_insn()
1581 /* dst = dst >> imm (64-bit) */ in build_insn()
1582 /* dst = dst >> imm (64-bit, arithmetic) */ in build_insn()
1589 /* dst = dst * imm (64-bit) */ in build_insn()
1593 /* dst = dst / imm (64-bit) */ in build_insn()
1594 /* dst = dst % imm (64-bit) */ in build_insn()
1598 * Sign-extend the immediate value into a temporary register, in build_insn()
1604 /* dst = dst & src (64-bit) */ in build_insn()
1605 /* dst = dst | src (64-bit) */ in build_insn()
1606 /* dst = dst ^ src (64-bit) */ in build_insn()
1607 /* dst = dst + src (64-bit) */ in build_insn()
1608 /* dst = dst - src (64-bit) */ in build_insn()
1616 /* dst = dst << src (64-bit) */ in build_insn()
1617 /* dst = dst >> src (64-bit) */ in build_insn()
1618 /* dst = dst >> src (64-bit, arithmetic) */ in build_insn()
1624 /* dst = dst * src (64-bit) */ in build_insn()
1628 /* dst = dst / src (64-bit) */ in build_insn()
1629 /* dst = dst % src (64-bit) */ in build_insn()
1654 /* LDX: dst = *(size *)(src + off) */ in build_insn()
1659 emit_ldx(ctx, dst, lo(src), off, BPF_SIZE(code)); in build_insn()
1661 /* ST: *(size *)(dst + off) = imm */ in build_insn()
1668 /* Sign-extend immediate value into temporary reg */ in build_insn()
1677 emit_stx(ctx, lo(dst), tmp, off, BPF_SIZE(code)); in build_insn()
1679 /* STX: *(size *)(dst + off) = src */ in build_insn()
1684 emit_stx(ctx, lo(dst), src, off, BPF_SIZE(code)); in build_insn()
1702 emit_atomic_r(ctx, lo(dst), lo(src), off, imm); in build_insn()
1703 else /* Non-ll/sc fallback */ in build_insn()
1705 off, imm); in build_insn()
1712 lo(res), off); in build_insn()
1713 else /* Non-ll/sc fallback */ in build_insn()
1714 emit_cmpxchg_r32(ctx, lo(dst), lo(src), off); in build_insn()
1715 /* Result zero-extension inserted by verifier */ in build_insn()
1721 /* Atomics (64-bit) */ in build_insn()
1733 emit_atomic_r64(ctx, lo(dst), src, off, imm); in build_insn()
1736 emit_cmpxchg_r64(ctx, lo(dst), src, off); in build_insn()
1742 /* PC += off if dst == src */ in build_insn()
1743 /* PC += off if dst != src */ in build_insn()
1744 /* PC += off if dst & src */ in build_insn()
1745 /* PC += off if dst > src */ in build_insn()
1746 /* PC += off if dst >= src */ in build_insn()
1747 /* PC += off if dst < src */ in build_insn()
1748 /* PC += off if dst <= src */ in build_insn()
1749 /* PC += off if dst > src (signed) */ in build_insn()
1750 /* PC += off if dst >= src (signed) */ in build_insn()
1751 /* PC += off if dst < src (signed) */ in build_insn()
1752 /* PC += off if dst <= src (signed) */ in build_insn()
1764 if (off == 0) in build_insn()
1766 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1768 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1771 /* PC += off if dst == imm */ in build_insn()
1772 /* PC += off if dst != imm */ in build_insn()
1773 /* PC += off if dst & imm */ in build_insn()
1774 /* PC += off if dst > imm */ in build_insn()
1775 /* PC += off if dst >= imm */ in build_insn()
1776 /* PC += off if dst < imm */ in build_insn()
1777 /* PC += off if dst <= imm */ in build_insn()
1778 /* PC += off if dst > imm (signed) */ in build_insn()
1779 /* PC += off if dst >= imm (signed) */ in build_insn()
1780 /* PC += off if dst < imm (signed) */ in build_insn()
1781 /* PC += off if dst <= imm (signed) */ in build_insn()
1793 if (off == 0) in build_insn()
1795 setup_jmp_i(ctx, imm, 32, BPF_OP(code), off, &jmp, &rel); in build_insn()
1803 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1806 /* PC += off if dst == src */ in build_insn()
1807 /* PC += off if dst != src */ in build_insn()
1808 /* PC += off if dst & src */ in build_insn()
1809 /* PC += off if dst > src */ in build_insn()
1810 /* PC += off if dst >= src */ in build_insn()
1811 /* PC += off if dst < src */ in build_insn()
1812 /* PC += off if dst <= src */ in build_insn()
1813 /* PC += off if dst > src (signed) */ in build_insn()
1814 /* PC += off if dst >= src (signed) */ in build_insn()
1815 /* PC += off if dst < src (signed) */ in build_insn()
1816 /* PC += off if dst <= src (signed) */ in build_insn()
1828 if (off == 0) in build_insn()
1830 setup_jmp_r(ctx, dst == src, BPF_OP(code), off, &jmp, &rel); in build_insn()
1832 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1835 /* PC += off if dst == imm */ in build_insn()
1836 /* PC += off if dst != imm */ in build_insn()
1837 /* PC += off if dst & imm */ in build_insn()
1838 /* PC += off if dst > imm */ in build_insn()
1839 /* PC += off if dst >= imm */ in build_insn()
1840 /* PC += off if dst < imm */ in build_insn()
1841 /* PC += off if dst <= imm */ in build_insn()
1842 /* PC += off if dst > imm (signed) */ in build_insn()
1843 /* PC += off if dst >= imm (signed) */ in build_insn()
1844 /* PC += off if dst < imm (signed) */ in build_insn()
1845 /* PC += off if dst <= imm (signed) */ in build_insn()
1857 if (off == 0) in build_insn()
1859 setup_jmp_i(ctx, imm, 64, BPF_OP(code), off, &jmp, &rel); in build_insn()
1861 if (finish_jmp(ctx, jmp, off) < 0) in build_insn()
1864 /* PC += off */ in build_insn()
1866 if (off == 0) in build_insn()
1868 if (emit_ja(ctx, off) < 0) in build_insn()
1876 /* Function call */ in build_insn()
1881 /* Function return */ in build_insn()
1887 if (ctx->bpf_index == ctx->program->len - 1) in build_insn()
1896 return -EINVAL; in build_insn()
1899 return -EFAULT; in build_insn()
1902 ctx->bpf_index, code); in build_insn()
1903 return -E2BIG; in build_insn()