Lines Matching +full:rs +full:-
1 // SPDX-License-Identifier: GPL-2.0
81 __set_bit(reg, &ctx->flags); in bpf_to_rv_reg()
96 return test_bit(reg, &ctx->flags); in seen_reg()
103 __set_bit(RV_CTX_F_SEEN_S5, &ctx->flags); in mark_fp()
108 __set_bit(RV_CTX_F_SEEN_CALL, &ctx->flags); in mark_call()
113 return test_bit(RV_CTX_F_SEEN_CALL, &ctx->flags); in seen_call()
118 __set_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags); in mark_tail_call()
123 return test_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags); in seen_tail_call()
131 __set_bit(RV_CTX_F_SEEN_S6, &ctx->flags); in rv_tail_call_reg()
139 return -(1L << 31) <= val && val < (1L << 31); in is_32b_int()
145 * auipc+jalr can reach any signed PC-relative offset in the range in in_auipc_jalr_range()
146 * [-2^31 - 2^11, 2^31 - 2^11). in in_auipc_jalr_range()
148 return (-(1L << 31) - (1L << 11)) <= val && in in_auipc_jalr_range()
149 val < ((1L << 31) - (1L << 11)); in in_auipc_jalr_range()
165 /* Emit fixed-length instructions for address */
172 u64 ip = (u64)(ctx->ro_insns + ctx->ninsns); in emit_addr()
173 s64 off = addr - ip; in emit_addr()
178 pr_err("bpf-jit: target offset 0x%llx is out of range\n", off); in emit_addr()
179 return -ERANGE; in emit_addr()
187 /* Emit variable-length instructions for 32-bit and 64-bit imm */
190 /* Note that the immediate from the add is sign-extended, in emit_imm()
194 * shift. The "Loading a 32-Bit constant" example from the in emit_imm()
195 * "Computer Organization and Design, RISC-V edition" book by in emit_imm()
201 /* Sign-extend lower 12 bits to 64 bits since immediates for li, addiw, in emit_imm()
233 int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8; in __build_epilogue()
237 store_offset -= 8; in __build_epilogue()
240 store_offset -= 8; in __build_epilogue()
243 store_offset -= 8; in __build_epilogue()
247 store_offset -= 8; in __build_epilogue()
251 store_offset -= 8; in __build_epilogue()
255 store_offset -= 8; in __build_epilogue()
259 store_offset -= 8; in __build_epilogue()
263 store_offset -= 8; in __build_epilogue()
265 if (ctx->arena_vm_start) { in __build_epilogue()
267 store_offset -= 8; in __build_epilogue()
280 static void emit_bcc(u8 cond, u8 rd, u8 rs, int rvoff, in emit_bcc() argument
285 emit(rv_beq(rd, rs, rvoff >> 1), ctx); in emit_bcc()
288 emit(rv_bltu(rs, rd, rvoff >> 1), ctx); in emit_bcc()
291 emit(rv_bltu(rd, rs, rvoff >> 1), ctx); in emit_bcc()
294 emit(rv_bgeu(rd, rs, rvoff >> 1), ctx); in emit_bcc()
297 emit(rv_bgeu(rs, rd, rvoff >> 1), ctx); in emit_bcc()
300 emit(rv_bne(rd, rs, rvoff >> 1), ctx); in emit_bcc()
303 emit(rv_blt(rs, rd, rvoff >> 1), ctx); in emit_bcc()
306 emit(rv_blt(rd, rs, rvoff >> 1), ctx); in emit_bcc()
309 emit(rv_bge(rd, rs, rvoff >> 1), ctx); in emit_bcc()
312 emit(rv_bge(rs, rd, rvoff >> 1), ctx); in emit_bcc()
316 static void emit_branch(u8 cond, u8 rd, u8 rs, int rvoff, in emit_branch() argument
322 emit_bcc(cond, rd, rs, rvoff, ctx); in emit_branch()
327 rvoff -= 4; in emit_branch()
330 * bne rd,rs,foo in emit_branch()
332 * beq rd,rs,<.L1> in emit_branch()
339 emit_bcc(cond, rd, rs, 8, ctx); in emit_branch()
350 emit_bcc(cond, rd, rs, 12, ctx); in emit_branch()
357 int tc_ninsn, off, start_insn = ctx->ninsns; in emit_bpf_tail_call()
364 * if (index >= array->map.max_entries) in emit_bpf_tail_call()
367 tc_ninsn = insn ? ctx->offset[insn] - ctx->offset[insn - 1] : in emit_bpf_tail_call()
368 ctx->offset[0]; in emit_bpf_tail_call()
373 return -1; in emit_bpf_tail_call()
375 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn)); in emit_bpf_tail_call()
378 /* if (--TCC < 0) in emit_bpf_tail_call()
381 emit_addi(RV_REG_TCC, tcc, -1, ctx); in emit_bpf_tail_call()
382 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn)); in emit_bpf_tail_call()
385 /* prog = array->ptrs[index]; in emit_bpf_tail_call()
392 return -1; in emit_bpf_tail_call()
394 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn)); in emit_bpf_tail_call()
397 /* goto *(prog->bpf_func + 4); */ in emit_bpf_tail_call()
400 return -1; in emit_bpf_tail_call()
406 static void init_regs(u8 *rd, u8 *rs, const struct bpf_insn *insn, in init_regs() argument
409 u8 code = insn->code; in init_regs()
418 *rd = bpf_to_rv_reg(insn->dst_reg, ctx); in init_regs()
424 *rs = bpf_to_rv_reg(insn->src_reg, ctx); in init_regs()
443 pr_err("bpf-jit: target offset 0x%llx is out of range\n", rvoff); in emit_jump_and_link()
444 return -ERANGE; in emit_jump_and_link()
458 if (addr && ctx->insns && ctx->ro_insns) { in emit_call()
463 ip = (u64)(long)(ctx->ro_insns + ctx->ninsns); in emit_call()
464 off = addr - ip; in emit_call()
476 static void emit_atomic(u8 rd, u8 rs, s16 off, s32 imm, bool is64, in emit_atomic() argument
495 emit(is64 ? rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0) : in emit_atomic()
496 rv_amoadd_w(RV_REG_ZERO, rs, rd, 0, 0), ctx); in emit_atomic()
499 emit(is64 ? rv_amoand_d(RV_REG_ZERO, rs, rd, 0, 0) : in emit_atomic()
500 rv_amoand_w(RV_REG_ZERO, rs, rd, 0, 0), ctx); in emit_atomic()
503 emit(is64 ? rv_amoor_d(RV_REG_ZERO, rs, rd, 0, 0) : in emit_atomic()
504 rv_amoor_w(RV_REG_ZERO, rs, rd, 0, 0), ctx); in emit_atomic()
507 emit(is64 ? rv_amoxor_d(RV_REG_ZERO, rs, rd, 0, 0) : in emit_atomic()
508 rv_amoxor_w(RV_REG_ZERO, rs, rd, 0, 0), ctx); in emit_atomic()
512 emit(is64 ? rv_amoadd_d(rs, rs, rd, 1, 1) : in emit_atomic()
513 rv_amoadd_w(rs, rs, rd, 1, 1), ctx); in emit_atomic()
515 emit_zextw(rs, rs, ctx); in emit_atomic()
518 emit(is64 ? rv_amoand_d(rs, rs, rd, 1, 1) : in emit_atomic()
519 rv_amoand_w(rs, rs, rd, 1, 1), ctx); in emit_atomic()
521 emit_zextw(rs, rs, ctx); in emit_atomic()
524 emit(is64 ? rv_amoor_d(rs, rs, rd, 1, 1) : in emit_atomic()
525 rv_amoor_w(rs, rs, rd, 1, 1), ctx); in emit_atomic()
527 emit_zextw(rs, rs, ctx); in emit_atomic()
530 emit(is64 ? rv_amoxor_d(rs, rs, rd, 1, 1) : in emit_atomic()
531 rv_amoxor_w(rs, rs, rd, 1, 1), ctx); in emit_atomic()
533 emit_zextw(rs, rs, ctx); in emit_atomic()
537 emit(is64 ? rv_amoswap_d(rs, rs, rd, 1, 1) : in emit_atomic()
538 rv_amoswap_w(rs, rs, rd, 1, 1), ctx); in emit_atomic()
540 emit_zextw(rs, rs, ctx); in emit_atomic()
553 emit(is64 ? rv_sc_d(RV_REG_T3, rs, rd, 0, 1) : in emit_atomic()
554 rv_sc_w(RV_REG_T3, rs, rd, 0, 1), ctx); in emit_atomic()
555 jmp_offset = ninsns_rvoff(-6); in emit_atomic()
569 off_t offset = FIELD_GET(BPF_FIXUP_OFFSET_MASK, ex->fixup); in ex_handler_bpf()
570 int regs_offset = FIELD_GET(BPF_FIXUP_REG_MASK, ex->fixup); in ex_handler_bpf()
574 regs->epc = (unsigned long)&ex->fixup - offset; in ex_handler_bpf()
589 if (!ctx->insns || !ctx->ro_insns || !ctx->prog->aux->extable || in add_exception_handler()
590 (BPF_MODE(insn->code) != BPF_PROBE_MEM && BPF_MODE(insn->code) != BPF_PROBE_MEMSX && in add_exception_handler()
591 BPF_MODE(insn->code) != BPF_PROBE_MEM32)) in add_exception_handler()
594 if (WARN_ON_ONCE(ctx->nexentries >= ctx->prog->aux->num_exentries)) in add_exception_handler()
595 return -EINVAL; in add_exception_handler()
597 if (WARN_ON_ONCE(insn_len > ctx->ninsns)) in add_exception_handler()
598 return -EINVAL; in add_exception_handler()
601 return -EINVAL; in add_exception_handler()
603 ex = &ctx->prog->aux->extable[ctx->nexentries]; in add_exception_handler()
604 pc = (unsigned long)&ctx->ro_insns[ctx->ninsns - insn_len]; in add_exception_handler()
612 ins_offset = pc - (long)&ex->insn; in add_exception_handler()
614 return -ERANGE; in add_exception_handler()
628 fixup_offset = (long)&ex->fixup - (pc + insn_len * sizeof(u16)); in add_exception_handler()
630 return -ERANGE; in add_exception_handler()
637 ex = (void *)ctx->insns + ((void *)ex - (void *)ctx->ro_insns); in add_exception_handler()
639 ex->insn = ins_offset; in add_exception_handler()
641 ex->fixup = FIELD_PREP(BPF_FIXUP_OFFSET_MASK, fixup_offset) | in add_exception_handler()
643 ex->type = EX_TYPE_BPF; in add_exception_handler()
645 ctx->nexentries++; in add_exception_handler()
663 rvoff = (s64)(target - ip); in gen_jump_or_nops()
676 return -ENOTSUPP; in bpf_arch_text_poke()
683 return -EFAULT; in bpf_arch_text_poke()
705 emit_sd(RV_REG_FP, -args_off, RV_REG_A0 + i, ctx); in store_args()
708 emit_ld(RV_REG_T1, 16 + (i - RV_MAX_REG_ARGS) * 8, RV_REG_FP, ctx); in store_args()
709 emit_sd(RV_REG_FP, -args_off, RV_REG_T1, ctx); in store_args()
711 args_off -= 8; in store_args()
720 emit_ld(RV_REG_A0 + i, -args_off, RV_REG_FP, ctx); in restore_args()
721 args_off -= 8; in restore_args()
731 emit_ld(RV_REG_T1, -(args_off - RV_MAX_REG_ARGS * 8), RV_REG_FP, ctx); in restore_stack_args()
732 emit_sd(RV_REG_FP, -stk_arg_off, RV_REG_T1, ctx); in restore_stack_args()
733 args_off -= 8; in restore_stack_args()
734 stk_arg_off -= 8; in restore_stack_args()
742 struct bpf_prog *p = l->link.prog; in invoke_bpf_prog()
745 if (l->cookie) { in invoke_bpf_prog()
746 emit_imm(RV_REG_T1, l->cookie, ctx); in invoke_bpf_prog()
747 emit_sd(RV_REG_FP, -run_ctx_off + cookie_off, RV_REG_T1, ctx); in invoke_bpf_prog()
749 emit_sd(RV_REG_FP, -run_ctx_off + cookie_off, RV_REG_ZERO, ctx); in invoke_bpf_prog()
755 emit_addi(RV_REG_A1, RV_REG_FP, -run_ctx_off, ctx); in invoke_bpf_prog()
766 branch_off = ctx->ninsns; in invoke_bpf_prog()
771 emit_addi(RV_REG_A0, RV_REG_FP, -args_off, ctx); in invoke_bpf_prog()
772 if (!p->jited) in invoke_bpf_prog()
773 /* arg2: progs[i]->insnsi for interpreter */ in invoke_bpf_prog()
774 emit_imm(RV_REG_A1, (const s64)p->insnsi, ctx); in invoke_bpf_prog()
775 ret = emit_call((const u64)p->bpf_func, true, ctx); in invoke_bpf_prog()
780 emit_sd(RV_REG_FP, -retval_off, RV_REG_A0, ctx); in invoke_bpf_prog()
781 emit_sd(RV_REG_FP, -(retval_off - 8), regmap[BPF_REG_0], ctx); in invoke_bpf_prog()
785 if (ctx->insns) { in invoke_bpf_prog()
786 int offset = ninsns_rvoff(ctx->ninsns - branch_off); in invoke_bpf_prog()
788 *(u32 *)(ctx->insns + branch_off) = insn; in invoke_bpf_prog()
796 emit_addi(RV_REG_A2, RV_REG_FP, -run_ctx_off, ctx); in invoke_bpf_prog()
823 * -------------------------------------- in __arch_prepare_bpf_trampoline()
828 * FP - 8 [ T0 to traced func ] return address of traced in __arch_prepare_bpf_trampoline()
830 * FP - 16 [ FP of traced func ] frame pointer of traced in __arch_prepare_bpf_trampoline()
832 * -------------------------------------- in __arch_prepare_bpf_trampoline()
835 * -------------------------------------- in __arch_prepare_bpf_trampoline()
836 * FP - 8 [ RA to caller func ] return address to caller in __arch_prepare_bpf_trampoline()
838 * FP - 16 [ FP of caller func ] frame pointer of caller in __arch_prepare_bpf_trampoline()
840 * -------------------------------------- in __arch_prepare_bpf_trampoline()
842 * FP - retval_off [ return value ] BPF_TRAMP_F_CALL_ORIG or in __arch_prepare_bpf_trampoline()
846 * FP - args_off [ arg1 ] in __arch_prepare_bpf_trampoline()
848 * FP - nregs_off [ regs count ] in __arch_prepare_bpf_trampoline()
850 * FP - ip_off [ traced func ] BPF_TRAMP_F_IP_ARG in __arch_prepare_bpf_trampoline()
852 * FP - run_ctx_off [ bpf_tramp_run_ctx ] in __arch_prepare_bpf_trampoline()
854 * FP - sreg_off [ callee saved reg ] in __arch_prepare_bpf_trampoline()
860 * FP - stk_arg_off [ stack_arg1 ] BPF_TRAMP_F_CALL_ORIG in __arch_prepare_bpf_trampoline()
864 return -ENOTSUPP; in __arch_prepare_bpf_trampoline()
866 if (m->nr_args > MAX_BPF_FUNC_ARGS) in __arch_prepare_bpf_trampoline()
867 return -ENOTSUPP; in __arch_prepare_bpf_trampoline()
869 for (i = 0; i < m->nr_args; i++) in __arch_prepare_bpf_trampoline()
870 nr_arg_slots += round_up(m->arg_size[i], 8) / 8; in __arch_prepare_bpf_trampoline()
898 if ((flags & BPF_TRAMP_F_CALL_ORIG) && (nr_arg_slots - RV_MAX_REG_ARGS > 0)) in __arch_prepare_bpf_trampoline()
899 stack_size += (nr_arg_slots - RV_MAX_REG_ARGS) * 8; in __arch_prepare_bpf_trampoline()
911 emit_addi(RV_REG_SP, RV_REG_SP, -16, ctx); in __arch_prepare_bpf_trampoline()
916 emit_addi(RV_REG_SP, RV_REG_SP, -stack_size, ctx); in __arch_prepare_bpf_trampoline()
917 emit_sd(RV_REG_SP, stack_size - 8, RV_REG_T0, ctx); in __arch_prepare_bpf_trampoline()
918 emit_sd(RV_REG_SP, stack_size - 16, RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
926 emit_addi(RV_REG_SP, RV_REG_SP, -stack_size, ctx); in __arch_prepare_bpf_trampoline()
927 emit_sd(RV_REG_SP, stack_size - 8, RV_REG_RA, ctx); in __arch_prepare_bpf_trampoline()
928 emit_sd(RV_REG_SP, stack_size - 16, RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
933 emit_sd(RV_REG_FP, -sreg_off, RV_REG_S1, ctx); in __arch_prepare_bpf_trampoline()
938 emit_sd(RV_REG_FP, -ip_off, RV_REG_T1, ctx); in __arch_prepare_bpf_trampoline()
942 emit_sd(RV_REG_FP, -nregs_off, RV_REG_T1, ctx); in __arch_prepare_bpf_trampoline()
951 emit_imm(RV_REG_A0, ctx->insns ? (const s64)im : RV_MAX_COUNT_IMM, ctx); in __arch_prepare_bpf_trampoline()
957 for (i = 0; i < fentry->nr_links; i++) { in __arch_prepare_bpf_trampoline()
958 ret = invoke_bpf_prog(fentry->links[i], args_off, retval_off, run_ctx_off, in __arch_prepare_bpf_trampoline()
964 if (fmod_ret->nr_links) { in __arch_prepare_bpf_trampoline()
965 branches_off = kcalloc(fmod_ret->nr_links, sizeof(int), GFP_KERNEL); in __arch_prepare_bpf_trampoline()
967 return -ENOMEM; in __arch_prepare_bpf_trampoline()
970 emit_sd(RV_REG_FP, -retval_off, RV_REG_ZERO, ctx); in __arch_prepare_bpf_trampoline()
971 for (i = 0; i < fmod_ret->nr_links; i++) { in __arch_prepare_bpf_trampoline()
972 ret = invoke_bpf_prog(fmod_ret->links[i], args_off, retval_off, in __arch_prepare_bpf_trampoline()
976 emit_ld(RV_REG_T1, -retval_off, RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
977 branches_off[i] = ctx->ninsns; in __arch_prepare_bpf_trampoline()
985 restore_stack_args(nr_arg_slots - RV_MAX_REG_ARGS, args_off, stk_arg_off, ctx); in __arch_prepare_bpf_trampoline()
989 emit_sd(RV_REG_FP, -retval_off, RV_REG_A0, ctx); in __arch_prepare_bpf_trampoline()
990 emit_sd(RV_REG_FP, -(retval_off - 8), regmap[BPF_REG_0], ctx); in __arch_prepare_bpf_trampoline()
991 im->ip_after_call = ctx->ro_insns + ctx->ninsns; in __arch_prepare_bpf_trampoline()
998 for (i = 0; ctx->insns && i < fmod_ret->nr_links; i++) { in __arch_prepare_bpf_trampoline()
999 offset = ninsns_rvoff(ctx->ninsns - branches_off[i]); in __arch_prepare_bpf_trampoline()
1001 *(u32 *)(ctx->insns + branches_off[i]) = insn; in __arch_prepare_bpf_trampoline()
1004 for (i = 0; i < fexit->nr_links; i++) { in __arch_prepare_bpf_trampoline()
1005 ret = invoke_bpf_prog(fexit->links[i], args_off, retval_off, in __arch_prepare_bpf_trampoline()
1012 im->ip_epilogue = ctx->ro_insns + ctx->ninsns; in __arch_prepare_bpf_trampoline()
1013 emit_imm(RV_REG_A0, ctx->insns ? (const s64)im : RV_MAX_COUNT_IMM, ctx); in __arch_prepare_bpf_trampoline()
1023 emit_ld(RV_REG_A0, -retval_off, RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
1024 emit_ld(regmap[BPF_REG_0], -(retval_off - 8), RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
1027 emit_ld(RV_REG_S1, -sreg_off, RV_REG_FP, ctx); in __arch_prepare_bpf_trampoline()
1031 emit_ld(RV_REG_T0, stack_size - 8, RV_REG_SP, ctx); in __arch_prepare_bpf_trampoline()
1032 emit_ld(RV_REG_FP, stack_size - 16, RV_REG_SP, ctx); in __arch_prepare_bpf_trampoline()
1047 emit_ld(RV_REG_RA, stack_size - 8, RV_REG_SP, ctx); in __arch_prepare_bpf_trampoline()
1048 emit_ld(RV_REG_FP, stack_size - 16, RV_REG_SP, ctx); in __arch_prepare_bpf_trampoline()
1054 ret = ctx->ninsns; in __arch_prepare_bpf_trampoline()
1093 u32 size = ro_image_end - ro_image; in arch_prepare_bpf_trampoline()
1097 return -ENOMEM; in arch_prepare_bpf_trampoline()
1107 ret = -E2BIG; in arch_prepare_bpf_trampoline()
1126 bool is64 = BPF_CLASS(insn->code) == BPF_ALU64 || in bpf_jit_emit_insn()
1127 BPF_CLASS(insn->code) == BPF_JMP; in bpf_jit_emit_insn()
1128 int s, e, rvoff, ret, i = insn - ctx->prog->insnsi; in bpf_jit_emit_insn()
1129 struct bpf_prog_aux *aux = ctx->prog->aux; in bpf_jit_emit_insn()
1130 u8 rd = -1, rs = -1, code = insn->code; in bpf_jit_emit_insn() local
1131 s16 off = insn->off; in bpf_jit_emit_insn()
1132 s32 imm = insn->imm; in bpf_jit_emit_insn()
1134 init_regs(&rd, &rs, insn, ctx); in bpf_jit_emit_insn()
1141 emit_mv(RV_REG_T1, rs, ctx); in bpf_jit_emit_insn()
1143 emit_imm(rd, (ctx->user_vm_start >> 32) << 32, ctx); in bpf_jit_emit_insn()
1149 if (rd != rs) in bpf_jit_emit_insn()
1150 emit_mv(rd, rs, ctx); in bpf_jit_emit_insn()
1170 switch (insn->off) { in bpf_jit_emit_insn()
1172 emit_mv(rd, rs, ctx); in bpf_jit_emit_insn()
1175 emit_sextb(rd, rs, ctx); in bpf_jit_emit_insn()
1178 emit_sexth(rd, rs, ctx); in bpf_jit_emit_insn()
1181 emit_sextw(rd, rs, ctx); in bpf_jit_emit_insn()
1184 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1191 emit_add(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1192 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1198 emit_sub(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1200 emit_subw(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1202 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1207 emit_and(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1208 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1213 emit_or(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1214 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1219 emit_xor(rd, rd, rs, ctx); in bpf_jit_emit_insn()
1220 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1225 emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1226 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1232 emit(is64 ? rv_div(rd, rd, rs) : rv_divw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1234 emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1235 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1241 emit(is64 ? rv_rem(rd, rd, rs) : rv_remw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1243 emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1244 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1249 emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1250 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1255 emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1256 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1261 emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx); in bpf_jit_emit_insn()
1262 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1266 /* dst = -dst */ in bpf_jit_emit_insn()
1270 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1281 if (!aux->verifier_zext) in bpf_jit_emit_insn()
1298 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1311 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1316 if (is_12b_int(-imm)) { in bpf_jit_emit_insn()
1317 emit_addi(rd, rd, -imm, ctx); in bpf_jit_emit_insn()
1322 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1333 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1344 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1355 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1363 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1375 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1387 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1394 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1404 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1414 if (!is64 && !aux->verifier_zext) in bpf_jit_emit_insn()
1455 s = ctx->ninsns; in bpf_jit_emit_insn()
1457 emit_sextw_alt(&rs, RV_REG_T1, ctx); in bpf_jit_emit_insn()
1460 emit_zextw_alt(&rs, RV_REG_T1, ctx); in bpf_jit_emit_insn()
1463 e = ctx->ninsns; in bpf_jit_emit_insn()
1466 rvoff -= ninsns_rvoff(e - s); in bpf_jit_emit_insn()
1471 rvoff -= 4; in bpf_jit_emit_insn()
1472 emit_and(RV_REG_T1, rd, rs, ctx); in bpf_jit_emit_insn()
1475 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn()
1501 s = ctx->ninsns; in bpf_jit_emit_insn()
1504 rs = imm ? RV_REG_T1 : RV_REG_ZERO; in bpf_jit_emit_insn()
1508 /* rs has been sign extended */ in bpf_jit_emit_insn()
1512 emit_zextw(rs, rs, ctx); in bpf_jit_emit_insn()
1515 e = ctx->ninsns; in bpf_jit_emit_insn()
1518 rvoff -= ninsns_rvoff(e - s); in bpf_jit_emit_insn()
1519 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx); in bpf_jit_emit_insn()
1525 s = ctx->ninsns; in bpf_jit_emit_insn()
1533 * sign-extension is sufficient here and saves one instruction, in bpf_jit_emit_insn()
1538 e = ctx->ninsns; in bpf_jit_emit_insn()
1539 rvoff -= ninsns_rvoff(e - s); in bpf_jit_emit_insn()
1554 * Set R0 to ((struct thread_info *)(RV_REG_TP))->cpu in bpf_jit_emit_insn()
1558 if (insn->src_reg == 0 && insn->imm == BPF_FUNC_get_smp_processor_id) { in bpf_jit_emit_insn()
1566 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, in bpf_jit_emit_insn()
1571 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in bpf_jit_emit_insn()
1575 fm = bpf_jit_find_kfunc_model(ctx->prog, insn); in bpf_jit_emit_insn()
1577 return -EINVAL; in bpf_jit_emit_insn()
1579 for (idx = 0; idx < fm->nr_args; idx++) { in bpf_jit_emit_insn()
1582 if (fm->arg_size[idx] == sizeof(int)) in bpf_jit_emit_insn()
1591 if (insn->src_reg != BPF_PSEUDO_CALL) in bpf_jit_emit_insn()
1598 return -1; in bpf_jit_emit_insn()
1603 if (i == ctx->prog->len - 1) in bpf_jit_emit_insn()
1620 /* fixed-length insns for extra jit pass */ in bpf_jit_emit_insn()
1656 sign_ext = BPF_MODE(insn->code) == BPF_MEMSX || in bpf_jit_emit_insn()
1657 BPF_MODE(insn->code) == BPF_PROBE_MEMSX; in bpf_jit_emit_insn()
1659 if (BPF_MODE(insn->code) == BPF_PROBE_MEM32) { in bpf_jit_emit_insn()
1660 emit_add(RV_REG_T2, rs, RV_REG_ARENA, ctx); in bpf_jit_emit_insn()
1661 rs = RV_REG_T2; in bpf_jit_emit_insn()
1667 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1669 emit(rv_lb(rd, off, rs), ctx); in bpf_jit_emit_insn()
1671 emit(rv_lbu(rd, off, rs), ctx); in bpf_jit_emit_insn()
1672 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1677 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx); in bpf_jit_emit_insn()
1678 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1683 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1687 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1689 emit(rv_lh(rd, off, rs), ctx); in bpf_jit_emit_insn()
1691 emit(rv_lhu(rd, off, rs), ctx); in bpf_jit_emit_insn()
1692 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1697 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx); in bpf_jit_emit_insn()
1698 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1703 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1707 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1709 emit(rv_lw(rd, off, rs), ctx); in bpf_jit_emit_insn()
1711 emit(rv_lwu(rd, off, rs), ctx); in bpf_jit_emit_insn()
1712 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1717 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx); in bpf_jit_emit_insn()
1718 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1723 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1727 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1728 emit_ld(rd, off, rs, ctx); in bpf_jit_emit_insn()
1729 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1734 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx); in bpf_jit_emit_insn()
1735 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1737 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1816 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1818 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1824 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1826 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1830 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1832 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1838 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1840 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1844 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1846 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1852 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1854 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1858 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1860 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1866 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1868 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1883 emit(rv_sb(rd, off, rs), ctx); in bpf_jit_emit_insn()
1889 emit(rv_sb(RV_REG_T1, 0, rs), ctx); in bpf_jit_emit_insn()
1893 emit(rv_sh(rd, off, rs), ctx); in bpf_jit_emit_insn()
1899 emit(rv_sh(RV_REG_T1, 0, rs), ctx); in bpf_jit_emit_insn()
1903 emit_sw(rd, off, rs, ctx); in bpf_jit_emit_insn()
1909 emit_sw(RV_REG_T1, 0, rs, ctx); in bpf_jit_emit_insn()
1913 emit_sd(rd, off, rs, ctx); in bpf_jit_emit_insn()
1919 emit_sd(RV_REG_T1, 0, rs, ctx); in bpf_jit_emit_insn()
1923 emit_atomic(rd, rs, off, imm, in bpf_jit_emit_insn()
1940 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1941 emit(rv_sb(rd, off, rs), ctx); in bpf_jit_emit_insn()
1942 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1948 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1949 emit(rv_sb(RV_REG_T1, 0, rs), ctx); in bpf_jit_emit_insn()
1950 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1954 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1955 emit(rv_sh(rd, off, rs), ctx); in bpf_jit_emit_insn()
1956 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1962 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1963 emit(rv_sh(RV_REG_T1, 0, rs), ctx); in bpf_jit_emit_insn()
1964 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1968 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1969 emit_sw(rd, off, rs, ctx); in bpf_jit_emit_insn()
1970 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1976 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1977 emit_sw(RV_REG_T1, 0, rs, ctx); in bpf_jit_emit_insn()
1978 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1982 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1983 emit_sd(rd, off, rs, ctx); in bpf_jit_emit_insn()
1984 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
1990 insns_start = ctx->ninsns; in bpf_jit_emit_insn()
1991 emit_sd(RV_REG_T1, 0, rs, ctx); in bpf_jit_emit_insn()
1992 insn_len = ctx->ninsns - insns_start; in bpf_jit_emit_insn()
2005 pr_err("bpf-jit: unknown opcode %02x\n", code); in bpf_jit_emit_insn()
2006 return -EINVAL; in bpf_jit_emit_insn()
2016 bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, STACK_ALIGN); in bpf_jit_build_prologue()
2035 if (ctx->arena_vm_start) in bpf_jit_build_prologue()
2041 store_offset = stack_adjust - 8; in bpf_jit_build_prologue()
2050 /* First instruction is always setting the tail-call-counter in bpf_jit_build_prologue()
2052 * Force using a 4-byte (non-compressed) instruction. in bpf_jit_build_prologue()
2056 emit_addi(RV_REG_SP, RV_REG_SP, -stack_adjust, ctx); in bpf_jit_build_prologue()
2060 store_offset -= 8; in bpf_jit_build_prologue()
2063 store_offset -= 8; in bpf_jit_build_prologue()
2066 store_offset -= 8; in bpf_jit_build_prologue()
2070 store_offset -= 8; in bpf_jit_build_prologue()
2074 store_offset -= 8; in bpf_jit_build_prologue()
2078 store_offset -= 8; in bpf_jit_build_prologue()
2082 store_offset -= 8; in bpf_jit_build_prologue()
2086 store_offset -= 8; in bpf_jit_build_prologue()
2088 if (ctx->arena_vm_start) { in bpf_jit_build_prologue()
2090 store_offset -= 8; in bpf_jit_build_prologue()
2104 ctx->stack_size = stack_adjust; in bpf_jit_build_prologue()
2106 if (ctx->arena_vm_start) in bpf_jit_build_prologue()
2107 emit_imm(RV_REG_ARENA, ctx->arena_vm_start, ctx); in bpf_jit_build_prologue()