Lines Matching refs:src_reg

260 	       insn->src_reg == 0;  in bpf_helper_call()
266 insn->src_reg == BPF_PSEUDO_CALL; in bpf_pseudo_call()
272 insn->src_reg == BPF_PSEUDO_KFUNC_CALL; in bpf_pseudo_kfunc_call()
566 return insn->code == (BPF_JMP | BPF_JCOND) && insn->src_reg == BPF_MAY_GOTO; in is_may_goto_insn()
3548 insn[i].src_reg == 0 && in check_subprogs()
3664 if (insn->src_reg == BPF_PSEUDO_CALL) in is_reg64()
3746 return insn->src_reg; in insn_def_regno()
4023 if (insn->src_reg != BPF_PSEUDO_KFUNC_CALL) in disasm_kfunc_name()
4246 u32 sreg = insn->src_reg; in backtrack_insn()
4433 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && insn->imm == 0) in backtrack_insn()
4445 if (insn->src_reg == BPF_REG_0 && insn->imm == BPF_FUNC_tail_call in backtrack_insn()
4997 struct bpf_reg_state *src_reg) in assign_scalar_id_before_mov() argument
4999 if (src_reg->type != SCALAR_VALUE) in assign_scalar_id_before_mov()
5002 if (src_reg->id & BPF_ADD_CONST) { in assign_scalar_id_before_mov()
5008 src_reg->id = 0; in assign_scalar_id_before_mov()
5009 src_reg->off = 0; in assign_scalar_id_before_mov()
5012 if (!src_reg->id && !tnum_is_const(src_reg->var_off)) in assign_scalar_id_before_mov()
5017 src_reg->id = ++env->id_gen; in assign_scalar_id_before_mov()
7807 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_load_mem()
7816 src_reg_type = regs[insn->src_reg].type; in check_load_mem()
7821 err = check_mem_access(env, env->insn_idx, insn->src_reg, insn->off, in check_load_mem()
7839 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_store_reg()
7852 BPF_SIZE(insn->code), BPF_WRITE, insn->src_reg, in check_store_reg()
7871 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_atomic_rmw()
7894 if (is_pointer_value(env, insn->src_reg)) { in check_atomic_rmw()
7895 verbose(env, "R%d leaks addr into mem\n", insn->src_reg); in check_atomic_rmw()
7910 load_reg = insn->src_reg; in check_atomic_rmw()
7957 if (!atomic_ptr_type_ok(env, insn->src_reg, insn)) { in check_atomic_load()
7959 insn->src_reg, in check_atomic_load()
7960 reg_type_str(env, reg_state(env, insn->src_reg)->type)); in check_atomic_load()
14430 mark_reg_unknown(env, regs, insn->src_reg); in sanitize_speculative_path()
14552 u32 dst = insn->dst_reg, src = insn->src_reg; in sanitize_err()
14898 struct bpf_reg_state *src_reg) in scalar32_min_max_add() argument
14904 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_add()
14905 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_add()
14908 if (check_add_overflow(*dst_smin, src_reg->s32_min_value, dst_smin) || in scalar32_min_max_add()
14909 check_add_overflow(*dst_smax, src_reg->s32_max_value, dst_smax)) { in scalar32_min_max_add()
14929 struct bpf_reg_state *src_reg) in scalar_min_max_add() argument
14935 u64 umin_val = src_reg->umin_value; in scalar_min_max_add()
14936 u64 umax_val = src_reg->umax_value; in scalar_min_max_add()
14939 if (check_add_overflow(*dst_smin, src_reg->smin_value, dst_smin) || in scalar_min_max_add()
14940 check_add_overflow(*dst_smax, src_reg->smax_value, dst_smax)) { in scalar_min_max_add()
14960 struct bpf_reg_state *src_reg) in scalar32_min_max_sub() argument
14966 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_sub()
14967 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_sub()
14970 if (check_sub_overflow(*dst_smin, src_reg->s32_max_value, dst_smin) || in scalar32_min_max_sub()
14971 check_sub_overflow(*dst_smax, src_reg->s32_min_value, dst_smax)) { in scalar32_min_max_sub()
14992 struct bpf_reg_state *src_reg) in scalar_min_max_sub() argument
14998 u64 umin_val = src_reg->umin_value; in scalar_min_max_sub()
14999 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
15002 if (check_sub_overflow(*dst_smin, src_reg->smax_value, dst_smin) || in scalar_min_max_sub()
15003 check_sub_overflow(*dst_smax, src_reg->smin_value, dst_smax)) { in scalar_min_max_sub()
15024 struct bpf_reg_state *src_reg) in scalar32_min_max_mul() argument
15032 if (check_mul_overflow(*dst_umax, src_reg->u32_max_value, dst_umax) || in scalar32_min_max_mul()
15033 check_mul_overflow(*dst_umin, src_reg->u32_min_value, dst_umin)) { in scalar32_min_max_mul()
15038 if (check_mul_overflow(*dst_smin, src_reg->s32_min_value, &tmp_prod[0]) || in scalar32_min_max_mul()
15039 check_mul_overflow(*dst_smin, src_reg->s32_max_value, &tmp_prod[1]) || in scalar32_min_max_mul()
15040 check_mul_overflow(*dst_smax, src_reg->s32_min_value, &tmp_prod[2]) || in scalar32_min_max_mul()
15041 check_mul_overflow(*dst_smax, src_reg->s32_max_value, &tmp_prod[3])) { in scalar32_min_max_mul()
15052 struct bpf_reg_state *src_reg) in scalar_min_max_mul() argument
15060 if (check_mul_overflow(*dst_umax, src_reg->umax_value, dst_umax) || in scalar_min_max_mul()
15061 check_mul_overflow(*dst_umin, src_reg->umin_value, dst_umin)) { in scalar_min_max_mul()
15066 if (check_mul_overflow(*dst_smin, src_reg->smin_value, &tmp_prod[0]) || in scalar_min_max_mul()
15067 check_mul_overflow(*dst_smin, src_reg->smax_value, &tmp_prod[1]) || in scalar_min_max_mul()
15068 check_mul_overflow(*dst_smax, src_reg->smin_value, &tmp_prod[2]) || in scalar_min_max_mul()
15069 check_mul_overflow(*dst_smax, src_reg->smax_value, &tmp_prod[3])) { in scalar_min_max_mul()
15080 struct bpf_reg_state *src_reg) in scalar32_min_max_and() argument
15082 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_and()
15085 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_and()
15111 struct bpf_reg_state *src_reg) in scalar_min_max_and() argument
15113 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_and()
15115 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
15143 struct bpf_reg_state *src_reg) in scalar32_min_max_or() argument
15145 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_or()
15148 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_or()
15174 struct bpf_reg_state *src_reg) in scalar_min_max_or() argument
15176 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_or()
15178 u64 umin_val = src_reg->umin_value; in scalar_min_max_or()
15206 struct bpf_reg_state *src_reg) in scalar32_min_max_xor() argument
15208 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_xor()
15234 struct bpf_reg_state *src_reg) in scalar_min_max_xor() argument
15236 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_xor()
15282 struct bpf_reg_state *src_reg) in scalar32_min_max_lsh() argument
15284 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_lsh()
15285 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_lsh()
15330 struct bpf_reg_state *src_reg) in scalar_min_max_lsh() argument
15332 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
15333 u64 umin_val = src_reg->umin_value; in scalar_min_max_lsh()
15345 struct bpf_reg_state *src_reg) in scalar32_min_max_rsh() argument
15348 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_rsh()
15349 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_rsh()
15377 struct bpf_reg_state *src_reg) in scalar_min_max_rsh() argument
15379 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
15380 u64 umin_val = src_reg->umin_value; in scalar_min_max_rsh()
15411 struct bpf_reg_state *src_reg) in scalar32_min_max_arsh() argument
15413 u64 umin_val = src_reg->u32_min_value; in scalar32_min_max_arsh()
15434 struct bpf_reg_state *src_reg) in scalar_min_max_arsh() argument
15436 u64 umin_val = src_reg->umin_value; in scalar_min_max_arsh()
15461 const struct bpf_reg_state *src_reg) in is_safe_to_compute_dst_reg_range() argument
15467 if (tnum_subreg_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
15468 && src_reg->s32_min_value == src_reg->s32_max_value in is_safe_to_compute_dst_reg_range()
15469 && src_reg->u32_min_value == src_reg->u32_max_value) in is_safe_to_compute_dst_reg_range()
15472 if (tnum_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
15473 && src_reg->smin_value == src_reg->smax_value in is_safe_to_compute_dst_reg_range()
15474 && src_reg->umin_value == src_reg->umax_value) in is_safe_to_compute_dst_reg_range()
15495 return (src_is_const && src_reg->umax_value < insn_bitness); in is_safe_to_compute_dst_reg_range()
15508 struct bpf_reg_state src_reg) in adjust_scalar_min_max_vals() argument
15514 if (!is_safe_to_compute_dst_reg_range(insn, &src_reg)) { in adjust_scalar_min_max_vals()
15541 scalar32_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15542 scalar_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15543 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15546 scalar32_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15547 scalar_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15548 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15558 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15559 scalar32_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15560 scalar_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15563 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15564 scalar32_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15565 scalar_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15568 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15569 scalar32_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15570 scalar_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15573 dst_reg->var_off = tnum_xor(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15574 scalar32_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15575 scalar_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15579 scalar32_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15581 scalar_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15585 scalar32_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15587 scalar_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15591 scalar32_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15593 scalar_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15614 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
15621 src_reg = NULL; in adjust_reg_min_max_vals()
15641 src_reg = &regs[insn->src_reg]; in adjust_reg_min_max_vals()
15642 if (src_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
15665 src_reg, dst_reg); in adjust_reg_min_max_vals()
15669 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
15673 dst_reg, src_reg); in adjust_reg_min_max_vals()
15676 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
15686 src_reg = &off_reg; in adjust_reg_min_max_vals()
15689 ptr_reg, src_reg); in adjust_reg_min_max_vals()
15698 if (WARN_ON(!src_reg)) { in adjust_reg_min_max_vals()
15703 err = adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
15717 dst_reg->id && is_reg_const(src_reg, false)) { in adjust_reg_min_max_vals()
15718 u64 val = reg_const_value(src_reg, false); in adjust_reg_min_max_vals()
15753 insn->src_reg != BPF_REG_0 || in check_alu_op()
15759 if (insn->src_reg != BPF_REG_0 || insn->off != 0 || in check_alu_op()
15819 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
15823 if (insn->src_reg != BPF_REG_0 || insn->off != 0) { in check_alu_op()
15835 struct bpf_reg_state *src_reg = regs + insn->src_reg; in check_alu_op() local
15851 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15852 copy_register_state(dst_reg, src_reg); in check_alu_op()
15856 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
15859 insn->src_reg); in check_alu_op()
15861 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
15864 no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
15866 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15867 copy_register_state(dst_reg, src_reg); in check_alu_op()
15878 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
15881 insn->src_reg); in check_alu_op()
15883 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
15885 bool is_src_reg_u32 = get_reg_width(src_reg) <= 32; in check_alu_op()
15888 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15889 copy_register_state(dst_reg, src_reg); in check_alu_op()
15899 bool no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
15902 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15903 copy_register_state(dst_reg, src_reg); in check_alu_op()
15945 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
15949 if (insn->src_reg != BPF_REG_0 || (insn->off != 0 && insn->off != 1) || in check_alu_op()
16247 struct bpf_reg_state *src_reg, in is_pkt_ptr_branch_taken() argument
16252 if (src_reg->type == PTR_TO_PACKET_END) { in is_pkt_ptr_branch_taken()
16255 pkt = src_reg; in is_pkt_ptr_branch_taken()
16649 struct bpf_reg_state *src_reg, in try_match_pkt_pointers() argument
16663 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16665 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16671 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16673 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16675 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
16676 src_reg->type, true); in try_match_pkt_pointers()
16677 mark_pkt_end(this_branch, insn->src_reg, false); in try_match_pkt_pointers()
16684 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16686 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16692 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16694 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16696 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
16697 src_reg->type, false); in try_match_pkt_pointers()
16698 mark_pkt_end(other_branch, insn->src_reg, true); in try_match_pkt_pointers()
16705 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16707 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16713 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16715 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16717 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
16718 src_reg->type, false); in try_match_pkt_pointers()
16719 mark_pkt_end(this_branch, insn->src_reg, true); in try_match_pkt_pointers()
16726 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16728 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16734 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16736 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16738 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
16739 src_reg->type, true); in try_match_pkt_pointers()
16740 mark_pkt_end(other_branch, insn->src_reg, false); in try_match_pkt_pointers()
16851 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
16871 insn->src_reg != BPF_MAY_GOTO || in check_cond_jmp_op()
16903 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_cond_jmp_op()
16907 src_reg = &regs[insn->src_reg]; in check_cond_jmp_op()
16908 if (!(reg_is_pkt_pointer_any(dst_reg) && reg_is_pkt_pointer_any(src_reg)) && in check_cond_jmp_op()
16909 is_pointer_value(env, insn->src_reg)) { in check_cond_jmp_op()
16911 insn->src_reg); in check_cond_jmp_op()
16915 if (src_reg->type == PTR_TO_STACK) in check_cond_jmp_op()
16920 if (insn->src_reg != BPF_REG_0) { in check_cond_jmp_op()
16924 src_reg = &env->fake_reg[0]; in check_cond_jmp_op()
16925 memset(src_reg, 0, sizeof(*src_reg)); in check_cond_jmp_op()
16926 src_reg->type = SCALAR_VALUE; in check_cond_jmp_op()
16927 __mark_reg_known(src_reg, insn->imm); in check_cond_jmp_op()
16940 pred = is_branch_taken(dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
16948 !__is_pointer_value(false, src_reg)) in check_cond_jmp_op()
16949 err = mark_chain_precision(env, insn->src_reg); in check_cond_jmp_op()
16989 if (BPF_SRC(insn->code) == BPF_X && src_reg->type == SCALAR_VALUE && src_reg->id) in check_cond_jmp_op()
16990 collect_linked_regs(this_branch, src_reg->id, &linked_regs); in check_cond_jmp_op()
17007 &other_branch_regs[insn->src_reg], in check_cond_jmp_op()
17008 dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
17026 src_reg->type == SCALAR_VALUE && src_reg->id && in check_cond_jmp_op()
17027 !WARN_ON_ONCE(src_reg->id != other_branch_regs[insn->src_reg].id)) { in check_cond_jmp_op()
17028 sync_linked_regs(this_branch, src_reg, &linked_regs); in check_cond_jmp_op()
17029 sync_linked_regs(other_branch, &other_branch_regs[insn->src_reg], &linked_regs); in check_cond_jmp_op()
17050 __is_pointer_value(false, src_reg) && __is_pointer_value(false, dst_reg) && in check_cond_jmp_op()
17051 type_may_be_null(src_reg->type) != type_may_be_null(dst_reg->type) && in check_cond_jmp_op()
17052 base_type(src_reg->type) != PTR_TO_BTF_ID && in check_cond_jmp_op()
17067 if (type_may_be_null(src_reg->type)) in check_cond_jmp_op()
17068 mark_ptr_not_null_reg(&eq_branch_regs[insn->src_reg]); in check_cond_jmp_op()
17088 } else if (!try_match_pkt_pointers(insn, dst_reg, &regs[insn->src_reg], in check_cond_jmp_op()
17123 if (insn->src_reg == 0) { in check_ld_imm()
17137 if (insn->src_reg == BPF_PSEUDO_BTF_ID) { in check_ld_imm()
17154 if (insn->src_reg == BPF_PSEUDO_FUNC) { in check_ld_imm()
17176 if (insn->src_reg == BPF_PSEUDO_MAP_VALUE || in check_ld_imm()
17177 insn->src_reg == BPF_PSEUDO_MAP_IDX_VALUE) { in check_ld_imm()
17187 } else if (insn->src_reg == BPF_PSEUDO_MAP_FD || in check_ld_imm()
17188 insn->src_reg == BPF_PSEUDO_MAP_IDX) { in check_ld_imm()
17244 (mode == BPF_ABS && insn->src_reg != BPF_REG_0)) { in check_ld_abs()
17270 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_ld_abs()
17865 ldx->src_reg != BPF_REG_10) in mark_fastcall_pattern_for_call()
17868 if (stx->src_reg != ldx->dst_reg) in mark_fastcall_pattern_for_call()
17871 if ((BIT(stx->src_reg) & expected_regs_mask) == 0) in mark_fastcall_pattern_for_call()
17879 expected_regs_mask &= ~BIT(stx->src_reg); in mark_fastcall_pattern_for_call()
18240 } else if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in visit_insn()
18269 return visit_func_call_insn(t, insns, env, insn->src_reg == BPF_PSEUDO_CALL); in visit_insn()
20346 insn->src_reg != BPF_REG_0) { in do_check_insn()
20373 (insn->src_reg != BPF_PSEUDO_KFUNC_CALL && in do_check_insn()
20375 (insn->src_reg != BPF_REG_0 && in do_check_insn()
20376 insn->src_reg != BPF_PSEUDO_CALL && in do_check_insn()
20377 insn->src_reg != BPF_PSEUDO_KFUNC_CALL) || in do_check_insn()
20384 if ((insn->src_reg == BPF_REG_0 && in do_check_insn()
20386 (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && in do_check_insn()
20393 if (insn->src_reg == BPF_PSEUDO_CALL) { in do_check_insn()
20395 } else if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_check_insn()
20408 if (insn->src_reg != BPF_REG_0 || in do_check_insn()
20417 insn->src_reg != BPF_REG_0 || in do_check_insn()
20433 insn->src_reg != BPF_REG_0 || in do_check_insn()
21067 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in resolve_pseudo_ldimm64()
21073 if (insn[0].src_reg == 0) in resolve_pseudo_ldimm64()
21077 if (insn[0].src_reg == BPF_PSEUDO_BTF_ID) { in resolve_pseudo_ldimm64()
21085 if (insn[0].src_reg == BPF_PSEUDO_FUNC) { in resolve_pseudo_ldimm64()
21094 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
21108 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
21133 if (insn[0].src_reg == BPF_PSEUDO_MAP_FD || in resolve_pseudo_ldimm64()
21134 insn[0].src_reg == BPF_PSEUDO_MAP_IDX) { in resolve_pseudo_ldimm64()
21206 if (insn->src_reg == BPF_PSEUDO_FUNC) in convert_pseudo_ld_imm64()
21208 insn->src_reg = 0; in convert_pseudo_ld_imm64()
21747 zext_patch[1].src_reg = load_reg; in opt_subreg_zext_lo32_rnd_hi32()
22705 *patch++ = BPF_MOV64_REG(BPF_REG_AX, insn->src_reg); in do_misc_fixups()
22728 *patch++ = BPF_MOV64_REG(BPF_REG_AX, insn->src_reg); in do_misc_fixups()
22750 BPF_JNE | BPF_K, insn->src_reg, in do_misc_fixups()
22759 BPF_JEQ | BPF_K, insn->src_reg, in do_misc_fixups()
22790 *patch++ = BPF_MOV64_REG(BPF_REG_AX, insn->src_reg); in do_misc_fixups()
22849 off_reg = issrc ? insn->src_reg : insn->dst_reg; in do_misc_fixups()
22863 *patch++ = BPF_MOV64_REG(insn->dst_reg, insn->src_reg); in do_misc_fixups()
22864 insn->src_reg = BPF_REG_AX; in do_misc_fixups()
22951 if (insn->src_reg == BPF_PSEUDO_CALL) in do_misc_fixups()
22953 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_misc_fixups()
23608 insn->src_reg == 0 && in is_bpf_loop_call()
24713 u16 src = BIT(insn->src_reg); in compute_insn_live_regs()