/linux/arch/powerpc/include/asm/ |
H A D | inst.h | 41 static inline u32 ppc_inst_val(ppc_inst_t x) in ppc_inst_val() function 49 static inline u32 ppc_inst_val(ppc_inst_t x) in ppc_inst_val() function 58 return ppc_inst_val(x) >> 26; in ppc_inst_primary_opcode() 94 return ppc_inst_prefix(swab32(ppc_inst_val(x)), swab32(ppc_inst_suffix(x))); in ppc_inst_swab() 99 if (ppc_inst_val(x) != ppc_inst_val(y)) in ppc_inst_equal() 127 return ppc_inst_val(x); in ppc_inst_as_ulong() 129 return (u64)ppc_inst_suffix(x) << 32 | ppc_inst_val(x); in ppc_inst_as_ulong() 131 return (u64)ppc_inst_val(x) << 32 | ppc_inst_suffix(x); in ppc_inst_as_ulong() 137 *ptr = ppc_inst_val(x); in ppc_inst_write()
|
H A D | sstep.h | 19 #define IS_MTMSRD(instr) ((ppc_inst_val(instr) & 0xfc0007be) == 0x7c000124) 20 #define IS_RFID(instr) ((ppc_inst_val(instr) & 0xfc0007be) == 0x4c000024)
|
/linux/arch/powerpc/lib/ |
H A D | code-patching.c | 49 return __patch_mem(addr, ppc_inst_val(instr), addr, false); in raw_patch_instruction() 383 return patch_mem(addr, ppc_inst_val(instr), false); in patch_instruction() 409 return patch_mem(addr, ppc_inst_val(instr), false); in patch_instruction() 451 u32 val = ppc_inst_val(instr); in __patch_instructions() 597 switch ((ppc_inst_val(instr) >> 1) & 0x3ff) { in is_conditional_branch() 629 if (ppc_inst_val(instr) & BRANCH_ABSOLUTE) in instr_is_relative_branch() 637 return instr_is_relative_branch(instr) && (ppc_inst_val(instr) & BRANCH_SET_LINK); in instr_is_relative_link_branch() 644 imm = ppc_inst_val(ppc_inst_read(instr)) & 0x3FFFFFC; in branch_iform_target() 650 if ((ppc_inst_val(ppc_inst_read(instr)) & BRANCH_ABSOLUTE) == 0) in branch_iform_target() 660 imm = ppc_inst_val(ppc_inst_read(instr)) & 0xFFFC; in branch_bform_target() [all …]
|
H A D | test_emulate_step.c | 1606 if (!regs || !ppc_inst_val(instr)) in emulate_compute_instr() 1631 if (!regs || !ppc_inst_val(instr)) in execute_compute_instr()
|
H A D | sstep.c | 1368 word = ppc_inst_val(instr); in analyse_instr()
|
/linux/arch/powerpc/kernel/ |
H A D | module_32.c | 304 if ((ppc_inst_val(jmp[0]) & 0xffff0000) != PPC_RAW_LIS(_R12, 0)) in module_trampoline_target() 306 if ((ppc_inst_val(jmp[1]) & 0xffff0000) != PPC_RAW_ADDI(_R12, _R12, 0)) in module_trampoline_target() 308 if (ppc_inst_val(jmp[2]) != PPC_RAW_MTCTR(_R12)) in module_trampoline_target() 310 if (ppc_inst_val(jmp[3]) != PPC_RAW_BCTR()) in module_trampoline_target() 313 addr = (ppc_inst_val(jmp[1]) & 0xffff) | ((ppc_inst_val(jmp[0]) & 0xffff) << 16); in module_trampoline_target()
|
H A D | align.c | 120 instr = ppc_inst_val(ppc_instr); in emulate_spe() 323 int reg = (ppc_inst_val(instr) >> 21) & 0x1f; in fix_alignment() 340 if ((ppc_inst_val(instr) & 0xfc0006fe) == (PPC_INST_COPY & 0xfc0006fe)) in fix_alignment()
|
H A D | kprobes.c | 138 } else if (!can_single_step(ppc_inst_val(insn))) { in arch_prepare_kprobe() 168 p->opcode = ppc_inst_val(insn); in arch_prepare_kprobe()
|
H A D | uprobes.c | 51 if (!can_single_step(ppc_inst_val(ppc_inst_read(auprobe->insn)))) { in arch_uprobe_analyze_insn()
|
H A D | vecemu.c | 272 word = ppc_inst_val(instr); in emulate_altivec()
|
H A D | traps.c | 1584 get_xop(ppc_inst_val(insn)) == OP_31_XOP_HASHCHK) { in do_program_check()
|
/linux/arch/powerpc/kernel/trace/ |
H A D | ftrace_64_pg.c | 98 return (ppc_inst_val(op) & ~PPC_LI_MASK) == PPC_RAW_BL(0); in is_bl_op() 103 return (ppc_inst_val(op) & ~PPC_LI_MASK) == PPC_RAW_BRANCH(0); in is_b_op() 110 offset = PPC_LI(ppc_inst_val(op)); in find_bl_target()
|
H A D | ftrace.c | 93 return (ppc_inst_val(op) & ~PPC_LI_MASK) == PPC_RAW_BL(0); in is_bl_op()
|
/linux/arch/powerpc/kvm/ |
H A D | book3s_pr.c | 1100 kvmppc_get_pc(vcpu), ppc_inst_val(last_inst)); in kvmppc_exit_pr_progint() 1102 if ((ppc_inst_val(last_inst) & 0xff0007ff) != (INS_DCBZ & 0xfffffff7)) { in kvmppc_exit_pr_progint() 1119 __func__, kvmppc_get_pc(vcpu), ppc_inst_val(last_inst)); in kvmppc_exit_pr_progint() 1296 (ppc_inst_val(last_sc) == 0x44000022) && in kvmppc_handle_exit_pr() 1389 dsisr = kvmppc_alignment_dsisr(vcpu, ppc_inst_val(last_inst)); in kvmppc_handle_exit_pr() 1390 dar = kvmppc_alignment_dar(vcpu, ppc_inst_val(last_inst)); in kvmppc_handle_exit_pr()
|
H A D | emulate.c | 206 inst = ppc_inst_val(pinst); in kvmppc_emulate_instruction()
|
H A D | emulate_loadstore.c | 360 trace_kvm_ppc_instr(ppc_inst_val(inst), kvmppc_get_pc(vcpu), emulated); in kvmppc_emulate_loadstore()
|
H A D | booke.c | 1039 last_inst = ppc_inst_val(pinst); in kvmppc_handle_exit() 1045 last_inst = ppc_inst_val(pinst); in kvmppc_handle_exit()
|
H A D | book3s_paired_singles.c | 637 inst = ppc_inst_val(pinst); in kvmppc_emulate_paired_single()
|
H A D | book3s_hv.c | 1484 if (ppc_inst_val(last_inst) == KVMPPC_INST_SW_BREAKPOINT) { in kvmppc_emulate_debug_inst() 1542 inst = ppc_inst_val(pinst); in kvmppc_emulate_doorbell_instr() 2082 vcpu->arch.emul_inst = ppc_inst_val(pinst); in kvmppc_handle_nested_exit()
|
H A D | book3s_64_mmu_hv.c | 425 suffix = ppc_inst_val(instr); in instruction_is_store()
|
H A D | powerpc.c | 311 ppc_inst_val(last_inst)); in kvmppc_emulate_mmio()
|
/linux/arch/powerpc/xmon/ |
H A D | xmon.c | 920 if (!can_single_step(ppc_inst_val(instr))) { in insert_bpts() 1468 if (!can_single_step(ppc_inst_val(instr))) { in check_bp_loc() 3051 dump_func(ppc_inst_val(inst), adr); in generic_inst_dump()
|