/linux/arch/powerpc/kvm/ |
H A D | book3s_32_mmu.c | 356 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 365 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 369 case MSR_IR: in kvmppc_mmu_book3s_32_esid_to_vsid() 375 case MSR_DR|MSR_IR: in kvmppc_mmu_book3s_32_esid_to_vsid()
|
H A D | book3s_64_mmu.c | 483 if (kvmppc_get_msr(vcpu) & MSR_IR) { in kvmppc_mmu_book3s_64_slbia() 582 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 595 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 599 case MSR_IR: in kvmppc_mmu_book3s_64_esid_to_vsid() 605 case MSR_DR|MSR_IR: in kvmppc_mmu_book3s_64_esid_to_vsid()
|
H A D | book3s_hv_builtin.c | 548 (msr & (MSR_IR|MSR_DR)) == (MSR_IR|MSR_DR) ) { in inject_interrupt() 549 new_msr |= MSR_IR | MSR_DR; in inject_interrupt()
|
H A D | book3s_pr.c | 70 return (msr & (MSR_IR|MSR_DR)) == MSR_DR; in kvmppc_is_split_real() 79 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR) in kvmppc_fixup_split_real() 244 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 509 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 510 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 692 bool ir = (kvmppc_get_msr(vcpu) & MSR_IR) ? true : false; in kvmppc_handle_pagefault() 713 switch (kvmppc_get_msr(vcpu) & (MSR_DR|MSR_IR)) { in kvmppc_handle_pagefault() 723 case MSR_IR: in kvmppc_handle_pagefault() 726 if ((kvmppc_get_msr(vcpu) & (MSR_DR|MSR_IR)) == MSR_DR) in kvmppc_handle_pagefault()
|
H A D | book3s_rmhandlers.S | 152 li r6, MSR_IR | MSR_DR
|
H A D | book3s.c | 455 int relocated = (kvmppc_get_msr(vcpu) & (data ? MSR_DR : MSR_IR)); in kvmppc_xlate() 469 if ((kvmppc_get_msr(vcpu) & (MSR_IR | MSR_DR)) == MSR_DR && in kvmppc_xlate()
|
/linux/arch/powerpc/kernel/ |
H A D | rtas_entry.S | 27 li r9,MSR_KERNEL & ~(MSR_IR|MSR_DR) 130 LOAD_REG_IMMEDIATE(r6, MSR_KERNEL & ~(MSR_IR|MSR_DR))
|
H A D | head_book3s_32.S | 202 ori r0,r0,MSR_DR|MSR_IR|MSR_RI 986 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 1077 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR) 1100 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI) 1118 andi. r0,r3,MSR_DR|MSR_IR /* MMU enabled? */
|
H A D | paca.c | 199 new_paca->kernel_msr = MSR_KERNEL & ~(MSR_IR | MSR_DR); in initialise_paca()
|
H A D | head_8xx.S | 88 ori r0,r0,MSR_DR|MSR_IR 568 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 697 li r12, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI)
|
H A D | kvm_emul.S | 300 andi. r31, r31, MSR_DR | MSR_IR
|
H A D | rtas.c | 719 const unsigned long mask = MSR_IR | MSR_DR; in do_enter_rtas() 1237 if ((mfmsr() & (MSR_IR|MSR_DR)) != (MSR_IR|MSR_DR)) { in rtas_call()
|
H A D | kprobes.c | 283 (!(regs->msr & MSR_IR) || !(regs->msr & MSR_DR))) in kprobe_handler()
|
H A D | misc_64.S | 363 li r10,MSR_DR|MSR_IR
|
/linux/arch/powerpc/platforms/82xx/ |
H A D | pq2.c | 29 mtmsr(mfmsr() & ~(MSR_ME | MSR_EE | MSR_IR | MSR_DR)); in pq2_restart()
|
/linux/arch/powerpc/platforms/powernv/ |
H A D | opal-wrappers.S | 27 li r0,MSR_IR|MSR_DR|MSR_LE
|
H A D | subcore-asm.S | 31 li r5, MSR_IR|MSR_DR
|
H A D | idle.c | 390 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power7_idle_insn() 702 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power9_idle_stop() 938 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power10_idle_stop()
|
H A D | opal-call.c | 101 bool mmu = (msr & (MSR_IR|MSR_DR)); in opal_call()
|
/linux/arch/powerpc/platforms/pasemi/ |
H A D | powersave.S | 62 LOAD_REG_IMMEDIATE(r6,MSR_DR|MSR_IR|MSR_ME|MSR_EE)
|
/linux/arch/powerpc/xmon/ |
H A D | xmon.c | 554 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core() 711 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core() 764 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt() 797 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_break_match() 812 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match() 837 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler() 1208 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step()
|
/linux/arch/powerpc/include/asm/ |
H A D | reg.h | 105 #define MSR_IR __MASK(MSR_IR_LG) /* Instruction Relocate */ macro 134 #define __MSR (MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_HV) 147 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR)
|
/linux/arch/powerpc/mm/book3s64/ |
H A D | hash_pgtable.c | 468 [p] "b" (p), [MSR_IR_DR] "i" (MSR_IR | MSR_DR) in chmem_secondary_loop()
|
/linux/arch/powerpc/platforms/52xx/ |
H A D | lite5200_sleep.S | 212 ori r10, r10, MSR_DR | MSR_IR
|
/linux/arch/powerpc/platforms/pseries/ |
H A D | ras.c | 496 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
|