| /linux/tools/testing/selftests/powerpc/signal/ |
| H A D | sigreturn_kernel.c | 17 #define MSR_PR (1ul << 14) macro 112 sigreturn_msr_mask = ~MSR_PR; in test_sigreturn_kernel() 119 sigreturn_msr_mask = ~MSR_PR; in test_sigreturn_kernel()
|
| /linux/arch/powerpc/kvm/ |
| H A D | book3s_hv_tm_builtin.c | 53 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early() 92 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early()
|
| H A D | book3s_emulate.c | 77 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 145 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_emulate_treclaim() 220 if (guest_msr & MSR_PR) in kvmppc_emulate_tabort() 361 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr() 497 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_core_emulate_op_pr() 539 WARN_ON(guest_msr & MSR_PR); in kvmppc_core_emulate_op_pr() 562 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 596 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 848 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_core_emulate_mtspr_pr() 853 if ((kvmppc_get_msr(vcpu) & MSR_PR) || sprn == 0) { in kvmppc_core_emulate_mtspr_pr() [all …]
|
| H A D | book3s_32_mmu.c | 141 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 224 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 225 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 303 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 385 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
| H A D | book3s_hv_tm.c | 25 if (msr & MSR_PR) { in emulate_tx_failure() 84 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation() 96 if ((msr & MSR_PR) && !(vcpu->arch.fscr & FSCR_EBB)) { in kvmhv_p9_tm_emulation() 135 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation()
|
| H A D | book3s_64_mmu.c | 212 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 257 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 259 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 301 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate() 567 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page() 628 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 638 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
| H A D | e500.h | 222 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
|
| H A D | e500_mmu_host.c | 311 u32 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_e500_setup_stlbe() 634 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_load_last_inst()
|
| H A D | booke.c | 437 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver() 1151 if (vcpu->arch.shared->msr & (MSR_PR | MSR_GS)) { in kvmppc_handle_exit() 1255 if (!(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit() 1269 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1290 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1987 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_xlate()
|
| H A D | booke_interrupts.S | 79 andi. r4, r4, MSR_PR
|
| H A D | powerpc.c | 377 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_st() 423 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_ld()
|
| H A D | book3s_hv_rm_mmu.c | 1241 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_hpte_hv_fault()
|
| /linux/arch/powerpc/kernel/ |
| H A D | head_32.h | 34 andi. r11, r11, MSR_PR 125 andi. r12,r9,MSR_PR
|
| H A D | exceptions-64e.S | 75 andi. r3,r3,MSR_PR 138 andi. r3,r3,MSR_PR 260 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 589 andi. r0,r12,MSR_PR; 606 andi. r0,r12,MSR_PR; 727 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 798 1: andi. r14,r11,MSR_PR; /* check for userspace again */
|
| H A D | head_booke.h | 56 andi. r11, r11, MSR_PR; /* check whether user or kernel */\ 108 andi. r12,r9,MSR_PR 203 andi. r11,r11,MSR_PR; \
|
| H A D | entry_32.S | 250 andi. r0,r4,MSR_PR 383 andi. r3,r3,MSR_PR; \
|
| H A D | interrupt_64.S | 394 andi. r0,r5,MSR_PR 417 andi. r0,r4,MSR_PR
|
| H A D | head_85xx.S | 440 andi. r10,r11,MSR_PR 532 andi. r10,r11,MSR_PR
|
| H A D | head_book3s_32.S | 329 andi. r11, r11, MSR_PR
|
| /linux/arch/powerpc/include/asm/ |
| H A D | reg_booke.h | 40 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 44 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
| H A D | reg.h | 96 #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ macro 143 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 148 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
| H A D | ptrace.h | 226 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
|
| /linux/arch/powerpc/platforms/pseries/ |
| H A D | ras.c | 496 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
|
| /linux/arch/powerpc/mm/book3s32/ |
| H A D | hash_low.S | 74 andi. r0,r9,MSR_PR /* Check usermode */
|
| /linux/arch/powerpc/mm/nohash/ |
| H A D | tlb_low_64e.S | 66 andi. r10,r11,MSR_PR
|