Home
last modified time | relevance | path

Searched refs:MSR_FP (Results 1 – 18 of 18) sorted by relevance

/linux/arch/powerpc/kernel/
H A Dprocess.c124 if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP)) in msr_check_and_set()
142 if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP)) in __msr_check_and_clear()
157 msr &= ~(MSR_FP|MSR_FE0|MSR_FE1); in __giveup_fpu()
167 msr_check_and_set(MSR_FP); in giveup_fpu()
169 msr_check_and_clear(MSR_FP); in giveup_fpu()
189 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
211 cpumsr = msr_check_and_set(MSR_FP); in enable_kernel_fp()
213 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) { in enable_kernel_fp()
308 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
311 if (msr & MSR_FP) in __giveup_vsx()
[all …]
H A Dfpu.S98 ori r5,r5,MSR_FP|MSR_RI
100 ori r5,r5,MSR_FP
113 ori r9,r9,MSR_FP /* enable FP for current */
119 ori r12,r12,MSR_FP
H A Dsignal_64.c237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
285 if (msr & MSR_FP) in setup_tm_sigcontexts()
387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext()
506 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts()
599 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_sigcontexts()
600 if (msr & MSR_FP) { in restore_tm_sigcontexts()
602 regs_set_return_msr(regs, regs->msr | (MSR_FP | tsk->thread.fpexc_mode)); in restore_tm_sigcontexts()
H A Dsignal_32.c418 if (msr & MSR_FP) in save_tm_user_regs_unsafe()
536 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1)); in restore_user_regs()
617 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1)); in restore_tm_user_regs()
700 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_user_regs()
701 if (msr & MSR_FP) { in restore_tm_user_regs()
703 regs_set_return_msr(regs, regs->msr | (MSR_FP | current->thread.fpexc_mode)); in restore_tm_user_regs()
H A Dvector.S131 andi. r5,r12,MSR_FP
204 ori r11,r10,MSR_FP
H A Dtm.S135 ori r15, r15, MSR_FP
387 ori r5, r5, MSR_FP
H A Dinterrupt.c213 unsigned long mathflags = MSR_FP; in interrupt_exit_user_prepare_main()
H A Dcpu_setup_6xx.S310 ori r11,r10,MSR_FP
/linux/arch/powerpc/lib/
H A Dldstfp.S23 ori r7, r6, MSR_FP
47 ori r7, r6, MSR_FP
213 ori r7, r6, MSR_FP
228 ori r7, r6, MSR_FP
/linux/arch/powerpc/include/asm/
H A Dswitch_to.h46 msr_check_and_clear(MSR_FP); in disable_kernel_fp()
85 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
H A Dreg.h97 #define MSR_FP __MASK(MSR_FP_LG) /* Floating Point enable */ macro
/linux/arch/powerpc/kvm/
H A Demulate_loadstore.c30 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_check_fp_disabled()
248 MSR_FP); in kvmppc_emulate_loadstore()
H A Dtm.S44 ori r8, r8, MSR_FP
238 ori r5, r5, MSR_FP
H A Dbooke.c160 if (!(current->thread.regs->msr & MSR_FP)) { in kvmppc_load_guest_fp()
165 current->thread.regs->msr |= MSR_FP; in kvmppc_load_guest_fp()
177 if (current->thread.regs->msr & MSR_FP) in kvmppc_save_guest_fp()
188 vcpu->arch.shadow_msr &= ~MSR_FP; in kvmppc_vcpu_sync_fpu()
189 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu()
H A Dbook3s_paired_singles.c658 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_emulate_paired_single()
663 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_emulate_paired_single()
H A Dbook3s_emulate.c737 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_core_emulate_mtspr_pr()
H A Dbook3s_hv_p9_entry.c502 msr_needed |= MSR_FP; in kvmppc_msr_hard_disable_set_facilities()
H A Dpowerpc.c1183 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_FP); in kvmppc_complete_mmio_load()