Searched refs:MSR_VSX (Results 1 – 16 of 16) sorted by relevance
/linux/arch/powerpc/kernel/ |
H A D | signal_64.c | 163 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext() 176 msr |= MSR_VSX; in __unsafe_setup_sigcontext() 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 302 if (msr & MSR_VSX) in setup_tm_sigcontexts() 310 msr |= MSR_VSX; in setup_tm_sigcontexts() 387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext() 418 if ((msr & MSR_VSX) != 0) { in __unsafe_restore_sigcontext() 506 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts() 552 if (v_regs && ((msr & MSR_VSX) != 0)) { in restore_tm_sigcontexts() 678 (new_msr & MSR_VSX)) in SYSCALL_DEFINE3()
|
H A D | signal_32.c | 302 msr &= ~MSR_VSX; in __unsafe_save_user_regs() 312 msr |= MSR_VSX; in __unsafe_save_user_regs() 431 if (msr & MSR_VSX) in save_tm_user_regs_unsafe() 436 msr |= MSR_VSX; in save_tm_user_regs_unsafe() 520 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_user_regs() 521 if (msr & MSR_VSX) { in restore_user_regs() 621 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_tm_user_regs() 622 if (msr & MSR_VSX) { in restore_tm_user_regs() 653 if (msr & MSR_VSX) { in restore_tm_user_regs() 1029 (new_msr & MSR_VSX)) in COMPAT_SYSCALL_DEFINE3()
|
H A D | fpu.S | 104 oris r5,r5,MSR_VSX@h
|
H A D | tm.S | 142 oris r15,r15, MSR_VSX@h 393 oris r5,r5, MSR_VSX@h
|
H A D | interrupt.c | 212 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main()
|
H A D | vector.S | 147 oris r12,r12,MSR_VSX@h
|
H A D | traps.c | 975 msr_mask = MSR_VSX; in p9_hmi_special_emu()
|
H A D | exceptions-64s.S | 2655 oris r10,r10,MSR_VSX@h
|
/linux/arch/powerpc/lib/ |
H A D | ldstfp.S | 162 oris r7,r6,MSR_VSX@h 189 oris r7,r6,MSR_VSX@h
|
/linux/arch/powerpc/kvm/ |
H A D | emulate_loadstore.c | 42 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled() 317 MSR_VSX); in kvmppc_emulate_loadstore()
|
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
H A D | book3s_emulate.c | 177 kvmppc_giveup_ext(vcpu, MSR_VSX); in kvmppc_emulate_trchkpt()
|
H A D | book3s_hv_p9_entry.c | 506 msr_needed |= MSR_VSX; in kvmppc_msr_hard_disable_set_facilities()
|
H A D | book3s_hv_rmhandlers.S | 2454 oris r8,r8,MSR_VSX@h 2490 oris r8,r8,MSR_VSX@h
|
/linux/arch/powerpc/include/asm/ |
H A D | switch_to.h | 85 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
H A D | reg.h | 89 #define MSR_VSX __MASK(MSR_VSX_LG) /* Enable VSX */ macro
|