Searched refs:MSR_VEC (Results 1 – 14 of 14) sorted by relevance
/linux/arch/powerpc/kernel/ |
H A D | signal_64.c | 147 msr |= MSR_VEC; in __unsafe_setup_sigcontext() 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 251 if (msr & MSR_VEC) in setup_tm_sigcontexts() 263 msr |= MSR_VEC; in setup_tm_sigcontexts() 271 if (msr & MSR_VEC) in setup_tm_sigcontexts() 387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext() 394 if (v_regs != NULL && (msr & MSR_VEC) != 0) { in __unsafe_restore_sigcontext() 506 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts() 518 if (v_regs != NULL && tm_v_regs != NULL && (msr & MSR_VEC) != 0) { in restore_tm_sigcontexts() 599 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_sigcontexts() [all …]
|
H A D | signal_32.c | 283 msr |= MSR_VEC; in __unsafe_save_user_regs() 388 if (msr & MSR_VEC) in save_tm_user_regs_unsafe() 400 msr |= MSR_VEC; in save_tm_user_regs_unsafe() 410 if (msr & MSR_VEC) in save_tm_user_regs_unsafe() 498 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_user_regs() 499 if (msr & MSR_VEC) { in restore_user_regs() 598 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_tm_user_regs() 599 if (msr & MSR_VEC) { in restore_tm_user_regs() 643 if (msr & MSR_VEC) in restore_tm_user_regs() 700 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_user_regs() [all …]
|
H A D | vector.S | 56 oris r5,r5,MSR_VEC@h 75 oris r9,r9,MSR_VEC@h 79 oris r12,r12,MSR_VEC@h 133 andis. r5,r12,MSR_VEC@h
|
H A D | interrupt.c | 212 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main() 214 mathflags |= MSR_VEC; in interrupt_exit_user_prepare_main()
|
H A D | tm.S | 139 oris r15, r15, MSR_VEC@h 389 oris r5, r5, MSR_VEC@h
|
H A D | traps.c | 977 msr_mask = MSR_VEC; in p9_hmi_special_emu()
|
/linux/arch/powerpc/include/asm/ |
H A D | switch_to.h | 64 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec() 85 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
H A D | reg.h | 88 #define MSR_VEC __MASK(MSR_VEC_LG) /* Enable AltiVec */ macro
|
/linux/arch/powerpc/lib/ |
H A D | ldstfp.S | 72 oris r7, r6, MSR_VEC@h 96 oris r7, r6, MSR_VEC@h
|
/linux/arch/powerpc/kvm/ |
H A D | emulate_loadstore.c | 54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled() 272 MSR_VEC); in kvmppc_emulate_loadstore()
|
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
H A D | booke.c | 202 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec() 207 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec() 221 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec()
|
H A D | book3s_hv_p9_entry.c | 504 msr_needed |= MSR_VEC; in kvmppc_msr_hard_disable_set_facilities()
|
H A D | book3s_hv_rmhandlers.S | 2449 oris r8,r8,MSR_VEC@h 2485 oris r8,r8,MSR_VEC@h
|