Home
last modified time | relevance | path

Searched refs:MSR_VSX (Results 1 – 21 of 21) sorted by relevance

/linux/arch/powerpc/kernel/
A Dprocess.c130 newmsr |= MSR_VSX; in msr_check_and_set()
148 newmsr &= ~MSR_VSX; in __msr_check_and_clear()
164 msr &= ~MSR_VSX; in __giveup_fpu()
247 msr &= ~MSR_VSX; in __giveup_altivec()
326 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
328 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
418 msr_all_available |= MSR_VSX; in init_msr_all_available()
540 new_msr |= MSR_VSX; in restore_math()
558 if (new_msr & MSR_VSX) in restore_math()
1107 msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; in restore_tm_state()
[all …]
A Dsignal_64.c158 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext()
171 msr |= MSR_VSX; in __unsafe_setup_sigcontext()
232 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
297 if (msr & MSR_VSX) in setup_tm_sigcontexts()
305 msr |= MSR_VSX; in setup_tm_sigcontexts()
379 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext()
410 if ((msr & MSR_VSX) != 0) { in __unsafe_restore_sigcontext()
498 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts()
544 if (v_regs && ((msr & MSR_VSX) != 0)) { in restore_tm_sigcontexts()
670 (new_msr & MSR_VSX)) in SYSCALL_DEFINE3()
A Dsignal_32.c295 msr &= ~MSR_VSX; in __unsafe_save_user_regs()
305 msr |= MSR_VSX; in __unsafe_save_user_regs()
423 if (msr & MSR_VSX) in save_tm_user_regs_unsafe()
428 msr |= MSR_VSX; in save_tm_user_regs_unsafe()
511 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_user_regs()
512 if (msr & MSR_VSX) { in restore_user_regs()
608 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_tm_user_regs()
609 if (msr & MSR_VSX) { in restore_tm_user_regs()
640 if (msr & MSR_VSX) { in restore_tm_user_regs()
1016 (new_msr & MSR_VSX)) in COMPAT_SYSCALL_DEFINE3()
A Dfpu.S87 oris r5,r5,MSR_VSX@h
A Dtm.S142 oris r15,r15, MSR_VSX@h
396 oris r5,r5, MSR_VSX@h
A Dvector.S134 oris r12,r12,MSR_VSX@h
A Dinterrupt.c373 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main()
A Dtraps.c959 msr_mask = MSR_VSX; in p9_hmi_special_emu()
A Dexceptions-64s.S2579 oris r10,r10,MSR_VSX@h
/linux/arch/powerpc/lib/
A Dldstfp.S162 oris r7,r6,MSR_VSX@h
189 oris r7,r6,MSR_VSX@h
A Dtest_emulate_step.c75 regs->msr |= MSR_VSX; in init_pt_regs()
A Dsstep.c3454 unsigned long msrbit = MSR_VSX; in emulate_loadstore()
3525 unsigned long msrbit = MSR_VSX; in emulate_loadstore()
/linux/arch/powerpc/kvm/
A Demulate_loadstore.c42 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled()
315 MSR_VSX); in kvmppc_emulate_loadstore()
A Dbook3s_pr.c173 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr()
362 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts()
385 kvmppc_giveup_ext(vcpu, MSR_VSX); in kvmppc_save_tm_pr()
807 if (msr & MSR_VSX) in kvmppc_giveup_ext()
837 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext()
875 if (msr == MSR_VSX) { in kvmppc_handle_ext()
889 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
1369 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr()
1847 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
A Dtm.S45 oris r8, r8, (MSR_VEC | MSR_VSX)@h
239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
A Dbook3s_emulate.c177 kvmppc_giveup_ext(vcpu, MSR_VSX); in kvmppc_emulate_trchkpt()
A Dbook3s_hv_rmhandlers.S2469 oris r8,r8,MSR_VSX@h
2504 oris r8,r8,MSR_VSX@h
A Dpowerpc.c1188 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VSX); in kvmppc_complete_mmio_load()
A Dbook3s_hv.c3919 msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX); in kvmhv_p9_guest_entry()
4028 msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX); in kvmhv_p9_guest_entry()
/linux/arch/powerpc/include/asm/
A Dswitch_to.h72 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
A Dreg.h89 #define MSR_VSX __MASK(MSR_VSX_LG) /* Enable VSX */ macro

Completed in 53 milliseconds