Home
last modified time | relevance | path

Searched refs:MSR_VSX (Results 1 – 16 of 16) sorted by relevance

/arch/powerpc/kernel/
Dprocess.c126 newmsr |= MSR_VSX; in msr_check_and_set()
144 newmsr &= ~MSR_VSX; in __msr_check_and_clear()
162 msr &= ~MSR_VSX; in __giveup_fpu()
258 msr &= ~MSR_VSX; in __giveup_altivec()
340 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
353 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
355 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
364 cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in enable_kernel_vsx()
367 (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) { in enable_kernel_vsx()
387 if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { in flush_vsx_to_thread()
[all …]
Dsignal_64.c151 msr &= ~MSR_VSX; in setup_sigcontext()
165 msr |= MSR_VSX; in setup_sigcontext()
287 if (msr & MSR_VSX) in setup_tm_sigcontexts()
295 msr |= MSR_VSX; in setup_tm_sigcontexts()
368 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext()
401 if ((msr & MSR_VSX) != 0) { in restore_sigcontext()
483 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts()
530 if (v_regs && ((msr & MSR_VSX) != 0)) { in restore_tm_sigcontexts()
654 (new_msr & MSR_VSX)) in sys_swapcontext()
Dsignal_32.c453 msr &= ~MSR_VSX; in save_user_regs()
465 msr |= MSR_VSX; in save_user_regs()
608 if (msr & MSR_VSX) { in save_tm_user_regs()
617 msr |= MSR_VSX; in save_tm_user_regs()
716 regs->msr &= ~MSR_VSX; in restore_user_regs()
717 if (msr & MSR_VSX) { in restore_user_regs()
826 regs->msr &= ~MSR_VSX; in restore_tm_user_regs()
827 if (msr & MSR_VSX) { in restore_tm_user_regs()
1179 (new_msr & MSR_VSX)) in sys_swapcontext()
Dfpu.S90 oris r5,r5,MSR_VSX@h
Dtm.S125 oris r15,r15, MSR_VSX@h
372 oris r5, r5, MSR_VSX@h
Dtraps.c1523 regs->msr |= MSR_VSX; in fp_unavailable_tm()
1544 regs->msr |= MSR_VSX; in altivec_unavailable_tm()
1567 regs->msr |= MSR_VSX; in vsx_unavailable_tm()
1575 MSR_VSX; in vsx_unavailable_tm()
Dvector.S130 oris r12,r12,MSR_VSX@h
Dexceptions-64s.S1301 oris r10,r10,MSR_VSX@h
/arch/powerpc/include/asm/
Dswitch_to.h62 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
Dreg.h88 #define MSR_VSX __MASK(MSR_VSX_LG) /* Enable VSX */ macro
/arch/powerpc/lib/
Dldstfp.S167 oris r7,r6,MSR_VSX@h
194 oris r7,r6,MSR_VSX@h
Dtest_emulate_step.c69 regs->msr |= MSR_VSX; in init_pt_regs()
Dsstep.c2841 unsigned long msrbit = MSR_VSX; in emulate_loadstore()
2912 unsigned long msrbit = MSR_VSX; in emulate_loadstore()
/arch/powerpc/kvm/
Dbook3s_pr.c134 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr()
662 if (msr & MSR_VSX) in kvmppc_giveup_ext()
692 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext()
730 if (msr == MSR_VSX) { in kvmppc_handle_ext()
744 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
1210 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr()
1562 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
Demulate_loadstore.c52 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled()
Dbook3s_hv_rmhandlers.S2879 oris r8,r8,MSR_VSX@h
2914 oris r8,r8,MSR_VSX@h
3064 oris r5, r5, (MSR_VEC | MSR_VSX)@h