Searched refs:MSR_VSX (Results 1 – 16 of 16) sorted by relevance
/arch/powerpc/kernel/ |
D | process.c | 126 newmsr |= MSR_VSX; in msr_check_and_set() 144 newmsr &= ~MSR_VSX; in __msr_check_and_clear() 162 msr &= ~MSR_VSX; in __giveup_fpu() 258 msr &= ~MSR_VSX; in __giveup_altivec() 340 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 353 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 355 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 364 cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in enable_kernel_vsx() 367 (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) { in enable_kernel_vsx() 387 if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { in flush_vsx_to_thread() [all …]
|
D | signal_64.c | 151 msr &= ~MSR_VSX; in setup_sigcontext() 165 msr |= MSR_VSX; in setup_sigcontext() 287 if (msr & MSR_VSX) in setup_tm_sigcontexts() 295 msr |= MSR_VSX; in setup_tm_sigcontexts() 368 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext() 401 if ((msr & MSR_VSX) != 0) { in restore_sigcontext() 483 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts() 530 if (v_regs && ((msr & MSR_VSX) != 0)) { in restore_tm_sigcontexts() 654 (new_msr & MSR_VSX)) in sys_swapcontext()
|
D | signal_32.c | 453 msr &= ~MSR_VSX; in save_user_regs() 465 msr |= MSR_VSX; in save_user_regs() 608 if (msr & MSR_VSX) { in save_tm_user_regs() 617 msr |= MSR_VSX; in save_tm_user_regs() 716 regs->msr &= ~MSR_VSX; in restore_user_regs() 717 if (msr & MSR_VSX) { in restore_user_regs() 826 regs->msr &= ~MSR_VSX; in restore_tm_user_regs() 827 if (msr & MSR_VSX) { in restore_tm_user_regs() 1179 (new_msr & MSR_VSX)) in sys_swapcontext()
|
D | fpu.S | 90 oris r5,r5,MSR_VSX@h
|
D | tm.S | 125 oris r15,r15, MSR_VSX@h 372 oris r5, r5, MSR_VSX@h
|
D | traps.c | 1523 regs->msr |= MSR_VSX; in fp_unavailable_tm() 1544 regs->msr |= MSR_VSX; in altivec_unavailable_tm() 1567 regs->msr |= MSR_VSX; in vsx_unavailable_tm() 1575 MSR_VSX; in vsx_unavailable_tm()
|
D | vector.S | 130 oris r12,r12,MSR_VSX@h
|
D | exceptions-64s.S | 1301 oris r10,r10,MSR_VSX@h
|
/arch/powerpc/include/asm/ |
D | switch_to.h | 62 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
D | reg.h | 88 #define MSR_VSX __MASK(MSR_VSX_LG) /* Enable VSX */ macro
|
/arch/powerpc/lib/ |
D | ldstfp.S | 167 oris r7,r6,MSR_VSX@h 194 oris r7,r6,MSR_VSX@h
|
D | test_emulate_step.c | 69 regs->msr |= MSR_VSX; in init_pt_regs()
|
D | sstep.c | 2841 unsigned long msrbit = MSR_VSX; in emulate_loadstore() 2912 unsigned long msrbit = MSR_VSX; in emulate_loadstore()
|
/arch/powerpc/kvm/ |
D | book3s_pr.c | 134 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 662 if (msr & MSR_VSX) in kvmppc_giveup_ext() 692 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext() 730 if (msr == MSR_VSX) { in kvmppc_handle_ext() 744 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 1210 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr() 1562 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
|
D | emulate_loadstore.c | 52 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled()
|
D | book3s_hv_rmhandlers.S | 2879 oris r8,r8,MSR_VSX@h 2914 oris r8,r8,MSR_VSX@h 3064 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|