Searched refs:MSR_VSX (Results 1 – 14 of 14) sorted by relevance
/arch/powerpc/kernel/ |
D | fpu.S | 65 oris r5,r5,MSR_VSX@h 91 oris r3,r3,MSR_VSX@h 133 oris r5,r5,MSR_VSX@h 202 oris r5,r5,MSR_VSX@h 227 oris r3,r3,MSR_VSX@h
|
D | signal_64.c | 142 msr &= ~MSR_VSX; in setup_sigcontext() 156 msr |= MSR_VSX; in setup_sigcontext() 279 if (msr & MSR_VSX) in setup_tm_sigcontexts() 287 msr |= MSR_VSX; in setup_tm_sigcontexts() 366 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext() 397 if ((msr & MSR_VSX) != 0) in restore_sigcontext() 487 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts() 533 if (v_regs && ((msr & MSR_VSX) != 0)) { in restore_tm_sigcontexts() 626 (new_msr & MSR_VSX)) in sys_swapcontext()
|
D | vector.S | 196 lis r3,(MSR_VEC|MSR_VSX)@h 242 lis r6,MSR_VSX@h 252 oris r12,r12,MSR_VSX@h 269 oris r5,r5,MSR_VSX@h 280 lis r3,MSR_VSX@h
|
D | process.c | 213 (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) in enable_kernel_vsx() 235 if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { in flush_vsx_to_thread() 552 msr_diff &= MSR_FP | MSR_VEC | MSR_VSX | MSR_FE0 | MSR_FE1; in tm_reclaim_thread() 707 if (msr & MSR_VSX) in tm_recheckpoint_new_task() 708 new->thread.regs->msr |= MSR_VSX; in tm_recheckpoint_new_task() 746 msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; in restore_tm_state() 815 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VSX)) in __switch_to() 841 new->thread.regs->msr |= MSR_VSX; in __switch_to() 965 {MSR_VSX, "VSX"},
|
D | signal_32.c | 452 msr &= ~MSR_VSX; in save_user_regs() 464 msr |= MSR_VSX; in save_user_regs() 612 if (msr & MSR_VSX) { in save_tm_user_regs() 621 msr |= MSR_VSX; in save_tm_user_regs() 728 regs->msr &= ~MSR_VSX; in restore_user_regs() 729 if (msr & MSR_VSX) { in restore_user_regs() 844 regs->msr &= ~MSR_VSX; in restore_tm_user_regs() 845 if (msr & MSR_VSX) { in restore_tm_user_regs() 1175 (new_msr & MSR_VSX)) in sys_swapcontext()
|
D | tm.S | 123 oris r15,r15, MSR_VSX@h 366 oris r5, r5, MSR_VSX@h
|
D | traps.c | 1492 regs->msr |= MSR_VSX; in fp_unavailable_tm() 1512 regs->msr |= MSR_VSX; in altivec_unavailable_tm() 1535 regs->msr |= MSR_VSX; in vsx_unavailable_tm() 1543 MSR_VSX; in vsx_unavailable_tm()
|
D | entry_64.S | 547 oris r0,r0,MSR_VSX@h /* Disable VSX */
|
D | exceptions-64s.S | 605 oris r10,r10,MSR_VSX@h
|
/arch/powerpc/kvm/ |
D | book3s_pr.c | 133 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 660 if (msr & MSR_VSX) in kvmppc_giveup_ext() 690 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext() 728 if (msr == MSR_VSX) { in kvmppc_handle_ext() 742 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 1176 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr() 1507 if (current->thread.regs->msr & MSR_VSX) in kvmppc_vcpu_run_pr() 1523 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
|
D | book3s_hv_rmhandlers.S | 2430 oris r8,r8,MSR_VSX@h 2465 oris r8,r8,MSR_VSX@h 2615 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
/arch/powerpc/lib/ |
D | ldstfp.S | 327 oris r7,r6,MSR_VSX@h 355 oris r7,r6,MSR_VSX@h
|
D | sstep.c | 1537 if (!(regs->msr & MSR_VSX)) in analyse_instr() 1545 if (!(regs->msr & MSR_VSX)) in analyse_instr()
|
/arch/powerpc/include/asm/ |
D | reg.h | 79 #define MSR_VSX __MASK(MSR_VSX_LG) /* Enable VSX */ macro
|