/arch/powerpc/kernel/ |
D | process.c | 125 if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP)) in msr_check_and_set() 143 if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP)) in __msr_check_and_clear() 159 msr &= ~(MSR_FP|MSR_FE0|MSR_FE1); in __giveup_fpu() 171 msr_check_and_set(MSR_FP); in giveup_fpu() 173 msr_check_and_clear(MSR_FP); in giveup_fpu() 193 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread() 215 cpumsr = msr_check_and_set(MSR_FP); in enable_kernel_fp() 217 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) { in enable_kernel_fp() 340 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 343 if (msr & MSR_FP) in __giveup_vsx() [all …]
|
D | fpu.S | 87 ori r5,r5,MSR_FP 100 ori r9,r9,MSR_FP /* enable FP for current */ 106 ori r12,r12,MSR_FP
|
D | tm.S | 118 ori r15, r15, MSR_FP 284 andi. r0, r4, MSR_FP 366 ori r5, r5, MSR_FP 398 andi. r0, r4, MSR_FP
|
D | signal_64.c | 270 if (msr & MSR_FP) in setup_tm_sigcontexts() 368 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext() 483 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts() 577 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_sigcontexts() 578 if (msr & MSR_FP) { in restore_tm_sigcontexts() 580 regs->msr |= (MSR_FP | tsk->thread.fpexc_mode); in restore_tm_sigcontexts()
|
D | traps.c | 1508 regs->msr |= (MSR_FP | current->thread.fpexc_mode); in fp_unavailable_tm() 1516 tm_recheckpoint(¤t->thread, MSR_FP); in fp_unavailable_tm() 1541 if (regs->msr & MSR_FP) { in altivec_unavailable_tm() 1542 msr_check_and_set(MSR_FP); in altivec_unavailable_tm() 1566 if ((orig_msr & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC)) { in vsx_unavailable_tm() 1574 regs->msr |= MSR_VEC | MSR_FP | current->thread.fpexc_mode | in vsx_unavailable_tm() 1582 msr_check_and_set(orig_msr & (MSR_FP | MSR_VEC)); in vsx_unavailable_tm() 1584 if (orig_msr & MSR_FP) in vsx_unavailable_tm()
|
D | vector.S | 120 andi. r5,r12,MSR_FP 180 ori r11,r10,MSR_FP
|
D | signal_32.c | 590 if (msr & MSR_FP) { in save_tm_user_regs() 733 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1); in restore_user_regs() 819 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1); in restore_tm_user_regs() 898 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_user_regs() 899 if (msr & MSR_FP) { in restore_tm_user_regs() 901 regs->msr |= (MSR_FP | current->thread.fpexc_mode); in restore_tm_user_regs()
|
D | cpu_setup_6xx.S | 284 ori r11,r10,MSR_FP
|
D | entry_64.S | 247 andi. r0,r8,MSR_FP 1171 ori r9,r9,MSR_IR|MSR_DR|MSR_FE0|MSR_FE1|MSR_FP|MSR_RI|MSR_LE
|
D | entry_32.S | 662 li r0,MSR_FP /* Disable floating-point */
|
D | exceptions-64s.S | 1283 ori r10,r10,(MSR_FP|MSR_FE0|MSR_FE1)
|
/arch/powerpc/include/asm/ |
D | switch_to.h | 35 msr_check_and_clear(MSR_FP); in disable_kernel_fp() 62 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
D | reg.h | 96 #define MSR_FP __MASK(MSR_FP_LG) /* Floating Point enable */ macro
|
/arch/powerpc/lib/ |
D | ldstfp.S | 28 ori r7, r6, MSR_FP 52 ori r7, r6, MSR_FP 218 ori r7, r6, MSR_FP 233 ori r7, r6, MSR_FP
|
D | sstep.c | 494 if (regs->msr & MSR_FP) in do_fp_load() 501 if (regs->msr & MSR_FP) in do_fp_load() 528 if (regs->msr & MSR_FP) in do_fp_store() 540 if (regs->msr & MSR_FP) in do_fp_store() 834 if (regs->msr & MSR_FP) { in do_vsx_load() 865 if (regs->msr & MSR_FP) { in do_vsx_store() 2827 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore() 2898 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore()
|
D | test_emulate_step.c | 67 regs->msr |= MSR_FP; in init_pt_regs()
|
/arch/powerpc/kvm/ |
D | book3s_pr.c | 134 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 399 if (kvmppc_get_msr(vcpu) & MSR_FP) in kvmppc_set_msr_pr() 400 kvmppc_handle_ext(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL, MSR_FP); in kvmppc_set_msr_pr() 663 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext() 673 if (msr & MSR_FP) { in kvmppc_giveup_ext() 679 if (t->regs->msr & MSR_FP) in kvmppc_giveup_ext() 744 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 756 if (msr & MSR_FP) { in kvmppc_handle_ext() 795 if (lost_ext & MSR_FP) { in kvmppc_handle_lost_ext() 1202 ext_msr = MSR_FP; in kvmppc_handle_exit_pr() [all …]
|
D | booke.c | 144 if (!(current->thread.regs->msr & MSR_FP)) { in kvmppc_load_guest_fp() 149 current->thread.regs->msr |= MSR_FP; in kvmppc_load_guest_fp() 161 if (current->thread.regs->msr & MSR_FP) in kvmppc_save_guest_fp() 172 vcpu->arch.shadow_msr &= ~MSR_FP; in kvmppc_vcpu_sync_fpu() 173 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu()
|
D | book3s_paired_singles.c | 667 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_emulate_paired_single() 672 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_emulate_paired_single()
|
D | book3s_emulate.c | 425 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_core_emulate_mtspr_pr()
|
D | emulate_loadstore.c | 40 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_check_fp_disabled()
|
D | book3s_hv_rmhandlers.S | 2871 ori r8,r5,MSR_FP 2906 ori r8,r9,MSR_FP 3063 ori r5, r5, MSR_FP
|