Searched refs:MSR_LE (Results 1 – 21 of 21) sorted by relevance
82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
116 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation()
195 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE | in kvmppc_recalc_shadow_msr()198 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; in kvmppc_recalc_shadow_msr()1532 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_get_one_reg_pr()1615 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr_pr()1617 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr_pr()1768 vcpu->arch.shadow_msr = MSR_USER64 & ~MSR_LE; in kvmppc_core_vcpu_create_pr()
251 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
184 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_kvm_pv()
1595 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr()1597 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr()
63 #define MSR_LE 0x0000000000000001 macro
43 li r11,MSR_LE
27 li r0,MSR_IR|MSR_DR|MSR_LE
347 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_sigcontext()460 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_sigcontexts()900 regs->msr &= ~MSR_LE; in handle_rt_signal64()901 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal64()
125 current->thread.regs->msr ^= MSR_LE; in SYSCALL_DEFINE0()
308 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
652 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_user_regs()759 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_user_regs()978 regs->msr &= ~MSR_LE; in handle_rt_signal32()979 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal32()1431 regs->msr &= ~MSR_LE; in handle_signal32()
1329 {MSR_LE, "LE"},1915 regs->msr &= ~MSR_LE; in set_endian()1917 regs->msr |= MSR_LE; in set_endian()1936 if (regs->msr & MSR_LE) { in get_endian()
1211 ori r9,r9,MSR_IR|MSR_DR|MSR_FE0|MSR_FE1|MSR_FP|MSR_RI|MSR_LE1324 LOAD_REG_IMMEDIATE(r12, MSR_SF | MSR_ISF | MSR_LE)
913 swap = (msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in p9_hmi_special_emu()
1492 xori r12,r12,MSR_LE
391 return (kvmppc_get_msr(vcpu) & MSR_LE) != (MSR_KERNEL & MSR_LE); in kvmppc_need_byteswap()
117 #define MSR_LE __MASK(MSR_LE_LG) /* Little Endian */ macro143 #define MSR_ (__MSR | MSR_LE)144 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV | MSR_LE)
476 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
1545 op->val = 0xffffffff & ~(MSR_ME | MSR_LE); in analyse_instr()2877 cross_endian = (regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in emulate_loadstore()3202 regs->msr ^= MSR_LE; in emulate_step()