Searched refs:MSR_LE (Results 1 – 22 of 22) sorted by relevance
82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
116 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation()
231 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE | in kvmppc_recalc_shadow_msr()234 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; in kvmppc_recalc_shadow_msr()1567 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_get_one_reg_pr()1650 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr_pr()1652 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr_pr()1795 vcpu->arch.shadow_msr = MSR_USER64 & ~MSR_LE; in kvmppc_core_vcpu_create_pr()
251 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
185 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_kvm_pv()
1630 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr()1632 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr()
48 #define MSR_LE 1UL macro67 thread_endianness = MSR_LE & ucp->uc_mcontext.gp_regs[PT_MSR]; in trap_signal_handler()
347 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_sigcontext()460 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_sigcontexts()898 regs->msr &= ~MSR_LE; in handle_rt_signal64()899 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal64()
507 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_user_regs()614 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_user_regs()833 regs->msr &= ~MSR_LE; in handle_rt_signal32()834 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal32()1286 regs->msr &= ~MSR_LE; in handle_signal32()
310 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
125 current->thread.regs->msr ^= MSR_LE; in SYSCALL_DEFINE0()
1424 {MSR_LE, "LE"},1993 regs->msr &= ~MSR_LE; in set_endian()1995 regs->msr |= MSR_LE; in set_endian()2014 if (regs->msr & MSR_LE) { in get_endian()
859 ori r9,r9,MSR_IR|MSR_DR|MSR_FE0|MSR_FE1|MSR_FP|MSR_RI|MSR_LE970 LOAD_REG_IMMEDIATE(r12, MSR_SF | MSR_ISF | MSR_LE)
921 swap = (msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in p9_hmi_special_emu()
2028 xori r12,r12,MSR_LE
63 #define MSR_LE 0x0000000000000001 macro
43 li r11,MSR_LE
27 li r0,MSR_IR|MSR_DR|MSR_LE
389 return (kvmppc_get_msr(vcpu) & MSR_LE) != (MSR_KERNEL & MSR_LE); in kvmppc_need_byteswap()
116 #define MSR_LE __MASK(MSR_LE_LG) /* Little Endian */ macro142 #define MSR_ (__MSR | MSR_LE)143 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV | MSR_LE)
493 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
1614 op->val = 0xffffffff & ~(MSR_ME | MSR_LE); in analyse_instr()3134 cross_endian = (regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in emulate_loadstore()3460 regs->msr ^= MSR_LE; in emulate_step()