Home
last modified time | relevance | path

Searched refs:MSR_PR (Results 1 – 25 of 36) sorted by relevance

12

/arch/powerpc/kvm/
Dbook3s_32_mmu.c158 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat()
240 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte()
241 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte()
318 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate()
400 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
Dbook3s_64_mmu.c229 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate()
272 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate()
274 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate()
317 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate()
572 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page()
633 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid()
643 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
Dbook3s_pr.c327 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr()
366 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr()
367 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr()
372 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr()
391 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr()
822 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_emulate_fac()
1042 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_handle_exit_pr()
1100 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_handle_exit_pr()
1132 } else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && in kvmppc_handle_exit_pr()
Dbook3s_64_mmu_host.c62 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid()
238 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
Dbook3s_32_mmu_host.c96 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid()
282 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
Dbook3s_emulate.c84 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed()
200 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr()
De500.h223 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
Dbooke.c401 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver()
1094 if (vcpu->arch.shared->msr & (MSR_PR | MSR_GS)) { in kvmppc_handle_exit()
1198 if (!(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit()
1212 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
1233 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
1941 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_xlate()
De500_mmu_host.c313 u32 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_e500_setup_stlbe()
674 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_load_last_inst()
Dpowerpc.c334 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_st()
373 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_ld()
Dbooke_interrupts.S90 andi. r4, r4, MSR_PR
/arch/powerpc/include/asm/
Dptrace.h107 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
Dreg_booke.h46 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE)
50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
53 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
Dreg.h86 #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ macro
128 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE)
133 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
Dexception-64s.h379 andi. r10,r12,MSR_PR; /* See if coming from user */ \
/arch/powerpc/kernel/
Dexceptions-64e.S76 andi. r3,r3,MSR_PR
153 andi. r3,r3,MSR_PR
295 andi. r10,r11,MSR_PR; /* save stack pointer */ \
651 andi. r0,r12,MSR_PR;
670 andi. r0,r12,MSR_PR;
795 1: andi. r14,r11,MSR_PR; /* check for userspace again */
865 1: andi. r14,r11,MSR_PR; /* check for userspace again */
1096 andi. r6,r10,MSR_PR
Dhead_booke.h52 andi. r11, r11, MSR_PR; /* check whether user or kernel */\
142 andi. r11,r11,MSR_PR; \
Dentry_64.S69 andi. r10,r12,MSR_PR
242 andi. r6,r8,MSR_PR
750 andi. r3,r3,MSR_PR
973 andi. r0,r3,MSR_PR
Dhead_40x.S113 andi. r11,r11,MSR_PR; \
152 andi. r11,r11,MSR_PR; \
696 andi. r10,r9,MSR_IR|MSR_PR /* check supervisor + MMU off */
Dentry_32.S144 andi. r2,r9,MSR_PR
767 andi. r0,r3,MSR_PR
1018 andi. r3,r3,MSR_PR; \
Dhead_fsl_booke.S456 andi. r10,r11,MSR_PR
557 andi. r10,r11,MSR_PR
Dexceptions-64s.S1452 andi. r11,r12,MSR_PR /* See if coming from user. */
1557 andi. r10,r12,MSR_PR /* check for user mode (PR != 0) */
/arch/powerpc/xmon/
Dxmon.c454 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core()
586 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core()
636 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt()
667 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_break_match()
677 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match()
702 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler()
1003 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step()
1468 if (regs->msr & MSR_PR) in print_bug_trap()
/arch/powerpc/lib/
Dsstep.c711 if (regs->msr & MSR_PR) in analyse_instr()
946 if (regs->msr & MSR_PR) in analyse_instr()
952 if (regs->msr & MSR_PR) in analyse_instr()
960 if (regs->msr & MSR_PR) in analyse_instr()
1916 !(regs->msr & MSR_PR) && in emulate_step()
/arch/powerpc/mm/
Dtlb_low_64e.S74 andi. r10,r11,MSR_PR

12