/arch/powerpc/kernel/ |
D | misc_64.S | 105 ori r0,r7,MSR_DR 106 xori r0,r0,MSR_DR 136 ori r0,r7,MSR_DR 137 xori r0,r0,MSR_DR 167 ori r0,r7,MSR_DR 168 xori r0,r0,MSR_DR 182 ori r0,r7,MSR_DR 183 xori r0,r0,MSR_DR 384 li r10,MSR_DR|MSR_IR
|
D | btext.c | 57 if (!(mfmsr() & MSR_DR)) in rmci_maybe_on() 65 if (!(mfmsr() & MSR_DR)) in rmci_maybe_off()
|
D | head_32.h | 116 li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */ 173 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
|
D | misc_32.S | 225 rlwinm r0,r7,0,~MSR_DR 243 rlwinm r0,r7,0,~MSR_DR
|
D | head_book3s_32.S | 205 ori r0,r0,MSR_DR|MSR_IR|MSR_RI 1031 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 1167 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR) 1190 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI) 1207 andi. r0,r3,MSR_DR|MSR_IR /* MMU enabled? */
|
D | paca.c | 224 new_paca->kernel_msr = MSR_KERNEL & ~(MSR_IR | MSR_DR); in initialise_paca()
|
D | kvm_emul.S | 300 andi. r31, r31, MSR_DR | MSR_IR
|
D | head_8xx.S | 106 ori r0,r0,MSR_DR|MSR_IR 599 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 727 li r12, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI)
|
D | head_40x.S | 596 lis r3,(MSR_KERNEL & ~(MSR_IR|MSR_DR))@h 597 ori r3,r3,(MSR_KERNEL & ~(MSR_IR|MSR_DR))@l
|
/arch/powerpc/kvm/ |
D | book3s_32_mmu.c | 356 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 365 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 372 case MSR_DR: in kvmppc_mmu_book3s_32_esid_to_vsid() 375 case MSR_DR|MSR_IR: in kvmppc_mmu_book3s_32_esid_to_vsid()
|
D | book3s_pr.c | 69 return (msr & (MSR_IR|MSR_DR)) == MSR_DR; in kvmppc_is_split_real() 78 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR) in kvmppc_fixup_split_real() 237 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 531 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 532 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 540 if (msr & MSR_DR) in kvmppc_set_msr_pr() 712 bool dr = (kvmppc_get_msr(vcpu) & MSR_DR) ? true : false; in kvmppc_handle_pagefault() 734 switch (kvmppc_get_msr(vcpu) & (MSR_DR|MSR_IR)) { in kvmppc_handle_pagefault() 738 case MSR_DR: in kvmppc_handle_pagefault() 747 if ((kvmppc_get_msr(vcpu) & (MSR_DR|MSR_IR)) == MSR_DR) in kvmppc_handle_pagefault()
|
D | book3s_hv_builtin.c | 562 return !(mfmsr() & MSR_DR); in is_rm() 797 (msr & (MSR_IR|MSR_DR)) == (MSR_IR|MSR_DR) ) { in inject_interrupt() 798 new_msr |= MSR_IR | MSR_DR; in inject_interrupt()
|
D | book3s_rmhandlers.S | 151 li r6, MSR_IR | MSR_DR
|
D | book3s_64_mmu.c | 583 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 596 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 603 case MSR_DR: in kvmppc_mmu_book3s_64_esid_to_vsid() 606 case MSR_DR|MSR_IR: in kvmppc_mmu_book3s_64_esid_to_vsid()
|
D | book3s.c | 429 int relocated = (kvmppc_get_msr(vcpu) & (data ? MSR_DR : MSR_IR)); in kvmppc_xlate() 443 if ((kvmppc_get_msr(vcpu) & (MSR_IR | MSR_DR)) == MSR_DR && in kvmppc_xlate()
|
D | book3s_segment.S | 296 ori r11, r9, MSR_DR /* Enable paging for data */
|
/arch/powerpc/platforms/powernv/ |
D | opal-wrappers.S | 27 li r0,MSR_IR|MSR_DR|MSR_LE
|
D | subcore-asm.S | 31 li r5, MSR_IR|MSR_DR
|
D | idle.c | 389 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power7_idle_insn() 702 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power9_idle_stop() 941 WARN_ON_ONCE(mfmsr() & (MSR_IR|MSR_DR)); in power10_idle_stop()
|
/arch/powerpc/platforms/pasemi/ |
D | powersave.S | 62 LOAD_REG_IMMEDIATE(r6,MSR_DR|MSR_IR|MSR_ME|MSR_EE)
|
/arch/powerpc/platforms/82xx/ |
D | pq2.c | 29 mtmsr(mfmsr() & ~(MSR_ME | MSR_EE | MSR_IR | MSR_DR)); in pq2_restart()
|
/arch/powerpc/platforms/pseries/ |
D | ras.c | 493 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception() 494 MSR_ILE|MSR_HV|MSR_SF)) == (MSR_DR|MSR_SF)) { in pSeries_system_reset_exception() 747 mtmsr(mfmsr() | MSR_IR | MSR_DR); in mce_handle_error()
|
/arch/powerpc/platforms/powermac/ |
D | cache.S | 44 rlwinm r0,r0,0,~MSR_DR 194 rlwinm r0,r0,0,~MSR_DR
|
/arch/powerpc/include/asm/ |
D | reg.h | 109 #define MSR_DR __MASK(MSR_DR_LG) /* Data Relocate */ macro 137 #define __MSR (MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF |MSR_HV) 150 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR)
|
/arch/powerpc/mm/ |
D | mem.c | 443 msr = msr0 & ~MSR_DR; in flush_dcache_icache_phys()
|