/arch/powerpc/kernel/ |
D | kvm_emul.S | 59 lis r30, (~(MSR_EE | MSR_RI))@h 60 ori r30, r30, (~(MSR_EE | MSR_RI))@l 66 andi. r30, r30, (MSR_EE|MSR_RI) 115 #define MSR_SAFE_BITS (MSR_EE | MSR_RI)
|
D | syscall_64.c | 33 BUG_ON(!(regs->msr & MSR_RI)); in system_call_exception() 263 BUG_ON(!(regs->msr & MSR_RI)); in interrupt_exit_user_prepare() 346 if (IS_ENABLED(CONFIG_PPC_BOOK3S) && unlikely(!(regs->msr & MSR_RI))) in interrupt_exit_kernel_prepare()
|
D | entry_64.S | 381 li r9, MSR_RI 658 andi. r0,r5,MSR_RI 854 ori r0,r0,MSR_EE|MSR_SE|MSR_BE|MSR_RI 859 ori r9,r9,MSR_IR|MSR_DR|MSR_FE0|MSR_FE1|MSR_FP|MSR_RI|MSR_LE 882 li r0,MSR_RI
|
D | head_64.S | 143 ori r24,r24,MSR_RI 753 ori r3,r3,MSR_RI 936 ori r6,r6,MSR_RI
|
D | head_32.h | 67 li r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */ 139 li r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
|
D | tm.S | 205 li r11, MSR_RI 532 li r4, MSR_RI
|
D | exceptions-64s.S | 475 xori r10,r10,MSR_RI 478 xori r10,r10,MSR_RI 575 li r10,MSR_RI 1018 li r10,MSR_RI 1205 li r10,MSR_RI 1273 andi. r11,r12,MSR_RI 1317 li r10,MSR_RI 2014 li r10,MSR_RI 3014 li r10,MSR_RI
|
D | traps.c | 402 if (!(regs->msr & MSR_RI)) in hv_nmi_check_nonrecoverable() 436 regs->msr &= ~MSR_RI; in hv_nmi_check_nonrecoverable() 512 if (!(regs->msr & MSR_RI)) { in system_reset_exception() 566 regs->msr |= MSR_RI; in check_io_access() 872 if (!(regs->msr & MSR_RI)) in machine_check_exception()
|
D | head_book3s_32.S | 205 ori r0,r0,MSR_DR|MSR_IR|MSR_RI 1168 rlwinm r0, r6, 0, ~MSR_RI 1190 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI) 1310 li r5, MSR_ME|MSR_RI
|
D | misc_64.S | 383 1: li r9,MSR_RI
|
D | head_8xx.S | 727 li r12, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI) 728 rlwinm r0, r10, 0, ~MSR_RI
|
/arch/powerpc/include/asm/ |
D | hw_irq.h | 266 #define __hard_irq_enable() __mtmsrd(MSR_EE|MSR_RI, 1) 267 #define __hard_irq_disable() __mtmsrd(MSR_RI, 1) 269 #define __hard_RI_enable() __mtmsrd(MSR_RI, 1)
|
D | reg_booke.h | 41 #define MSR_ (MSR_ME | MSR_RI | MSR_CE) 46 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR|MSR_CE) 49 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_CE)
|
D | reg.h | 115 #define MSR_RI __MASK(MSR_RI_LG) /* Recoverable Exception */ macro 137 #define __MSR (MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF |MSR_HV) 150 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR)
|
/arch/powerpc/platforms/pasemi/ |
D | idle.c | 61 regs->msr |= MSR_RI; in pasemi_system_reset_exception()
|
/arch/powerpc/platforms/powernv/ |
D | subcore-asm.S | 25 ori r4,r4,MSR_EE|MSR_SE|MSR_BE|MSR_RI
|
/arch/sh/include/asm/ |
D | smc37c93x.h | 156 #define MSR_RI 0x4000 /* Ring Indicator */ macro
|
/arch/powerpc/platforms/embedded6xx/ |
D | mpc7448_hpc2.c | 173 regs->msr |= MSR_RI; in mpc7448_machine_check_exception()
|
D | holly.c | 251 regs->msr |= MSR_RI; in ppc750_machine_check_exception()
|
/arch/arm/mach-pxa/include/mach/ |
D | regs-uart.h | 127 #define MSR_RI (1 << 6) /* Ring Indicator */ macro
|
/arch/powerpc/kvm/ |
D | tm.S | 109 li r2, MSR_RI 351 li r5, MSR_RI
|
D | book3s_segment.S | 128 li r0, MSR_RI
|
D | book3s_emulate.c | 306 new_msr &= ~(MSR_RI | MSR_EE); in kvmppc_core_emulate_op_pr() 307 new_msr |= rs_val & (MSR_RI | MSR_EE); in kvmppc_core_emulate_op_pr()
|
/arch/powerpc/platforms/pseries/ |
D | ras.c | 493 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception() 777 if (!(regs->msr & MSR_RI)) { in recover_mce()
|
/arch/powerpc/kexec/ |
D | relocate_32.S | 410 ori r8, r8, MSR_RI|MSR_ME
|