/arch/arm64/include/asm/ |
D | ptrace.h | 114 u64 pstate; member 127 (((regs)->pstate & COMPAT_PSR_T_BIT)) 133 (((regs)->pstate & PSR_MODE_MASK) == PSR_MODE_EL0t) 136 (((regs)->pstate & (PSR_MODE32_BIT | PSR_MODE_MASK)) == \ 140 ((regs)->pstate & PSR_MODE_MASK) 143 (!((regs)->pstate & PSR_I_BIT)) 146 (!((regs)->pstate & PSR_F_BIT)) 162 if (user_mode(regs) && (regs->pstate & PSR_I_BIT) == 0) { in valid_user_regs() 163 regs->pstate &= ~(PSR_F_BIT | PSR_A_BIT); in valid_user_regs() 166 if (!(regs->pstate & PSR_MODE32_BIT)) in valid_user_regs() [all …]
|
D | processor.h | 100 regs->pstate = PSR_MODE_EL0t; in start_thread() 109 regs->pstate = COMPAT_PSR_MODE_USR; in compat_start_thread() 111 regs->pstate |= COMPAT_PSR_T_BIT; in compat_start_thread() 114 regs->pstate |= COMPAT_PSR_E_BIT; in compat_start_thread()
|
D | perf_event.h | 31 (regs)->pstate = PSR_MODE_EL1h; \
|
/arch/x86/kernel/cpu/mcheck/ |
D | therm_throt.c | 157 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); in therm_throt_process() local 162 state = &pstate->core_throttle; in therm_throt_process() 164 state = &pstate->core_power_limit; in therm_throt_process() 169 state = &pstate->package_throttle; in therm_throt_process() 171 state = &pstate->package_power_limit; in therm_throt_process() 214 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); in thresh_event_valid() local 218 state = (event == 0) ? &pstate->pkg_thresh0 : in thresh_event_valid() 219 &pstate->pkg_thresh1; in thresh_event_valid() 221 state = (event == 0) ? &pstate->core_thresh0 : in thresh_event_valid() 222 &pstate->core_thresh1; in thresh_event_valid()
|
/arch/sparc/kernel/ |
D | helpers.S | 27 rdpr %pstate, %o0 28 wrpr %o0, PSTATE_IE, %pstate 43 wrpr %o0, %pstate
|
D | spiterrs.S | 157 rdpr %pstate, %g4 158 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 185 rdpr %pstate, %g4 186 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 205 rdpr %pstate, %g4 206 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 224 rdpr %pstate, %g4 225 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
|
D | rtrap_64.S | 27 wrpr %g0, RTRAP_PSTATE, %pstate 29 wrpr %g0, RTRAP_PSTATE_IRQOFF, %pstate 33 wrpr %g0, RTRAP_PSTATE, %pstate 35 wrpr %g0, RTRAP_PSTATE_IRQOFF, %pstate 50 wrpr %g0, RTRAP_PSTATE, %pstate 51 wrpr %g0, RTRAP_PSTATE_IRQOFF, %pstate 123 to_user: wrpr %g0, RTRAP_PSTATE_IRQOFF, %pstate 165 661: wrpr %g0, RTRAP_PSTATE_AG_IRQOFF, %pstate 169 wrpr %g0, RTRAP_PSTATE_IRQOFF, %pstate
|
D | dtlb_prot.S | 20 rdpr %pstate, %g5 ! Move into alt-globals 21 wrpr %g5, PSTATE_AG|PSTATE_MG, %pstate
|
D | tsb.S | 157 661: rdpr %pstate, %g5 158 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate 264 661: rdpr %pstate, %g5 265 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate 326 rdpr %pstate, %o5 327 wrpr %o5, PSTATE_IE, %pstate 330 wrpr %o5, %pstate 379 rdpr %pstate, %g1 380 wrpr %g1, PSTATE_IE, %pstate 451 wrpr %g1, %pstate
|
D | smp_64.c | 386 static void spitfire_xcall_helper(u64 data0, u64 data1, u64 data2, u64 pstate, unsigned long cpu) in spitfire_xcall_helper() argument 421 : "r" (pstate), "i" (PSTATE_IE), "i" (ASI_INTR_W), in spitfire_xcall_helper() 434 : : "r" (pstate)); in spitfire_xcall_helper() 442 : : "r" (pstate)); in spitfire_xcall_helper() 456 u64 pstate; in spitfire_xcall_deliver() local 459 __asm__ __volatile__("rdpr %%pstate, %0" : "=r" (pstate)); in spitfire_xcall_deliver() 466 spitfire_xcall_helper(data0, data1, data2, pstate, cpu_list[i]); in spitfire_xcall_deliver() 476 u64 *mondo, pstate, ver, busy_mask; in cheetah_xcall_deliver() local 490 __asm__ __volatile__("rdpr %%pstate, %0" : "=r" (pstate)); in cheetah_xcall_deliver() 495 : : "r" (pstate), "i" (PSTATE_IE)); in cheetah_xcall_deliver() [all …]
|
D | ktlb.S | 81 661: rdpr %pstate, %g5 82 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate 235 661: rdpr %pstate, %g5 236 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
|
D | hvtramp.S | 81 wrpr %g0, (PSTATE_PRIV | PSTATE_PEF), %pstate 124 wrpr %g0, (PSTATE_PRIV | PSTATE_PEF | PSTATE_IE), %pstate
|
D | asm-offsets.c | 36 OFFSET(SC_REG_PSTATE, saved_context, pstate); in sparc64_foo()
|
/arch/arm64/kvm/ |
D | reset.c | 37 .regs.pstate = (PSR_MODE_EL1h | PSR_A_BIT | PSR_I_BIT | 42 .regs.pstate = (COMPAT_PSR_MODE_SVC | COMPAT_PSR_A_BIT |
|
/arch/sparc/lib/ |
D | clear_page.S | 62 rdpr %pstate, %o4 63 wrpr %o4, PSTATE_IE, %pstate 68 wrpr %o4, 0x0, %pstate
|
/arch/sparc/mm/ |
D | ultra.S | 59 rdpr %pstate, %g7 61 wrpr %g2, %pstate 76 wrpr %g7, 0x0, %pstate 86 rdpr %pstate, %g7 89 wrpr %g2, %pstate 108 wrpr %g7, 0x0, %pstate 136 rdpr %pstate, %g1 137 wrpr %g1, PSTATE_IE, %pstate 146 wrpr %g1, 0, %pstate 213 rdpr %pstate, %g7 [all …]
|
/arch/sparc/include/asm/ |
D | hibernate.h | 16 unsigned long pstate; member
|
/arch/arm64/include/asm/xen/ |
D | events.h | 16 return raw_irqs_disabled_flags((unsigned long) regs->pstate); in xen_irqs_disabled()
|
/arch/arm64/kernel/ |
D | debug-monitors.c | 171 spsr = regs->pstate; in set_regs_spsr_ss() 174 regs->pstate = spsr; in set_regs_spsr_ss() 181 spsr = regs->pstate; in clear_regs_spsr_ss() 183 regs->pstate = spsr; in clear_regs_spsr_ss()
|
D | process.c | 240 regs->pc, lr, regs->pstate); in __show_regs() 340 childregs->pstate = PSR_MODE_EL1h; in copy_thread() 343 childregs->pstate |= PSR_UAO_BIT; in copy_thread()
|
D | signal.c | 111 __get_user_error(regs->pstate, &sf->uc.uc_mcontext.pstate, err); in restore_sigframe() 181 __put_user_error(regs->pstate, &sf->uc.uc_mcontext.pstate, err); in setup_sigframe()
|
D | armv8_deprecated.c | 383 switch (arm_check_condition(instr, regs->pstate)) { in swp_handler() 464 switch (arm_check_condition(instr, regs->pstate)) { in cp15barrier_handler() 567 regs->pstate |= COMPAT_PSR_E_BIT; in compat_setend_handler() 570 regs->pstate &= ~COMPAT_PSR_E_BIT; in compat_setend_handler()
|
/arch/arm64/include/uapi/asm/ |
D | sigcontext.h | 31 __u64 pstate; member
|
D | ptrace.h | 73 __u64 pstate; member
|
/arch/sparc/power/ |
D | hibernate_asm.S | 30 rdpr %pstate, %g2 110 wrpr %g2, %pstate
|