/kernel/linux/linux-5.10/arch/arm64/kernel/ |
D | entry-common.c | 112 static void noinstr el1_abort(struct pt_regs *regs, unsigned long esr) in el1_abort() argument 119 do_mem_abort(far, esr, regs); in el1_abort() 124 static void noinstr el1_pc(struct pt_regs *regs, unsigned long esr) in el1_pc() argument 130 do_sp_pc_abort(far, esr, regs); in el1_pc() 144 static void noinstr el1_inv(struct pt_regs *regs, unsigned long esr) in el1_inv() argument 148 bad_mode(regs, 0, esr); in el1_inv() 177 static void noinstr el1_dbg(struct pt_regs *regs, unsigned long esr) in el1_dbg() argument 182 do_debug_exception(far, esr, regs); in el1_dbg() 186 static void noinstr el1_fpac(struct pt_regs *regs, unsigned long esr) in el1_fpac() argument 190 do_ptrauth_fault(regs, esr); in el1_fpac() [all …]
|
D | traps.c | 155 unsigned int esr = tsk->thread.fault_code; in arm64_show_signal() local 165 if (esr) in arm64_show_signal() 166 pr_cont("%s, ESR 0x%08x, ", esr_get_class_string(esr), esr); in arm64_show_signal() 419 void do_ptrauth_fault(struct pt_regs *regs, unsigned int esr) in do_ptrauth_fault() argument 426 force_signal_inject(SIGILL, ILL_ILLOPN, regs->pc, esr); in do_ptrauth_fault() 450 static void user_cache_maint_handler(unsigned int esr, struct pt_regs *regs) in user_cache_maint_handler() argument 453 int rt = ESR_ELx_SYS64_ISS_RT(esr); in user_cache_maint_handler() 454 int crm = (esr & ESR_ELx_SYS64_ISS_CRM_MASK) >> ESR_ELx_SYS64_ISS_CRM_SHIFT; in user_cache_maint_handler() 489 static void ctr_read_handler(unsigned int esr, struct pt_regs *regs) in ctr_read_handler() argument 491 int rt = ESR_ELx_SYS64_ISS_RT(esr); in ctr_read_handler() [all …]
|
D | debug-monitors.c | 205 static int call_step_hook(struct pt_regs *regs, unsigned int esr) in call_step_hook() argument 218 retval = hook->fn(regs, esr); in call_step_hook() 242 static int single_step_handler(unsigned long unused, unsigned int esr, in single_step_handler() argument 254 if (!handler_found && call_step_hook(regs, esr) == DBG_HOOK_HANDLED) in single_step_handler() 303 static int call_break_hook(struct pt_regs *regs, unsigned int esr) in call_break_hook() argument 307 int (*fn)(struct pt_regs *regs, unsigned int esr) = NULL; in call_break_hook() 316 unsigned int comment = esr & ESR_ELx_BRK64_ISS_COMMENT_MASK; in call_break_hook() 322 return fn ? fn(regs, esr) : DBG_HOOK_ERROR; in call_break_hook() 326 static int brk_handler(unsigned long unused, unsigned int esr, in brk_handler() argument 329 if (call_break_hook(regs, esr) == DBG_HOOK_HANDLED) in brk_handler()
|
D | fpsimd.c | 937 void do_sve_acc(unsigned int esr, struct pt_regs *regs) in do_sve_acc() argument 964 void do_fpsimd_acc(unsigned int esr, struct pt_regs *regs) in do_fpsimd_acc() argument 973 void do_fpsimd_exc(unsigned int esr, struct pt_regs *regs) in do_fpsimd_exc() argument 977 if (esr & ESR_ELx_FP_EXC_TFV) { in do_fpsimd_exc() 978 if (esr & FPEXC_IOF) in do_fpsimd_exc() 980 else if (esr & FPEXC_DZF) in do_fpsimd_exc() 982 else if (esr & FPEXC_OFF) in do_fpsimd_exc() 984 else if (esr & FPEXC_UFF) in do_fpsimd_exc() 986 else if (esr & FPEXC_IXF) in do_fpsimd_exc()
|
D | kgdb.c | 234 static int kgdb_brk_fn(struct pt_regs *regs, unsigned int esr) in kgdb_brk_fn() argument 241 static int kgdb_compiled_brk_fn(struct pt_regs *regs, unsigned int esr) in NOKPROBE_SYMBOL() 250 static int kgdb_step_brk_fn(struct pt_regs *regs, unsigned int esr) in kgdb_step_brk_fn() argument
|
/kernel/linux/linux-5.10/arch/arm64/mm/ |
D | fault.c | 43 int (*fn)(unsigned long addr, unsigned int esr, 53 static inline const struct fault_info *esr_to_fault_info(unsigned int esr) in esr_to_fault_info() argument 55 return fault_info + (esr & ESR_ELx_FSC); in esr_to_fault_info() 58 static inline const struct fault_info *esr_to_debug_fault_info(unsigned int esr) in esr_to_debug_fault_info() argument 60 return debug_fault_info + DBG_ESR_EVT(esr); in esr_to_debug_fault_info() 63 static void data_abort_decode(unsigned int esr) in data_abort_decode() argument 67 if (esr & ESR_ELx_ISV) { in data_abort_decode() 69 1U << ((esr & ESR_ELx_SAS) >> ESR_ELx_SAS_SHIFT)); in data_abort_decode() 71 (esr & ESR_ELx_SSE) >> ESR_ELx_SSE_SHIFT, in data_abort_decode() 72 (esr & ESR_ELx_SRT_MASK) >> ESR_ELx_SRT_SHIFT); in data_abort_decode() [all …]
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
D | exception.h | 24 unsigned int esr = ESR_ELx_EC_SERROR << ESR_ELx_EC_SHIFT; in disr_to_esr() local 27 esr |= (disr & DISR_EL1_ESR_MASK); in disr_to_esr() 29 esr |= (disr & ESR_ELx_ISS_MASK); in disr_to_esr() 31 return esr; in disr_to_esr() 40 void do_mem_abort(unsigned long addr, unsigned int esr, struct pt_regs *regs); 43 asmlinkage void bad_mode(struct pt_regs *regs, int reason, unsigned int esr); 44 void do_debug_exception(unsigned long addr_if_watchpoint, unsigned int esr, 46 void do_fpsimd_acc(unsigned int esr, struct pt_regs *regs); 47 void do_sve_acc(unsigned int esr, struct pt_regs *regs); 48 void do_fpsimd_exc(unsigned int esr, struct pt_regs *regs); [all …]
|
D | traps.h | 60 static inline bool arm64_is_ras_serror(u32 esr) in arm64_is_ras_serror() argument 64 if (esr & ESR_ELx_IDS) in arm64_is_ras_serror() 80 static inline u32 arm64_ras_serror_get_severity(u32 esr) in arm64_ras_serror_get_severity() argument 82 u32 aet = esr & ESR_ELx_AET; in arm64_ras_serror_get_severity() 84 if (!arm64_is_ras_serror(esr)) { in arm64_ras_serror_get_severity() 93 if ((esr & ESR_ELx_FSC) != ESR_ELx_FSC_SERROR) { in arm64_ras_serror_get_severity() 101 bool arm64_is_fatal_ras_serror(struct pt_regs *regs, unsigned int esr); 102 void __noreturn arm64_serror_panic(struct pt_regs *regs, u32 esr);
|
D | esr.h | 73 #define ESR_ELx_EC(esr) (((esr) & ESR_ELx_EC_MASK) >> ESR_ELx_EC_SHIFT) argument 188 #define ESR_ELx_SYS64_ISS_RT(esr) \ argument 189 (((esr) & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT) 326 static inline bool esr_is_data_abort(u32 esr) in esr_is_data_abort() argument 328 const u32 ec = ESR_ELx_EC(esr); in esr_is_data_abort() 333 const char *esr_get_class_string(u32 esr);
|
D | kvm_emulate.h | 248 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_vcpu_get_condition() local 250 if (esr & ESR_ELx_CV) in kvm_vcpu_get_condition() 251 return (esr & ESR_ELx_COND_MASK) >> ESR_ELx_COND_SHIFT; in kvm_vcpu_get_condition() 379 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_vcpu_sys_get_rt() local 380 return ESR_ELx_SYS64_ISS_RT(esr); in kvm_vcpu_sys_get_rt()
|
D | debug-monitors.h | 79 int (*fn)(struct pt_regs *regs, unsigned int esr); 90 int (*fn)(struct pt_regs *regs, unsigned int esr);
|
D | kvm_ras.h | 17 static inline int kvm_handle_guest_sea(phys_addr_t addr, unsigned int esr) in kvm_handle_guest_sea() argument
|
/kernel/linux/linux-5.10/arch/arm64/kvm/ |
D | inject_fault.c | 127 u32 esr = 0; in inject_abt64() local 138 esr |= ESR_ELx_IL; in inject_abt64() 145 esr |= (ESR_ELx_EC_IABT_LOW << ESR_ELx_EC_SHIFT); in inject_abt64() 147 esr |= (ESR_ELx_EC_IABT_CUR << ESR_ELx_EC_SHIFT); in inject_abt64() 150 esr |= ESR_ELx_EC_DABT_LOW << ESR_ELx_EC_SHIFT; in inject_abt64() 152 vcpu_write_sys_reg(vcpu, esr | ESR_ELx_FSC_EXTABT, ESR_EL1); in inject_abt64() 157 u32 esr = (ESR_ELx_EC_UNKNOWN << ESR_ELx_EC_SHIFT); in inject_undef64() local 166 esr |= ESR_ELx_IL; in inject_undef64() 168 vcpu_write_sys_reg(vcpu, esr, ESR_EL1); in inject_undef64() 218 void kvm_set_sei_esr(struct kvm_vcpu *vcpu, u64 esr) in kvm_set_sei_esr() argument [all …]
|
D | handle_exit.c | 30 static void kvm_handle_guest_serror(struct kvm_vcpu *vcpu, u32 esr) in kvm_handle_guest_serror() argument 32 if (!arm64_is_ras_serror(esr) || arm64_is_fatal_ras_serror(NULL, esr)) in kvm_handle_guest_serror() 122 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_guest_debug() local 126 run->debug.arch.hsr = esr; in kvm_handle_guest_debug() 128 switch (ESR_ELx_EC(esr)) { in kvm_handle_guest_debug() 139 __func__, (unsigned int) esr); in kvm_handle_guest_debug() 149 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_unknown_ec() local 152 esr, esr_get_class_string(esr)); in kvm_handle_unknown_ec() 203 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_get_exit_handler() local 204 u8 esr_ec = ESR_ELx_EC(esr); in kvm_get_exit_handler()
|
D | sys_regs.c | 2257 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_cp_64() local 2259 int Rt2 = (esr >> 10) & 0x1f; in kvm_handle_cp_64() 2263 params.CRm = (esr >> 1) & 0xf; in kvm_handle_cp_64() 2264 params.is_write = ((esr & 1) == 0); in kvm_handle_cp_64() 2267 params.Op1 = (esr >> 16) & 0xf; in kvm_handle_cp_64() 2309 u32 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_cp_32() local 2314 params.CRm = (esr >> 1) & 0xf; in kvm_handle_cp_32() 2316 params.is_write = ((esr & 1) == 0); in kvm_handle_cp_32() 2317 params.CRn = (esr >> 10) & 0xf; in kvm_handle_cp_32() 2319 params.Op1 = (esr >> 14) & 0x7; in kvm_handle_cp_32() [all …]
|
/kernel/linux/linux-5.10/arch/alpha/kernel/ |
D | core_lca.c | 338 mem_error(unsigned long esr, unsigned long ear) in mem_error() argument 341 ((esr & ESR_CEE) ? "Correctable" : in mem_error() 342 (esr & ESR_UEE) ? "Uncorrectable" : "A"), in mem_error() 343 (esr & ESR_WRE) ? "write" : "read", in mem_error() 344 (esr & ESR_SOR) ? "memory" : "b-cache", in mem_error() 346 if (esr & ESR_CTE) { in mem_error() 349 if (esr & ESR_MSE) { in mem_error() 352 if (esr & ESR_MHE) { in mem_error() 355 if (esr & ESR_NXM) { in mem_error() 435 if (el.s->esr & ESR_EAV) { in lca_machine_check() [all …]
|
/kernel/linux/linux-5.10/arch/sh/boards/mach-dreamcast/ |
D | irq.c | 90 __u32 esr = ESR_BASE + (LEVEL(irq) << 2); in mask_ack_systemasic_irq() local 92 outl((1 << EVENT_BIT(irq)), esr); in mask_ack_systemasic_irq() 107 __u32 emr, esr, status, level; in systemasic_irq_demux() local 124 esr = ESR_BASE + (level << 2); in systemasic_irq_demux() 127 status = inl(esr); in systemasic_irq_demux()
|
/kernel/linux/linux-5.10/arch/arm64/kvm/hyp/include/hyp/ |
D | switch.h | 161 u64 esr; in __populate_fault_info() local 164 esr = vcpu->arch.fault.esr_el2; in __populate_fault_info() 165 ec = ESR_ELx_EC(esr); in __populate_fault_info() 183 if (!(esr & ESR_ELx_S1PTW) && in __populate_fault_info() 185 (esr & ESR_ELx_FSC_TYPE) == FSC_PERM)) { in __populate_fault_info() 339 static inline bool esr_is_ptrauth_trap(u32 esr) in esr_is_ptrauth_trap() argument 341 u32 ec = ESR_ELx_EC(esr); in esr_is_ptrauth_trap() 349 switch (esr_sys64_to_sysreg(esr)) { in esr_is_ptrauth_trap()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/ibm/emac/ |
D | mal.c | 220 u32 esr = get_mal_dcrn(mal, MAL_ESR); in mal_serr() local 223 set_mal_dcrn(mal, MAL_ESR, esr); in mal_serr() 225 MAL_DBG(mal, "SERR %08x" NL, esr); in mal_serr() 227 if (esr & MAL_ESR_EVB) { in mal_serr() 228 if (esr & MAL_ESR_DE) { in mal_serr() 235 if (esr & MAL_ESR_PEIN) { in mal_serr() 243 mal->index, esr); in mal_serr() 253 mal->index, esr); in mal_serr() 353 u32 esr = get_mal_dcrn(mal, MAL_ESR); in mal_int() local 355 if (esr & MAL_ESR_EVB) { in mal_int() [all …]
|
/kernel/linux/linux-5.10/sound/soc/fsl/ |
D | fsl_esai.c | 107 u32 esr; in esai_isr() local 110 regmap_read(esai_priv->regmap, REG_ESAI_ESR, &esr); in esai_isr() 123 if (esr & ESAI_ESR_TINIT_MASK) in esai_isr() 126 if (esr & ESAI_ESR_RFF_MASK) in esai_isr() 129 if (esr & ESAI_ESR_TFE_MASK) in esai_isr() 132 if (esr & ESAI_ESR_TLS_MASK) in esai_isr() 135 if (esr & ESAI_ESR_TDE_MASK) in esai_isr() 138 if (esr & ESAI_ESR_TED_MASK) in esai_isr() 141 if (esr & ESAI_ESR_TD_MASK) in esai_isr() 144 if (esr & ESAI_ESR_RLS_MASK) in esai_isr() [all …]
|
/kernel/linux/linux-5.10/arch/arm64/kvm/hyp/nvhe/ |
D | hyp-main.c | 109 u64 esr = read_sysreg_el2(SYS_ESR); in handle_trap() local 112 if (ESR_ELx_EC(esr) != ESR_ELx_EC_HVC64) in handle_trap()
|
/kernel/linux/linux-5.10/arch/mips/include/asm/octeon/ |
D | cvmx-sli-defs.h | 107 __BITFIELD_FIELD(uint64_t esr:2, 119 __BITFIELD_FIELD(uint64_t esr:2,
|
/kernel/linux/linux-5.10/arch/arm64/kernel/probes/ |
D | uprobes.c | 169 unsigned int esr) in uprobe_breakpoint_handler() argument 178 unsigned int esr) in uprobe_single_step_handler() argument
|
/kernel/linux/linux-5.10/arch/arm64/kvm/hyp/ |
D | vgic-v3-sr.c | 429 u32 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_get_group() local 430 u8 crm = (esr & ESR_ELx_SYS64_ISS_CRM_MASK) >> ESR_ELx_SYS64_ISS_CRM_SHIFT; in __vgic_v3_get_group() 975 u32 esr; in __vgic_v3_perform_cpuif_access() local 981 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_perform_cpuif_access() 988 sysreg = esr_cp15_to_sysreg(esr); in __vgic_v3_perform_cpuif_access() 990 sysreg = esr_sys64_to_sysreg(esr); in __vgic_v3_perform_cpuif_access() 993 is_read = (esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ; in __vgic_v3_perform_cpuif_access()
|
/kernel/linux/linux-5.10/arch/powerpc/include/uapi/asm/ |
D | kvm_para.h | 58 __u32 esr; member
|