Home
last modified time | relevance | path

Searched refs:kvm_vcpu_get_hsr (Results 1 – 9 of 9) sorted by relevance

/arch/arm/include/asm/
Dkvm_emulate.h83 static inline u32 kvm_vcpu_get_hsr(struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
105 return kvm_vcpu_get_hsr(vcpu) & HSR_ISV; in kvm_vcpu_dabt_isvalid()
110 return kvm_vcpu_get_hsr(vcpu) & HSR_WNR; in kvm_vcpu_dabt_iswrite()
115 return kvm_vcpu_get_hsr(vcpu) & HSR_SSE; in kvm_vcpu_dabt_issext()
120 return (kvm_vcpu_get_hsr(vcpu) & HSR_SRT_MASK) >> HSR_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
125 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_EA; in kvm_vcpu_dabt_isextabt()
130 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_S1PTW; in kvm_vcpu_dabt_iss1tw()
136 switch ((kvm_vcpu_get_hsr(vcpu) >> 22) & 0x3) { in kvm_vcpu_dabt_get_as()
152 return kvm_vcpu_get_hsr(vcpu) & HSR_IL; in kvm_vcpu_trap_il_is32bit()
157 return kvm_vcpu_get_hsr(vcpu) >> HSR_EC_SHIFT; in kvm_vcpu_trap_get_class()
[all …]
/arch/arm64/include/asm/
Dkvm_emulate.h126 static inline u32 kvm_vcpu_get_hsr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
143 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_ISV); in kvm_vcpu_dabt_isvalid()
148 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_WNR); in kvm_vcpu_dabt_iswrite()
153 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SSE); in kvm_vcpu_dabt_issext()
158 return (kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SRT_MASK) >> ESR_EL2_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
163 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_EA); in kvm_vcpu_dabt_isextabt()
168 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_S1PTW); in kvm_vcpu_dabt_iss1tw()
173 return 1 << ((kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SAS) >> ESR_EL2_SAS_SHIFT); in kvm_vcpu_dabt_get_as()
179 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_IL); in kvm_vcpu_trap_il_is32bit()
184 return kvm_vcpu_get_hsr(vcpu) >> ESR_EL2_EC_SHIFT; in kvm_vcpu_trap_get_class()
[all …]
/arch/arm/kvm/
Dhandle_exit.c65 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_pabt_hyp()
73 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_dabt_hyp()
91 if (kvm_vcpu_get_hsr(vcpu) & HSR_WFI_IS_WFE) in kvm_handle_wfx()
103 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_unknown_ec()
Demulate.c181 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid()
187 if ((kvm_vcpu_get_hsr(vcpu) & HSR_CV) >> HSR_CV_SHIFT) in kvm_condition_valid()
188 cond = (kvm_vcpu_get_hsr(vcpu) & HSR_COND) >> HSR_COND_SHIFT; in kvm_condition_valid()
Dcoproc.c477 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_64()
478 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_64()
479 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_64()
482 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 16) & 0xf; in kvm_handle_cp15_64()
484 params.Rt2 = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_64()
509 params.CRm = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_32()
510 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_32()
511 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_32()
514 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_32()
515 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 14) & 0x7; in kvm_handle_cp15_32()
[all …]
Dmmu.c1148 trace_kvm_guest_fault(*vcpu_pc(vcpu), kvm_vcpu_get_hsr(vcpu), in kvm_handle_guest_abort()
1157 (unsigned long)kvm_vcpu_get_hsr(vcpu)); in kvm_handle_guest_abort()
/arch/arm64/kvm/
Demulate.c56 u32 esr = kvm_vcpu_get_hsr(vcpu); in kvm_vcpu_get_condition()
74 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid32()
Dhandle_exit.c64 if (kvm_vcpu_get_hsr(vcpu) & ESR_EL2_EC_WFI_ISS_WFE) in kvm_handle_wfx()
97 (unsigned int)kvm_vcpu_get_hsr(vcpu)); in kvm_get_exit_handler()
Dsys_regs.c793 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_64()
849 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_32()
961 unsigned long esr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_sys_reg()