Searched refs:kvm_vcpu_get_hsr (Results 1 – 9 of 9) sorted by relevance
83 static inline u32 kvm_vcpu_get_hsr(struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function105 return kvm_vcpu_get_hsr(vcpu) & HSR_ISV; in kvm_vcpu_dabt_isvalid()110 return kvm_vcpu_get_hsr(vcpu) & HSR_WNR; in kvm_vcpu_dabt_iswrite()115 return kvm_vcpu_get_hsr(vcpu) & HSR_SSE; in kvm_vcpu_dabt_issext()120 return (kvm_vcpu_get_hsr(vcpu) & HSR_SRT_MASK) >> HSR_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()125 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_EA; in kvm_vcpu_dabt_isextabt()130 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_S1PTW; in kvm_vcpu_dabt_iss1tw()136 switch ((kvm_vcpu_get_hsr(vcpu) >> 22) & 0x3) { in kvm_vcpu_dabt_get_as()152 return kvm_vcpu_get_hsr(vcpu) & HSR_IL; in kvm_vcpu_trap_il_is32bit()157 return kvm_vcpu_get_hsr(vcpu) >> HSR_EC_SHIFT; in kvm_vcpu_trap_get_class()[all …]
126 static inline u32 kvm_vcpu_get_hsr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function143 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_ISV); in kvm_vcpu_dabt_isvalid()148 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_WNR); in kvm_vcpu_dabt_iswrite()153 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SSE); in kvm_vcpu_dabt_issext()158 return (kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SRT_MASK) >> ESR_EL2_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()163 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_EA); in kvm_vcpu_dabt_isextabt()168 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_S1PTW); in kvm_vcpu_dabt_iss1tw()173 return 1 << ((kvm_vcpu_get_hsr(vcpu) & ESR_EL2_SAS) >> ESR_EL2_SAS_SHIFT); in kvm_vcpu_dabt_get_as()179 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_EL2_IL); in kvm_vcpu_trap_il_is32bit()184 return kvm_vcpu_get_hsr(vcpu) >> ESR_EL2_EC_SHIFT; in kvm_vcpu_trap_get_class()[all …]
65 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_pabt_hyp()73 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_dabt_hyp()91 if (kvm_vcpu_get_hsr(vcpu) & HSR_WFI_IS_WFE) in kvm_handle_wfx()103 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_unknown_ec()
181 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid()187 if ((kvm_vcpu_get_hsr(vcpu) & HSR_CV) >> HSR_CV_SHIFT) in kvm_condition_valid()188 cond = (kvm_vcpu_get_hsr(vcpu) & HSR_COND) >> HSR_COND_SHIFT; in kvm_condition_valid()
477 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_64()478 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_64()479 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_64()482 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 16) & 0xf; in kvm_handle_cp15_64()484 params.Rt2 = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_64()509 params.CRm = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_32()510 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_32()511 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_32()514 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_32()515 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 14) & 0x7; in kvm_handle_cp15_32()[all …]
1148 trace_kvm_guest_fault(*vcpu_pc(vcpu), kvm_vcpu_get_hsr(vcpu), in kvm_handle_guest_abort()1157 (unsigned long)kvm_vcpu_get_hsr(vcpu)); in kvm_handle_guest_abort()
56 u32 esr = kvm_vcpu_get_hsr(vcpu); in kvm_vcpu_get_condition()74 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid32()
64 if (kvm_vcpu_get_hsr(vcpu) & ESR_EL2_EC_WFI_ISS_WFE) in kvm_handle_wfx()97 (unsigned int)kvm_vcpu_get_hsr(vcpu)); in kvm_get_exit_handler()
793 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_64()849 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_32()961 unsigned long esr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_sys_reg()