Searched refs:hcr_el2 (Results 1 – 8 of 8) sorted by relevance
/arch/arm64/include/asm/ |
D | kvm_emulate.h | 42 return !(vcpu->arch.hcr_el2 & HCR_RW); in vcpu_el1_is_32bit() 47 vcpu->arch.hcr_el2 = HCR_GUEST_FLAGS; in vcpu_reset_hcr() 49 vcpu->arch.hcr_el2 |= HCR_E2H; in vcpu_reset_hcr() 52 vcpu->arch.hcr_el2 |= HCR_TEA; in vcpu_reset_hcr() 54 vcpu->arch.hcr_el2 |= HCR_TERR; in vcpu_reset_hcr() 57 vcpu->arch.hcr_el2 |= HCR_FWB; in vcpu_reset_hcr() 60 vcpu->arch.hcr_el2 &= ~HCR_RW; in vcpu_reset_hcr() 68 vcpu->arch.hcr_el2 |= HCR_TID3; in vcpu_reset_hcr() 72 vcpu->arch.hcr_el2 |= HCR_TID2; in vcpu_reset_hcr() 77 return (unsigned long *)&vcpu->arch.hcr_el2; in vcpu_hcr() [all …]
|
D | hardirq.h | 43 nmi_ctx->hcr = read_sysreg(hcr_el2); \ 45 write_sysreg(nmi_ctx->hcr | HCR_TGE, hcr_el2); \ 56 write_sysreg(nmi_ctx->hcr, hcr_el2); \
|
D | kvm_host.h | 252 u64 hcr_el2; member
|
/arch/arm64/kvm/hyp/ |
D | tlb.c | 57 val = read_sysreg(hcr_el2); in __tlb_switch_to_guest_vhe() 59 write_sysreg(val, hcr_el2); in __tlb_switch_to_guest_vhe() 87 write_sysreg(HCR_HOST_VHE_FLAGS, hcr_el2); in __tlb_switch_to_host_vhe()
|
D | switch.c | 150 u64 hcr = vcpu->arch.hcr_el2; in __activate_traps() 155 write_sysreg(hcr, hcr_el2); in __activate_traps() 169 write_sysreg(HCR_HOST_VHE_FLAGS, hcr_el2); in deactivate_traps_vhe() 196 write_sysreg(HCR_HOST_NVHE_FLAGS, hcr_el2); in __deactivate_traps_nvhe() 208 if (vcpu->arch.hcr_el2 & HCR_VSE) { in __deactivate_traps() 209 vcpu->arch.hcr_el2 &= ~HCR_VSE; in __deactivate_traps() 210 vcpu->arch.hcr_el2 |= read_sysreg(hcr_el2) & HCR_VSE; in __deactivate_traps() 407 if (!(read_sysreg(hcr_el2) & HCR_RW)) in __hyp_handle_fpsimd() 426 if (vcpu->arch.hcr_el2 & HCR_TVM) in handle_tx2_tvm()
|
/arch/arm64/kernel/ |
D | asm-offsets.c | 98 DEFINE(VCPU_HCR_EL2, offsetof(struct kvm_vcpu, arch.hcr_el2)); in main()
|
D | head.S | 521 msr hcr_el2, x0
|
/arch/arm64/kvm/ |
D | guest.c | 716 events->exception.serror_pending = !!(vcpu->arch.hcr_el2 & HCR_VSE); in __kvm_arm_vcpu_get_events()
|