Lines Matching refs:sve_state
594 void *sve_state; in unpin_host_sve_state() local
599 sve_state = kern_hyp_va(hyp_vcpu->vcpu.arch.sve_state); in unpin_host_sve_state()
600 hyp_unpin_shared_mem(sve_state, in unpin_host_sve_state()
601 sve_state + vcpu_sve_state_size(&hyp_vcpu->vcpu)); in unpin_host_sve_state()
607 void *sve_state = hyp_vcpu->vcpu.arch.sve_state; in teardown_sve_state() local
609 if (sve_state) in teardown_sve_state()
610 hyp_free_account(sve_state, hyp_vm->host_kvm); in teardown_sve_state()
654 void *sve_state = kern_hyp_va(READ_ONCE(host_vcpu->arch.sve_state)); in init_pkvm_hyp_vcpu_sve() local
659 if (!sve_state && !pkvm_hyp_vcpu_is_protected(hyp_vcpu)) { in init_pkvm_hyp_vcpu_sve()
672 sve_state = hyp_alloc_account(sve_state_size, in init_pkvm_hyp_vcpu_sve()
674 if (!sve_state) { in init_pkvm_hyp_vcpu_sve()
679 ret = hyp_pin_shared_mem(sve_state, sve_state + sve_state_size); in init_pkvm_hyp_vcpu_sve()
684 hyp_vcpu->vcpu.arch.sve_state = sve_state; in init_pkvm_hyp_vcpu_sve()
1138 memset(vcpu->arch.sve_state, 0, vcpu_sve_state_size(vcpu)); in pkvm_reset_vcpu()