Home
last modified time | relevance | path

Searched refs:sve_state (Results 1 – 13 of 13) sorted by relevance

/arch/arm64/kvm/
Dreset.c115 vcpu->arch.sve_state = buf; in kvm_vcpu_finalize_sve()
146 void *sve_state = vcpu->arch.sve_state; in kvm_arm_vcpu_destroy() local
149 if (sve_state) in kvm_arm_vcpu_destroy()
150 kvm_unshare_hyp(sve_state, sve_state + vcpu_sve_state_size(vcpu)); in kvm_arm_vcpu_destroy()
151 kfree(sve_state); in kvm_arm_vcpu_destroy()
157 memset(vcpu->arch.sve_state, 0, vcpu_sve_state_size(vcpu)); in kvm_vcpu_reset_sve()
Dfpsimd.c117 vcpu->arch.sve_state, in kvm_arch_vcpu_ctxsync_fp()
Dguest.c343 if (WARN_ON(vcpu->arch.sve_state)) in set_sve_vls()
498 if (copy_to_user(uptr, vcpu->arch.sve_state + region.koffset, in get_sve_reg()
524 if (copy_from_user(vcpu->arch.sve_state + region.koffset, uptr, in set_sve_reg()
/arch/arm64/kernel/
Dfpsimd.c123 void *sve_state; member
278 kfree(task->thread.sve_state); in __sve_free()
279 task->thread.sve_state = NULL; in __sve_free()
480 sve_save_state((char *)last->sve_state + in fpsimd_save()
593 #define ZREG(sve_state, vq, n) ((char *)(sve_state) + \ argument
640 void *sst = task->thread.sve_state; in fpsimd_to_sve()
664 void const *sst = task->thread.sve_state; in sve_to_fpsimd()
710 if (task->thread.sve_state) { in sve_alloc()
712 memset(task->thread.sve_state, 0, in sve_alloc()
718 task->thread.sve_state = in sve_alloc()
[all …]
Dprocess.c313 dst->thread.sve_state = NULL; in arch_dup_task_struct()
325 dst->thread.sve_state = kzalloc(sve_state_size(src), in arch_dup_task_struct()
327 if (!dst->thread.sve_state) in arch_dup_task_struct()
333 kfree(dst->thread.sve_state); in arch_dup_task_struct()
334 dst->thread.sve_state = NULL; in arch_dup_task_struct()
Dptrace.c800 membuf_write(&to, target->thread.sve_state, end - start); in sve_get_common()
917 if (!target->thread.sve_state) { in sve_set_common()
936 target->thread.sve_state, in sve_set_common()
1085 if (!target->thread.sve_state) { in za_set()
1087 if (!target->thread.sve_state) { in za_set()
Dsignal.c261 current->thread.sve_state, in preserve_sve_context()
312 if (!current->thread.sve_state) { in restore_sve_fpsimd_context()
317 err = __copy_from_user(current->thread.sve_state, in restore_sve_fpsimd_context()
/arch/arm64/kvm/hyp/nvhe/
Dswitch.c201 struct kvm_host_sve_state *sve_state = get_host_sve_state(vcpu); in kvm_hyp_handle_fpsimd_host() local
203 sve_state->zcr_el1 = read_sysreg_el1(SYS_ZCR); in kvm_hyp_handle_fpsimd_host()
205 __sve_save_state(sve_state->sve_regs + in kvm_hyp_handle_fpsimd_host()
207 &sve_state->fpsr); in kvm_hyp_handle_fpsimd_host()
Dpkvm.c487 void *sve_state; in unpin_host_sve_state() local
492 sve_state = kern_hyp_va(hyp_vcpu->vcpu.arch.sve_state); in unpin_host_sve_state()
493 hyp_unpin_shared_mem(sve_state, in unpin_host_sve_state()
494 sve_state + vcpu_sve_state_size(&hyp_vcpu->vcpu)); in unpin_host_sve_state()
571 void *sve_state; in init_pkvm_hyp_vcpu() local
573 hyp_vcpu->vcpu.arch.sve_state = READ_ONCE(host_vcpu->arch.sve_state); in init_pkvm_hyp_vcpu()
576 sve_state = kern_hyp_va(hyp_vcpu->vcpu.arch.sve_state); in init_pkvm_hyp_vcpu()
579 if (!hyp_vcpu->vcpu.arch.sve_state || !sve_state_size || in init_pkvm_hyp_vcpu()
580 hyp_pin_shared_mem(sve_state, sve_state + sve_state_size)) { in init_pkvm_hyp_vcpu()
582 hyp_vcpu->vcpu.arch.sve_state = NULL; in init_pkvm_hyp_vcpu()
Dhyp-main.c695 struct kvm_host_sve_state *sve_state = get_host_sve_state(vcpu); in fpsimd_host_restore() local
697 write_sysreg_el1(sve_state->zcr_el1, SYS_ZCR); in fpsimd_host_restore()
699 __sve_restore_state(sve_state->sve_regs + in fpsimd_host_restore()
701 &sve_state->fpsr); in fpsimd_host_restore()
/arch/arm64/include/asm/
Dfpsimd.h49 void *sve_state, unsigned int sve_vl,
84 return (char *)thread->sve_state + sve_ffr_offset(vl); in sve_pffr()
Dprocessor.h159 void *sve_state; /* SVE registers, if any */ member
Dkvm_host.h434 void *sve_state; member
713 #define vcpu_sve_pffr(vcpu) (kern_hyp_va((vcpu)->arch.sve_state) + \