Searched refs:vmx_pages (Results 1 – 8 of 8) sorted by relevance
/external/linux-kselftest/tools/testing/selftests/kvm/x86_64/ |
D | evmcs_test.c | 32 void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument 37 enable_vp_assist(vmx_pages->vp_assist_gpa, vmx_pages->vp_assist); in l1_guest_code() 39 GUEST_ASSERT(vmx_pages->vmcs_gpa); in l1_guest_code() 40 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code() 42 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code() 43 GUEST_ASSERT(vmptrstz() == vmx_pages->enlightened_vmcs_gpa); in l1_guest_code() 46 GUEST_ASSERT(vmptrstz() == vmx_pages->enlightened_vmcs_gpa); in l1_guest_code() 48 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code() 52 GUEST_ASSERT(vmptrstz() == vmx_pages->enlightened_vmcs_gpa); in l1_guest_code() 54 GUEST_ASSERT(vmptrstz() == vmx_pages->enlightened_vmcs_gpa); in l1_guest_code() [all …]
|
D | state_test.c | 45 void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument 50 GUEST_ASSERT(vmx_pages->vmcs_gpa); in l1_guest_code() 51 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code() 53 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code() 54 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in l1_guest_code() 57 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in l1_guest_code() 59 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code() 63 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in l1_guest_code() 65 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in l1_guest_code() 83 vmwrite(VMCS_LINK_POINTER, vmx_pages->shadow_vmcs_gpa); in l1_guest_code() [all …]
|
D | vmx_close_while_nested_test.c | 37 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument 42 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code() 43 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code() 46 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
|
D | vmx_tsc_adjust_test.c | 83 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument 94 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code() 95 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code() 98 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
|
D | smm_test.c | 61 void guest_code(struct vmx_pages *vmx_pages) in guest_code() argument 75 if (vmx_pages) { in guest_code() 76 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in guest_code()
|
D | vmx_dirty_log_test.c | 50 void l1_guest_code(struct vmx_pages *vmx) in l1_guest_code() 72 struct vmx_pages *vmx; in main()
|
/external/linux-kselftest/tools/testing/selftests/kvm/include/x86_64/ |
D | vmx.h | 540 struct vmx_pages { struct 578 struct vmx_pages *vcpu_alloc_vmx(struct kvm_vm *vm, vm_vaddr_t *p_vmx_gva); argument 579 bool prepare_for_vmx_operation(struct vmx_pages *vmx); 580 void prepare_vmcs(struct vmx_pages *vmx, void *guest_rip, void *guest_rsp); 581 bool load_vmcs(struct vmx_pages *vmx); 585 void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, 587 void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, 590 void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm, 592 void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm,
|
/external/linux-kselftest/tools/testing/selftests/kvm/lib/x86_64/ |
D | vmx.c | 74 struct vmx_pages * 78 struct vmx_pages *vmx = addr_gva2hva(vm, vmx_gva); in vcpu_alloc_vmx() 130 bool prepare_for_vmx_operation(struct vmx_pages *vmx) in prepare_for_vmx_operation() 174 bool load_vmcs(struct vmx_pages *vmx) in load_vmcs() 203 static inline void init_vmcs_control_fields(struct vmx_pages *vmx) in init_vmcs_control_fields() 372 void prepare_vmcs(struct vmx_pages *vmx, void *guest_rip, void *guest_rsp) in prepare_vmcs() 389 void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_pg_map() 488 void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_map() 508 void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_map_memslot() 530 void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm, in prepare_eptp()
|