Home
last modified time | relevance | path

Searched refs:kvm_nvhe_sym (Results 1 – 8 of 8) sorted by relevance

/arch/arm64/include/asm/
Dkvm_hyp.h126 extern u64 kvm_nvhe_sym(id_aa64pfr0_el1_sys_val);
127 extern u64 kvm_nvhe_sym(id_aa64pfr1_el1_sys_val);
128 extern u64 kvm_nvhe_sym(id_aa64isar0_el1_sys_val);
129 extern u64 kvm_nvhe_sym(id_aa64isar1_el1_sys_val);
130 extern u64 kvm_nvhe_sym(id_aa64isar2_el1_sys_val);
131 extern u64 kvm_nvhe_sym(id_aa64mmfr0_el1_sys_val);
132 extern u64 kvm_nvhe_sym(id_aa64mmfr1_el1_sys_val);
133 extern u64 kvm_nvhe_sym(id_aa64mmfr2_el1_sys_val);
135 extern unsigned long kvm_nvhe_sym(__icache_flags);
136 extern unsigned int kvm_nvhe_sym(kvm_arm_vmid_bits);
[all …]
Dhyp_image.h18 #define kvm_nvhe_sym(sym) __kvm_nvhe_##sym macro
20 #define kvm_nvhe_sym(sym) sym macro
61 #define KVM_NVHE_ALIAS(sym) kvm_nvhe_sym(sym) = sym;
64 #define KVM_NVHE_ALIAS_HYP(first, sec) kvm_nvhe_sym(first) = kvm_nvhe_sym(sec);
Dkvm_pkvm.h277 extern struct pkvm_moveable_reg kvm_nvhe_sym(pkvm_moveable_regs)[];
278 extern unsigned int kvm_nvhe_sym(pkvm_moveable_regs_nr);
280 extern struct memblock_region kvm_nvhe_sym(hyp_memory)[];
281 extern unsigned int kvm_nvhe_sym(hyp_memblock_nr);
283 extern phys_addr_t kvm_nvhe_sym(pvmfw_base);
284 extern phys_addr_t kvm_nvhe_sym(pvmfw_size);
304 for (i = 0; i < kvm_nvhe_sym(hyp_memblock_nr); i++) { in hyp_vmemmap_pages()
305 res += hyp_vmemmap_memblock_size(&kvm_nvhe_sym(hyp_memory)[i], in hyp_vmemmap_pages()
335 for (i = 0; i < kvm_nvhe_sym(pkvm_moveable_regs_nr); i++) { in __hyp_pgtable_moveable_regs_pages()
336 struct pkvm_moveable_reg *reg = &kvm_nvhe_sym(pkvm_moveable_regs)[i]; in __hyp_pgtable_moveable_regs_pages()
Dkvm_asm.h112 #define DECLARE_KVM_NVHE_SYM(sym) extern char kvm_nvhe_sym(sym)[]
125 DECLARE_PER_CPU(type, kvm_nvhe_sym(sym))
139 base = kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu]; \
206 #define CHOOSE_NVHE_SYM(sym) kvm_nvhe_sym(sym)
247 #define kvm_ksym_ref_nvhe(sym) kvm_ksym_ref(kvm_nvhe_sym(sym))
258 extern unsigned long kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[];
262 extern unsigned long kvm_nvhe_sym(kvm_arm_hyp_host_fp_state)[];
Dkvm_host.h386 extern struct kvm_host_psci_config kvm_nvhe_sym(kvm_host_psci_config);
389 extern s64 kvm_nvhe_sym(hyp_physvirt_offset);
392 extern u64 kvm_nvhe_sym(hyp_cpu_logical_map)[NR_CPUS];
/arch/arm64/kvm/
Darm.c2019 free_pages(kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu], nvhe_percpu_order()); in teardown_hyp_mode()
2020 free_pages(kvm_nvhe_sym(kvm_arm_hyp_host_fp_state)[cpu], in teardown_hyp_mode()
2027 void *per_cpu_base = kvm_ksym_ref(kvm_nvhe_sym(kvm_arm_hyp_percpu_base)); in do_pkvm_init()
2073 kvm_nvhe_sym(id_aa64pfr0_el1_sys_val) = get_hyp_id_aa64pfr0_el1(); in kvm_hyp_init_symbols()
2074 kvm_nvhe_sym(id_aa64pfr1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64PFR1_EL1); in kvm_hyp_init_symbols()
2075 kvm_nvhe_sym(id_aa64isar0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR0_EL1); in kvm_hyp_init_symbols()
2076 kvm_nvhe_sym(id_aa64isar1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR1_EL1); in kvm_hyp_init_symbols()
2077 kvm_nvhe_sym(id_aa64isar2_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR2_EL1); in kvm_hyp_init_symbols()
2078 kvm_nvhe_sym(id_aa64mmfr0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_hyp_init_symbols()
2079 kvm_nvhe_sym(id_aa64mmfr1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64MMFR1_EL1); in kvm_hyp_init_symbols()
[all …]
Dpkvm.c33 static phys_addr_t *pvmfw_base = &kvm_nvhe_sym(pvmfw_base);
34 static phys_addr_t *pvmfw_size = &kvm_nvhe_sym(pvmfw_size);
36 static struct pkvm_moveable_reg *moveable_regs = kvm_nvhe_sym(pkvm_moveable_regs);
37 static struct memblock_region *hyp_memory = kvm_nvhe_sym(hyp_memory);
38 static unsigned int *hyp_memblock_nr_ptr = &kvm_nvhe_sym(hyp_memblock_nr);
100 kvm_nvhe_sym(pkvm_moveable_regs_nr), in sort_moveable_regs()
144 kvm_nvhe_sym(pkvm_moveable_regs_nr) = i; in register_moveable_regions()
Dstacktrace.c195 i < ARRAY_SIZE(kvm_nvhe_sym(pkvm_stacktrace)) && stacktrace[i]; in pkvm_dump_backtrace()