Home
last modified time | relevance | path

Searched refs:mmfr0 (Results 1 – 8 of 8) sorted by relevance

/arch/arm64/include/asm/
Dcpufeature.h595 static inline bool id_aa64mmfr0_mixed_endian_el0(u64 mmfr0) in id_aa64mmfr0_mixed_endian_el0() argument
597 return cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_BIGENDEL_SHIFT) == 0x1 || in id_aa64mmfr0_mixed_endian_el0()
598 cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_BIGENDEL0_SHIFT) == 0x1; in id_aa64mmfr0_mixed_endian_el0()
682 u64 mmfr0; in system_supports_4kb_granule() local
685 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_4kb_granule()
686 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_4kb_granule()
695 u64 mmfr0; in system_supports_64kb_granule() local
698 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_64kb_granule()
699 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_64kb_granule()
708 u64 mmfr0; in system_supports_16kb_granule() local
[all …]
Dkvm_pgtable.h16 static inline u64 kvm_get_parange(u64 mmfr0) in kvm_get_parange() argument
18 u64 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_get_parange()
289 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift);
/arch/arm64/kvm/
Dreset.c308 u64 mmfr0; in kvm_set_ipa_limit() local
310 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_set_ipa_limit()
311 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_set_ipa_limit()
326 switch (cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_TGRAN_2_SHIFT)) { in kvm_set_ipa_limit()
Dmmu.c671 u64 mmfr0, mmfr1; in kvm_init_stage2_mmu() local
690 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_init_stage2_mmu()
692 kvm->arch.vtcr = kvm_get_vtcr(mmfr0, mmfr1, phys_shift); in kvm_init_stage2_mmu()
/arch/arm64/mm/
Dinit.c329 u64 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in arm64_memblock_init() local
331 mmfr0, ID_AA64MMFR0_PARANGE_SHIFT); in arm64_memblock_init()
/arch/arm/kernel/
Dsetup.c261 unsigned int mmfr0 = read_cpuid_ext(CPUID_EXT_MMFR0); in __get_cpu_architecture() local
262 if ((mmfr0 & 0x0000000f) >= 0x00000003 || in __get_cpu_architecture()
263 (mmfr0 & 0x000000f0) >= 0x00000030) in __get_cpu_architecture()
265 else if ((mmfr0 & 0x0000000f) == 0x00000002 || in __get_cpu_architecture()
266 (mmfr0 & 0x000000f0) == 0x00000020) in __get_cpu_architecture()
/arch/arm64/kvm/hyp/
Dpgtable.c574 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift) in kvm_get_vtcr() argument
579 vtcr |= kvm_get_parange(mmfr0) << VTCR_EL2_PS_SHIFT; in kvm_get_vtcr()
/arch/arm64/kernel/
Dcpufeature.c2749 u64 safe_mmfr1, mmfr0, mmfr1; in verify_hyp_capabilities() local
2757 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in verify_hyp_capabilities()
2769 parange = cpuid_feature_extract_unsigned_field(mmfr0, in verify_hyp_capabilities()