Home
last modified time | relevance | path

Searched refs:vaddr_end (Results 1 – 7 of 7) sorted by relevance

/arch/x86/mm/
Dmem_encrypt_identity.c70 unsigned long vaddr_end; member
96 pgd_end = ppd->vaddr_end & PGDIR_MASK; in sme_clear_pgd()
186 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pmd()
196 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pte()
207 unsigned long vaddr_end; in __sme_map_range() local
213 vaddr_end = ppd->vaddr_end; in __sme_map_range()
216 ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_PAGE_SIZE); in __sme_map_range()
220 ppd->vaddr_end = vaddr_end & PMD_PAGE_MASK; in __sme_map_range()
224 ppd->vaddr_end = vaddr_end; in __sme_map_range()
386 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel()
[all …]
Dmem_encrypt.c256 unsigned long vaddr_end, vaddr_next; in early_set_memory_enc_dec() local
263 vaddr_end = vaddr + size; in early_set_memory_enc_dec()
265 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec()
288 ((vaddr_end - vaddr) >= psize)) { in early_set_memory_enc_dec()
310 __pa((vaddr_end & pmask) + psize), in early_set_memory_enc_dec()
382 unsigned long vaddr, vaddr_end, npages; in mem_encrypt_free_decrypted_mem() local
386 vaddr_end = (unsigned long)__end_bss_decrypted; in mem_encrypt_free_decrypted_mem()
387 npages = (vaddr_end - vaddr) >> PAGE_SHIFT; in mem_encrypt_free_decrypted_mem()
401 free_init_pages("unused decrypted", vaddr, vaddr_end); in mem_encrypt_free_decrypted_mem()
Dkaslr.c42 static const unsigned long vaddr_end = CPU_ENTRY_AREA_BASE; variable
91 BUILD_BUG_ON(vaddr_start >= vaddr_end); in kernel_randomize_memory()
92 BUILD_BUG_ON(vaddr_end != CPU_ENTRY_AREA_BASE); in kernel_randomize_memory()
93 BUILD_BUG_ON(vaddr_end > __START_KERNEL_map); in kernel_randomize_memory()
122 remain_entropy = vaddr_end - vaddr_start; in kernel_randomize_memory()
Dinit_64.c423 unsigned long vaddr_end = __START_KERNEL_map + KERNEL_IMAGE_SIZE; in cleanup_highmap() local
433 vaddr_end = __START_KERNEL_map + (max_pfn_mapped << PAGE_SHIFT); in cleanup_highmap()
435 for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap()
674 unsigned long vaddr, vaddr_end, vaddr_next, paddr_next, paddr_last; in phys_p4d_init() local
678 vaddr_end = (unsigned long)__va(paddr_end); in phys_p4d_init()
684 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in phys_p4d_init()
704 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
710 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
728 unsigned long vaddr, vaddr_start, vaddr_end, vaddr_next, paddr_last; in __kernel_physical_mapping_init() local
732 vaddr_end = (unsigned long)__va(paddr_end); in __kernel_physical_mapping_init()
[all …]
/arch/x86/kernel/
Dhead64.c116 unsigned long vaddr, vaddr_end; in __startup_64() local
268 vaddr_end = (unsigned long)__end_bss_decrypted; in __startup_64()
269 for (; vaddr < vaddr_end; vaddr += PMD_SIZE) { in __startup_64()
/arch/x86/xen/
Dmmu_pv.c1094 unsigned long vaddr_end) in xen_cleanhighmap() argument
1101 for (; vaddr <= vaddr_end && (pmd < (level2_kernel_pgt + PTRS_PER_PMD)); in xen_cleanhighmap()
1119 void *vaddr_end = vaddr + size; in xen_free_ro_pages() local
1121 for (; vaddr < vaddr_end; vaddr += PAGE_SIZE) in xen_free_ro_pages()
/arch/x86/kvm/
Dsvm.c6462 unsigned long vaddr, vaddr_end, next_vaddr, npages, pages, size, i; in sev_launch_update_data() local
6481 vaddr_end = vaddr + size; in sev_launch_update_data()
6498 for (i = 0; vaddr < vaddr_end; vaddr = next_vaddr, i += pages) { in sev_launch_update_data()
6811 unsigned long vaddr, vaddr_end, next_vaddr; in sev_dbg_crypt() local
6832 vaddr_end = vaddr + size; in sev_dbg_crypt()
6835 for (; vaddr < vaddr_end; vaddr = next_vaddr) { in sev_dbg_crypt()