Lines Matching refs:vaddr_end
225 unsigned long vaddr_end; member
234 pgd_end = ppd->vaddr_end & PGDIR_MASK; in sme_clear_pgd()
378 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pmd()
388 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pte()
399 unsigned long vaddr_end; in __sme_map_range() local
405 vaddr_end = ppd->vaddr_end; in __sme_map_range()
408 ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_PAGE_SIZE); in __sme_map_range()
412 ppd->vaddr_end = vaddr_end & PMD_PAGE_MASK; in __sme_map_range()
416 ppd->vaddr_end = vaddr_end; in __sme_map_range()
591 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel()
625 ppd.vaddr_end = kernel_end; in sme_encrypt_kernel()
631 ppd.vaddr_end = kernel_end + decrypted_base; in sme_encrypt_kernel()
638 ppd.vaddr_end = initrd_end; in sme_encrypt_kernel()
645 ppd.vaddr_end = initrd_end + decrypted_base; in sme_encrypt_kernel()
652 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel()
657 ppd.vaddr_end = workarea_end + decrypted_base; in sme_encrypt_kernel()
675 ppd.vaddr_end = kernel_end + decrypted_base; in sme_encrypt_kernel()
680 ppd.vaddr_end = initrd_end + decrypted_base; in sme_encrypt_kernel()
685 ppd.vaddr_end = workarea_end + decrypted_base; in sme_encrypt_kernel()