Home
last modified time | relevance | path

Searched refs:kernel_end (Results 1 – 13 of 13) sorted by relevance

/arch/alpha/mm/
Dinit.c144 callback_init(void * kernel_end) in callback_init() argument
183 (((unsigned long)kernel_end + ~PAGE_MASK) & PAGE_MASK); in callback_init()
184 kernel_end = two_pages + 2*PAGE_SIZE; in callback_init()
221 memset(kernel_end, 0, PAGE_SIZE); in callback_init()
223 pmd_set(pmd, (pte_t *)kernel_end); in callback_init()
224 kernel_end += PAGE_SIZE; in callback_init()
235 return kernel_end; in callback_init()
Dnuma.c55 setup_memory_node(int nid, void *kernel_end) in setup_memory_node() argument
141 end_kernel_pfn = PFN_UP(virt_to_phys(kernel_end)); in setup_memory_node()
161 setup_memory(void *kernel_end) in setup_memory() argument
173 setup_memory_node(nid, kernel_end); in setup_memory()
175 kernel_size = virt_to_phys(kernel_end) - KERNEL_START_PHYS; in setup_memory()
/arch/arm/boot/bootp/
Dkernel.S5 .globl kernel_end symbol
6 kernel_end: label
/arch/riscv/mm/
Dphysaddr.c27 unsigned long kernel_end = (unsigned long)_end; in __phys_addr_symbol() local
33 VIRTUAL_BUG_ON(x < kernel_start || x > kernel_end); in __phys_addr_symbol()
/arch/mips/ar7/
Dmemory.c24 u32 *kernel_end = (u32 *)KSEG1ADDR(CPHYSADDR((u32)&_end)); in memsize() local
27 while (tmpaddr > kernel_end) { in memsize()
/arch/x86/mm/
Dmem_encrypt_identity.c293 unsigned long kernel_start, kernel_end, kernel_len; in sme_encrypt_kernel() local
319 kernel_end = ALIGN(__pa_symbol(_end), PMD_PAGE_SIZE); in sme_encrypt_kernel()
320 kernel_len = kernel_end - kernel_start; in sme_encrypt_kernel()
429 ppd.vaddr_end = kernel_end; in sme_encrypt_kernel()
435 ppd.vaddr_end = kernel_end + decrypted_base; in sme_encrypt_kernel()
479 ppd.vaddr_end = kernel_end + decrypted_base; in sme_encrypt_kernel()
Dinit.c774 unsigned long kernel_end = __pa_symbol(_end); in init_mem_mapping() local
783 memory_map_bottom_up(kernel_end, end); in init_mem_mapping()
784 memory_map_bottom_up(ISA_END_ADDRESS, kernel_end); in init_mem_mapping()
/arch/powerpc/kexec/
Dcore.c214 static phys_addr_t kernel_end; variable
222 .value = &kernel_end,
281 kernel_end = cpu_to_be_ulong(__pa(_end)); in kexec_setup()
/arch/parisc/mm/
Dinit.c359 unsigned long kernel_start, kernel_end; in map_pages() local
364 kernel_end = __pa((unsigned long)&_end); in map_pages()
406 } else if (address < kernel_start || address >= kernel_end) { in map_pages()
460 unsigned long kernel_end = (unsigned long)&_end; in free_initmem() local
464 map_pages(init_end, __pa(init_end), kernel_end - init_end, in free_initmem()
481 __flush_tlb_range(0, init_begin, kernel_end); in free_initmem()
/arch/alpha/kernel/
Dsetup.c310 setup_memory(void *kernel_end) in setup_memory() argument
370 kernel_size = virt_to_phys(kernel_end) - KERNEL_START_PHYS; in setup_memory()
454 void *kernel_end = _end; /* end of kernel */ in setup_arch() local
493 kernel_end = callback_init(kernel_end); in setup_arch()
649 setup_memory(kernel_end); in setup_arch()
/arch/arm64/mm/
Dmmu.c499 phys_addr_t kernel_end = __pa_symbol(__init_begin); in map_mem() local
514 memblock_mark_nomap(kernel_start, kernel_end - kernel_start); in map_mem()
550 __map_memblock(pgdp, kernel_start, kernel_end, in map_mem()
552 memblock_clear_nomap(kernel_start, kernel_end - kernel_start); in map_mem()
/arch/m68k/kernel/
Dhead.S1285 movel %pc@(L(kernel_end)),%a0
2542 lea %pc@(L(kernel_end)),%a0
3786 L(kernel_end):
/arch/x86/xen/
Dmmu_pv.c1000 unsigned long kernel_end = roundup((unsigned long)_brk_end, PMD_SIZE) - 1; in xen_cleanhighmap() local
1009 if (vaddr < (unsigned long) _text || vaddr > kernel_end) in xen_cleanhighmap()