/arch/x86/include/asm/ |
D | pgtable_32_areas.h | 47 #define MODULES_VADDR VMALLOC_START macro 49 #define MODULES_LEN (MODULES_VADDR - MODULES_END)
|
D | pgtable_64_types.h | 144 #define MODULES_VADDR (__START_KERNEL_map + KERNEL_IMAGE_SIZE) macro 147 #define MODULES_LEN (MODULES_END - MODULES_VADDR)
|
/arch/arm/include/asm/ |
D | memory.h | 44 #define MODULES_VADDR (PAGE_OFFSET - SZ_16M) macro 47 #define MODULES_VADDR (PAGE_OFFSET - SZ_8M) macro 50 #if TASK_SIZE > MODULES_VADDR 68 #define XIP_VIRT_ADDR(physaddr) (MODULES_VADDR + ((physaddr) & 0x000fffff)) 111 #define MODULES_VADDR PAGE_OFFSET macro
|
/arch/powerpc/kernel/ |
D | module.c | 90 #ifdef MODULES_VADDR 93 BUILD_BUG_ON(TASK_SIZE > MODULES_VADDR); in module_alloc() 95 return __vmalloc_node_range(size, 1, MODULES_VADDR, MODULES_END, GFP_KERNEL, in module_alloc()
|
/arch/nds32/include/asm/ |
D | memory.h | 41 #define MODULES_VADDR (MODULES_END - SZ_32M) macro 43 #if TASK_SIZE > MODULES_VADDR
|
/arch/arm/mm/ |
D | pgd.c | 57 new_p4d = p4d_alloc(mm, new_pgd + pgd_index(MODULES_VADDR), in pgd_alloc() 58 MODULES_VADDR); in pgd_alloc() 62 new_pud = pud_alloc(mm, new_p4d, MODULES_VADDR); in pgd_alloc()
|
D | pageattr.c | 49 if (!in_range(start, size, MODULES_VADDR, MODULES_END) && in change_memory_common()
|
D | init.c | 421 BUILD_BUG_ON(TASK_SIZE > MODULES_VADDR); in mem_init() 422 BUG_ON(TASK_SIZE > MODULES_VADDR); in mem_init()
|
/arch/x86/kernel/ |
D | head64.c | 469 BUILD_BUG_ON(MODULES_VADDR < __START_KERNEL_map); in x86_64_start_kernel() 470 BUILD_BUG_ON(MODULES_VADDR - __START_KERNEL_map < KERNEL_IMAGE_SIZE); in x86_64_start_kernel() 473 BUILD_BUG_ON((MODULES_VADDR & ~PMD_MASK) != 0); in x86_64_start_kernel() 474 BUILD_BUG_ON(!(MODULES_VADDR > __START_KERNEL)); in x86_64_start_kernel()
|
D | module.c | 76 MODULES_VADDR + get_module_load_offset(), in module_alloc()
|
/arch/powerpc/mm/kasan/ |
D | kasan_init_32.c | 119 #ifdef MODULES_VADDR in kasan_unmap_early_shadow_vmalloc() 120 k_start = (unsigned long)kasan_mem_to_shadow((void *)MODULES_VADDR); in kasan_unmap_early_shadow_vmalloc()
|
/arch/arm/kernel/ |
D | module.c | 32 #undef MODULES_VADDR 33 #define MODULES_VADDR (((unsigned long)_exiprom + ~PMD_MASK) & PMD_MASK) macro 46 p = __vmalloc_node_range(size, 1, MODULES_VADDR, MODULES_END, in module_alloc()
|
/arch/powerpc/mm/ptdump/ |
D | ptdump.c | 76 #ifdef MODULES_VADDR 362 #ifdef MODULES_VADDR in populate_markers() 363 address_markers[i++].start_address = MODULES_VADDR; in populate_markers()
|
/arch/arm64/include/asm/ |
D | memory.h | 47 #define MODULES_END (MODULES_VADDR + MODULES_VSIZE) 48 #define MODULES_VADDR (KASAN_SHADOW_END) macro
|
/arch/arm64/kernel/ |
D | kaslr.c | 156 (u64)MODULES_VADDR); in kaslr_early_init()
|
/arch/um/include/asm/ |
D | pgtable.h | 49 #define MODULES_VADDR VMALLOC_START macro 51 #define MODULES_LEN (MODULES_VADDR - MODULES_END)
|
/arch/powerpc/mm/book3s32/ |
D | mmu.c | 185 #ifdef MODULES_VADDR in is_module_segment() 186 if (addr < ALIGN_DOWN(MODULES_VADDR, SZ_256M)) in is_module_segment()
|
/arch/s390/kernel/ |
D | setup.c | 123 unsigned long MODULES_VADDR; variable 578 MODULES_VADDR = MODULES_END - MODULES_LEN; in setup_memory_end() 579 VMALLOC_END = MODULES_VADDR; in setup_memory_end()
|
/arch/sparc/kernel/ |
D | module.c | 32 return __vmalloc_node_range(size, 1, MODULES_VADDR, MODULES_END, in module_map()
|
D | ktlb.S | 221 sethi %hi(MODULES_VADDR), %g5
|
/arch/s390/mm/ |
D | dump_pagetables.c | 259 address_markers[MODULES_NR].start_address = MODULES_VADDR; in pt_dump_init()
|
/arch/nds32/kernel/ |
D | module.c | 12 return __vmalloc_node_range(size, 1, MODULES_VADDR, MODULES_END, in module_alloc()
|
/arch/s390/include/asm/ |
D | pgtable.h | 96 extern unsigned long MODULES_VADDR; 98 #define MODULES_VADDR MODULES_VADDR macro 105 if (addr < (void *)MODULES_VADDR) in is_module_addr()
|
/arch/arm64/mm/ |
D | kasan_init.c | 224 mod_shadow_start = (u64)kasan_mem_to_shadow((void *)MODULES_VADDR); in kasan_init_shadow()
|
D | ptdump.c | 44 { MODULES_VADDR, "Modules start" },
|