/arch/hexagon/kernel/ |
D | vdso.c | 65 unsigned long vdso_base; in arch_setup_additional_pages() local 72 vdso_base = STACK_TOP; in arch_setup_additional_pages() 74 vdso_base = get_unmapped_area(NULL, vdso_base, PAGE_SIZE, 0, 0); in arch_setup_additional_pages() 75 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 76 ret = vdso_base; in arch_setup_additional_pages() 81 ret = install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 89 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages()
|
/arch/tile/kernel/ |
D | vdso.c | 129 unsigned long vdso_base = 0; in setup_vdso_pages() local 135 mm->context.vdso_base = 0; in setup_vdso_pages() 153 vdso_base = get_unmapped_area(NULL, vdso_base, in setup_vdso_pages() 157 if (IS_ERR_VALUE(vdso_base)) { in setup_vdso_pages() 158 retval = vdso_base; in setup_vdso_pages() 163 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in setup_vdso_pages() 170 mm->context.vdso_base = vdso_base; in setup_vdso_pages() 182 retval = install_special_mapping(mm, vdso_base, in setup_vdso_pages() 188 mm->context.vdso_base = 0; in setup_vdso_pages()
|
D | stack.c | 128 (kbt->it.pc == ((ulong)kbt->task->mm->context.vdso_base + in is_sigreturn()
|
/arch/arm64/kernel/ |
D | vdso.c | 179 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in arch_setup_additional_pages() local 188 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in arch_setup_additional_pages() 189 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 190 ret = ERR_PTR(vdso_base); in arch_setup_additional_pages() 193 ret = _install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 199 vdso_base += PAGE_SIZE; in arch_setup_additional_pages() 200 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages() 201 ret = _install_special_mapping(mm, vdso_base, vdso_text_len, in arch_setup_additional_pages()
|
/arch/s390/kernel/ |
D | vdso.c | 93 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap() 238 unsigned long vdso_base; in arch_setup_additional_pages() local 268 vdso_base = get_unmapped_area(NULL, 0, vdso_pages << PAGE_SHIFT, 0, 0); in arch_setup_additional_pages() 269 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 270 rc = vdso_base; in arch_setup_additional_pages() 284 vma = _install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, in arch_setup_additional_pages() 293 current->mm->context.vdso_base = vdso_base; in arch_setup_additional_pages()
|
/arch/powerpc/include/asm/ |
D | mm-arch-hooks.h | 23 if (old_start == mm->context.vdso_base) in arch_remap() 24 mm->context.vdso_base = new_start; in arch_remap()
|
D | mmu_context.h | 131 if (start <= mm->context.vdso_base && mm->context.vdso_base < end) in arch_unmap() 132 mm->context.vdso_base = 0; in arch_unmap()
|
D | mmu-40x.h | 60 unsigned long vdso_base; member
|
D | mmu-8xx.h | 171 unsigned long vdso_base; member
|
D | mmu-44x.h | 111 unsigned long vdso_base; member
|
D | elf.h | 178 VDSO_AUX_ENT(AT_SYSINFO_EHDR, current->mm->context.vdso_base); \
|
D | mmu-book3e.h | 232 unsigned long vdso_base; member
|
/arch/powerpc/kernel/ |
D | vdso.c | 156 unsigned long vdso_base; in arch_setup_additional_pages() local 166 vdso_base = VDSO32_MBASE; in arch_setup_additional_pages() 175 vdso_base = 0; in arch_setup_additional_pages() 180 vdso_base = VDSO32_MBASE; in arch_setup_additional_pages() 183 current->mm->context.vdso_base = 0; in arch_setup_additional_pages() 201 vdso_base = get_unmapped_area(NULL, vdso_base, in arch_setup_additional_pages() 205 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 206 rc = vdso_base; in arch_setup_additional_pages() 211 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in arch_setup_additional_pages() 218 current->mm->context.vdso_base = vdso_base; in arch_setup_additional_pages() [all …]
|
D | signal_32.c | 1025 if (vdso32_rt_sigtramp && tsk->mm->context.vdso_base) { in handle_rt_signal32() 1027 tramp = tsk->mm->context.vdso_base + vdso32_rt_sigtramp; in handle_rt_signal32() 1471 if (vdso32_sigtramp && tsk->mm->context.vdso_base) { in handle_signal32() 1473 tramp = tsk->mm->context.vdso_base + vdso32_sigtramp; in handle_signal32()
|
D | signal_64.c | 839 if (vdso64_rt_sigtramp && tsk->mm->context.vdso_base) { in handle_rt_signal64() 840 regs->link = tsk->mm->context.vdso_base + vdso64_rt_sigtramp; in handle_rt_signal64()
|
/arch/powerpc/perf/ |
D | callchain.c | 213 if (vdso64_rt_sigtramp && current->mm->context.vdso_base && in is_sigreturn_64_address() 214 nip == current->mm->context.vdso_base + vdso64_rt_sigtramp) in is_sigreturn_64_address() 372 if (vdso32_sigtramp && current->mm->context.vdso_base && in is_sigreturn_32_address() 373 nip == current->mm->context.vdso_base + vdso32_sigtramp) in is_sigreturn_32_address() 383 if (vdso32_rt_sigtramp && current->mm->context.vdso_base && in is_rt_sigreturn_32_address() 384 nip == current->mm->context.vdso_base + vdso32_rt_sigtramp) in is_rt_sigreturn_32_address()
|
/arch/tile/include/asm/ |
D | mmu.h | 25 unsigned long vdso_base; member
|
D | processor.h | 173 #define VDSO_BASE ((unsigned long)current->active_mm->context.vdso_base)
|
/arch/s390/include/asm/ |
D | mmu.h | 18 unsigned long vdso_base; member
|
D | elf.h | 272 (unsigned long)current->mm->context.vdso_base); \
|
/arch/powerpc/include/asm/book3s/32/ |
D | mmu-hash.h | 83 unsigned long vdso_base; member
|
/arch/mips/kernel/ |
D | vdso.c | 101 static unsigned long vdso_base(void) in vdso_base() function 156 base = get_unmapped_area(NULL, vdso_base(), size, 0, 0); in arch_setup_additional_pages()
|
/arch/powerpc/include/asm/book3s/64/ |
D | mmu.h | 100 unsigned long vdso_base; member
|