/arch/hexagon/kernel/ |
D | vdso.c | 65 unsigned long vdso_base; in arch_setup_additional_pages() local 72 vdso_base = STACK_TOP; in arch_setup_additional_pages() 74 vdso_base = get_unmapped_area(NULL, vdso_base, PAGE_SIZE, 0, 0); in arch_setup_additional_pages() 75 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 76 ret = vdso_base; in arch_setup_additional_pages() 81 ret = install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 89 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages()
|
/arch/tile/kernel/ |
D | vdso.c | 129 unsigned long vdso_base = 0; in setup_vdso_pages() local 135 mm->context.vdso_base = 0; in setup_vdso_pages() 153 vdso_base = get_unmapped_area(NULL, vdso_base, in setup_vdso_pages() 157 if (IS_ERR_VALUE(vdso_base)) { in setup_vdso_pages() 158 retval = vdso_base; in setup_vdso_pages() 163 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in setup_vdso_pages() 170 mm->context.vdso_base = vdso_base; in setup_vdso_pages() 182 retval = install_special_mapping(mm, vdso_base, in setup_vdso_pages() 188 mm->context.vdso_base = 0; in setup_vdso_pages()
|
D | stack.c | 126 (kbt->it.pc == ((ulong)kbt->task->mm->context.vdso_base + in is_sigreturn()
|
/arch/s390/kernel/ |
D | vdso.c | 186 unsigned long vdso_base; in arch_setup_additional_pages() local 212 current->mm->context.vdso_base = 0; in arch_setup_additional_pages() 221 vdso_base = get_unmapped_area(NULL, 0, vdso_pages << PAGE_SHIFT, 0, 0); in arch_setup_additional_pages() 222 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 223 rc = vdso_base; in arch_setup_additional_pages() 232 current->mm->context.vdso_base = vdso_base; in arch_setup_additional_pages() 244 rc = install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, in arch_setup_additional_pages() 249 current->mm->context.vdso_base = 0; in arch_setup_additional_pages() 257 if (vma->vm_mm && vma->vm_start == vma->vm_mm->context.vdso_base) in arch_vma_name()
|
/arch/arm64/kernel/ |
D | vdso.c | 164 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in arch_setup_additional_pages() local 173 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in arch_setup_additional_pages() 174 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 175 ret = ERR_PTR(vdso_base); in arch_setup_additional_pages() 178 ret = _install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 184 vdso_base += PAGE_SIZE; in arch_setup_additional_pages() 185 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages() 186 ret = _install_special_mapping(mm, vdso_base, vdso_text_len, in arch_setup_additional_pages()
|
/arch/powerpc/include/asm/ |
D | mm-arch-hooks.h | 23 if (old_start == mm->context.vdso_base) in arch_remap() 24 mm->context.vdso_base = new_start; in arch_remap()
|
D | mmu_context.h | 182 if (start <= mm->context.vdso_base && mm->context.vdso_base < end) in arch_unmap() 183 mm->context.vdso_base = 0; in arch_unmap()
|
D | mmu-40x.h | 59 unsigned long vdso_base; member
|
D | mmu-8xx.h | 170 unsigned long vdso_base; member
|
D | mmu-44x.h | 110 unsigned long vdso_base; member
|
D | mmu-book3e.h | 231 unsigned long vdso_base; member
|
/arch/powerpc/kernel/ |
D | vdso.c | 156 unsigned long vdso_base; in arch_setup_additional_pages() local 166 vdso_base = VDSO32_MBASE; in arch_setup_additional_pages() 175 vdso_base = 0; in arch_setup_additional_pages() 180 vdso_base = VDSO32_MBASE; in arch_setup_additional_pages() 183 current->mm->context.vdso_base = 0; in arch_setup_additional_pages() 201 vdso_base = get_unmapped_area(NULL, vdso_base, in arch_setup_additional_pages() 205 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 206 rc = vdso_base; in arch_setup_additional_pages() 211 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in arch_setup_additional_pages() 218 current->mm->context.vdso_base = vdso_base; in arch_setup_additional_pages() [all …]
|
D | signal_32.c | 1009 if (vdso32_rt_sigtramp && tsk->mm->context.vdso_base) { in handle_rt_signal32() 1011 tramp = tsk->mm->context.vdso_base + vdso32_rt_sigtramp; in handle_rt_signal32() 1452 if (vdso32_sigtramp && tsk->mm->context.vdso_base) { in handle_signal32() 1454 tramp = tsk->mm->context.vdso_base + vdso32_sigtramp; in handle_signal32()
|
D | signal_64.c | 805 if (vdso64_rt_sigtramp && tsk->mm->context.vdso_base) { in handle_rt_signal64() 806 regs->link = tsk->mm->context.vdso_base + vdso64_rt_sigtramp; in handle_rt_signal64()
|
/arch/powerpc/perf/ |
D | callchain.c | 212 if (vdso64_rt_sigtramp && current->mm->context.vdso_base && in is_sigreturn_64_address() 213 nip == current->mm->context.vdso_base + vdso64_rt_sigtramp) in is_sigreturn_64_address() 371 if (vdso32_sigtramp && current->mm->context.vdso_base && in is_sigreturn_32_address() 372 nip == current->mm->context.vdso_base + vdso32_sigtramp) in is_sigreturn_32_address() 382 if (vdso32_rt_sigtramp && current->mm->context.vdso_base && in is_rt_sigreturn_32_address() 383 nip == current->mm->context.vdso_base + vdso32_rt_sigtramp) in is_rt_sigreturn_32_address()
|
/arch/tile/include/asm/ |
D | mmu.h | 25 unsigned long vdso_base; member
|
D | processor.h | 173 #define VDSO_BASE ((unsigned long)current->active_mm->context.vdso_base)
|
/arch/powerpc/include/asm/book3s/32/ |
D | mmu-hash.h | 82 unsigned long vdso_base; member
|
/arch/s390/include/asm/ |
D | mmu.h | 19 unsigned long vdso_base; member
|
D | elf.h | 232 (unsigned long)current->mm->context.vdso_base); \
|
/arch/powerpc/include/asm/book3s/64/ |
D | mmu.h | 75 unsigned long vdso_base; member
|
/arch/powerpc/include/uapi/asm/ |
D | elf.h | 185 VDSO_AUX_ENT(AT_SYSINFO_EHDR, current->mm->context.vdso_base); \
|