Home
last modified time | relevance | path

Searched refs:vm_start (Results 1 – 25 of 78) sorted by relevance

1234

/arch/ia64/include/asm/
Dfb.h13 if (efi_range_is_wc(vma->vm_start, vma->vm_end - vma->vm_start)) in fb_pgprotect()
/arch/csky/include/asm/
Dtlb.h12 flush_cache_range(vma, (vma)->vm_start, (vma)->vm_end); \
18 flush_tlb_range(vma, (vma)->vm_start, (vma)->vm_end); \
/arch/x86/um/
Dmem_32.c17 gate_vma.vm_start = FIXADDR_USER_START; in gate_vma_init()
49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
Dmem_64.c7 if (vma->vm_mm && vma->vm_start == um_vdso_addr) in arch_vma_name()
/arch/arc/kernel/
Darc_hostlink.c22 if (io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in arc_hl_mmap()
23 vma->vm_end - vma->vm_start, in arc_hl_mmap()
Dtroubleshoot.c91 if (vma && (vma->vm_start <= address)) { in show_faulting_vma()
101 vma->vm_start < TASK_UNMAPPED_BASE ? in show_faulting_vma()
102 address : address - vma->vm_start, in show_faulting_vma()
103 nm, vma->vm_start, vma->vm_end); in show_faulting_vma()
/arch/parisc/mm/
Dfault.c128 if (tree->vm_start > addr) {
134 if (prev->vm_next->vm_start > addr)
255 vma->vm_start, vma->vm_end); in show_signal_msg()
289 if (!vma || address < vma->vm_start) in do_page_fault()
359 address < vma->vm_start || address >= vma->vm_end) { in do_page_fault()
485 && address >= vma->vm_start in handle_nadtlb_fault()
/arch/powerpc/platforms/powernv/
Dvas-api.c159 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) { in coproc_mmap()
161 (vma->vm_end - vma->vm_start), PAGE_SIZE); in coproc_mmap()
180 rc = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in coproc_mmap()
181 vma->vm_end - vma->vm_start, prot); in coproc_mmap()
184 paste_addr, vma->vm_start, rc); in coproc_mmap()
/arch/x86/entry/vdso/
Dvma.c84 regs->ip = new_vma->vm_start + vdso_land; in vdso_fix_landing()
92 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vdso_mremap()
99 current->mm->context.vdso = (void __user *)new_vma->vm_start; in vdso_mremap()
108 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vvar_mremap()
150 unsigned long size = vma->vm_end - vma->vm_start; in vdso_join_timens()
153 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
/arch/powerpc/kernel/
Dproc_powerpc.c33 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) in page_map_mmap()
36 remap_pfn_range(vma, vma->vm_start, in page_map_mmap()
/arch/arm64/kernel/
Dvdso.c85 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in __vdso_remap()
92 current->mm->context.vdso = (void *)new_vma->vm_start; in __vdso_remap()
151 unsigned long size = vma->vm_end - vma->vm_start; in vdso_join_timens()
154 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
157 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
225 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vvar_mremap()
/arch/riscv/kernel/
Dvdso.c107 if (vma->vm_mm && (vma->vm_start == (long)vma->vm_mm->context.vdso)) in arch_vma_name()
109 if (vma->vm_mm && (vma->vm_start == in arch_vma_name()
/arch/um/drivers/
Dmmapper_kern.c57 size = vma->vm_end - vma->vm_start; in mmapper_mmap()
65 if (remap_pfn_range(vma, vma->vm_start, p_buf >> PAGE_SHIFT, size, in mmapper_mmap()
/arch/ia64/mm/
Dfault.c125 if (( !vma && prev_vma ) || (address < vma->vm_start) ) in ia64_do_page_fault()
193 if (REGION_NUMBER(address) != REGION_NUMBER(vma->vm_start) in ia64_do_page_fault()
200 if (REGION_NUMBER(address) != REGION_NUMBER(vma->vm_start) in ia64_do_page_fault()
/arch/powerpc/include/asm/
Dfb.h13 vma->vm_end - vma->vm_start, in fb_pgprotect()
/arch/arm64/kvm/
Dmmu.c508 hva_t vm_start, vm_end; in stage2_unmap_memslot() local
510 if (!vma || vma->vm_start >= reg_end) in stage2_unmap_memslot()
516 vm_start = max(hva, vma->vm_start); in stage2_unmap_memslot()
520 gpa_t gpa = addr + (vm_start - memslot->userspace_addr); in stage2_unmap_memslot()
521 unmap_stage2_range(&kvm->arch.mmu, gpa, vm_end - vm_start); in stage2_unmap_memslot()
1428 hva_t vm_start, vm_end; in kvm_arch_prepare_memory_region() local
1430 if (!vma || vma->vm_start >= reg_end) in kvm_arch_prepare_memory_region()
1436 vm_start = max(hva, vma->vm_start); in kvm_arch_prepare_memory_region()
1441 (vm_start - mem->userspace_addr); in kvm_arch_prepare_memory_region()
1445 pa += vm_start - vma->vm_start; in kvm_arch_prepare_memory_region()
[all …]
/arch/parisc/kernel/
Dcache.c353 addr = mpnt->vm_start + offset; in flush_dcache_page()
526 usize += vma->vm_end - vma->vm_start; in mm_total_size()
565 flush_user_dcache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm()
567 flush_user_icache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm()
568 flush_tlb_range(vma, vma->vm_start, vma->vm_end); in flush_cache_mm()
577 for (addr = vma->vm_start; addr < vma->vm_end; in flush_cache_mm()
619 for (addr = vma->vm_start; addr < vma->vm_end; addr += PAGE_SIZE) { in flush_cache_range()
/arch/nios2/kernel/
Dsys_nios2.c46 if (vma == NULL || addr < vma->vm_start || addr + len > vma->vm_end) { in sys_cacheflush()
/arch/s390/kernel/
Dvdso.c68 if ((vdso_pages << PAGE_SHIFT) != vma->vm_end - vma->vm_start) in vdso_mremap()
74 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap()
/arch/arm/mm/
Dfault-armv.c142 pgoff = vma->vm_pgoff + ((addr - vma->vm_start) >> PAGE_SHIFT); in make_coherent()
161 aliases += adjust_pte(mpnt, mpnt->vm_start + offset, pfn); in make_coherent()
/arch/hexagon/kernel/
Dvdso.c85 if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso) in arch_vma_name()
/arch/sh/kernel/vsyscall/
Dvsyscall.c89 if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso) in arch_vma_name()
/arch/arm/kernel/
Dprocess.c321 gate_vma.vm_start = 0xffff0000; in gate_vma_init()
335 return (addr >= gate_vma.vm_start) && (addr < gate_vma.vm_end); in in_gate_area()
391 current->mm->context.sigpage = new_vma->vm_start; in sigpage_mremap()
Dvdso.c53 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vdso_mremap()
62 current->mm->context.vdso = new_vma->vm_start; in vdso_mremap()
/arch/csky/kernel/
Dvdso.c82 if (vma->vm_start == (long)vma->vm_mm->context.vdso) in arch_vma_name()

1234