Home
last modified time | relevance | path

Searched refs:vm_end (Results 1 – 25 of 66) sorted by relevance

123

/arch/metag/kernel/
Ddma.c102 unsigned long vm_end; member
110 .vm_end = CONSISTENT_END,
117 unsigned long addr = head->vm_start, end = head->vm_end - size; in metag_vm_region_alloc()
132 addr = c->vm_end; in metag_vm_region_alloc()
143 new->vm_end = addr + size; in metag_vm_region_alloc()
287 if ((c->vm_end - c->vm_start) != size) { in dma_free_coherent()
289 __func__, c->vm_end - c->vm_start, size); in dma_free_coherent()
291 size = c->vm_end - c->vm_start; in dma_free_coherent()
317 flush_tlb_kernel_range(c->vm_start, c->vm_end); in dma_free_coherent()
343 user_size = (vma->vm_end - vma->vm_start) >> PAGE_SHIFT; in dma_mmap()
[all …]
/arch/powerpc/mm/
Ddma-noncoherent.c87 unsigned long vm_end; member
93 .vm_end = CONSISTENT_END,
99 unsigned long addr = head->vm_start, end = head->vm_end - size; in ppc_vm_region_alloc()
114 addr = c->vm_end; in ppc_vm_region_alloc()
125 new->vm_end = addr + size; in ppc_vm_region_alloc()
270 if ((c->vm_end - c->vm_start) != size) { in __dma_free_coherent()
272 __func__, c->vm_end - c->vm_start, size); in __dma_free_coherent()
274 size = c->vm_end - c->vm_start; in __dma_free_coherent()
297 flush_tlb_kernel_range(c->vm_start, c->vm_end); in __dma_free_coherent()
/arch/sparc/include/asm/
Dtlb_32.h6 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
11 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/avr32/include/asm/
Dtlb.h12 flush_cache_range(vma, vma->vm_start, vma->vm_end)
15 flush_tlb_range(vma, vma->vm_start, vma->vm_end)
/arch/arc/include/asm/
Dtlb.h32 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
39 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/metag/include/asm/
Dtlb.h14 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
20 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/xtensa/include/asm/
Dtlb.h29 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
35 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/parisc/include/asm/
Dtlb.h11 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
16 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/x86/um/
Dmem_32.c22 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init()
53 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
/arch/arm64/kernel/
Dsys_compat.c45 if (end > vma->vm_end) in do_compat_cache_op()
46 end = vma->vm_end; in do_compat_cache_op()
/arch/metag/mm/
Dhugetlbpage.c158 if (vma && vma->vm_end <= addr) in hugetlb_get_unmapped_area_existing()
172 if (vma->vm_end & HUGEPT_MASK) { in hugetlb_get_unmapped_area_existing()
175 addr = vma->vm_end; in hugetlb_get_unmapped_area_existing()
180 addr = ALIGN_HUGEPT(vma->vm_end); in hugetlb_get_unmapped_area_existing()
/arch/powerpc/include/asm/
Dfb.h12 vma->vm_end - vma->vm_start, in fb_pgprotect()
/arch/ia64/include/asm/
Dfb.h12 if (efi_range_is_wc(vma->vm_start, vma->vm_end - vma->vm_start)) in fb_pgprotect()
/arch/mips/include/asm/
Dtlb.h11 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
/arch/arc/kernel/
Darc_hostlink.c26 vma->vm_end - vma->vm_start, in arc_hl_mmap()
/arch/alpha/kernel/
Dbinfmt_loader.c25 loader = bprm->vma->vm_end - sizeof(void *); in load_binary()
Dpci-sysfs.c31 vma->vm_end - vma->vm_start, in hose_mmap_page_range()
41 nr = (vma->vm_end - vma->vm_start) >> PAGE_SHIFT; in __pci_mmap_fits()
258 nr = (vma->vm_end - vma->vm_start) >> PAGE_SHIFT; in __legacy_mmap_fits()
/arch/sh/kernel/
Dsys_sh.c73 if (vma == NULL || addr < vma->vm_start || addr + len > vma->vm_end) { in sys_cacheflush()
/arch/powerpc/kernel/
Dproc_powerpc.c46 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) in page_map_mmap()
/arch/parisc/mm/
Dfault.c196 vma->vm_start, vma->vm_end); in show_signal_msg()
306 address < vma->vm_start || address >= vma->vm_end) { in do_page_fault()
/arch/sh/mm/
Dcache-sh5.c180 if (!vma || (aligned_start <= vma->vm_end)) { in sh64_icache_inv_user_page_range()
185 vma_end = vma->vm_end; in sh64_icache_inv_user_page_range()
194 aligned_start = vma->vm_end; /* Skip to start of next region */ in sh64_icache_inv_user_page_range()
/arch/xtensa/kernel/
Dsyscall.c91 addr = vmm->vm_end; in arch_get_unmapped_area()
/arch/arm/kvm/
Dmmu.c730 hva_t vm_start, vm_end; in stage2_unmap_memslot() local
739 vm_end = min(reg_end, vma->vm_end); in stage2_unmap_memslot()
743 unmap_stage2_range(kvm, gpa, vm_end - vm_start); in stage2_unmap_memslot()
745 hva = vm_end; in stage2_unmap_memslot()
1437 hva_t vm_start, vm_end; in kvm_arch_prepare_memory_region() local
1455 vm_end = min(reg_end, vma->vm_end); in kvm_arch_prepare_memory_region()
1466 vm_end - vm_start, in kvm_arch_prepare_memory_region()
1471 hva = vm_end; in kvm_arch_prepare_memory_region()
/arch/parisc/kernel/
Dcache.c466 usize += vma->vm_end - vma->vm_start; in mm_total_size()
499 flush_user_dcache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm()
502 flush_user_icache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm()
511 for (addr = vma->vm_start; addr < vma->vm_end; in flush_cache_mm()
/arch/ia64/mm/
Dfault.c206 if (!(prev_vma && (prev_vma->vm_flags & VM_GROWSUP) && (address == prev_vma->vm_end))) { in ia64_do_page_fault()
225 if (address > vma->vm_end + PAGE_SIZE - sizeof(long)) in ia64_do_page_fault()

123