Lines Matching refs:end
82 unsigned long start, unsigned long end);
565 unsigned long end, struct vm_area_struct **pprev, in find_vma_links() argument
581 if (vma_tmp->vm_start < end) in find_vma_links()
643 unsigned long addr, unsigned long end) in count_vma_pages_range() argument
649 vma = find_vma_intersection(mm, addr, end); in count_vma_pages_range()
653 nr_pages = (min(end, vma->vm_end) - in count_vma_pages_range()
660 if (vma->vm_start > end) in count_vma_pages_range()
663 overlap_len = min(end, vma->vm_end) - vma->vm_start; in count_vma_pages_range()
779 unsigned long end, pgoff_t pgoff, struct vm_area_struct *insert, in __vma_adjust() argument
799 if (end >= next->vm_end) { in __vma_adjust()
811 VM_WARN_ON(end != next->vm_end); in __vma_adjust()
826 remove_next = 1 + (end > next->vm_end); in __vma_adjust()
828 end != next->vm_next->vm_end); in __vma_adjust()
830 end = next->vm_end; in __vma_adjust()
843 } else if (end > next->vm_start) { in __vma_adjust()
848 adjust_next = (end - next->vm_start); in __vma_adjust()
852 } else if (end < vma->vm_end) { in __vma_adjust()
858 adjust_next = -(vma->vm_end - end); in __vma_adjust()
883 vma_adjust_trans_huge(orig_vma, start, end, adjust_next); in __vma_adjust()
928 if (end != vma->vm_end) { in __vma_adjust()
929 WRITE_ONCE(vma->vm_end, end); in __vma_adjust()
1038 end = next->vm_end; in __vma_adjust()
1218 unsigned long end, unsigned long vm_flags, in __vma_merge() argument
1224 pgoff_t pglen = (end - addr) >> PAGE_SHIFT; in __vma_merge()
1237 if (area && area->vm_end == end) /* cases 6, 7, 8 */ in __vma_merge()
1242 VM_WARN_ON(area && end > area->vm_end); in __vma_merge()
1243 VM_WARN_ON(addr >= end); in __vma_merge()
1257 if (next && end == next->vm_start && in __vma_merge()
1272 end, prev->vm_pgoff, NULL, prev, in __vma_merge()
1283 if (next && end == next->vm_start && in __vma_merge()
2784 unsigned long start, unsigned long end) in unmap_region() argument
2791 tlb_gather_mmu(&tlb, mm, start, end); in unmap_region()
2793 unmap_vmas(&tlb, vma, start, end); in unmap_region()
2812 tlb_finish_mmu(&tlb, start, end); in unmap_region()
2821 struct vm_area_struct *prev, unsigned long end) in detach_vmas_to_be_unmapped() argument
2833 } while (vma && vma->vm_start < end); in detach_vmas_to_be_unmapped()
2942 unsigned long end; in __do_munmap() local
2949 end = start + len; in __do_munmap()
2958 arch_unmap(mm, start, end); in __do_munmap()
2968 if (vma->vm_start >= end) in __do_munmap()
2986 if (end < vma->vm_end && mm->map_count >= sysctl_max_map_count) in __do_munmap()
2996 last = find_vma(mm, end); in __do_munmap()
2997 if (last && end > last->vm_start) { in __do_munmap()
2998 int error = __split_vma(mm, last, end, 1); in __do_munmap()
3014 int error = userfaultfd_unmap_prep(vma, start, end, uf); in __do_munmap()
3024 while (tmp && tmp->vm_start < end) { in __do_munmap()
3035 if (!detach_vmas_to_be_unmapped(mm, vma, prev, end)) in __do_munmap()
3041 unmap_region(mm, vma, prev, start, end); in __do_munmap()