Lines Matching refs:end
78 unsigned long start, unsigned long end);
529 unsigned long end, struct vm_area_struct **pprev, in find_vma_links() argument
545 if (vma_tmp->vm_start < end) in find_vma_links()
563 unsigned long addr, unsigned long end) in count_vma_pages_range() argument
569 vma = find_vma_intersection(mm, addr, end); in count_vma_pages_range()
573 nr_pages = (min(end, vma->vm_end) - in count_vma_pages_range()
580 if (vma->vm_start > end) in count_vma_pages_range()
583 overlap_len = min(end, vma->vm_end) - vma->vm_start; in count_vma_pages_range()
720 unsigned long end, pgoff_t pgoff, struct vm_area_struct *insert, in __vma_adjust() argument
736 if (end >= next->vm_end) { in __vma_adjust()
748 VM_WARN_ON(end != next->vm_end); in __vma_adjust()
763 remove_next = 1 + (end > next->vm_end); in __vma_adjust()
765 end != next->vm_next->vm_end); in __vma_adjust()
767 end != next->vm_end); in __vma_adjust()
769 end = next->vm_end; in __vma_adjust()
782 } else if (end > next->vm_start) { in __vma_adjust()
787 adjust_next = (end - next->vm_start) >> PAGE_SHIFT; in __vma_adjust()
791 } else if (end < vma->vm_end) { in __vma_adjust()
797 adjust_next = -((vma->vm_end - end) >> PAGE_SHIFT); in __vma_adjust()
818 vma_adjust_trans_huge(orig_vma, start, end, adjust_next); in __vma_adjust()
863 if (end != vma->vm_end) { in __vma_adjust()
864 vma->vm_end = end; in __vma_adjust()
972 end = next->vm_end; in __vma_adjust()
1144 unsigned long end, unsigned long vm_flags, in vma_merge() argument
1150 pgoff_t pglen = (end - addr) >> PAGE_SHIFT; in vma_merge()
1166 if (area && area->vm_end == end) /* cases 6, 7, 8 */ in vma_merge()
1171 VM_WARN_ON(area && end > area->vm_end); in vma_merge()
1172 VM_WARN_ON(addr >= end); in vma_merge()
1186 if (next && end == next->vm_start && in vma_merge()
1201 end, prev->vm_pgoff, NULL, prev); in vma_merge()
1211 if (next && end == next->vm_start && in vma_merge()
2620 unsigned long start, unsigned long end) in unmap_region() argument
2626 tlb_gather_mmu(&tlb, mm, start, end); in unmap_region()
2628 unmap_vmas(&tlb, vma, start, end); in unmap_region()
2631 tlb_finish_mmu(&tlb, start, end); in unmap_region()
2640 struct vm_area_struct *prev, unsigned long end) in detach_vmas_to_be_unmapped() argument
2652 } while (vma && vma->vm_start < end); in detach_vmas_to_be_unmapped()
2750 unsigned long end; in __do_munmap() local
2757 end = start + len; in __do_munmap()
2766 arch_unmap(mm, start, end); in __do_munmap()
2776 if (vma->vm_start >= end) in __do_munmap()
2794 if (end < vma->vm_end && mm->map_count >= sysctl_max_map_count) in __do_munmap()
2804 last = find_vma(mm, end); in __do_munmap()
2805 if (last && end > last->vm_start) { in __do_munmap()
2806 int error = __split_vma(mm, last, end, 1); in __do_munmap()
2822 int error = userfaultfd_unmap_prep(vma, start, end, uf); in __do_munmap()
2832 while (tmp && tmp->vm_start < end) { in __do_munmap()
2843 detach_vmas_to_be_unmapped(mm, vma, prev, end); in __do_munmap()
2848 unmap_region(mm, vma, prev, start, end); in __do_munmap()