• Home
  • Raw
  • Download

Lines Matching refs:end

78 		unsigned long start, unsigned long end);
527 unsigned long end, struct vm_area_struct **pprev, in find_vma_links() argument
543 if (vma_tmp->vm_start < end) in find_vma_links()
561 unsigned long addr, unsigned long end) in count_vma_pages_range() argument
567 vma = find_vma_intersection(mm, addr, end); in count_vma_pages_range()
571 nr_pages = (min(end, vma->vm_end) - in count_vma_pages_range()
578 if (vma->vm_start > end) in count_vma_pages_range()
581 overlap_len = min(end, vma->vm_end) - vma->vm_start; in count_vma_pages_range()
718 unsigned long end, pgoff_t pgoff, struct vm_area_struct *insert, in __vma_adjust() argument
734 if (end >= next->vm_end) { in __vma_adjust()
746 VM_WARN_ON(end != next->vm_end); in __vma_adjust()
761 remove_next = 1 + (end > next->vm_end); in __vma_adjust()
763 end != next->vm_next->vm_end); in __vma_adjust()
765 end != next->vm_end); in __vma_adjust()
767 end = next->vm_end; in __vma_adjust()
780 } else if (end > next->vm_start) { in __vma_adjust()
785 adjust_next = (end - next->vm_start) >> PAGE_SHIFT; in __vma_adjust()
789 } else if (end < vma->vm_end) { in __vma_adjust()
795 adjust_next = -((vma->vm_end - end) >> PAGE_SHIFT); in __vma_adjust()
816 vma_adjust_trans_huge(orig_vma, start, end, adjust_next); in __vma_adjust()
861 if (end != vma->vm_end) { in __vma_adjust()
862 vma->vm_end = end; in __vma_adjust()
970 end = next->vm_end; in __vma_adjust()
1142 unsigned long end, unsigned long vm_flags, in vma_merge() argument
1148 pgoff_t pglen = (end - addr) >> PAGE_SHIFT; in vma_merge()
1164 if (area && area->vm_end == end) /* cases 6, 7, 8 */ in vma_merge()
1169 VM_WARN_ON(area && end > area->vm_end); in vma_merge()
1170 VM_WARN_ON(addr >= end); in vma_merge()
1184 if (next && end == next->vm_start && in vma_merge()
1199 end, prev->vm_pgoff, NULL, prev); in vma_merge()
1209 if (next && end == next->vm_start && in vma_merge()
2613 unsigned long start, unsigned long end) in unmap_region() argument
2620 tlb_gather_mmu(&tlb, mm, start, end); in unmap_region()
2622 unmap_vmas(&tlb, vma, start, end); in unmap_region()
2641 tlb_finish_mmu(&tlb, start, end); in unmap_region()
2650 struct vm_area_struct *prev, unsigned long end) in detach_vmas_to_be_unmapped() argument
2662 } while (vma && vma->vm_start < end); in detach_vmas_to_be_unmapped()
2771 unsigned long end; in __do_munmap() local
2778 end = start + len; in __do_munmap()
2787 arch_unmap(mm, start, end); in __do_munmap()
2797 if (vma->vm_start >= end) in __do_munmap()
2815 if (end < vma->vm_end && mm->map_count >= sysctl_max_map_count) in __do_munmap()
2825 last = find_vma(mm, end); in __do_munmap()
2826 if (last && end > last->vm_start) { in __do_munmap()
2827 int error = __split_vma(mm, last, end, 1); in __do_munmap()
2843 int error = userfaultfd_unmap_prep(vma, start, end, uf); in __do_munmap()
2853 while (tmp && tmp->vm_start < end) { in __do_munmap()
2864 if (!detach_vmas_to_be_unmapped(mm, vma, prev, end)) in __do_munmap()
2870 unmap_region(mm, vma, prev, start, end); in __do_munmap()