Lines Matching refs:next
173 struct vm_area_struct *next = vma->vm_next; in remove_vma() local
182 return next; in remove_vma()
192 struct vm_area_struct *next; in SYSCALL_DEFINE1() local
262 next = find_vma(mm, oldbrk); in SYSCALL_DEFINE1()
263 if (next && newbrk + PAGE_SIZE > vm_start_gap(next)) in SYSCALL_DEFINE1()
685 struct vm_area_struct *next; in __vma_unlink_common() local
688 next = vma->vm_next; in __vma_unlink_common()
690 prev->vm_next = next; in __vma_unlink_common()
694 prev->vm_next = next; in __vma_unlink_common()
696 mm->mmap = next; in __vma_unlink_common()
698 if (next) in __vma_unlink_common()
699 next->vm_prev = prev; in __vma_unlink_common()
724 struct vm_area_struct *next = vma->vm_next, *orig_vma = vma; in __vma_adjust() local
733 if (next && !insert) { in __vma_adjust()
736 if (end >= next->vm_end) { in __vma_adjust()
743 if (next == expand) { in __vma_adjust()
748 VM_WARN_ON(end != next->vm_end); in __vma_adjust()
755 VM_WARN_ON(file != next->vm_file); in __vma_adjust()
756 swap(vma, next); in __vma_adjust()
763 remove_next = 1 + (end > next->vm_end); in __vma_adjust()
765 end != next->vm_next->vm_end); in __vma_adjust()
767 end != next->vm_end); in __vma_adjust()
769 end = next->vm_end; in __vma_adjust()
772 exporter = next; in __vma_adjust()
779 if (remove_next == 2 && !next->anon_vma) in __vma_adjust()
780 exporter = next->vm_next; in __vma_adjust()
782 } else if (end > next->vm_start) { in __vma_adjust()
787 adjust_next = (end - next->vm_start) >> PAGE_SHIFT; in __vma_adjust()
788 exporter = next; in __vma_adjust()
799 importer = next; in __vma_adjust()
826 uprobe_munmap(next, next->vm_start, next->vm_end); in __vma_adjust()
842 anon_vma = next->anon_vma; in __vma_adjust()
844 VM_WARN_ON(adjust_next && next->anon_vma && in __vma_adjust()
845 anon_vma != next->anon_vma); in __vma_adjust()
849 anon_vma_interval_tree_pre_update_vma(next); in __vma_adjust()
856 vma_interval_tree_remove(next, root); in __vma_adjust()
869 next->vm_start += adjust_next << PAGE_SHIFT; in __vma_adjust()
870 next->vm_pgoff += adjust_next; in __vma_adjust()
875 vma_interval_tree_insert(next, root); in __vma_adjust()
886 __vma_unlink_prev(mm, next, vma); in __vma_adjust()
897 __vma_unlink_common(mm, next, NULL, false, vma); in __vma_adjust()
899 __remove_shared_vm_struct(next, file, mapping); in __vma_adjust()
911 if (!next) in __vma_adjust()
914 vma_gap_update(next); in __vma_adjust()
921 anon_vma_interval_tree_post_update_vma(next); in __vma_adjust()
931 uprobe_mmap(next); in __vma_adjust()
936 uprobe_munmap(next, next->vm_start, next->vm_end); in __vma_adjust()
939 if (next->anon_vma) in __vma_adjust()
940 anon_vma_merge(vma, next); in __vma_adjust()
942 mpol_put(vma_policy(next)); in __vma_adjust()
943 vm_area_free(next); in __vma_adjust()
956 next = vma->vm_next; in __vma_adjust()
968 next = vma; in __vma_adjust()
972 end = next->vm_end; in __vma_adjust()
975 else if (next) in __vma_adjust()
976 vma_gap_update(next); in __vma_adjust()
1151 struct vm_area_struct *area, *next; in vma_merge() local
1162 next = prev->vm_next; in vma_merge()
1164 next = mm->mmap; in vma_merge()
1165 area = next; in vma_merge()
1167 next = next->vm_next; in vma_merge()
1186 if (next && end == next->vm_start && in vma_merge()
1187 mpol_equal(policy, vma_policy(next)) && in vma_merge()
1188 can_vma_merge_before(next, vm_flags, in vma_merge()
1194 next->anon_vma, NULL)) { in vma_merge()
1197 next->vm_end, prev->vm_pgoff, NULL, in vma_merge()
1211 if (next && end == next->vm_start && in vma_merge()
1212 mpol_equal(policy, vma_policy(next)) && in vma_merge()
1213 can_vma_merge_before(next, vm_flags, in vma_merge()
1219 addr, prev->vm_pgoff, NULL, next); in vma_merge()
1221 err = __vma_adjust(area, addr, next->vm_end, in vma_merge()
1222 next->vm_pgoff - pglen, NULL, next); in vma_merge()
1228 area = next; in vma_merge()
2355 struct vm_area_struct *next; in expand_upwards() local
2375 next = vma->vm_next; in expand_upwards()
2376 if (next && next->vm_start < gap_addr && in expand_upwards()
2377 (next->vm_flags & (VM_WRITE|VM_READ|VM_EXEC))) { in expand_upwards()
2378 if (!(next->vm_flags & VM_GROWSUP)) in expand_upwards()
2622 struct vm_area_struct *next = prev ? prev->vm_next : mm->mmap; in unmap_region() local
2630 next ? next->vm_start : USER_PGTABLES_CEILING); in unmap_region()
2941 struct vm_area_struct *next; in SYSCALL_DEFINE5() local
2943 for (next = vma->vm_next; next; next = next->vm_next) { in SYSCALL_DEFINE5()
2945 if (next->vm_start != next->vm_prev->vm_end) in SYSCALL_DEFINE5()
2948 if (next->vm_file != vma->vm_file) in SYSCALL_DEFINE5()
2951 if (next->vm_flags != vma->vm_flags) in SYSCALL_DEFINE5()
2954 if (start + size <= next->vm_end) in SYSCALL_DEFINE5()
2958 if (!next) in SYSCALL_DEFINE5()