Lines Matching refs:vm_rb
313 if (vma->vm_rb.rb_left) { in vma_compute_subtree_gap()
314 subtree_gap = rb_entry(vma->vm_rb.rb_left, in vma_compute_subtree_gap()
315 struct vm_area_struct, vm_rb)->rb_subtree_gap; in vma_compute_subtree_gap()
319 if (vma->vm_rb.rb_right) { in vma_compute_subtree_gap()
320 subtree_gap = rb_entry(vma->vm_rb.rb_right, in vma_compute_subtree_gap()
321 struct vm_area_struct, vm_rb)->rb_subtree_gap; in vma_compute_subtree_gap()
337 vma = rb_entry(nd, struct vm_area_struct, vm_rb); in browse_rb()
382 vma = rb_entry(nd, struct vm_area_struct, vm_rb); in validate_mm_rb()
434 struct vm_area_struct, vm_rb, in RB_DECLARE_CALLBACKS_MAX() argument
448 vma_gap_callbacks_propagate(&vma->vm_rb, NULL); in RB_DECLARE_CALLBACKS_MAX()
457 rb_insert_augmented(&vma->vm_rb, root, &vma_gap_callbacks); in vma_rb_insert()
467 rb_erase_augmented(&vma->vm_rb, root, &vma_gap_callbacks); in __vma_rb_erase()
541 vma_tmp = rb_entry(__rb_parent, struct vm_area_struct, vm_rb); in find_vma_links()
556 *pprev = rb_entry(rb_prev, struct vm_area_struct, vm_rb); in find_vma_links()
608 rb_link_node(&vma->vm_rb, rb_parent, rb_link); in __vma_link_rb()
1921 vma = rb_entry(mm->mm_rb.rb_node, struct vm_area_struct, vm_rb); in unmapped_area()
1928 if (gap_end >= low_limit && vma->vm_rb.rb_left) { in unmapped_area()
1930 rb_entry(vma->vm_rb.rb_left, in unmapped_area()
1931 struct vm_area_struct, vm_rb); in unmapped_area()
1948 if (vma->vm_rb.rb_right) { in unmapped_area()
1950 rb_entry(vma->vm_rb.rb_right, in unmapped_area()
1951 struct vm_area_struct, vm_rb); in unmapped_area()
1960 struct rb_node *prev = &vma->vm_rb; in unmapped_area()
1964 struct vm_area_struct, vm_rb); in unmapped_area()
1965 if (prev == vma->vm_rb.rb_left) { in unmapped_area()
2025 vma = rb_entry(mm->mm_rb.rb_node, struct vm_area_struct, vm_rb); in unmapped_area_topdown()
2032 if (gap_start <= high_limit && vma->vm_rb.rb_right) { in unmapped_area_topdown()
2034 rb_entry(vma->vm_rb.rb_right, in unmapped_area_topdown()
2035 struct vm_area_struct, vm_rb); in unmapped_area_topdown()
2052 if (vma->vm_rb.rb_left) { in unmapped_area_topdown()
2054 rb_entry(vma->vm_rb.rb_left, in unmapped_area_topdown()
2055 struct vm_area_struct, vm_rb); in unmapped_area_topdown()
2064 struct rb_node *prev = &vma->vm_rb; in unmapped_area_topdown()
2068 struct vm_area_struct, vm_rb); in unmapped_area_topdown()
2069 if (prev == vma->vm_rb.rb_right) { in unmapped_area_topdown()
2263 tmp = rb_entry(rb_node, struct vm_area_struct, vm_rb); in find_vma()
2296 *pprev = rb_node ? rb_entry(rb_node, struct vm_area_struct, vm_rb) : NULL; in find_vma_prev()