Lines Matching refs:region
481 struct vm_region *region, *last; in validate_nommu_regions() local
495 region = rb_entry(p, struct vm_region, vm_rb); in validate_nommu_regions()
498 if (unlikely(region->vm_end <= region->vm_start)) in validate_nommu_regions()
500 if (unlikely(region->vm_top < region->vm_end)) in validate_nommu_regions()
502 if (unlikely(region->vm_start < last->vm_top)) in validate_nommu_regions()
515 static void add_nommu_region(struct vm_region *region) in add_nommu_region() argument
522 BUG_ON(region->vm_start & ~PAGE_MASK); in add_nommu_region()
529 if (region->vm_start < pregion->vm_start) in add_nommu_region()
531 else if (region->vm_start > pregion->vm_start) in add_nommu_region()
533 else if (pregion == region) in add_nommu_region()
539 rb_link_node(®ion->vm_rb, parent, p); in add_nommu_region()
540 rb_insert_color(®ion->vm_rb, &nommu_region_tree); in add_nommu_region()
548 static void delete_nommu_region(struct vm_region *region) in delete_nommu_region() argument
553 rb_erase(®ion->vm_rb, &nommu_region_tree); in delete_nommu_region()
579 static void __put_nommu_region(struct vm_region *region) in __put_nommu_region() argument
582 kenter("%p{%d}", region, atomic_read(®ion->vm_usage)); in __put_nommu_region()
586 if (atomic_dec_and_test(®ion->vm_usage)) { in __put_nommu_region()
587 if (region->vm_top > region->vm_start) in __put_nommu_region()
588 delete_nommu_region(region); in __put_nommu_region()
591 if (region->vm_file) in __put_nommu_region()
592 fput(region->vm_file); in __put_nommu_region()
596 if (region->vm_flags & VM_MAPPED_COPY) { in __put_nommu_region()
598 free_page_series(region->vm_start, region->vm_top); in __put_nommu_region()
600 kmem_cache_free(vm_region_jar, region); in __put_nommu_region()
609 static void put_nommu_region(struct vm_region *region) in put_nommu_region() argument
612 __put_nommu_region(region); in put_nommu_region()
1057 struct vm_region *region, in do_mmap_private() argument
1121 region->vm_flags = vma->vm_flags |= VM_MAPPED_COPY; in do_mmap_private()
1122 region->vm_start = (unsigned long) base; in do_mmap_private()
1123 region->vm_end = region->vm_start + rlen; in do_mmap_private()
1124 region->vm_top = region->vm_start + (total << PAGE_SHIFT); in do_mmap_private()
1126 vma->vm_start = region->vm_start; in do_mmap_private()
1127 vma->vm_end = region->vm_start + len; in do_mmap_private()
1157 free_page_series(region->vm_start, region->vm_end); in do_mmap_private()
1158 region->vm_start = vma->vm_start = 0; in do_mmap_private()
1159 region->vm_end = vma->vm_end = 0; in do_mmap_private()
1160 region->vm_top = 0; in do_mmap_private()
1181 struct vm_region *region; in do_mmap_pgoff() local
1205 region = kmem_cache_zalloc(vm_region_jar, GFP_KERNEL); in do_mmap_pgoff()
1206 if (!region) in do_mmap_pgoff()
1213 atomic_set(®ion->vm_usage, 1); in do_mmap_pgoff()
1214 region->vm_flags = vm_flags; in do_mmap_pgoff()
1215 region->vm_pgoff = pgoff; in do_mmap_pgoff()
1222 region->vm_file = file; in do_mmap_pgoff()
1302 fput(region->vm_file); in do_mmap_pgoff()
1303 kmem_cache_free(vm_region_jar, region); in do_mmap_pgoff()
1304 region = pregion; in do_mmap_pgoff()
1330 vma->vm_start = region->vm_start = addr; in do_mmap_pgoff()
1331 vma->vm_end = region->vm_end = addr + len; in do_mmap_pgoff()
1336 vma->vm_region = region; in do_mmap_pgoff()
1342 ret = do_mmap_private(vma, region, len); in do_mmap_pgoff()
1346 add_nommu_region(region); in do_mmap_pgoff()
1365 __put_nommu_region(region); in do_mmap_pgoff()
1380 fput(region->vm_file); in do_mmap_pgoff()
1381 kmem_cache_free(vm_region_jar, region); in do_mmap_pgoff()
1396 kmem_cache_free(vm_region_jar, region); in do_mmap_pgoff()
1420 struct vm_region *region; in split_vma() local
1434 region = kmem_cache_alloc(vm_region_jar, GFP_KERNEL); in split_vma()
1435 if (!region) in split_vma()
1440 kmem_cache_free(vm_region_jar, region); in split_vma()
1446 *region = *vma->vm_region; in split_vma()
1447 new->vm_region = region; in split_vma()
1452 region->vm_top = region->vm_end = new->vm_end = addr; in split_vma()
1454 region->vm_start = new->vm_start = addr; in split_vma()
1455 region->vm_pgoff = new->vm_pgoff += npages; in split_vma()
1487 struct vm_region *region; in shrink_vma() local
1501 region = vma->vm_region; in shrink_vma()
1502 BUG_ON(atomic_read(®ion->vm_usage) != 1); in shrink_vma()
1505 delete_nommu_region(region); in shrink_vma()
1506 if (from > region->vm_start) { in shrink_vma()
1507 to = region->vm_top; in shrink_vma()
1508 region->vm_top = region->vm_end = from; in shrink_vma()
1510 region->vm_start = to; in shrink_vma()
1512 add_nommu_region(region); in shrink_vma()