Lines Matching full:va
436 * All vmap_area objects in this tree are sorted by va->va_start
454 va_size(struct vmap_area *va) in va_size() argument
456 return (va->va_end - va->va_start); in va_size()
462 struct vmap_area *va; in get_subtree_max_size() local
464 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
465 return va ? va->subtree_max_size : 0; in get_subtree_max_size()
472 compute_subtree_max_size(struct vmap_area *va) in compute_subtree_max_size() argument
474 return max3(va_size(va), in compute_subtree_max_size()
475 get_subtree_max_size(va->rb_node.rb_left), in compute_subtree_max_size()
476 get_subtree_max_size(va->rb_node.rb_right)); in compute_subtree_max_size()
498 struct vmap_area *va; in __find_vmap_area() local
500 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
501 if (addr < va->va_start) in __find_vmap_area()
503 else if (addr >= va->va_end) in __find_vmap_area()
506 return va; in __find_vmap_area()
521 find_va_links(struct vmap_area *va, in find_va_links() argument
541 * it link, where the new va->rb_node will be attached to. in find_va_links()
551 if (va->va_start < tmp_va->va_end && in find_va_links()
552 va->va_end <= tmp_va->va_start) in find_va_links()
554 else if (va->va_end > tmp_va->va_start && in find_va_links()
555 va->va_start >= tmp_va->va_end) in find_va_links()
559 va->va_start, va->va_end, tmp_va->va_start, tmp_va->va_end); in find_va_links()
576 * The red-black tree where we try to find VA neighbors in get_va_next_sibling()
588 link_va(struct vmap_area *va, struct rb_root *root, in link_va() argument
592 * VA is still not in the list, but we can in link_va()
602 rb_link_node(&va->rb_node, parent, link); in link_va()
606 * to the tree. We do not set va->subtree_max_size to in link_va()
615 rb_insert_augmented(&va->rb_node, in link_va()
617 va->subtree_max_size = 0; in link_va()
619 rb_insert_color(&va->rb_node, root); in link_va()
623 list_add(&va->list, head); in link_va()
627 unlink_va(struct vmap_area *va, struct rb_root *root) in unlink_va() argument
629 if (WARN_ON(RB_EMPTY_NODE(&va->rb_node))) in unlink_va()
633 rb_erase_augmented(&va->rb_node, in unlink_va()
636 rb_erase(&va->rb_node, root); in unlink_va()
638 list_del(&va->list); in unlink_va()
639 RB_CLEAR_NODE(&va->rb_node); in unlink_va()
646 struct vmap_area *va; in augment_tree_propagate_check() local
649 list_for_each_entry(va, &free_vmap_area_list, list) { in augment_tree_propagate_check()
650 computed_size = compute_subtree_max_size(va); in augment_tree_propagate_check()
651 if (computed_size != va->subtree_max_size) in augment_tree_propagate_check()
653 va_size(va), va->subtree_max_size); in augment_tree_propagate_check()
660 * levels starting from VA point. The propagation must be done
661 * when VA size is modified by changing its va_start/va_end. Or
662 * in case of newly inserting of VA to the tree.
665 * - After VA has been inserted to the tree(free path);
666 * - After VA has been shrunk(allocation path);
667 * - After VA has been increased(merging path).
686 augment_tree_propagate_from(struct vmap_area *va) in augment_tree_propagate_from() argument
693 free_vmap_area_rb_augment_cb_propagate(&va->rb_node, NULL); in augment_tree_propagate_from()
701 insert_vmap_area(struct vmap_area *va, in insert_vmap_area() argument
707 link = find_va_links(va, root, NULL, &parent); in insert_vmap_area()
709 link_va(va, root, parent, link, head); in insert_vmap_area()
713 insert_vmap_area_augment(struct vmap_area *va, in insert_vmap_area_augment() argument
721 link = find_va_links(va, NULL, from, &parent); in insert_vmap_area_augment()
723 link = find_va_links(va, root, NULL, &parent); in insert_vmap_area_augment()
726 link_va(va, root, parent, link, head); in insert_vmap_area_augment()
727 augment_tree_propagate_from(va); in insert_vmap_area_augment()
732 * Merge de-allocated chunk of VA memory with previous
734 * free area is inserted. If VA has been merged, it is
743 merge_or_add_vmap_area(struct vmap_area *va, in merge_or_add_vmap_area() argument
753 * Find a place in the tree where VA potentially will be in merge_or_add_vmap_area()
756 link = find_va_links(va, root, NULL, &parent); in merge_or_add_vmap_area()
761 * Get next node of VA to check if merging can be done. in merge_or_add_vmap_area()
770 * |<------VA------>|<-----Next----->| in merge_or_add_vmap_area()
776 if (sibling->va_start == va->va_end) { in merge_or_add_vmap_area()
777 sibling->va_start = va->va_start; in merge_or_add_vmap_area()
780 kmem_cache_free(vmap_area_cachep, va); in merge_or_add_vmap_area()
783 va = sibling; in merge_or_add_vmap_area()
791 * |<-----Prev----->|<------VA------>| in merge_or_add_vmap_area()
797 if (sibling->va_end == va->va_start) { in merge_or_add_vmap_area()
806 unlink_va(va, root); in merge_or_add_vmap_area()
808 sibling->va_end = va->va_end; in merge_or_add_vmap_area()
811 kmem_cache_free(vmap_area_cachep, va); in merge_or_add_vmap_area()
814 va = sibling; in merge_or_add_vmap_area()
821 link_va(va, root, parent, link, head); in merge_or_add_vmap_area()
826 augment_tree_propagate_from(va); in merge_or_add_vmap_area()
827 return va; in merge_or_add_vmap_area()
831 is_within_this_va(struct vmap_area *va, unsigned long size, in is_within_this_va() argument
836 if (va->va_start > vstart) in is_within_this_va()
837 nva_start_addr = ALIGN(va->va_start, align); in is_within_this_va()
846 return (nva_start_addr + size <= va->va_end); in is_within_this_va()
858 struct vmap_area *va; in find_vmap_lowest_match() local
869 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
872 vstart < va->va_start) { in find_vmap_lowest_match()
875 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match()
876 return va; in find_vmap_lowest_match()
894 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
895 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match()
896 return va; in find_vmap_lowest_match()
899 vstart <= va->va_start) { in find_vmap_lowest_match()
917 struct vmap_area *va; in find_vmap_lowest_linear_match() local
919 list_for_each_entry(va, &free_vmap_area_list, list) { in find_vmap_lowest_linear_match()
920 if (!is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_linear_match()
923 return va; in find_vmap_lowest_linear_match()
957 classify_va_fit_type(struct vmap_area *va, in classify_va_fit_type() argument
962 /* Check if it is within VA. */ in classify_va_fit_type()
963 if (nva_start_addr < va->va_start || in classify_va_fit_type()
964 nva_start_addr + size > va->va_end) in classify_va_fit_type()
968 if (va->va_start == nva_start_addr) { in classify_va_fit_type()
969 if (va->va_end == nva_start_addr + size) in classify_va_fit_type()
973 } else if (va->va_end == nva_start_addr + size) { in classify_va_fit_type()
983 adjust_va_to_fit_type(struct vmap_area *va, in adjust_va_to_fit_type() argument
991 * No need to split VA, it fully fits. in adjust_va_to_fit_type()
997 unlink_va(va, &free_vmap_area_root); in adjust_va_to_fit_type()
998 kmem_cache_free(vmap_area_cachep, va); in adjust_va_to_fit_type()
1001 * Split left edge of fit VA. in adjust_va_to_fit_type()
1007 va->va_start += size; in adjust_va_to_fit_type()
1010 * Split right edge of fit VA. in adjust_va_to_fit_type()
1016 va->va_end = nva_start_addr; in adjust_va_to_fit_type()
1019 * Split no edge of fit VA. in adjust_va_to_fit_type()
1060 lva->va_start = va->va_start; in adjust_va_to_fit_type()
1064 * Shrink this VA to remaining size. in adjust_va_to_fit_type()
1066 va->va_start = nva_start_addr + size; in adjust_va_to_fit_type()
1072 augment_tree_propagate_from(va); in adjust_va_to_fit_type()
1075 insert_vmap_area_augment(lva, &va->rb_node, in adjust_va_to_fit_type()
1091 struct vmap_area *va; in __alloc_vmap_area() local
1095 va = find_vmap_lowest_match(size, align, vstart); in __alloc_vmap_area()
1096 if (unlikely(!va)) in __alloc_vmap_area()
1099 if (va->va_start > vstart) in __alloc_vmap_area()
1100 nva_start_addr = ALIGN(va->va_start, align); in __alloc_vmap_area()
1109 type = classify_va_fit_type(va, nva_start_addr, size); in __alloc_vmap_area()
1114 ret = adjust_va_to_fit_type(va, nva_start_addr, size, type); in __alloc_vmap_area()
1128 static void free_vmap_area(struct vmap_area *va) in free_vmap_area() argument
1134 unlink_va(va, &vmap_area_root); in free_vmap_area()
1141 merge_or_add_vmap_area(va, &free_vmap_area_root, &free_vmap_area_list); in free_vmap_area()
1154 struct vmap_area *va, *pva; in alloc_vmap_area() local
1169 va = kmem_cache_alloc_node(vmap_area_cachep, gfp_mask, node); in alloc_vmap_area()
1170 if (unlikely(!va)) in alloc_vmap_area()
1177 kmemleak_scan_area(&va->rb_node, SIZE_MAX, gfp_mask); in alloc_vmap_area()
1220 va->va_start = addr; in alloc_vmap_area()
1221 va->va_end = addr + size; in alloc_vmap_area()
1222 va->vm = NULL; in alloc_vmap_area()
1226 insert_vmap_area(va, &vmap_area_root, &vmap_area_list); in alloc_vmap_area()
1229 BUG_ON(!IS_ALIGNED(va->va_start, align)); in alloc_vmap_area()
1230 BUG_ON(va->va_start < vstart); in alloc_vmap_area()
1231 BUG_ON(va->va_end > vend); in alloc_vmap_area()
1235 free_vmap_area(va); in alloc_vmap_area()
1239 return va; in alloc_vmap_area()
1261 kmem_cache_free(vmap_area_cachep, va); in alloc_vmap_area()
1330 struct vmap_area *va; in __purge_vmap_area_lazy() local
1343 llist_for_each_entry(va, valist, purge_list) { in __purge_vmap_area_lazy()
1344 if (va->va_start < start) in __purge_vmap_area_lazy()
1345 start = va->va_start; in __purge_vmap_area_lazy()
1346 if (va->va_end > end) in __purge_vmap_area_lazy()
1347 end = va->va_end; in __purge_vmap_area_lazy()
1354 llist_for_each_entry_safe(va, n_va, valist, purge_list) { in __purge_vmap_area_lazy()
1355 unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
1356 unsigned long orig_start = va->va_start; in __purge_vmap_area_lazy()
1357 unsigned long orig_end = va->va_end; in __purge_vmap_area_lazy()
1364 va = merge_or_add_vmap_area(va, &free_vmap_area_root, in __purge_vmap_area_lazy()
1367 if (!va) in __purge_vmap_area_lazy()
1372 va->va_start, va->va_end); in __purge_vmap_area_lazy()
1411 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush() argument
1416 unlink_va(va, &vmap_area_root); in free_vmap_area_noflush()
1419 nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >> in free_vmap_area_noflush()
1422 /* After this point, we may free va at any time */ in free_vmap_area_noflush()
1423 llist_add(&va->purge_list, &vmap_purge_list); in free_vmap_area_noflush()
1432 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area() argument
1434 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1435 unmap_kernel_range_noflush(va->va_start, va->va_end - va->va_start); in free_unmap_vmap_area()
1437 flush_tlb_kernel_range(va->va_start, va->va_end); in free_unmap_vmap_area()
1439 free_vmap_area_noflush(va); in free_unmap_vmap_area()
1444 struct vmap_area *va; in find_vmap_area() local
1447 va = __find_vmap_area(addr); in find_vmap_area()
1450 return va; in find_vmap_area()
1490 struct vmap_area *va; member
1543 struct vmap_area *va; in new_vmap_block() local
1555 va = alloc_vmap_area(VMAP_BLOCK_SIZE, VMAP_BLOCK_SIZE, in new_vmap_block()
1558 if (IS_ERR(va)) { in new_vmap_block()
1560 return ERR_CAST(va); in new_vmap_block()
1563 vaddr = vmap_block_vaddr(va->va_start, 0); in new_vmap_block()
1565 vb->va = va; in new_vmap_block()
1574 vb_idx = addr_to_vb_idx(va->va_start); in new_vmap_block()
1578 free_vmap_area(va); in new_vmap_block()
1595 tmp = xa_erase(&vmap_blocks, addr_to_vb_idx(vb->va->va_start)); in free_vmap_block()
1598 free_vmap_area_noflush(vb->va); in free_vmap_block()
1676 vaddr = vmap_block_vaddr(vb->va->va_start, pages_off); in vb_alloc()
1750 unsigned long va_start = vb->va->va_start; in _vm_unmap_aliases()
1804 struct vmap_area *va; in vm_unmap_ram() local
1820 va = find_vmap_area(addr); in vm_unmap_ram()
1821 BUG_ON(!va); in vm_unmap_ram()
1822 debug_check_no_locks_freed((void *)va->va_start, in vm_unmap_ram()
1823 (va->va_end - va->va_start)); in vm_unmap_ram()
1824 free_unmap_vmap_area(va); in vm_unmap_ram()
1854 struct vmap_area *va; in vm_map_ram() local
1855 va = alloc_vmap_area(size, PAGE_SIZE, in vm_map_ram()
1857 if (IS_ERR(va)) in vm_map_ram()
1860 addr = va->va_start; in vm_map_ram()
1970 struct vmap_area *va; in vmalloc_init() local
1993 va = kmem_cache_zalloc(vmap_area_cachep, GFP_NOWAIT); in vmalloc_init()
1994 if (WARN_ON_ONCE(!va)) in vmalloc_init()
1997 va->va_start = (unsigned long)tmp->addr; in vmalloc_init()
1998 va->va_end = va->va_start + tmp->size; in vmalloc_init()
1999 va->vm = tmp; in vmalloc_init()
2000 insert_vmap_area(va, &vmap_area_root, &vmap_area_list); in vmalloc_init()
2028 struct vmap_area *va, unsigned long flags, const void *caller) in setup_vmalloc_vm_locked() argument
2031 vm->addr = (void *)va->va_start; in setup_vmalloc_vm_locked()
2032 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm_locked()
2034 va->vm = vm; in setup_vmalloc_vm_locked()
2037 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm() argument
2041 setup_vmalloc_vm_locked(vm, va, flags, caller); in setup_vmalloc_vm()
2060 struct vmap_area *va; in __get_vm_area_node() local
2080 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
2081 if (IS_ERR(va)) { in __get_vm_area_node()
2086 kasan_unpoison_vmalloc((void *)va->va_start, requested_size); in __get_vm_area_node()
2088 setup_vmalloc_vm(area, va, flags, caller); in __get_vm_area_node()
2138 struct vmap_area *va; in find_vm_area() local
2140 va = find_vmap_area((unsigned long)addr); in find_vm_area()
2141 if (!va) in find_vm_area()
2144 return va->vm; in find_vm_area()
2159 struct vmap_area *va; in remove_vm_area() local
2164 va = __find_vmap_area((unsigned long)addr); in remove_vm_area()
2165 if (va && va->vm) { in remove_vm_area()
2166 struct vm_struct *vm = va->vm; in remove_vm_area()
2168 va->vm = NULL; in remove_vm_area()
2172 free_unmap_vmap_area(va); in remove_vm_area()
2876 struct vmap_area *va; in vread() local
2887 list_for_each_entry(va, &vmap_area_list, list) { in vread()
2891 if (!va->vm) in vread()
2894 vm = va->vm; in vread()
2955 struct vmap_area *va; in vwrite() local
2967 list_for_each_entry(va, &vmap_area_list, list) { in vwrite()
2971 if (!va->vm) in vwrite()
2974 vm = va->vm; in vwrite()
3111 * i.e. va->va_start < addr && va->va_end < addr or NULL
3117 struct vmap_area *va, *tmp; in pvm_find_va_enclose_addr() local
3121 va = NULL; in pvm_find_va_enclose_addr()
3126 va = tmp; in pvm_find_va_enclose_addr()
3136 return va; in pvm_find_va_enclose_addr()
3142 * @va:
3143 * in - the VA we start the search(reverse order);
3144 * out - the VA with the highest aligned end address.
3149 pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) in pvm_determine_end_from_reverse() argument
3154 if (likely(*va)) { in pvm_determine_end_from_reverse()
3155 list_for_each_entry_from_reverse((*va), in pvm_determine_end_from_reverse()
3157 addr = min((*va)->va_end & ~(align - 1), vmalloc_end); in pvm_determine_end_from_reverse()
3158 if ((*va)->va_start < addr) in pvm_determine_end_from_reverse()
3196 struct vmap_area **vas, *va; in pcpu_get_vm_areas() local
3250 va = pvm_find_va_enclose_addr(vmalloc_end); in pcpu_get_vm_areas()
3251 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3264 if (va == NULL) in pcpu_get_vm_areas()
3268 * If required width exceeds current VA block, move in pcpu_get_vm_areas()
3271 if (base + end > va->va_end) { in pcpu_get_vm_areas()
3272 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3278 * If this VA does not fit, move base downwards and recheck. in pcpu_get_vm_areas()
3280 if (base + start < va->va_start) { in pcpu_get_vm_areas()
3281 va = node_to_va(rb_prev(&va->rb_node)); in pcpu_get_vm_areas()
3282 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3297 va = pvm_find_va_enclose_addr(base + end); in pcpu_get_vm_areas()
3300 /* we've found a fitting base, insert all va's */ in pcpu_get_vm_areas()
3307 va = pvm_find_va_enclose_addr(start); in pcpu_get_vm_areas()
3308 if (WARN_ON_ONCE(va == NULL)) in pcpu_get_vm_areas()
3312 type = classify_va_fit_type(va, start, size); in pcpu_get_vm_areas()
3317 ret = adjust_va_to_fit_type(va, start, size, type); in pcpu_get_vm_areas()
3322 va = vas[area]; in pcpu_get_vm_areas()
3323 va->va_start = start; in pcpu_get_vm_areas()
3324 va->va_end = start + size; in pcpu_get_vm_areas()
3361 va = merge_or_add_vmap_area(vas[area], &free_vmap_area_root, in pcpu_get_vm_areas()
3363 if (va) in pcpu_get_vm_areas()
3365 va->va_start, va->va_end); in pcpu_get_vm_areas()
3411 va = merge_or_add_vmap_area(vas[area], &free_vmap_area_root, in pcpu_get_vm_areas()
3413 if (va) in pcpu_get_vm_areas()
3415 va->va_start, va->va_end); in pcpu_get_vm_areas()
3493 struct vmap_area *va; in show_purge_info() local
3499 llist_for_each_entry(va, head, purge_list) { in show_purge_info()
3501 (void *)va->va_start, (void *)va->va_end, in show_purge_info()
3502 va->va_end - va->va_start); in show_purge_info()
3508 struct vmap_area *va; in s_show() local
3511 va = list_entry(p, struct vmap_area, list); in s_show()
3517 if (!va->vm) { in s_show()
3519 (void *)va->va_start, (void *)va->va_end, in s_show()
3520 va->va_end - va->va_start); in s_show()
3525 v = va->vm; in s_show()
3566 if (list_is_last(&va->list, &vmap_area_list)) in s_show()