Lines Matching refs:va
292 struct vmap_area *va; in __find_vmap_area() local
294 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
295 if (addr < va->va_start) in __find_vmap_area()
297 else if (addr >= va->va_end) in __find_vmap_area()
300 return va; in __find_vmap_area()
306 static void __insert_vmap_area(struct vmap_area *va) in __insert_vmap_area() argument
317 if (va->va_start < tmp_va->va_end) in __insert_vmap_area()
319 else if (va->va_end > tmp_va->va_start) in __insert_vmap_area()
325 rb_link_node(&va->rb_node, parent, p); in __insert_vmap_area()
326 rb_insert_color(&va->rb_node, &vmap_area_root); in __insert_vmap_area()
329 tmp = rb_prev(&va->rb_node); in __insert_vmap_area()
333 list_add_rcu(&va->list, &prev->list); in __insert_vmap_area()
335 list_add_rcu(&va->list, &vmap_area_list); in __insert_vmap_area()
349 struct vmap_area *va; in alloc_vmap_area() local
359 va = kmalloc_node(sizeof(struct vmap_area), in alloc_vmap_area()
361 if (unlikely(!va)) in alloc_vmap_area()
368 kmemleak_scan_area(&va->rb_node, SIZE_MAX, gfp_mask & GFP_RECLAIM_MASK); in alloc_vmap_area()
445 va->va_start = addr; in alloc_vmap_area()
446 va->va_end = addr + size; in alloc_vmap_area()
447 va->flags = 0; in alloc_vmap_area()
448 __insert_vmap_area(va); in alloc_vmap_area()
449 free_vmap_cache = &va->rb_node; in alloc_vmap_area()
452 BUG_ON(va->va_start & (align-1)); in alloc_vmap_area()
453 BUG_ON(va->va_start < vstart); in alloc_vmap_area()
454 BUG_ON(va->va_end > vend); in alloc_vmap_area()
456 return va; in alloc_vmap_area()
469 kfree(va); in alloc_vmap_area()
473 static void __free_vmap_area(struct vmap_area *va) in __free_vmap_area() argument
475 BUG_ON(RB_EMPTY_NODE(&va->rb_node)); in __free_vmap_area()
478 if (va->va_end < cached_vstart) { in __free_vmap_area()
483 if (va->va_start <= cache->va_start) { in __free_vmap_area()
484 free_vmap_cache = rb_prev(&va->rb_node); in __free_vmap_area()
492 rb_erase(&va->rb_node, &vmap_area_root); in __free_vmap_area()
493 RB_CLEAR_NODE(&va->rb_node); in __free_vmap_area()
494 list_del_rcu(&va->list); in __free_vmap_area()
502 if (va->va_end > VMALLOC_START && va->va_end <= VMALLOC_END) in __free_vmap_area()
503 vmap_area_pcpu_hole = max(vmap_area_pcpu_hole, va->va_end); in __free_vmap_area()
505 kfree_rcu(va, rcu_head); in __free_vmap_area()
511 static void free_vmap_area(struct vmap_area *va) in free_vmap_area() argument
514 __free_vmap_area(va); in free_vmap_area()
521 static void unmap_vmap_area(struct vmap_area *va) in unmap_vmap_area() argument
523 vunmap_page_range(va->va_start, va->va_end); in unmap_vmap_area()
601 struct vmap_area *va; in __purge_vmap_area_lazy() local
620 list_for_each_entry_rcu(va, &vmap_area_list, list) { in __purge_vmap_area_lazy()
621 if (va->flags & VM_LAZY_FREE) { in __purge_vmap_area_lazy()
622 if (va->va_start < *start) in __purge_vmap_area_lazy()
623 *start = va->va_start; in __purge_vmap_area_lazy()
624 if (va->va_end > *end) in __purge_vmap_area_lazy()
625 *end = va->va_end; in __purge_vmap_area_lazy()
626 nr += (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
627 list_add_tail(&va->purge_list, &valist); in __purge_vmap_area_lazy()
628 va->flags |= VM_LAZY_FREEING; in __purge_vmap_area_lazy()
629 va->flags &= ~VM_LAZY_FREE; in __purge_vmap_area_lazy()
642 list_for_each_entry_safe(va, n_va, &valist, purge_list) in __purge_vmap_area_lazy()
643 __free_vmap_area(va); in __purge_vmap_area_lazy()
675 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush() argument
677 va->flags |= VM_LAZY_FREE; in free_vmap_area_noflush()
678 atomic_add((va->va_end - va->va_start) >> PAGE_SHIFT, &vmap_lazy_nr); in free_vmap_area_noflush()
687 static void free_unmap_vmap_area_noflush(struct vmap_area *va) in free_unmap_vmap_area_noflush() argument
689 unmap_vmap_area(va); in free_unmap_vmap_area_noflush()
690 free_vmap_area_noflush(va); in free_unmap_vmap_area_noflush()
696 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area() argument
698 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
699 free_unmap_vmap_area_noflush(va); in free_unmap_vmap_area()
704 struct vmap_area *va; in find_vmap_area() local
707 va = __find_vmap_area(addr); in find_vmap_area()
710 return va; in find_vmap_area()
715 struct vmap_area *va; in free_unmap_vmap_area_addr() local
717 va = find_vmap_area(addr); in free_unmap_vmap_area_addr()
718 BUG_ON(!va); in free_unmap_vmap_area_addr()
719 free_unmap_vmap_area(va); in free_unmap_vmap_area_addr()
762 struct vmap_area *va; member
799 struct vmap_area *va; in new_vmap_block() local
810 va = alloc_vmap_area(VMAP_BLOCK_SIZE, VMAP_BLOCK_SIZE, in new_vmap_block()
813 if (IS_ERR(va)) { in new_vmap_block()
815 return ERR_CAST(va); in new_vmap_block()
821 free_vmap_area(va); in new_vmap_block()
826 vb->va = va; in new_vmap_block()
832 vb_idx = addr_to_vb_idx(va->va_start); in new_vmap_block()
853 vb_idx = addr_to_vb_idx(vb->va->va_start); in free_vmap_block()
859 free_vmap_area_noflush(vb->va); in free_vmap_block()
935 addr = vb->va->va_start + (i << PAGE_SHIFT); in vb_alloc()
937 addr_to_vb_idx(vb->va->va_start)); in vb_alloc()
1038 s = vb->va->va_start + (i << PAGE_SHIFT); in vm_unmap_aliases()
1039 e = vb->va->va_start + (j << PAGE_SHIFT); in vm_unmap_aliases()
1108 struct vmap_area *va; in vm_map_ram() local
1109 va = alloc_vmap_area(size, PAGE_SIZE, in vm_map_ram()
1111 if (IS_ERR(va)) in vm_map_ram()
1114 addr = va->va_start; in vm_map_ram()
1179 struct vmap_area *va; in vmalloc_init() local
1197 va = kzalloc(sizeof(struct vmap_area), GFP_NOWAIT); in vmalloc_init()
1198 va->flags = VM_VM_AREA; in vmalloc_init()
1199 va->va_start = (unsigned long)tmp->addr; in vmalloc_init()
1200 va->va_end = va->va_start + tmp->size; in vmalloc_init()
1201 va->vm = tmp; in vmalloc_init()
1202 __insert_vmap_area(va); in vmalloc_init()
1285 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm() argument
1290 vm->addr = (void *)va->va_start; in setup_vmalloc_vm()
1291 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm()
1293 va->vm = vm; in setup_vmalloc_vm()
1294 va->flags |= VM_VM_AREA; in setup_vmalloc_vm()
1313 struct vmap_area *va; in __get_vm_area_node() local
1333 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
1334 if (IS_ERR(va)) { in __get_vm_area_node()
1339 setup_vmalloc_vm(area, va, flags, caller); in __get_vm_area_node()
1393 struct vmap_area *va; in find_vm_area() local
1395 va = find_vmap_area((unsigned long)addr); in find_vm_area()
1396 if (va && va->flags & VM_VM_AREA) in find_vm_area()
1397 return va->vm; in find_vm_area()
1412 struct vmap_area *va; in remove_vm_area() local
1414 va = find_vmap_area((unsigned long)addr); in remove_vm_area()
1415 if (va && va->flags & VM_VM_AREA) { in remove_vm_area()
1416 struct vm_struct *vm = va->vm; in remove_vm_area()
1419 va->vm = NULL; in remove_vm_area()
1420 va->flags &= ~VM_VM_AREA; in remove_vm_area()
1423 vmap_debug_free_range(va->va_start, va->va_end); in remove_vm_area()
1424 free_unmap_vmap_area(va); in remove_vm_area()
1980 struct vmap_area *va; in vread() local
1991 list_for_each_entry(va, &vmap_area_list, list) { in vread()
1995 if (!(va->flags & VM_VM_AREA)) in vread()
1998 vm = va->vm; in vread()
2061 struct vmap_area *va; in vwrite() local
2073 list_for_each_entry(va, &vmap_area_list, list) { in vwrite()
2077 if (!(va->flags & VM_VM_AREA)) in vwrite()
2080 vm = va->vm; in vwrite()
2276 struct vmap_area *va = NULL; in pvm_find_next_prev() local
2279 va = rb_entry(n, struct vmap_area, rb_node); in pvm_find_next_prev()
2280 if (end < va->va_end) in pvm_find_next_prev()
2282 else if (end > va->va_end) in pvm_find_next_prev()
2288 if (!va) in pvm_find_next_prev()
2291 if (va->va_end > end) { in pvm_find_next_prev()
2292 *pnext = va; in pvm_find_next_prev()
2295 *pprev = va; in pvm_find_next_prev()
2485 struct vmap_area *va = vas[area]; in pcpu_get_vm_areas() local
2487 va->va_start = base + offsets[area]; in pcpu_get_vm_areas()
2488 va->va_end = va->va_start + sizes[area]; in pcpu_get_vm_areas()
2489 __insert_vmap_area(va); in pcpu_get_vm_areas()
2537 struct vmap_area *va; in s_start() local
2540 va = list_entry((&vmap_area_list)->next, typeof(*va), list); in s_start()
2541 while (n > 0 && &va->list != &vmap_area_list) { in s_start()
2543 va = list_entry(va->list.next, typeof(*va), list); in s_start()
2545 if (!n && &va->list != &vmap_area_list) in s_start()
2546 return va; in s_start()
2554 struct vmap_area *va = p, *next; in s_next() local
2557 next = list_entry(va->list.next, typeof(*va), list); in s_next()
2596 struct vmap_area *va = p; in s_show() local
2603 if (!(va->flags & VM_VM_AREA)) in s_show()
2606 v = va->vm; in s_show()
2672 struct vmap_area *va; in get_vmalloc_info() local
2688 list_for_each_entry_rcu(va, &vmap_area_list, list) { in get_vmalloc_info()
2689 unsigned long addr = va->va_start; in get_vmalloc_info()
2699 if (va->flags & (VM_LAZY_FREE | VM_LAZY_FREEING)) in get_vmalloc_info()
2702 vmi->used += (va->va_end - va->va_start); in get_vmalloc_info()
2708 prev_end = va->va_end; in get_vmalloc_info()