Lines Matching full:va

280 		struct vmap_area *va;  in __find_vmap_area()  local
282 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
283 if (addr < va->va_start) in __find_vmap_area()
285 else if (addr > va->va_start) in __find_vmap_area()
288 return va; in __find_vmap_area()
294 static void __insert_vmap_area(struct vmap_area *va) in __insert_vmap_area() argument
305 if (va->va_start < tmp_va->va_end) in __insert_vmap_area()
307 else if (va->va_end > tmp_va->va_start) in __insert_vmap_area()
313 rb_link_node(&va->rb_node, parent, p); in __insert_vmap_area()
314 rb_insert_color(&va->rb_node, &vmap_area_root); in __insert_vmap_area()
317 tmp = rb_prev(&va->rb_node); in __insert_vmap_area()
321 list_add_rcu(&va->list, &prev->list); in __insert_vmap_area()
323 list_add_rcu(&va->list, &vmap_area_list); in __insert_vmap_area()
337 struct vmap_area *va; in alloc_vmap_area() local
347 va = kmalloc_node(sizeof(struct vmap_area), in alloc_vmap_area()
349 if (unlikely(!va)) in alloc_vmap_area()
427 va->va_start = addr; in alloc_vmap_area()
428 va->va_end = addr + size; in alloc_vmap_area()
429 va->flags = 0; in alloc_vmap_area()
430 __insert_vmap_area(va); in alloc_vmap_area()
431 free_vmap_cache = &va->rb_node; in alloc_vmap_area()
434 BUG_ON(va->va_start & (align-1)); in alloc_vmap_area()
435 BUG_ON(va->va_start < vstart); in alloc_vmap_area()
436 BUG_ON(va->va_end > vend); in alloc_vmap_area()
438 return va; in alloc_vmap_area()
451 kfree(va); in alloc_vmap_area()
455 static void __free_vmap_area(struct vmap_area *va) in __free_vmap_area() argument
457 BUG_ON(RB_EMPTY_NODE(&va->rb_node)); in __free_vmap_area()
460 if (va->va_end < cached_vstart) { in __free_vmap_area()
465 if (va->va_start <= cache->va_start) { in __free_vmap_area()
466 free_vmap_cache = rb_prev(&va->rb_node); in __free_vmap_area()
474 rb_erase(&va->rb_node, &vmap_area_root); in __free_vmap_area()
475 RB_CLEAR_NODE(&va->rb_node); in __free_vmap_area()
476 list_del_rcu(&va->list); in __free_vmap_area()
484 if (va->va_end > VMALLOC_START && va->va_end <= VMALLOC_END) in __free_vmap_area()
485 vmap_area_pcpu_hole = max(vmap_area_pcpu_hole, va->va_end); in __free_vmap_area()
487 kfree_rcu(va, rcu_head); in __free_vmap_area()
493 static void free_vmap_area(struct vmap_area *va) in free_vmap_area() argument
496 __free_vmap_area(va); in free_vmap_area()
503 static void unmap_vmap_area(struct vmap_area *va) in unmap_vmap_area() argument
505 vunmap_page_range(va->va_start, va->va_end); in unmap_vmap_area()
583 struct vmap_area *va; in __purge_vmap_area_lazy() local
602 list_for_each_entry_rcu(va, &vmap_area_list, list) { in __purge_vmap_area_lazy()
603 if (va->flags & VM_LAZY_FREE) { in __purge_vmap_area_lazy()
604 if (va->va_start < *start) in __purge_vmap_area_lazy()
605 *start = va->va_start; in __purge_vmap_area_lazy()
606 if (va->va_end > *end) in __purge_vmap_area_lazy()
607 *end = va->va_end; in __purge_vmap_area_lazy()
608 nr += (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
609 list_add_tail(&va->purge_list, &valist); in __purge_vmap_area_lazy()
610 va->flags |= VM_LAZY_FREEING; in __purge_vmap_area_lazy()
611 va->flags &= ~VM_LAZY_FREE; in __purge_vmap_area_lazy()
624 list_for_each_entry_safe(va, n_va, &valist, purge_list) in __purge_vmap_area_lazy()
625 __free_vmap_area(va); in __purge_vmap_area_lazy()
657 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush() argument
659 va->flags |= VM_LAZY_FREE; in free_vmap_area_noflush()
660 atomic_add((va->va_end - va->va_start) >> PAGE_SHIFT, &vmap_lazy_nr); in free_vmap_area_noflush()
669 static void free_unmap_vmap_area_noflush(struct vmap_area *va) in free_unmap_vmap_area_noflush() argument
671 unmap_vmap_area(va); in free_unmap_vmap_area_noflush()
672 free_vmap_area_noflush(va); in free_unmap_vmap_area_noflush()
678 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area() argument
680 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
681 free_unmap_vmap_area_noflush(va); in free_unmap_vmap_area()
686 struct vmap_area *va; in find_vmap_area() local
689 va = __find_vmap_area(addr); in find_vmap_area()
692 return va; in find_vmap_area()
697 struct vmap_area *va; in free_unmap_vmap_area_addr() local
699 va = find_vmap_area(addr); in free_unmap_vmap_area_addr()
700 BUG_ON(!va); in free_unmap_vmap_area_addr()
701 free_unmap_vmap_area(va); in free_unmap_vmap_area_addr()
744 struct vmap_area *va; member
783 struct vmap_area *va; in new_vmap_block() local
794 va = alloc_vmap_area(VMAP_BLOCK_SIZE, VMAP_BLOCK_SIZE, in new_vmap_block()
797 if (IS_ERR(va)) { in new_vmap_block()
799 return ERR_CAST(va); in new_vmap_block()
805 free_vmap_area(va); in new_vmap_block()
810 vb->va = va; in new_vmap_block()
817 vb_idx = addr_to_vb_idx(va->va_start); in new_vmap_block()
839 vb_idx = addr_to_vb_idx(vb->va->va_start); in free_vmap_block()
845 free_vmap_area_noflush(vb->va); in free_vmap_block()
930 addr = vb->va->va_start + (i << PAGE_SHIFT); in vb_alloc()
932 addr_to_vb_idx(vb->va->va_start)); in vb_alloc()
1035 s = vb->va->va_start + (i << PAGE_SHIFT); in vm_unmap_aliases()
1036 e = vb->va->va_start + (j << PAGE_SHIFT); in vm_unmap_aliases()
1103 struct vmap_area *va; in vm_map_ram() local
1104 va = alloc_vmap_area(size, PAGE_SIZE, in vm_map_ram()
1106 if (IS_ERR(va)) in vm_map_ram()
1109 addr = va->va_start; in vm_map_ram()
1173 struct vmap_area *va; in vmalloc_init() local
1187 va = kzalloc(sizeof(struct vmap_area), GFP_NOWAIT); in vmalloc_init()
1188 va->flags = tmp->flags | VM_VM_AREA; in vmalloc_init()
1189 va->va_start = (unsigned long)tmp->addr; in vmalloc_init()
1190 va->va_end = va->va_start + tmp->size; in vmalloc_init()
1191 __insert_vmap_area(va); in vmalloc_init()
1281 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm() argument
1285 vm->addr = (void *)va->va_start; in setup_vmalloc_vm()
1286 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm()
1288 va->vm = vm; in setup_vmalloc_vm()
1289 va->flags |= VM_VM_AREA; in setup_vmalloc_vm()
1307 static void insert_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in insert_vmalloc_vm() argument
1310 setup_vmalloc_vm(vm, va, flags, caller); in insert_vmalloc_vm()
1318 struct vmap_area *va; in __get_vm_area_node() local
1346 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
1347 if (IS_ERR(va)) { in __get_vm_area_node()
1360 setup_vmalloc_vm(area, va, flags, caller); in __get_vm_area_node()
1362 insert_vmalloc_vm(area, va, flags, caller); in __get_vm_area_node()
1407 struct vmap_area *va; in find_vm_area() local
1409 va = find_vmap_area((unsigned long)addr); in find_vm_area()
1410 if (va && va->flags & VM_VM_AREA) in find_vm_area()
1411 return va->vm; in find_vm_area()
1426 struct vmap_area *va; in remove_vm_area() local
1428 va = find_vmap_area((unsigned long)addr); in remove_vm_area()
1429 if (va && va->flags & VM_VM_AREA) { in remove_vm_area()
1430 struct vm_struct *vm = va->vm; in remove_vm_area()
1446 vmap_debug_free_range(va->va_start, va->va_end); in remove_vm_area()
1447 free_unmap_vmap_area(va); in remove_vm_area()
2249 struct vmap_area *va = NULL; in pvm_find_next_prev() local
2252 va = rb_entry(n, struct vmap_area, rb_node); in pvm_find_next_prev()
2253 if (end < va->va_end) in pvm_find_next_prev()
2255 else if (end > va->va_end) in pvm_find_next_prev()
2261 if (!va) in pvm_find_next_prev()
2264 if (va->va_end > end) { in pvm_find_next_prev()
2265 *pnext = va; in pvm_find_next_prev()
2268 *pprev = va; in pvm_find_next_prev()
2456 /* we've found a fitting base, insert all va's */ in pcpu_get_vm_areas()
2458 struct vmap_area *va = vas[area]; in pcpu_get_vm_areas() local
2460 va->va_start = base + offsets[area]; in pcpu_get_vm_areas()
2461 va->va_end = va->va_start + sizes[area]; in pcpu_get_vm_areas()
2462 __insert_vmap_area(va); in pcpu_get_vm_areas()