Lines Matching refs:vmap_area

761 static DEFINE_PER_CPU(struct vmap_area *, ne_fit_preload_node);
764 va_size(struct vmap_area *va) in va_size()
772 struct vmap_area *va; in get_subtree_max_size()
774 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
779 struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
793 static struct vmap_area *find_vmap_area_exceed_addr(unsigned long addr) in find_vmap_area_exceed_addr()
795 struct vmap_area *va = NULL; in find_vmap_area_exceed_addr()
801 struct vmap_area *tmp; in find_vmap_area_exceed_addr()
803 tmp = rb_entry(n, struct vmap_area, rb_node); in find_vmap_area_exceed_addr()
817 static struct vmap_area *__find_vmap_area(unsigned long addr) in __find_vmap_area()
824 struct vmap_area *va; in __find_vmap_area()
826 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
847 find_va_links(struct vmap_area *va, in find_va_links()
851 struct vmap_area *tmp_va; in find_va_links()
870 tmp_va = rb_entry(*link, struct vmap_area, rb_node); in find_va_links()
909 list = &rb_entry(parent, struct vmap_area, rb_node)->list; in get_va_next_sibling()
914 link_va(struct vmap_area *va, struct rb_root *root, in link_va()
922 head = &rb_entry(parent, struct vmap_area, rb_node)->list; in link_va()
953 unlink_va(struct vmap_area *va, struct rb_root *root) in unlink_va()
973 compute_subtree_max_size(struct vmap_area *va) in compute_subtree_max_size()
983 struct vmap_area *va; in augment_tree_propagate_check()
1023 augment_tree_propagate_from(struct vmap_area *va) in augment_tree_propagate_from()
1038 insert_vmap_area(struct vmap_area *va, in insert_vmap_area()
1050 insert_vmap_area_augment(struct vmap_area *va, in insert_vmap_area_augment()
1079 static __always_inline struct vmap_area *
1080 merge_or_add_vmap_area(struct vmap_area *va, in merge_or_add_vmap_area()
1083 struct vmap_area *sibling; in merge_or_add_vmap_area()
1112 sibling = list_entry(next, struct vmap_area, list); in merge_or_add_vmap_area()
1133 sibling = list_entry(next->prev, struct vmap_area, list); in merge_or_add_vmap_area()
1163 static __always_inline struct vmap_area *
1164 merge_or_add_vmap_area_augment(struct vmap_area *va, in merge_or_add_vmap_area_augment()
1175 is_within_this_va(struct vmap_area *va, unsigned long size, in is_within_this_va()
1200 static __always_inline struct vmap_area *
1204 struct vmap_area *va; in find_vmap_lowest_match()
1215 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1241 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1267 static struct vmap_area *
1271 struct vmap_area *va; in find_vmap_lowest_linear_match()
1286 struct vmap_area *va_1, *va_2; in find_vmap_lowest_match_check()
1311 classify_va_fit_type(struct vmap_area *va, in classify_va_fit_type()
1337 adjust_va_to_fit_type(struct vmap_area *va, in adjust_va_to_fit_type()
1341 struct vmap_area *lva = NULL; in adjust_va_to_fit_type()
1446 struct vmap_area *va; in __alloc_vmap_area()
1495 static void free_vmap_area(struct vmap_area *va) in free_vmap_area()
1515 struct vmap_area *va = NULL; in preload_this_cpu_lock()
1539 static struct vmap_area *alloc_vmap_area(unsigned long size, in alloc_vmap_area()
1544 struct vmap_area *va; in alloc_vmap_area()
1681 struct vmap_area *va, *n_va; in __purge_vmap_area_lazy()
1695 struct vmap_area, list)->va_start); in __purge_vmap_area_lazy()
1699 struct vmap_area, list)->va_end); in __purge_vmap_area_lazy()
1764 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush()
1791 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area()
1801 struct vmap_area *find_vmap_area(unsigned long addr) in find_vmap_area()
1803 struct vmap_area *va; in find_vmap_area()
1849 struct vmap_area *va;
1902 struct vmap_area *va; in new_vmap_block()
2161 struct vmap_area *va; in vm_unmap_ram()
2211 struct vmap_area *va; in vm_map_ram()
2320 struct vmap_area *busy, *free; in vmap_init_free_space()
2359 struct vmap_area *va; in vmalloc_init()
2366 vmap_area_cachep = KMEM_CACHE(vmap_area, SLAB_PANIC); in vmalloc_init()
2400 struct vmap_area *va, unsigned long flags, const void *caller) in setup_vmalloc_vm_locked()
2409 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm()
2433 struct vmap_area *va; in __get_vm_area_node()
2523 struct vmap_area *va; in find_vm_area()
2544 struct vmap_area *va; in remove_vm_area()
3477 struct vmap_area *va; in vread()
3636 static struct vmap_area *node_to_va(struct rb_node *n) in node_to_va()
3638 return rb_entry_safe(n, struct vmap_area, rb_node); in node_to_va()
3650 static struct vmap_area *
3653 struct vmap_area *va, *tmp; in pvm_find_va_enclose_addr()
3660 tmp = rb_entry(n, struct vmap_area, rb_node); in pvm_find_va_enclose_addr()
3686 pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) in pvm_determine_end_from_reverse()
3733 struct vmap_area **vas, *va; in pcpu_get_vm_areas()
4051 struct vmap_area *va; in show_purge_info()
4064 struct vmap_area *va; in s_show()
4067 va = list_entry(p, struct vmap_area, list); in s_show()