Lines Matching refs:vmap_area

760 static DEFINE_PER_CPU(struct vmap_area *, ne_fit_preload_node);
763 va_size(struct vmap_area *va) in va_size()
771 struct vmap_area *va; in get_subtree_max_size()
773 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
781 compute_subtree_max_size(struct vmap_area *va) in compute_subtree_max_size()
789 struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
802 static struct vmap_area *find_vmap_area_exceed_addr(unsigned long addr) in find_vmap_area_exceed_addr()
804 struct vmap_area *va = NULL; in find_vmap_area_exceed_addr()
808 struct vmap_area *tmp; in find_vmap_area_exceed_addr()
810 tmp = rb_entry(n, struct vmap_area, rb_node); in find_vmap_area_exceed_addr()
824 static struct vmap_area *__find_vmap_area(unsigned long addr) in __find_vmap_area()
829 struct vmap_area *va; in __find_vmap_area()
831 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
852 find_va_links(struct vmap_area *va, in find_va_links()
856 struct vmap_area *tmp_va; in find_va_links()
875 tmp_va = rb_entry(*link, struct vmap_area, rb_node); in find_va_links()
914 list = &rb_entry(parent, struct vmap_area, rb_node)->list; in get_va_next_sibling()
919 link_va(struct vmap_area *va, struct rb_root *root, in link_va()
927 head = &rb_entry(parent, struct vmap_area, rb_node)->list; in link_va()
958 unlink_va(struct vmap_area *va, struct rb_root *root) in unlink_va()
977 struct vmap_area *va; in augment_tree_propagate_check()
1017 augment_tree_propagate_from(struct vmap_area *va) in augment_tree_propagate_from()
1032 insert_vmap_area(struct vmap_area *va, in insert_vmap_area()
1044 insert_vmap_area_augment(struct vmap_area *va, in insert_vmap_area_augment()
1073 static __always_inline struct vmap_area *
1074 merge_or_add_vmap_area(struct vmap_area *va, in merge_or_add_vmap_area()
1077 struct vmap_area *sibling; in merge_or_add_vmap_area()
1106 sibling = list_entry(next, struct vmap_area, list); in merge_or_add_vmap_area()
1127 sibling = list_entry(next->prev, struct vmap_area, list); in merge_or_add_vmap_area()
1157 static __always_inline struct vmap_area *
1158 merge_or_add_vmap_area_augment(struct vmap_area *va, in merge_or_add_vmap_area_augment()
1169 is_within_this_va(struct vmap_area *va, unsigned long size, in is_within_this_va()
1192 static __always_inline struct vmap_area *
1196 struct vmap_area *va; in find_vmap_lowest_match()
1207 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1232 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1251 static struct vmap_area *
1255 struct vmap_area *va; in find_vmap_lowest_linear_match()
1270 struct vmap_area *va_1, *va_2; in find_vmap_lowest_match_check()
1295 classify_va_fit_type(struct vmap_area *va, in classify_va_fit_type()
1321 adjust_va_to_fit_type(struct vmap_area *va, in adjust_va_to_fit_type()
1325 struct vmap_area *lva = NULL; in adjust_va_to_fit_type()
1429 struct vmap_area *va; in __alloc_vmap_area()
1466 static void free_vmap_area(struct vmap_area *va) in free_vmap_area()
1486 struct vmap_area *va = NULL; in preload_this_cpu_lock()
1510 static struct vmap_area *alloc_vmap_area(unsigned long size, in alloc_vmap_area()
1515 struct vmap_area *va; in alloc_vmap_area()
1663 struct vmap_area *va, *n_va; in __purge_vmap_area_lazy()
1677 struct vmap_area, list)->va_start); in __purge_vmap_area_lazy()
1681 struct vmap_area, list)->va_end); in __purge_vmap_area_lazy()
1744 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush()
1771 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area()
1781 static struct vmap_area *find_vmap_area(unsigned long addr) in find_vmap_area()
1783 struct vmap_area *va; in find_vmap_area()
1829 struct vmap_area *va;
1882 struct vmap_area *va; in new_vmap_block()
2143 struct vmap_area *va; in vm_unmap_ram()
2193 struct vmap_area *va; in vm_map_ram()
2290 struct vmap_area *busy, *free; in vmap_init_free_space()
2329 struct vmap_area *va; in vmalloc_init()
2336 vmap_area_cachep = KMEM_CACHE(vmap_area, SLAB_PANIC); in vmalloc_init()
2370 struct vmap_area *va, unsigned long flags, const void *caller) in setup_vmalloc_vm_locked()
2379 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm()
2403 struct vmap_area *va; in __get_vm_area_node()
2483 struct vmap_area *va; in find_vm_area()
2504 struct vmap_area *va; in remove_vm_area()
3316 struct vmap_area *va; in vread()
3473 static struct vmap_area *node_to_va(struct rb_node *n) in node_to_va()
3475 return rb_entry_safe(n, struct vmap_area, rb_node); in node_to_va()
3487 static struct vmap_area *
3490 struct vmap_area *va, *tmp; in pvm_find_va_enclose_addr()
3497 tmp = rb_entry(n, struct vmap_area, rb_node); in pvm_find_va_enclose_addr()
3523 pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) in pvm_determine_end_from_reverse()
3570 struct vmap_area **vas, *va; in pcpu_get_vm_areas()
3881 struct vmap_area *va; in show_purge_info()
3894 struct vmap_area *va; in s_show()
3897 va = list_entry(p, struct vmap_area, list); in s_show()