Lines Matching refs:va_end
778 return (va->va_end - va->va_start); in va_size()
817 if (tmp->va_end > addr) { in find_vmap_area_exceed_addr()
842 else if (addr >= va->va_end) in __find_vmap_area()
890 if (va->va_end <= tmp_va->va_start) in find_va_links()
892 else if (va->va_start >= tmp_va->va_end) in find_va_links()
896 va->va_start, va->va_end, tmp_va->va_start, tmp_va->va_end); in find_va_links()
1153 if (sibling->va_start == va->va_end) { in __merge_or_add_vmap_area()
1174 if (sibling->va_end == va->va_start) { in __merge_or_add_vmap_area()
1185 sibling->va_end = va->va_end; in __merge_or_add_vmap_area()
1237 return (nva_start_addr + size <= va->va_end); in is_within_this_va()
1366 nva_start_addr + size > va->va_end) in classify_va_fit_type()
1371 if (va->va_end == nva_start_addr + size) in classify_va_fit_type()
1375 } else if (va->va_end == nva_start_addr + size) { in classify_va_fit_type()
1419 va->va_end = nva_start_addr; in adjust_va_to_fit_type()
1464 lva->va_end = nva_start_addr; in adjust_va_to_fit_type()
1629 va->va_end = addr + size; in alloc_vmap_area()
1639 BUG_ON(va->va_end > vend); in alloc_vmap_area()
1747 struct vmap_area, list)->va_end); in __purge_vmap_area_lazy()
1754 unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
1756 unsigned long orig_end = va->va_end; in __purge_vmap_area_lazy()
1771 va->va_start, va->va_end); in __purge_vmap_area_lazy()
1826 nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >> in free_vmap_area_noflush()
1849 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1850 vunmap_range_noflush(va->va_start, va->va_end); in free_unmap_vmap_area()
1852 flush_tlb_kernel_range(va->va_start, va->va_end); in free_unmap_vmap_area()
2363 (va->va_end - va->va_start)); in vm_unmap_ram()
2517 free->va_end = busy->va_start; in vmap_init_free_space()
2525 vmap_start = busy->va_end; in vmap_init_free_space()
2532 free->va_end = vmap_end; in vmap_init_free_space()
2546 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm_locked()
3958 if (tmp->va_end >= addr) in pvm_find_va_enclose_addr()
3989 addr = min((*va)->va_end & ~(align - 1), vmalloc_end); in pvm_determine_end_from_reverse()
4102 if (base + end > va->va_end) { in pcpu_get_vm_areas()
4153 va->va_end = start + size; in pcpu_get_vm_areas()
4196 orig_end = vas[area]->va_end; in pcpu_get_vm_areas()
4201 va->va_start, va->va_end); in pcpu_get_vm_areas()
4246 orig_end = vas[area]->va_end; in pcpu_get_vm_areas()
4251 va->va_start, va->va_end); in pcpu_get_vm_areas()
4366 (void *)va->va_start, (void *)va->va_end, in show_purge_info()
4367 va->va_end - va->va_start); in show_purge_info()
4382 (void *)va->va_start, (void *)va->va_end, in s_show()
4383 va->va_end - va->va_start); in s_show()
4485 va->va_end = va->va_start + tmp->size; in vmalloc_init()