Lines Matching refs:vmi

151 static inline struct vm_area_struct *vma_prev_limit(struct vma_iterator *vmi,  in vma_prev_limit()  argument
154 return mas_prev(&vmi->mas, min); in vma_prev_limit()
176 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *brkvma,
186 struct vma_iterator vmi; in SYSCALL_DEFINE1() local
229 vma_iter_init(&vmi, mm, newbrk); in SYSCALL_DEFINE1()
230 brkvma = vma_find(&vmi, oldbrk); in SYSCALL_DEFINE1()
239 if (do_vma_munmap(&vmi, brkvma, newbrk, oldbrk, &uf, true)) in SYSCALL_DEFINE1()
252 vma_iter_init(&vmi, mm, oldbrk); in SYSCALL_DEFINE1()
253 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap); in SYSCALL_DEFINE1()
257 brkvma = vma_prev_limit(&vmi, mm->start_brk); in SYSCALL_DEFINE1()
259 if (do_brk_flags(&vmi, brkvma, oldbrk, newbrk - oldbrk, 0) < 0) in SYSCALL_DEFINE1()
286 VMA_ITERATOR(vmi, mm, 0); in validate_mm()
289 for_each_vma(vmi, vma) { in validate_mm()
297 vmi_start = vma_iter_addr(&vmi); in validate_mm()
298 vmi_end = vma_iter_end(&vmi); in validate_mm()
311 vma_iter_dump_tree(&vmi); in validate_mm()
370 VMA_ITERATOR(vmi, mm, addr); in count_vma_pages_range()
374 for_each_vma_range(vmi, vma, end) { in count_vma_pages_range()
397 VMA_ITERATOR(vmi, mm, 0); in vma_link()
400 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in vma_link()
401 if (vma_iter_prealloc(&vmi, vma)) in vma_link()
406 vma_iter_store(&vmi, vma); in vma_link()
511 struct vma_iterator *vmi, struct mm_struct *mm) in vma_complete() argument
532 vma_iter_store(vmi, vp->insert); in vma_complete()
630 int vma_expand(struct vma_iterator *vmi, struct vm_area_struct *vma, in vma_expand() argument
657 vma_iter_config(vmi, start, end); in vma_expand()
658 if (vma_iter_prealloc(vmi, vma)) in vma_expand()
666 vma_iter_store(vmi, vma); in vma_expand()
668 vma_complete(&vp, vmi, vma->vm_mm); in vma_expand()
686 int vma_shrink(struct vma_iterator *vmi, struct vm_area_struct *vma, in vma_shrink() argument
694 vma_iter_config(vmi, vma->vm_start, start); in vma_shrink()
696 vma_iter_config(vmi, end, vma->vm_end); in vma_shrink()
698 if (vma_iter_prealloc(vmi, NULL)) in vma_shrink()
707 vma_iter_clear(vmi); in vma_shrink()
711 vma_complete(&vp, vmi, vma->vm_mm); in vma_shrink()
863 struct vm_area_struct *vma_merge(struct vma_iterator *vmi, struct mm_struct *mm, in vma_merge() argument
910 vma_prev(vmi); in vma_merge()
995 vma_iter_config(vmi, vma_start, vma_end); in vma_merge()
997 vma_iter_config(vmi, adjust->vm_start + adj_start, in vma_merge()
1001 if (vma_iter_prealloc(vmi, vma)) in vma_merge()
1016 vma_iter_store(vmi, vma); in vma_merge()
1023 vma_iter_store(vmi, next); in vma_merge()
1027 vma_complete(&vp, vmi, mm); in vma_merge()
1036 vma_iter_set(vmi, addr); in vma_merge()
1037 vma_iter_load(vmi); in vma_merge()
2346 int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in __split_vma() argument
2374 vma_iter_config(vmi, new->vm_start, new->vm_end); in __split_vma()
2375 if (vma_iter_prealloc(vmi, new)) in __split_vma()
2408 vma_complete(&vp, vmi, vma->vm_mm); in __split_vma()
2412 vma_next(vmi); in __split_vma()
2418 vma_iter_free(vmi); in __split_vma()
2428 int split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in split_vma() argument
2434 return __split_vma(vmi, vma, addr, new_below); in split_vma()
2452 do_vmi_align_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_vmi_align_munmap() argument
2462 mt_init_flags(&mt_detach, vmi->mas.tree->ma_flags & MT_FLAGS_LOCK_MASK); in do_vmi_align_munmap()
2484 error = __split_vma(vmi, vma, start, 1); in do_vmi_align_munmap()
2497 error = __split_vma(vmi, next, end, 0); in do_vmi_align_munmap()
2530 } for_each_vma_range(*vmi, next, end); in do_vmi_align_munmap()
2539 vma_iter_set(vmi, start); in do_vmi_align_munmap()
2542 for_each_vma_range(*vmi, vma_mas, end) { in do_vmi_align_munmap()
2552 while (vma_iter_addr(vmi) > start) in do_vmi_align_munmap()
2553 vma_iter_prev_range(vmi); in do_vmi_align_munmap()
2555 error = vma_iter_clear_gfp(vmi, start, end, GFP_KERNEL); in do_vmi_align_munmap()
2565 prev = vma_iter_prev_range(vmi); in do_vmi_align_munmap()
2566 next = vma_next(vmi); in do_vmi_align_munmap()
2568 vma_iter_prev_range(vmi); in do_vmi_align_munmap()
2618 int do_vmi_munmap(struct vma_iterator *vmi, struct mm_struct *mm, in do_vmi_munmap() argument
2636 vma = vma_find(vmi, end); in do_vmi_munmap()
2643 return do_vmi_align_munmap(vmi, vma, mm, start, end, uf, unlock); in do_vmi_munmap()
2657 VMA_ITERATOR(vmi, mm, start); in do_munmap()
2659 return do_vmi_munmap(&vmi, mm, start, len, uf, false); in do_munmap()
2675 VMA_ITERATOR(vmi, mm, addr); in mmap_region()
2693 if (do_vmi_munmap(&vmi, mm, addr, len, uf, false)) in mmap_region()
2706 next = vma_next(&vmi); in mmap_region()
2707 prev = vma_prev(&vmi); in mmap_region()
2710 vma_iter_next_range(&vmi); in mmap_region()
2734 vma_iter_next_range(&vmi); in mmap_region()
2739 !vma_expand(&vmi, vma, merge_start, merge_end, vm_pgoff, next)) { in mmap_region()
2745 vma_iter_set(&vmi, addr); in mmap_region()
2759 vma_iter_config(&vmi, addr, end); in mmap_region()
2786 vma_iter_config(&vmi, addr, end); in mmap_region()
2792 merge = vma_merge(&vmi, mm, prev, vma->vm_start, in mmap_region()
2833 if (vma_iter_prealloc(&vmi, vma)) in mmap_region()
2838 vma_iter_store(&vmi, vma); in mmap_region()
2902 vma_iter_set(&vmi, vma->vm_end); in mmap_region()
2904 unmap_region(mm, &vmi.mas, vma, prev, next, vma->vm_start, in mmap_region()
2923 VMA_ITERATOR(vmi, mm, start); in __vm_munmap()
2928 ret = do_vmi_munmap(&vmi, mm, start, len, &uf, unlock); in __vm_munmap()
2986 VMA_ITERATOR(vmi, mm, vma->vm_end); in SYSCALL_DEFINE5()
2989 for_each_vma_range(vmi, next, start + size) { in SYSCALL_DEFINE5()
3047 int do_vma_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_vma_munmap() argument
3054 return do_vmi_align_munmap(vmi, vma, mm, start, end, uf, unlock); in do_vma_munmap()
3069 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_brk_flags() argument
3096 vma_iter_config(vmi, vma->vm_start, addr + len); in do_brk_flags()
3097 if (vma_iter_prealloc(vmi, vma)) in do_brk_flags()
3107 vma_iter_store(vmi, vma); in do_brk_flags()
3109 vma_complete(&vp, vmi, mm); in do_brk_flags()
3115 vma_iter_next_range(vmi); in do_brk_flags()
3128 if (vma_iter_store_gfp(vmi, vma, GFP_KERNEL)) in do_brk_flags()
3158 VMA_ITERATOR(vmi, mm, addr); in vm_brk_flags()
3177 ret = do_vmi_munmap(&vmi, mm, addr, len, &uf, 0); in vm_brk_flags()
3181 vma = vma_prev(&vmi); in vm_brk_flags()
3182 ret = do_brk_flags(&vmi, vma, addr, len, flags); in vm_brk_flags()
3321 VMA_ITERATOR(vmi, mm, addr); in copy_vma()
3336 new_vma = vma_merge(&vmi, mm, prev, addr, addr + len, vma->vm_flags, in copy_vma()