/Linux-v5.15/mm/ |
D | nommu.c | 104 return vma->vm_end - vma->vm_start; in kobjsize() 433 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions() 434 BUG_ON(last->vm_top < last->vm_end); in validate_nommu_regions() 440 BUG_ON(region->vm_end <= region->vm_start); in validate_nommu_regions() 441 BUG_ON(region->vm_top < region->vm_end); in validate_nommu_regions() 589 } else if (vma->vm_end < pvma->vm_end) in add_vma_to_mm() 591 else if (vma->vm_end > pvma->vm_end) { in add_vma_to_mm() 681 if (vma->vm_end > addr) { in find_vma() 732 if (vma->vm_end == end) { in find_vma_exact() 947 vma->vm_region->vm_top = vma->vm_region->vm_end; in do_mmap_shared_file() [all …]
|
D | mmap.c | 351 if (vma->vm_start > vma->vm_end) { in browse_rb() 353 vma->vm_start, vma->vm_end); in browse_rb() 367 pend = vma->vm_end; in browse_rb() 545 if (vma_tmp->vm_end > addr) { in find_vma_links() 619 nr_pages = (min(end, vma->vm_end) - in count_vma_pages_range() 629 overlap_len = min(end, vma->vm_end) - vma->vm_start; in count_vma_pages_range() 716 if (find_vma_links(mm, vma->vm_start, vma->vm_end, in __insert_vm_struct() 757 if (end >= next->vm_end) { in __vma_adjust() 769 VM_WARN_ON(end != next->vm_end); in __vma_adjust() 784 remove_next = 1 + (end > next->vm_end); in __vma_adjust() [all …]
|
D | mlock.c | 526 if (end != vma->vm_end) { in mlock_fixup() 587 tmp = vma->vm_end; in apply_vma_lock_flags() 594 if (nstart < prev->vm_end) in apply_vma_lock_flags() 595 nstart = prev->vm_end; in apply_vma_lock_flags() 629 if (start >= vma->vm_end) in count_mm_mlocked_page_nr() 636 if (start + len < vma->vm_end) { in count_mm_mlocked_page_nr() 640 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr() 768 mlock_fixup(vma, &prev, vma->vm_start, vma->vm_end, newflags); in apply_mlockall_flags()
|
D | madvise.c | 159 if (end != vma->vm_end) { in madvise_behavior() 720 if (range.start >= vma->vm_end) in madvise_free_single_vma() 722 range.end = min(vma->vm_end, end_addr); in madvise_free_single_vma() 801 if (end > vma->vm_end) { in madvise_dontneed_free() 814 end = vma->vm_end; in madvise_dontneed_free() 845 if (!vma || start >= vma->vm_end) { in madvise_populate() 851 tmp_end = min_t(unsigned long, end, vma->vm_end); in madvise_populate() 1197 tmp = vma->vm_end; in do_madvise() 1206 if (prev && start < prev->vm_end) in do_madvise() 1207 start = prev->vm_end; in do_madvise()
|
D | vmacache.c | 79 if (vma->vm_start <= addr && vma->vm_end > addr) { in vmacache_find() 107 if (vma && vma->vm_start == start && vma->vm_end == end) { in vmacache_find_exact()
|
D | msync.c | 90 fend = fstart + (min(end, vma->vm_end) - start) - 1; in SYSCALL_DEFINE3() 91 start = vma->vm_end; in SYSCALL_DEFINE3()
|
D | mremap.c | 589 if (!err && vma->vm_end != old_addr + old_len) in move_vma() 648 excess = vma->vm_end - vma->vm_start - old_len; in move_vma() 650 old_addr + old_len < vma->vm_end) in move_vma() 679 vma->vm_end == (old_addr + old_len)) in move_vma() 743 if (old_len > vma->vm_end - addr) in vma_to_resize() 877 unsigned long end = vma->vm_end + delta; in vma_expandable() 878 if (end < vma->vm_end) /* overflow */ in vma_expandable() 992 if (old_len == vma->vm_end - addr) { in SYSCALL_DEFINE5()
|
D | internal.h | 358 munlock_vma_pages_range(vma, vma->vm_start, vma->vm_end); in munlock_vma_pages_all() 400 if (address < vma->vm_start || address >= vma->vm_end) in vma_address() 427 if (address < vma->vm_start || address > vma->vm_end) in vma_address_end() 428 address = vma->vm_end; in vma_address_end()
|
/Linux-v5.15/fs/proc/ |
D | task_nommu.c | 37 size += region->vm_end - region->vm_start; in task_mem() 39 size = vma->vm_end - vma->vm_start; in task_mem() 48 slack = region->vm_end - vma->vm_end; in task_mem() 92 vsize += vma->vm_end - vma->vm_start; in task_vsize() 114 size += region->vm_end - region->vm_start; in task_statm() 139 vma->vm_end >= mm->start_stack; in is_stack() 168 vma->vm_end, in nommu_vma_show()
|
/Linux-v5.15/mm/damon/ |
D | vaddr-test.h | 35 gap = vmas[j].vm_start - vmas[j - 1].vm_end; in __link_vmas() 78 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas() 79 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas() 80 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas() 81 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas() 82 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas() 83 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
|
/Linux-v5.15/drivers/gpu/drm/ |
D | drm_vm.c | 78 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot() 240 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_shm_close() 395 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_open_locked() 421 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_close_locked() 465 unsigned long length = vma->vm_end - vma->vm_start; in drm_mmap_dma() 470 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_dma() 532 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_locked() 559 if (map->size < vma->vm_end - vma->vm_start) in drm_mmap_locked() 600 vma->vm_end - vma->vm_start, in drm_mmap_locked() 606 vma->vm_start, vma->vm_end, (unsigned long long)(map->offset + offset)); in drm_mmap_locked() [all …]
|
/Linux-v5.15/scripts/coccinelle/api/ |
D | vma_pages.cocci | 22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT 32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT) 44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
|
/Linux-v5.15/arch/csky/include/asm/ |
D | tlb.h | 11 flush_cache_range(vma, (vma)->vm_start, (vma)->vm_end); \ 17 flush_tlb_range(vma, (vma)->vm_start, (vma)->vm_end); \
|
/Linux-v5.15/drivers/char/ |
D | mspec.c | 76 unsigned long vm_end; /* Original (unsplit) end. */ member 114 last_index = (vdata->vm_end - vdata->vm_start) >> PAGE_SHIFT; in mspec_close() 203 vdata->vm_end = vma->vm_end; in mspec_mmap()
|
/Linux-v5.15/arch/x86/um/ |
D | mem_32.c | 18 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init() 49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
|
/Linux-v5.15/drivers/media/v4l2-core/ |
D | videobuf-vmalloc.c | 56 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open() 68 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close() 233 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper() 254 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
|
D | videobuf-dma-contig.c | 68 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open() 80 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close() 178 if ((untagged_baddr + mem->size) > vma->vm_end) in videobuf_dma_contig_user_get() 327 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
|
/Linux-v5.15/drivers/media/common/videobuf2/ |
D | videobuf2-memops.c | 94 vma->vm_end); in vb2_common_vm_open() 112 vma->vm_end); in vb2_common_vm_close()
|
D | frame_vector.c | 71 while (ret < nr_frames && start + PAGE_SIZE <= vma->vm_end) { in get_vaddr_frames() 82 if (start < vma->vm_end) in get_vaddr_frames()
|
/Linux-v5.15/drivers/soc/qcom/ |
D | rmtfs_mem.c | 136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap() 139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap() 140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap() 148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
|
/Linux-v5.15/arch/parisc/kernel/ |
D | cache.c | 524 usize += vma->vm_end - vma->vm_start; in mm_total_size() 563 flush_user_dcache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm() 565 flush_user_icache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm() 566 flush_tlb_range(vma, vma->vm_start, vma->vm_end); in flush_cache_mm() 575 for (addr = vma->vm_start; addr < vma->vm_end; in flush_cache_mm() 617 for (addr = vma->vm_start; addr < vma->vm_end; addr += PAGE_SIZE) { in flush_cache_range()
|
/Linux-v5.15/drivers/dax/ |
D | device.c | 35 if (vma->vm_start & mask || vma->vm_end & mask) { in check_vma() 38 current->comm, func, vma->vm_start, vma->vm_end, in check_vma() 131 (pmd_addr + PMD_SIZE) > vmf->vma->vm_end) in __dev_dax_pmd_fault() 173 (pud_addr + PUD_SIZE) > vmf->vma->vm_end) in __dev_dax_pud_fault() 207 vmf->vma->vm_start, vmf->vma->vm_end, pe_size); in dev_dax_huge_fault()
|
/Linux-v5.15/include/trace/events/ |
D | fs_dax.h | 17 __field(unsigned long, vm_end) 30 __entry->vm_end = vmf->vma->vm_end; 47 __entry->vm_end,
|
/Linux-v5.15/fs/ |
D | exec.c | 271 vma->vm_end = STACK_TOP_MAX; in __bprm_mm_init() 272 vma->vm_start = vma->vm_end - PAGE_SIZE; in __bprm_mm_init() 282 bprm->p = vma->vm_end - sizeof(void *); in __bprm_mm_init() 680 unsigned long old_end = vma->vm_end; in shift_arg_pages() 766 if (vma->vm_end - vma->vm_start > stack_base) in setup_arg_pages() 773 bprm->p = vma->vm_end - stack_shift; in setup_arg_pages() 779 unlikely(vma->vm_end - vma->vm_start >= stack_top - mmap_min_addr)) in setup_arg_pages() 782 stack_shift = vma->vm_end - stack_top; in setup_arg_pages() 809 ret = mprotect_fixup(vma, &prev, vma->vm_start, vma->vm_end, in setup_arg_pages() 831 stack_size = vma->vm_end - vma->vm_start; in setup_arg_pages() [all …]
|
/Linux-v5.15/arch/powerpc/kvm/ |
D | book3s_hv_uvmem.c | 409 ret = ksm_madvise(vma, vma->vm_start, vma->vm_end, in kvmppc_memslot_page_merge() 415 start = vma->vm_end; in kvmppc_memslot_page_merge() 416 } while (end > vma->vm_end); in kvmppc_memslot_page_merge() 617 if (!vma || addr >= vma->vm_end) { in kvmppc_uvmem_drop_pages() 802 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_uv_migrate_mem_slot() 963 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_in() 1063 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_out()
|