Lines Matching full:last

65 		 prange, prange->start, prange->last);  in svm_range_unlink()
74 if (prange->it_node.start != 0 && prange->it_node.last != 0) in svm_range_unlink()
82 prange, prange->start, prange->last); in svm_range_add_notifier_locked()
101 prange, prange->start, prange->last); in svm_range_add_to_svms()
105 prange->it_node.last = prange->last; in svm_range_add_to_svms()
114 prange->notifier.interval_tree.last >> PAGE_SHIFT); in svm_range_remove_notifier()
117 prange->notifier.interval_tree.last != 0) in svm_range_remove_notifier()
256 prange->start, prange->last); in svm_range_free()
278 uint64_t last) in svm_range_new() argument
280 uint64_t size = last - start + 1; in svm_range_new()
290 prange->last = last; in svm_range_new()
312 pr_debug("svms 0x%p [0x%llx 0x%llx]\n", svms, start, last); in svm_range_new()
342 prange->start, prange->last); in svm_range_bo_release()
424 prange->svms, prange->start, prange->last); in svm_range_validate_svm_bo()
474 prange->start, prange->last); in svm_range_vram_node_new()
786 uint64_t start, uint64_t last) in svm_range_split_pages() argument
788 uint64_t npages = last - start + 1; in svm_range_split_pages()
804 uint64_t start, uint64_t last) in svm_range_split_nodes() argument
806 uint64_t npages = last - start + 1; in svm_range_split_nodes()
809 new->svms, new, new->start, start, last); in svm_range_split_nodes()
834 * @last: the old range adjust to last address in pages
838 * start to last
845 uint64_t start, uint64_t last) in svm_range_split_adjust() argument
850 new->svms, new->start, old->start, old->last, start, last); in svm_range_split_adjust()
853 new->last > old->last) { in svm_range_split_adjust()
854 WARN_ONCE(1, "invalid new range start or last\n"); in svm_range_split_adjust()
858 r = svm_range_split_pages(new, old, start, last); in svm_range_split_adjust()
863 r = svm_range_split_nodes(new, old, start, last); in svm_range_split_adjust()
868 old->npages = last - start + 1; in svm_range_split_adjust()
870 old->last = last; in svm_range_split_adjust()
887 * @last: the remaining range last address in pages
892 * prange ==> prange[start, last]
893 * new range [last + 1, prange->last]
895 * case 2: if last == prange->last
896 * prange ==> prange[start, last]
900 * 0 - OK, -ENOMEM - out of memory, -EINVAL - invalid start, last
903 svm_range_split(struct svm_range *prange, uint64_t start, uint64_t last, in svm_range_split() argument
907 uint64_t old_last = prange->last; in svm_range_split()
912 old_start, old_last, start, last); in svm_range_split()
914 if (old_start != start && old_last != last) in svm_range_split()
916 if (start < old_start || last > old_last) in svm_range_split()
921 *new = svm_range_new(svms, last + 1, old_last); in svm_range_split()
927 r = svm_range_split_adjust(*new, prange, start, last); in svm_range_split()
930 r, old_start, old_last, start, last); in svm_range_split()
955 int r = svm_range_split(prange, new_start, prange->last, &head); in svm_range_split_head()
967 pchild, pchild->start, pchild->last, prange, op); in svm_range_add_child()
997 unsigned long start, last, size; in svm_range_split_by_granularity() local
1006 last = ALIGN(addr + 1, size) - 1; in svm_range_split_by_granularity()
1009 prange->svms, prange->start, prange->last, start, last, size); in svm_range_split_by_granularity()
1012 r = svm_range_split(prange, start, prange->last, &head); in svm_range_split_by_granularity()
1018 if (last < prange->last) { in svm_range_split_by_granularity()
1019 r = svm_range_split(prange, prange->start, last, &tail); in svm_range_split_by_granularity()
1029 prange, prange->start, prange->last, in svm_range_split_by_granularity()
1106 uint64_t start, uint64_t last, in svm_range_unmap_from_gpu() argument
1111 pr_debug("[0x%llx 0x%llx]\n", start, last); in svm_range_unmap_from_gpu()
1114 start, last, init_pte_value, 0, in svm_range_unmap_from_gpu()
1120 unsigned long last) in svm_range_unmap_from_gpus() argument
1144 start, last, &fence); in svm_range_unmap_from_gpus()
1479 end = (prange->last + 1) << PAGE_SHIFT; in svm_range_validate_and_map()
1610 prange->svms, prange, prange->start, prange->last, in svm_range_restore_work()
1674 unsigned long start, unsigned long last) in svm_range_evict() argument
1684 svms, prange->start, prange->last, start, last); in svm_range_evict()
1691 if (pchild->start <= last && pchild->last >= start) { in svm_range_evict()
1693 pchild->start, pchild->last); in svm_range_evict()
1699 if (prange->start <= last && prange->last >= start) in svm_range_evict()
1707 prange->svms, prange->start, prange->last); in svm_range_evict()
1721 prange->svms, start, last); in svm_range_evict()
1725 l = min(last, pchild->last); in svm_range_evict()
1731 l = min(last, prange->last); in svm_range_evict()
1743 new = svm_range_new(old->svms, old->start, old->last); in svm_range_clone()
1771 * @last: range last address, in pages
1796 unsigned long start, unsigned long last, in svm_range_handle_overlap() argument
1811 node = interval_tree_iter_first(&svms->objects, start, last); in svm_range_handle_overlap()
1818 node->last); in svm_range_handle_overlap()
1821 next = interval_tree_iter_next(node, start, last); in svm_range_handle_overlap()
1822 next_start = min(node->last, last) + 1; in svm_range_handle_overlap()
1824 if (node->start < start || node->last > last) { in svm_range_handle_overlap()
1842 if (node->last > last) { in svm_range_handle_overlap()
1843 pr_debug("change old range last\n"); in svm_range_handle_overlap()
1844 r = svm_range_split_tail(prange, new, last, in svm_range_handle_overlap()
1850 /* The node is contained within start..last, in svm_range_handle_overlap()
1876 if (left && start <= last) in svm_range_handle_overlap()
1877 *left = last - start + 1; in svm_range_handle_overlap()
1892 unsigned long last; in svm_range_update_notifier_and_interval_tree() local
1895 last = prange->notifier.interval_tree.last >> PAGE_SHIFT; in svm_range_update_notifier_and_interval_tree()
1897 if (prange->start == start && prange->last == last) in svm_range_update_notifier_and_interval_tree()
1901 prange->svms, prange, start, last, prange->start, in svm_range_update_notifier_and_interval_tree()
1902 prange->last); in svm_range_update_notifier_and_interval_tree()
1904 if (start != 0 && last != 0) { in svm_range_update_notifier_and_interval_tree()
1909 prange->it_node.last = prange->last; in svm_range_update_notifier_and_interval_tree()
1923 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
1927 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
1934 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
1939 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
1945 prange->start, prange->last); in svm_range_handle_list_op()
1951 prange, prange->start, prange->last); in svm_range_handle_list_op()
1999 prange->start, prange->last, prange->work_item.op); in svm_range_deferred_list_work()
2060 prange, prange->start, prange->last, op); in svm_range_add_list_work()
2076 unsigned long last) in svm_range_unmap_split() argument
2083 prange->start, prange->last); in svm_range_unmap_split()
2086 if (start > prange->last || last < prange->start) in svm_range_unmap_split()
2092 if (last < tail->last) in svm_range_unmap_split()
2093 svm_range_split(tail, last + 1, tail->last, &head); in svm_range_unmap_split()
2109 unsigned long start, unsigned long last) in svm_range_unmap_from_cpu() argument
2123 prange, prange->start, prange->last, start, last); in svm_range_unmap_from_cpu()
2125 unmap_parent = start <= prange->start && last >= prange->last; in svm_range_unmap_from_cpu()
2130 l = min(last, pchild->last); in svm_range_unmap_from_cpu()
2133 svm_range_unmap_split(mm, prange, pchild, start, last); in svm_range_unmap_from_cpu()
2137 l = min(last, prange->last); in svm_range_unmap_from_cpu()
2140 svm_range_unmap_split(mm, prange, prange, start, last); in svm_range_unmap_from_cpu()
2176 unsigned long last; in svm_range_cpu_invalidate_pagetables() local
2182 last = mni->interval_tree.last; in svm_range_cpu_invalidate_pagetables()
2184 last = (last < (range->end - 1) ? last : range->end - 1) >> PAGE_SHIFT; in svm_range_cpu_invalidate_pagetables()
2186 start, last, range->start >> PAGE_SHIFT, in svm_range_cpu_invalidate_pagetables()
2189 mni->interval_tree.last >> PAGE_SHIFT, range->event); in svm_range_cpu_invalidate_pagetables()
2198 svm_range_unmap_from_cpu(mni->mm, prange, start, last); in svm_range_cpu_invalidate_pagetables()
2201 svm_range_evict(prange, mni->mm, start, last); in svm_range_cpu_invalidate_pagetables()
2234 addr, prange->start, prange->last, node->start, node->last); in svm_range_from_addr()
2236 if (addr >= prange->start && addr <= prange->last) { in svm_range_from_addr()
2242 if (addr >= pchild->start && addr <= pchild->last) { in svm_range_from_addr()
2244 addr, pchild->start, pchild->last); in svm_range_from_addr()
2313 unsigned long *start, unsigned long *last) in svm_range_get_range_boundaries() argument
2332 /* Last range that ends before the fault address */ in svm_range_get_range_boundaries()
2336 /* Last range must end before addr because in svm_range_get_range_boundaries()
2343 if (node->last >= addr) { in svm_range_get_range_boundaries()
2347 start_limit = max(start_limit, node->last + 1); in svm_range_get_range_boundaries()
2351 *last = end_limit - 1; in svm_range_get_range_boundaries()
2353 pr_debug("vma start: 0x%lx start: 0x%lx vma end: 0x%lx last: 0x%lx\n", in svm_range_get_range_boundaries()
2355 vma->vm_end >> PAGE_SHIFT, *last); in svm_range_get_range_boundaries()
2367 unsigned long start, last; in svm_range_create_unregistered_range() local
2370 if (svm_range_get_range_boundaries(p, addr, &start, &last)) in svm_range_create_unregistered_range()
2373 prange = svm_range_new(&p->svms, start, last); in svm_range_create_unregistered_range()
2416 svms, prange, prange->start, prange->last); in svm_range_skip_recover()
2422 svms, prange, prange->start, prange->last); in svm_range_skip_recover()
2556 svms, prange->start, prange->last); in svm_range_restore_pages()
2570 svms, prange->start, prange->last); in svm_range_restore_pages()
2576 svms, prange->start, prange->last, best_loc, in svm_range_restore_pages()
2598 r, svms, prange->start, prange->last); in svm_range_restore_pages()
2606 r, svms, prange->start, prange->last); in svm_range_restore_pages()
2731 uint64_t last = start + size - 1UL; in svm_range_add() local
2738 pr_debug("svms 0x%p [0x%llx 0x%llx]\n", &p->svms, start, last); in svm_range_add()
2744 r = svm_range_handle_overlap(svms, &new, start, last, update_list, in svm_range_add()
2750 prange = svm_range_new(svms, last - left + 1, last); in svm_range_add()
2832 p->xnack_enabled, &p->svms, prange->start, prange->last, in svm_range_best_prefetch_location()
2954 prange->start, prange->last); in svm_range_evict_svm_bo_worker()
2971 /* This is the last reference to svm_bo, after svm_range_vram_node_free in svm_range_evict_svm_bo_worker()
2974 WARN_ONCE(kref_read(&svm_bo->kref) != 1, "This was not the last reference\n"); in svm_range_evict_svm_bo_worker()
3035 prange->last); in svm_range_set_attr()
3097 uint64_t last = start + size - 1UL; in svm_range_get_attr() local
3160 node = interval_tree_iter_first(&svms->objects, start, last); in svm_range_get_attr()
3181 next = interval_tree_iter_next(node, start, last); in svm_range_get_attr()