Lines Matching refs:bo_va

1772 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va,  in amdgpu_vm_bo_update()  argument
1775 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1776 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1796 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1827 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1829 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1830 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1831 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1833 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1834 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1837 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1854 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1856 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1858 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1861 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1862 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1865 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
2085 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_handle_moved() local
2090 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_handle_moved()
2092 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
2099 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
2101 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
2111 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
2143 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
2145 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
2146 if (bo_va == NULL) { in amdgpu_vm_bo_add()
2149 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
2151 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
2152 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
2153 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
2156 return bo_va; in amdgpu_vm_bo_add()
2159 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
2164 return bo_va; in amdgpu_vm_bo_add()
2178 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
2181 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
2182 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
2184 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
2185 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
2192 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
2193 list_move(&bo_va->base.vm_status, &vm->moved); in amdgpu_vm_bo_insert_map()
2195 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
2216 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
2221 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
2222 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
2258 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
2282 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
2287 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
2308 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
2322 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
2342 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
2346 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
2351 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
2356 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
2359 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
2364 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
2370 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
2371 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
2377 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
2429 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2430 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2440 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2441 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2460 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
2521 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2524 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2545 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_rmv() argument
2548 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_rmv()
2549 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_rmv()
2556 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_rmv()
2558 if (*base != &bo_va->base) in amdgpu_vm_bo_rmv()
2561 *base = bo_va->base.next; in amdgpu_vm_bo_rmv()
2567 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_rmv()
2570 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_rmv()
2573 mapping->bo_va = NULL; in amdgpu_vm_bo_rmv()
2574 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_rmv()
2577 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_rmv()
2581 bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2584 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2586 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_rmv()
2589 kfree(bo_va); in amdgpu_vm_bo_rmv()