Lines Matching refs:bo_va
1676 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1679 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1680 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1716 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1718 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1719 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1720 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1722 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1723 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1726 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1748 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1750 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1752 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1755 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1756 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1759 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1978 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_handle_moved() local
1983 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_handle_moved()
1985 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
1992 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1994 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
2004 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
2036 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
2038 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
2039 if (bo_va == NULL) { in amdgpu_vm_bo_add()
2042 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
2044 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
2045 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
2046 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
2050 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
2058 return bo_va; in amdgpu_vm_bo_add()
2072 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
2075 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
2076 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
2078 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
2079 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
2086 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
2087 list_move(&bo_va->base.vm_status, &vm->moved); in amdgpu_vm_bo_insert_map()
2089 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
2110 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
2115 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
2116 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
2151 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
2175 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
2180 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
2200 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
2214 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
2234 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
2238 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
2243 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
2248 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
2251 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
2256 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
2262 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
2263 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
2269 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
2321 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2322 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2332 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2333 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2352 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
2413 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2416 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2437 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_rmv() argument
2440 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_rmv()
2441 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_rmv()
2448 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_rmv()
2450 if (*base != &bo_va->base) in amdgpu_vm_bo_rmv()
2453 *base = bo_va->base.next; in amdgpu_vm_bo_rmv()
2459 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_rmv()
2462 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_rmv()
2465 mapping->bo_va = NULL; in amdgpu_vm_bo_rmv()
2466 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_rmv()
2469 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_rmv()
2473 bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2476 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2478 if (bo && bo_va->is_xgmi) { in amdgpu_vm_bo_rmv()
2485 kfree(bo_va); in amdgpu_vm_bo_rmv()