Lines Matching refs:bo_va

802 	struct amdgpu_bo_va *bo_va;  in amdgpu_vm_bo_find()  local
804 list_for_each_entry(bo_va, &bo->va, base.bo_list) { in amdgpu_vm_bo_find()
805 if (bo_va->base.vm == vm) { in amdgpu_vm_bo_find()
806 return bo_va; in amdgpu_vm_bo_find()
1638 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1641 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1642 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1675 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1677 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1678 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1679 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1681 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1682 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1685 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1700 list_del_init(&bo_va->base.vm_status); in amdgpu_vm_bo_update()
1711 list_add_tail(&bo_va->base.vm_status, &vm->evicted); in amdgpu_vm_bo_update()
1713 list_add(&bo_va->base.vm_status, &vm->idle); in amdgpu_vm_bo_update()
1716 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1717 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1720 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1938 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_handle_moved() local
1948 list_for_each_entry_safe(bo_va, tmp, &moved, base.vm_status) { in amdgpu_vm_handle_moved()
1949 struct reservation_object *resv = bo_va->base.bo->tbo.resv; in amdgpu_vm_handle_moved()
1961 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
1996 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
1998 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
1999 if (bo_va == NULL) { in amdgpu_vm_bo_add()
2002 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
2004 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
2005 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
2006 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
2008 return bo_va; in amdgpu_vm_bo_add()
2022 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
2025 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
2026 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
2028 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
2029 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
2036 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
2038 list_move(&bo_va->base.vm_status, &vm->moved); in amdgpu_vm_bo_insert_map()
2041 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
2062 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
2067 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
2068 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
2103 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
2127 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
2132 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
2152 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
2166 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
2186 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
2190 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
2195 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
2200 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
2203 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
2208 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
2214 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
2215 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
2221 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
2273 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2274 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2284 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2285 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2304 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
2365 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2368 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2388 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_rmv() argument
2391 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_rmv()
2393 list_del(&bo_va->base.bo_list); in amdgpu_vm_bo_rmv()
2396 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_rmv()
2399 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_rmv()
2402 mapping->bo_va = NULL; in amdgpu_vm_bo_rmv()
2403 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_rmv()
2406 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_rmv()
2410 bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2413 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2414 kfree(bo_va); in amdgpu_vm_bo_rmv()