Lines Matching refs:tbo

229 	if (bo->tbo.type == ttm_bo_type_kernel)  in amdgpu_vm_bo_evicted()
331 if (bo->tbo.base.resv != vm->root.base.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
335 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init()
341 amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type)) in amdgpu_vm_bo_base_init()
589 entry->tv.bo = &vm->root.base.bo->tbo; in amdgpu_vm_get_pd_bo()
621 if (abo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) in amdgpu_vm_del_from_lru_notify()
656 ttm_bo_move_to_lru_tail(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_move_to_lru_tail()
658 ttm_bo_move_to_lru_tail(&bo->shadow->tbo, in amdgpu_vm_move_to_lru_tail()
695 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos()
782 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_vm_clear_bo()
787 r = ttm_bo_validate(&bo->shadow->tbo, &bo->shadow->placement, in amdgpu_vm_clear_bo()
877 bp->resv = vm->root.base.bo->tbo.base.resv; in amdgpu_vm_bo_param()
1790 resv = vm->root.base.bo->tbo.base.resv; in amdgpu_vm_bo_update()
1792 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_vm_bo_update()
1795 resv = bo->tbo.base.resv; in amdgpu_vm_bo_update()
1801 if (abo->tbo.mem.mem_type == TTM_PL_VRAM) in amdgpu_vm_bo_update()
1804 mem = &bo->tbo.mem; in amdgpu_vm_bo_update()
1807 ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm); in amdgpu_vm_bo_update()
1813 flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem); in amdgpu_vm_bo_update()
1818 bo_adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vm_bo_update()
1823 if (clear || (bo && bo->tbo.base.resv == in amdgpu_vm_bo_update()
1824 vm->root.base.bo->tbo.base.resv)) in amdgpu_vm_bo_update()
1849 if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { in amdgpu_vm_bo_update()
1850 uint32_t mem_type = bo->tbo.mem.mem_type; in amdgpu_vm_bo_update()
1986 struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; in amdgpu_vm_prt_fini()
2033 struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; in amdgpu_vm_clear_freed()
2101 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
2191 if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv && in amdgpu_vm_bo_insert_map()
2525 if (dma_resv_locking_ctx(bo->tbo.base.resv) != in amdgpu_vm_bo_trace_cs()
2553 if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) in amdgpu_vm_bo_rmv()
2608 if (!dma_resv_test_signaled_rcu(bo->tbo.base.resv, true)) in amdgpu_vm_evictable()
2647 if (evicted && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { in amdgpu_vm_bo_invalidate()
2656 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_invalidate()
2658 else if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) in amdgpu_vm_bo_invalidate()
2788 timeout = dma_resv_wait_timeout_rcu(vm->root.base.bo->tbo.base.resv, in amdgpu_vm_wait_idle()
2881 r = dma_resv_reserve_shared(root->tbo.base.resv, 1); in amdgpu_vm_init()