Lines Matching refs:tbo

58 	ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages,  in amdgpu_gem_prime_vmap()
95 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_prime_mmap()
109 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap()
181 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
202 r = __dma_resv_make_exclusive(bo->tbo.base.resv); in amdgpu_dma_buf_attach()
224 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach()
280 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
295 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
299 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type) & in amdgpu_dma_buf_map()
304 switch (bo->tbo.mem.mem_type) { in amdgpu_dma_buf_map()
307 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
308 bo->tbo.num_pages); in amdgpu_dma_buf_map()
318 r = amdgpu_vram_mgr_alloc_sgt(adev, &bo->tbo.mem, attach->dev, in amdgpu_dma_buf_map()
351 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_unmap()
378 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_begin_cpu_access()
395 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access()
432 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
507 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_move_notify()
513 if (bo->tbo.mem.mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify()
516 r = ttm_bo_validate(&bo->tbo, &placement, &ctx); in amdgpu_dma_buf_move_notify()
524 struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify()
617 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_dmabuf_is_xgmi_accessible()
631 if (amdgpu_xgmi_same_hive(adev, amdgpu_ttm_adev(bo->tbo.bdev)) && in amdgpu_dmabuf_is_xgmi_accessible()