Lines Matching refs:bo
55 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_vmap() local
58 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap()
59 &bo->dma_buf_vmap); in amdgpu_gem_prime_vmap()
63 return bo->dma_buf_vmap.virtual; in amdgpu_gem_prime_vmap()
75 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_vunmap() local
77 ttm_bo_kunmap(&bo->dma_buf_vmap); in amdgpu_gem_prime_vunmap()
94 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_mmap() local
95 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_prime_mmap()
96 unsigned asize = amdgpu_bo_size(bo); in amdgpu_gem_prime_mmap()
109 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap()
110 (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) { in amdgpu_gem_prime_mmap()
113 vma->vm_pgoff += amdgpu_bo_mmap_offset(bo) >> PAGE_SHIFT; in amdgpu_gem_prime_mmap()
180 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_attach() local
181 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
190 r = amdgpu_bo_reserve(bo, false); in amdgpu_dma_buf_attach()
202 r = __dma_resv_make_exclusive(bo->tbo.base.resv); in amdgpu_dma_buf_attach()
206 bo->prime_shared_count++; in amdgpu_dma_buf_attach()
207 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_attach()
223 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_detach() local
224 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach()
226 if (attach->dev->driver != adev->dev->driver && bo->prime_shared_count) in amdgpu_dma_buf_detach()
227 bo->prime_shared_count--; in amdgpu_dma_buf_detach()
240 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_pin() local
243 return amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_dma_buf_pin()
256 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_unpin() local
258 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin()
279 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_map() local
280 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
284 if (!bo->pin_count) { in amdgpu_dma_buf_map()
289 if (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM && in amdgpu_dma_buf_map()
291 bo->flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_dma_buf_map()
294 amdgpu_bo_placement_from_domain(bo, domains); in amdgpu_dma_buf_map()
295 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
299 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type) & in amdgpu_dma_buf_map()
304 switch (bo->tbo.mem.mem_type) { in amdgpu_dma_buf_map()
307 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
308 bo->tbo.num_pages); in amdgpu_dma_buf_map()
318 r = amdgpu_vram_mgr_alloc_sgt(adev, &bo->tbo.mem, attach->dev, in amdgpu_dma_buf_map()
350 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_unmap() local
351 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_unmap()
377 struct amdgpu_bo *bo = gem_to_amdgpu_bo(dma_buf->priv); in amdgpu_dma_buf_begin_cpu_access() local
378 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_begin_cpu_access()
380 u32 domain = amdgpu_display_supported_domains(adev, bo->flags); in amdgpu_dma_buf_begin_cpu_access()
389 ret = amdgpu_bo_reserve(bo, false); in amdgpu_dma_buf_begin_cpu_access()
393 if (!bo->pin_count && (bo->allowed_domains & AMDGPU_GEM_DOMAIN_GTT)) { in amdgpu_dma_buf_begin_cpu_access()
394 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_dma_buf_begin_cpu_access()
395 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access()
398 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access()
429 struct amdgpu_bo *bo = gem_to_amdgpu_bo(gobj); in amdgpu_gem_prime_export() local
432 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
433 bo->flags & AMDGPU_GEM_CREATE_VM_ALWAYS_VALID) in amdgpu_gem_prime_export()
460 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local
479 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj()
480 bo->allowed_domains = AMDGPU_GEM_DOMAIN_GTT; in amdgpu_dma_buf_create_obj()
481 bo->preferred_domains = AMDGPU_GEM_DOMAIN_GTT; in amdgpu_dma_buf_create_obj()
483 bo->prime_shared_count = 1; in amdgpu_dma_buf_create_obj()
506 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_move_notify() local
507 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_move_notify()
513 if (bo->tbo.mem.mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify()
516 r = ttm_bo_validate(&bo->tbo, &placement, &ctx); in amdgpu_dma_buf_move_notify()
522 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_dma_buf_move_notify()
524 struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify()
615 struct amdgpu_bo *bo) in amdgpu_dmabuf_is_xgmi_accessible() argument
617 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_dmabuf_is_xgmi_accessible()
628 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dmabuf_is_xgmi_accessible()
631 if (amdgpu_xgmi_same_hive(adev, amdgpu_ttm_adev(bo->tbo.bdev)) && in amdgpu_dmabuf_is_xgmi_accessible()
632 (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM)) in amdgpu_dmabuf_is_xgmi_accessible()