Lines Matching refs:bo
63 static void amdgpu_bo_subtract_pin_size(struct amdgpu_bo *bo) in amdgpu_bo_subtract_pin_size() argument
65 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_subtract_pin_size()
67 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_subtract_pin_size()
68 atomic64_sub(amdgpu_bo_size(bo), &adev->vram_pin_size); in amdgpu_bo_subtract_pin_size()
69 atomic64_sub(amdgpu_vram_mgr_bo_visible_size(bo), in amdgpu_bo_subtract_pin_size()
71 } else if (bo->tbo.mem.mem_type == TTM_PL_TT) { in amdgpu_bo_subtract_pin_size()
72 atomic64_sub(amdgpu_bo_size(bo), &adev->gart_pin_size); in amdgpu_bo_subtract_pin_size()
79 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() local
81 if (bo->pin_count > 0) in amdgpu_bo_destroy()
82 amdgpu_bo_subtract_pin_size(bo); in amdgpu_bo_destroy()
84 amdgpu_bo_kunmap(bo); in amdgpu_bo_destroy()
86 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy()
87 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
88 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy()
90 if (!list_empty(&bo->shadow_list)) { in amdgpu_bo_destroy()
92 list_del_init(&bo->shadow_list); in amdgpu_bo_destroy()
95 amdgpu_bo_unref(&bo->parent); in amdgpu_bo_destroy()
97 kfree(bo->metadata); in amdgpu_bo_destroy()
98 kfree(bo); in amdgpu_bo_destroy()
111 bool amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) in amdgpu_bo_is_amdgpu_bo() argument
113 if (bo->destroy == &amdgpu_bo_destroy) in amdgpu_bo_is_amdgpu_bo()
428 void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr, in amdgpu_bo_free_kernel() argument
431 if (*bo == NULL) in amdgpu_bo_free_kernel()
434 if (likely(amdgpu_bo_reserve(*bo, true) == 0)) { in amdgpu_bo_free_kernel()
436 amdgpu_bo_kunmap(*bo); in amdgpu_bo_free_kernel()
438 amdgpu_bo_unpin(*bo); in amdgpu_bo_free_kernel()
439 amdgpu_bo_unreserve(*bo); in amdgpu_bo_free_kernel()
441 amdgpu_bo_unref(bo); in amdgpu_bo_free_kernel()
533 struct amdgpu_bo *bo; in amdgpu_bo_do_create() local
561 bo = kzalloc(sizeof(struct amdgpu_bo), GFP_KERNEL); in amdgpu_bo_do_create()
562 if (bo == NULL) in amdgpu_bo_do_create()
564 drm_gem_private_object_init(adev_to_drm(adev), &bo->tbo.base, size); in amdgpu_bo_do_create()
565 INIT_LIST_HEAD(&bo->shadow_list); in amdgpu_bo_do_create()
566 bo->vm_bo = NULL; in amdgpu_bo_do_create()
567 bo->preferred_domains = bp->preferred_domain ? bp->preferred_domain : in amdgpu_bo_do_create()
569 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_do_create()
571 bo->allowed_domains == AMDGPU_GEM_DOMAIN_VRAM) in amdgpu_bo_do_create()
572 bo->allowed_domains |= AMDGPU_GEM_DOMAIN_GTT; in amdgpu_bo_do_create()
574 bo->flags = bp->flags; in amdgpu_bo_do_create()
576 if (!amdgpu_bo_support_uswc(bo->flags)) in amdgpu_bo_do_create()
577 bo->flags &= ~AMDGPU_GEM_CREATE_CPU_GTT_USWC; in amdgpu_bo_do_create()
579 bo->tbo.bdev = &adev->mman.bdev; in amdgpu_bo_do_create()
582 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in amdgpu_bo_do_create()
584 amdgpu_bo_placement_from_domain(bo, bp->domain); in amdgpu_bo_do_create()
586 bo->tbo.priority = 1; in amdgpu_bo_do_create()
588 r = ttm_bo_init_reserved(&adev->mman.bdev, &bo->tbo, size, bp->type, in amdgpu_bo_do_create()
589 &bo->placement, page_align, &ctx, acc_size, in amdgpu_bo_do_create()
595 bo->tbo.mem.mem_type == TTM_PL_VRAM && in amdgpu_bo_do_create()
596 bo->tbo.mem.start < adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_bo_do_create()
603 bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_do_create()
606 r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence); in amdgpu_bo_do_create()
610 amdgpu_bo_fence(bo, fence, false); in amdgpu_bo_do_create()
611 dma_fence_put(bo->tbo.moving); in amdgpu_bo_do_create()
612 bo->tbo.moving = dma_fence_get(fence); in amdgpu_bo_do_create()
616 amdgpu_bo_unreserve(bo); in amdgpu_bo_do_create()
617 *bo_ptr = bo; in amdgpu_bo_do_create()
619 trace_amdgpu_bo_create(bo); in amdgpu_bo_do_create()
623 bo->flags &= ~AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_bo_do_create()
629 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_do_create()
630 amdgpu_bo_unref(&bo); in amdgpu_bo_do_create()
636 struct amdgpu_bo *bo) in amdgpu_bo_create_shadow() argument
641 if (bo->shadow) in amdgpu_bo_create_shadow()
650 bp.resv = bo->tbo.base.resv; in amdgpu_bo_create_shadow()
652 r = amdgpu_bo_do_create(adev, &bp, &bo->shadow); in amdgpu_bo_create_shadow()
654 bo->shadow->parent = amdgpu_bo_ref(bo); in amdgpu_bo_create_shadow()
656 list_add_tail(&bo->shadow->shadow_list, &adev->shadow_list); in amdgpu_bo_create_shadow()
718 int amdgpu_bo_validate(struct amdgpu_bo *bo) in amdgpu_bo_validate() argument
724 if (bo->pin_count) in amdgpu_bo_validate()
727 domain = bo->preferred_domains; in amdgpu_bo_validate()
730 amdgpu_bo_placement_from_domain(bo, domain); in amdgpu_bo_validate()
731 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_validate()
732 if (unlikely(r == -ENOMEM) && domain != bo->allowed_domains) { in amdgpu_bo_validate()
733 domain = bo->allowed_domains; in amdgpu_bo_validate()
779 int amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) in amdgpu_bo_kmap() argument
784 if (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS) in amdgpu_bo_kmap()
787 kptr = amdgpu_bo_kptr(bo); in amdgpu_bo_kmap()
794 r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, false, false, in amdgpu_bo_kmap()
799 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap()
804 *ptr = amdgpu_bo_kptr(bo); in amdgpu_bo_kmap()
818 void *amdgpu_bo_kptr(struct amdgpu_bo *bo) in amdgpu_bo_kptr() argument
822 return ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in amdgpu_bo_kptr()
831 void amdgpu_bo_kunmap(struct amdgpu_bo *bo) in amdgpu_bo_kunmap() argument
833 if (bo->kmap.bo) in amdgpu_bo_kunmap()
834 ttm_bo_kunmap(&bo->kmap); in amdgpu_bo_kunmap()
846 struct amdgpu_bo *amdgpu_bo_ref(struct amdgpu_bo *bo) in amdgpu_bo_ref() argument
848 if (bo == NULL) in amdgpu_bo_ref()
851 ttm_bo_get(&bo->tbo); in amdgpu_bo_ref()
852 return bo; in amdgpu_bo_ref()
861 void amdgpu_bo_unref(struct amdgpu_bo **bo) in amdgpu_bo_unref() argument
865 if ((*bo) == NULL) in amdgpu_bo_unref()
868 tbo = &((*bo)->tbo); in amdgpu_bo_unref()
870 *bo = NULL; in amdgpu_bo_unref()
895 int amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain, in amdgpu_bo_pin_restricted() argument
898 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_pin_restricted()
902 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_bo_pin_restricted()
909 if (bo->prime_shared_count) { in amdgpu_bo_pin_restricted()
921 if (bo->pin_count) { in amdgpu_bo_pin_restricted()
922 uint32_t mem_type = bo->tbo.mem.mem_type; in amdgpu_bo_pin_restricted()
927 bo->pin_count++; in amdgpu_bo_pin_restricted()
933 (amdgpu_bo_gpu_offset(bo) - domain_start)); in amdgpu_bo_pin_restricted()
939 if (bo->tbo.base.import_attach) in amdgpu_bo_pin_restricted()
940 dma_buf_pin(bo->tbo.base.import_attach); in amdgpu_bo_pin_restricted()
942 bo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in amdgpu_bo_pin_restricted()
944 if (!(bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) in amdgpu_bo_pin_restricted()
945 bo->flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_bo_pin_restricted()
946 amdgpu_bo_placement_from_domain(bo, domain); in amdgpu_bo_pin_restricted()
947 for (i = 0; i < bo->placement.num_placement; i++) { in amdgpu_bo_pin_restricted()
953 if (fpfn > bo->placements[i].fpfn) in amdgpu_bo_pin_restricted()
954 bo->placements[i].fpfn = fpfn; in amdgpu_bo_pin_restricted()
955 if (!bo->placements[i].lpfn || in amdgpu_bo_pin_restricted()
956 (lpfn && lpfn < bo->placements[i].lpfn)) in amdgpu_bo_pin_restricted()
957 bo->placements[i].lpfn = lpfn; in amdgpu_bo_pin_restricted()
958 bo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT; in amdgpu_bo_pin_restricted()
961 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin_restricted()
963 dev_err(adev->dev, "%p pin failed\n", bo); in amdgpu_bo_pin_restricted()
967 bo->pin_count = 1; in amdgpu_bo_pin_restricted()
969 domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type); in amdgpu_bo_pin_restricted()
971 atomic64_add(amdgpu_bo_size(bo), &adev->vram_pin_size); in amdgpu_bo_pin_restricted()
972 atomic64_add(amdgpu_vram_mgr_bo_visible_size(bo), in amdgpu_bo_pin_restricted()
975 atomic64_add(amdgpu_bo_size(bo), &adev->gart_pin_size); in amdgpu_bo_pin_restricted()
994 int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain) in amdgpu_bo_pin() argument
996 return amdgpu_bo_pin_restricted(bo, domain, 0, 0); in amdgpu_bo_pin()
1009 int amdgpu_bo_unpin(struct amdgpu_bo *bo) in amdgpu_bo_unpin() argument
1011 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_unpin()
1015 if (WARN_ON_ONCE(!bo->pin_count)) { in amdgpu_bo_unpin()
1016 dev_warn(adev->dev, "%p unpin not necessary\n", bo); in amdgpu_bo_unpin()
1019 bo->pin_count--; in amdgpu_bo_unpin()
1020 if (bo->pin_count) in amdgpu_bo_unpin()
1023 amdgpu_bo_subtract_pin_size(bo); in amdgpu_bo_unpin()
1025 if (bo->tbo.base.import_attach) in amdgpu_bo_unpin()
1026 dma_buf_unpin(bo->tbo.base.import_attach); in amdgpu_bo_unpin()
1028 for (i = 0; i < bo->placement.num_placement; i++) { in amdgpu_bo_unpin()
1029 bo->placements[i].lpfn = 0; in amdgpu_bo_unpin()
1030 bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT; in amdgpu_bo_unpin()
1032 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_unpin()
1034 dev_err(adev->dev, "%p validate failed for unpin\n", bo); in amdgpu_bo_unpin()
1140 int amdgpu_bo_fbdev_mmap(struct amdgpu_bo *bo, in amdgpu_bo_fbdev_mmap() argument
1146 return ttm_bo_mmap_obj(vma, &bo->tbo); in amdgpu_bo_fbdev_mmap()
1160 int amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) in amdgpu_bo_set_tiling_flags() argument
1162 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_set_tiling_flags()
1168 bo->tiling_flags = tiling_flags; in amdgpu_bo_set_tiling_flags()
1180 void amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) in amdgpu_bo_get_tiling_flags() argument
1182 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_bo_get_tiling_flags()
1185 *tiling_flags = bo->tiling_flags; in amdgpu_bo_get_tiling_flags()
1201 int amdgpu_bo_set_metadata (struct amdgpu_bo *bo, void *metadata, in amdgpu_bo_set_metadata() argument
1207 if (bo->metadata_size) { in amdgpu_bo_set_metadata()
1208 kfree(bo->metadata); in amdgpu_bo_set_metadata()
1209 bo->metadata = NULL; in amdgpu_bo_set_metadata()
1210 bo->metadata_size = 0; in amdgpu_bo_set_metadata()
1222 kfree(bo->metadata); in amdgpu_bo_set_metadata()
1223 bo->metadata_flags = flags; in amdgpu_bo_set_metadata()
1224 bo->metadata = buffer; in amdgpu_bo_set_metadata()
1225 bo->metadata_size = metadata_size; in amdgpu_bo_set_metadata()
1245 int amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, in amdgpu_bo_get_metadata() argument
1253 if (buffer_size < bo->metadata_size) in amdgpu_bo_get_metadata()
1256 if (bo->metadata_size) in amdgpu_bo_get_metadata()
1257 memcpy(buffer, bo->metadata, bo->metadata_size); in amdgpu_bo_get_metadata()
1261 *metadata_size = bo->metadata_size; in amdgpu_bo_get_metadata()
1263 *flags = bo->metadata_flags; in amdgpu_bo_get_metadata()
1278 void amdgpu_bo_move_notify(struct ttm_buffer_object *bo, in amdgpu_bo_move_notify() argument
1282 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev); in amdgpu_bo_move_notify()
1284 struct ttm_resource *old_mem = &bo->mem; in amdgpu_bo_move_notify()
1286 if (!amdgpu_bo_is_amdgpu_bo(bo)) in amdgpu_bo_move_notify()
1289 abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_move_notify()
1295 bo->mem.mem_type != TTM_PL_SYSTEM) in amdgpu_bo_move_notify()
1317 void amdgpu_bo_release_notify(struct ttm_buffer_object *bo) in amdgpu_bo_release_notify() argument
1323 if (!amdgpu_bo_is_amdgpu_bo(bo)) in amdgpu_bo_release_notify()
1326 abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_release_notify()
1332 WARN_ON_ONCE(bo->type == ttm_bo_type_kernel in amdgpu_bo_release_notify()
1333 && bo->base.resv != &bo->base._resv); in amdgpu_bo_release_notify()
1334 if (bo->base.resv == &bo->base._resv) in amdgpu_bo_release_notify()
1337 if (bo->mem.mem_type != TTM_PL_VRAM || !bo->mem.mm_node || in amdgpu_bo_release_notify()
1341 dma_resv_lock(bo->base.resv, NULL); in amdgpu_bo_release_notify()
1343 r = amdgpu_fill_buffer(abo, AMDGPU_POISON, bo->base.resv, &fence); in amdgpu_bo_release_notify()
1349 dma_resv_unlock(bo->base.resv); in amdgpu_bo_release_notify()
1363 int amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) in amdgpu_bo_fault_reserve_notify() argument
1365 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev); in amdgpu_bo_fault_reserve_notify()
1371 if (!amdgpu_bo_is_amdgpu_bo(bo)) in amdgpu_bo_fault_reserve_notify()
1374 abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_fault_reserve_notify()
1379 if (bo->mem.mem_type != TTM_PL_VRAM) in amdgpu_bo_fault_reserve_notify()
1382 size = bo->mem.num_pages << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
1383 offset = bo->mem.start << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
1400 r = ttm_bo_validate(bo, &abo->placement, &ctx); in amdgpu_bo_fault_reserve_notify()
1404 offset = bo->mem.start << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
1406 if (bo->mem.mem_type == TTM_PL_VRAM && in amdgpu_bo_fault_reserve_notify()
1421 void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, in amdgpu_bo_fence() argument
1424 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_bo_fence()
1470 int amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr) in amdgpu_bo_sync_wait() argument
1472 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_sync_wait()
1474 return amdgpu_bo_sync_wait_resv(adev, bo->tbo.base.resv, in amdgpu_bo_sync_wait()
1488 u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) in amdgpu_bo_gpu_offset() argument
1490 WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_SYSTEM); in amdgpu_bo_gpu_offset()
1491 WARN_ON_ONCE(!dma_resv_is_locked(bo->tbo.base.resv) && in amdgpu_bo_gpu_offset()
1492 !bo->pin_count && bo->tbo.type != ttm_bo_type_kernel); in amdgpu_bo_gpu_offset()
1493 WARN_ON_ONCE(bo->tbo.mem.start == AMDGPU_BO_INVALID_OFFSET); in amdgpu_bo_gpu_offset()
1494 WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_VRAM && in amdgpu_bo_gpu_offset()
1495 !(bo->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS)); in amdgpu_bo_gpu_offset()
1497 return amdgpu_bo_gpu_offset_no_check(bo); in amdgpu_bo_gpu_offset()
1507 u64 amdgpu_bo_gpu_offset_no_check(struct amdgpu_bo *bo) in amdgpu_bo_gpu_offset_no_check() argument
1509 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_gpu_offset_no_check()
1512 offset = (bo->tbo.mem.start << PAGE_SHIFT) + in amdgpu_bo_gpu_offset_no_check()
1513 amdgpu_ttm_domain_start(adev, bo->tbo.mem.mem_type); in amdgpu_bo_gpu_offset_no_check()