Home
last modified time | relevance | path

Searched refs:tbo (Results 1 – 25 of 64) sorted by relevance

123

/Linux-v4.19/drivers/gpu/drm/virtio/
Dvirtgpu_object.c28 static void virtio_gpu_ttm_bo_destroy(struct ttm_buffer_object *tbo) in virtio_gpu_ttm_bo_destroy() argument
33 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_ttm_bo_destroy()
91 ret = ttm_bo_init(&vgdev->mman.bdev, &bo->tbo, size, type, in virtio_gpu_object_create()
112 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in virtio_gpu_object_kmap()
125 struct page **pages = bo->tbo.ttm->pages; in virtio_gpu_object_get_sg_table()
126 int nr_pages = bo->tbo.num_pages; in virtio_gpu_object_get_sg_table()
136 if (bo->tbo.ttm->state == tt_unpopulated) in virtio_gpu_object_get_sg_table()
137 bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm, &ctx); in virtio_gpu_object_get_sg_table()
164 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL); in virtio_gpu_object_wait()
167 r = ttm_bo_wait(&bo->tbo, true, no_wait); in virtio_gpu_object_wait()
[all …]
Dvirtgpu_drv.h64 struct ttm_buffer_object tbo; member
375 ttm_bo_reference(&bo->tbo); in virtio_gpu_object_ref()
381 struct ttm_buffer_object *tbo; in virtio_gpu_object_unref() local
385 tbo = &((*bo)->tbo); in virtio_gpu_object_unref()
386 ttm_bo_unref(&tbo); in virtio_gpu_object_unref()
387 if (tbo == NULL) in virtio_gpu_object_unref()
393 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in virtio_gpu_object_mmap_offset()
401 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL); in virtio_gpu_object_reserve()
415 ttm_bo_unreserve(&bo->tbo); in virtio_gpu_object_unreserve()
Dvirtgpu_ttm.c368 static void virtio_gpu_bo_move_notify(struct ttm_buffer_object *tbo, in virtio_gpu_bo_move_notify() argument
375 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_bo_move_notify()
391 static void virtio_gpu_bo_swap_notify(struct ttm_buffer_object *tbo) in virtio_gpu_bo_swap_notify() argument
396 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_bo_swap_notify()
Dvirtgpu_ioctl.c71 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_object_list_validate()
89 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_unref_list()
154 buflist[i].bo = &qobj->tbo; in virtio_gpu_execbuffer_ioctl()
270 mainbuf.bo = &qobj->tbo; in virtio_gpu_resource_create_ioctl()
381 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx); in virtio_gpu_transfer_from_host_ioctl()
390 reservation_object_add_excl_fence(qobj->tbo.resv, in virtio_gpu_transfer_from_host_ioctl()
425 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx); in virtio_gpu_transfer_to_host_ioctl()
439 reservation_object_add_excl_fence(qobj->tbo.resv, in virtio_gpu_transfer_to_host_ioctl()
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/
Damdgpu_object.c76 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_subtract_pin_size()
78 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_subtract_pin_size()
82 } else if (bo->tbo.mem.mem_type == TTM_PL_TT) { in amdgpu_bo_subtract_pin_size()
87 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument
89 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_bo_destroy()
90 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy()
101 drm_prime_gem_destroy(&bo->gem_base, bo->tbo.sg); in amdgpu_bo_destroy()
140 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_bo_placement_from_domain()
287 r = amdgpu_ttm_alloc_gart(&(*bo_ptr)->tbo); in amdgpu_bo_create_reserved()
492 bo->tbo.bdev = &adev->mman.bdev; in amdgpu_bo_do_create()
[all …]
Damdgpu_object.h83 struct ttm_buffer_object tbo; member
110 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument
112 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo()
153 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve()
156 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve()
167 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
172 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size()
177 return (bo->tbo.num_pages << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages()
182 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment()
193 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in amdgpu_bo_mmap_offset()
[all …]
Damdgpu_prime.c54 int npages = bo->tbo.num_pages; in amdgpu_gem_prime_get_sg_table()
56 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table()
73 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap()
109 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_prime_mmap()
123 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap()
176 bo->tbo.sg = sg; in amdgpu_gem_prime_import_sg_table()
177 bo->tbo.ttm->sg = sg; in amdgpu_gem_prime_import_sg_table()
208 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_map_attach()
225 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_gem_map_attach()
264 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_map_detach()
[all …]
Damdgpu_gtt_mgr.c36 struct ttm_buffer_object *tbo; member
111 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_alloc() argument
162 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_new() argument
171 if ((&tbo->mem == mem || tbo->mem.mem_type != TTM_PL_TT) && in amdgpu_gtt_mgr_new()
187 node->tbo = tbo; in amdgpu_gtt_mgr_new()
191 r = amdgpu_gtt_mgr_alloc(man, tbo, place, mem); in amdgpu_gtt_mgr_new()
263 r = amdgpu_ttm_recover_gart(node->tbo); in amdgpu_gtt_mgr_recover()
Damdgpu_gem.c124 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_gem_object_open()
131 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open()
136 abo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_gem_object_open()
157 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_object_close()
171 tv.bo = &bo->tbo; in amdgpu_gem_object_close()
263 resv = vm->root.base.bo->tbo.resv; in amdgpu_gem_create_ioctl()
327 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
338 r = amdgpu_ttm_tt_get_user_pages(bo->tbo.ttm, in amdgpu_gem_userptr_ioctl()
339 bo->tbo.ttm->pages); in amdgpu_gem_userptr_ioctl()
348 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_gem_userptr_ioctl()
[all …]
Damdgpu_amdkfd_gpuvm.c174 kfd_mem_limit.system_mem_used -= bo->tbo.acc_size; in amdgpu_amdkfd_unreserve_system_memory_limit()
178 (bo->tbo.acc_size + amdgpu_bo_size(bo)); in amdgpu_amdkfd_unreserve_system_memory_limit()
209 struct reservation_object *resv = bo->tbo.resv; in amdgpu_amdkfd_remove_eviction_fence()
322 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
328 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate()
340 ttm_bo_wait(&bo->tbo, false, false); in amdgpu_amdkfd_bo_validate()
365 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_validate_pt_pd_bos()
421 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_update_pds()
453 unsigned long bo_size = bo->tbo.mem.size; in add_bo_to_vm()
543 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list()
[all …]
Damdgpu_vm.c159 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_base_init()
162 if (bo->tbo.resv != vm->root.base.bo->tbo.resv) in amdgpu_vm_bo_base_init()
166 amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type)) in amdgpu_vm_bo_base_init()
265 entry->tv.bo = &entry->robj->tbo; in amdgpu_vm_get_pd_bo()
301 ttm_bo_move_to_lru_tail(&bo->tbo); in amdgpu_vm_validate_pt_bos()
303 ttm_bo_move_to_lru_tail(&bo->shadow->tbo); in amdgpu_vm_validate_pt_bos()
307 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos()
323 ttm_bo_move_to_lru_tail(&bo->tbo); in amdgpu_vm_validate_pt_bos()
325 ttm_bo_move_to_lru_tail(&bo->shadow->tbo); in amdgpu_vm_validate_pt_bos()
392 r = reservation_object_reserve_shared(bo->tbo.resv); in amdgpu_vm_clear_bo()
[all …]
Damdgpu_cs.c50 p->uf_entry.tv.bo = &p->uf_entry.robj->tbo; in amdgpu_cs_user_fence_chunk()
62 if (amdgpu_ttm_tt_get_usermm(p->uf_entry.robj->tbo.ttm)) { in amdgpu_cs_user_fence_chunk()
394 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_cs_bo_validate()
398 .resv = bo->tbo.resv, in amdgpu_cs_bo_validate()
430 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_bo_validate()
461 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_cs_try_evict()
473 other = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type); in amdgpu_cs_try_evict()
489 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_cs_try_evict()
535 usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm); in amdgpu_cs_list_validate()
540 if (amdgpu_ttm_tt_userptr_needs_pages(bo->tbo.ttm) && in amdgpu_cs_list_validate()
[all …]
Damdgpu_mn.c225 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, end)) in amdgpu_mn_invalidate_node()
228 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_mn_invalidate_node()
233 amdgpu_ttm_tt_mark_user_pages(bo->tbo.ttm); in amdgpu_mn_invalidate_node()
327 if (amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, in amdgpu_mn_invalidate_range_start_hsa()
448 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_register()
506 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_unregister()
Damdgpu_ttm.c986 struct ttm_buffer_object *tbo, in amdgpu_ttm_gart_bind() argument
989 struct amdgpu_bo *abo = ttm_to_amdgpu_bo(tbo); in amdgpu_ttm_gart_bind()
990 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind()
1133 int amdgpu_ttm_recover_gart(struct ttm_buffer_object *tbo) in amdgpu_ttm_recover_gart() argument
1135 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_ttm_recover_gart()
1139 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1142 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, &tbo->mem); in amdgpu_ttm_recover_gart()
1143 r = amdgpu_ttm_gart_bind(adev, tbo, flags); in amdgpu_ttm_recover_gart()
1531 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_ttm_access_memory()
1541 nodes = amdgpu_find_mm_node(&abo->tbo.mem, &offset); in amdgpu_ttm_access_memory()
[all …]
Damdgpu_vram_mgr.c109 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vram_mgr_bo_visible_size()
110 struct ttm_mem_reg *mem = &bo->tbo.mem; in amdgpu_vram_mgr_bo_visible_size()
138 struct ttm_buffer_object *tbo, in amdgpu_vram_mgr_new() argument
/Linux-v4.19/drivers/gpu/drm/qxl/
Dqxl_object.h34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL); in qxl_bo_reserve()
47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
52 return bo->tbo.offset; in qxl_bo_gpu_offset()
57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size()
62 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in qxl_bo_mmap_offset()
70 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL); in qxl_bo_wait()
80 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait()
82 r = ttm_bo_wait(&bo->tbo, true, no_wait); in qxl_bo_wait()
83 ttm_bo_unreserve(&bo->tbo); in qxl_bo_wait()
Dqxl_object.c30 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument
35 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
111 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create()
135 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in qxl_bo_kmap()
147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page()
152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in qxl_bo_kmap_atomic_page()
154 else if (bo->tbo.mem.mem_type == TTM_PL_PRIV) in qxl_bo_kmap_atomic_page()
160 ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem); in qxl_bo_kmap_atomic_page()
163 return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset); in qxl_bo_kmap_atomic_page()
189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page()
[all …]
Dqxl_gem.c36 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local
42 tbo = &qobj->tbo; in qxl_gem_object_free()
43 ttm_bo_unref(&tbo); in qxl_gem_object_free()
/Linux-v4.19/drivers/gpu/drm/radeon/
Dradeon_object.c54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage()
72 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument
76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy()
86 drm_prime_gem_destroy(&bo->gem_base, bo->tbo.sg); in radeon_ttm_bo_destroy()
259 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create()
284 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap()
310 ttm_bo_get(&bo->tbo); in radeon_bo_ref()
316 struct ttm_buffer_object *tbo; in radeon_bo_unref() local
322 tbo = &((*bo)->tbo); in radeon_bo_unref()
[all …]
Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
93 return bo->tbo.offset; in radeon_bo_gpu_offset()
98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
103 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages()
108 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment()
119 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in radeon_bo_mmap_offset()
Dradeon_prime.c35 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
122 return bo->tbo.resv; in radeon_gem_prime_res_obj()
130 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
Dradeon_gem.c112 r = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_set_domain()
326 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
345 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl()
415 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
447 r = reservation_object_test_signaled_rcu(robj->tbo.resv, true); in radeon_gem_busy_ioctl()
453 cur_placement = READ_ONCE(robj->tbo.mem.mem_type); in radeon_gem_busy_ioctl()
476 ret = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_wait_idle_ioctl()
483 cur_placement = READ_ONCE(robj->tbo.mem.mem_type); in radeon_gem_wait_idle_ioctl()
554 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm()
580 r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem); in radeon_gem_va_update_vm()
[all …]
Dradeon_mn.c159 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
168 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start()
174 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate_range_start()
/Linux-v4.19/drivers/gpu/drm/hisilicon/hibmc/
Dhibmc_ttm.c81 static void hibmc_bo_ttm_destroy(struct ttm_buffer_object *tbo) in hibmc_bo_ttm_destroy() argument
83 struct hibmc_bo *bo = container_of(tbo, struct hibmc_bo, bo); in hibmc_bo_ttm_destroy()
280 struct ttm_buffer_object *tbo; in hibmc_bo_unref() local
285 tbo = &((*bo)->bo); in hibmc_bo_unref()
286 ttm_bo_unref(&tbo); in hibmc_bo_unref()
/Linux-v4.19/drivers/gpu/drm/bochs/
Dbochs_mm.c74 static void bochs_bo_ttm_destroy(struct ttm_buffer_object *tbo) in bochs_bo_ttm_destroy() argument
78 bo = container_of(tbo, struct bochs_bo, bo); in bochs_bo_ttm_destroy()
427 struct ttm_buffer_object *tbo; in bochs_bo_unref() local
432 tbo = &((*bo)->bo); in bochs_bo_unref()
433 ttm_bo_unref(&tbo); in bochs_bo_unref()

123