/Linux-v5.15/drivers/gpu/drm/radeon/ |
D | radeon_vm.c | 297 struct radeon_bo_va *bo_va; in radeon_vm_bo_find() local 299 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find() 300 if (bo_va->vm == vm) in radeon_vm_bo_find() 301 return bo_va; in radeon_vm_bo_find() 324 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local 326 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add() 327 if (bo_va == NULL) in radeon_vm_bo_add() 330 bo_va->vm = vm; in radeon_vm_bo_add() 331 bo_va->bo = bo; in radeon_vm_bo_add() 332 bo_va->it.start = 0; in radeon_vm_bo_add() [all …]
|
D | radeon_gem.c | 201 struct radeon_bo_va *bo_va; in radeon_gem_object_open() local 214 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_open() 215 if (!bo_va) { in radeon_gem_object_open() 216 bo_va = radeon_vm_bo_add(rdev, vm, rbo); in radeon_gem_object_open() 218 ++bo_va->ref_count; in radeon_gem_object_open() 232 struct radeon_bo_va *bo_va; in radeon_gem_object_close() local 246 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_close() 247 if (bo_va) { in radeon_gem_object_close() 248 if (--bo_va->ref_count == 0) { in radeon_gem_object_close() 249 radeon_vm_bo_rmv(rdev, bo_va); in radeon_gem_object_close() [all …]
|
D | radeon_trace.h | 66 TP_PROTO(struct radeon_bo_va *bo_va), 67 TP_ARGS(bo_va), 75 __entry->soffset = bo_va->it.start; 76 __entry->eoffset = bo_va->it.last + 1; 77 __entry->flags = bo_va->flags;
|
D | radeon_cs.c | 502 struct radeon_bo_va *bo_va; in radeon_bo_vm_update_pte() local 527 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte() 528 if (bo_va == NULL) { in radeon_bo_vm_update_pte() 533 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte() 537 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte()
|
D | radeon.h | 2895 struct radeon_bo_va *bo_va, 2905 struct radeon_bo_va *bo_va, 2909 struct radeon_bo_va *bo_va);
|
/Linux-v5.15/drivers/gpu/drm/lima/ |
D | lima_vm.c | 79 struct lima_bo_va *bo_va, *ret = NULL; in lima_vm_bo_find() local 81 list_for_each_entry(bo_va, &bo->va, list) { in lima_vm_bo_find() 82 if (bo_va->vm == vm) { in lima_vm_bo_find() 83 ret = bo_va; in lima_vm_bo_find() 93 struct lima_bo_va *bo_va; in lima_vm_bo_add() local 99 bo_va = lima_vm_bo_find(vm, bo); in lima_vm_bo_add() 100 if (bo_va) { in lima_vm_bo_add() 101 bo_va->ref_count++; in lima_vm_bo_add() 112 bo_va = kzalloc(sizeof(*bo_va), GFP_KERNEL); in lima_vm_bo_add() 113 if (!bo_va) { in lima_vm_bo_add() [all …]
|
/Linux-v5.15/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vm.c | 1812 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_get_memory() local 1814 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_vm_get_memory() 1815 if (!bo_va->base.bo) in amdgpu_vm_get_memory() 1817 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory() 1820 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_vm_get_memory() 1821 if (!bo_va->base.bo) in amdgpu_vm_get_memory() 1823 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory() 1826 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_vm_get_memory() 1827 if (!bo_va->base.bo) in amdgpu_vm_get_memory() 1829 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory() [all …]
|
D | amdgpu_csa.c | 65 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in amdgpu_map_static_csa() argument 88 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_map_static_csa() 89 if (!*bo_va) { in amdgpu_map_static_csa() 95 r = amdgpu_vm_bo_map(adev, *bo_va, csa_addr, 0, size, in amdgpu_map_static_csa() 101 amdgpu_vm_bo_rmv(adev, *bo_va); in amdgpu_map_static_csa()
|
D | amdgpu_gem.c | 164 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_open() local 180 bo_va = amdgpu_vm_bo_find(vm, abo); in amdgpu_gem_object_open() 181 if (!bo_va) { in amdgpu_gem_object_open() 182 bo_va = amdgpu_vm_bo_add(adev, vm, abo); in amdgpu_gem_object_open() 184 ++bo_va->ref_count; in amdgpu_gem_object_open() 203 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_close() local 221 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close() 222 if (!bo_va || --bo_va->ref_count) in amdgpu_gem_object_close() 225 amdgpu_vm_bo_rmv(adev, bo_va); in amdgpu_gem_object_close() 607 struct amdgpu_bo_va *bo_va, in amdgpu_gem_va_update_vm() argument [all …]
|
D | amdgpu_amdkfd_gpuvm.c | 75 if (entry->bo_va->base.vm == avm) in kfd_mem_is_attached() 473 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_userptr() 523 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_dmabuf() 554 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_userptr() 574 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_dmabuf() 737 attachment[i]->bo_va = amdgpu_vm_bo_add(adev, vm, bo[i]); in kfd_mem_attach() 738 if (unlikely(!attachment[i]->bo_va)) { in kfd_mem_attach() 759 if (attachment[i]->bo_va) { in kfd_mem_attach() 760 amdgpu_vm_bo_rmv(adev, attachment[i]->bo_va); in kfd_mem_attach() 772 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_detach() [all …]
|
D | amdgpu_vm.h | 410 struct amdgpu_bo_va *bo_va, 422 struct amdgpu_bo_va *bo_va, 426 struct amdgpu_bo_va *bo_va, 430 struct amdgpu_bo_va *bo_va, 439 struct amdgpu_bo_va *bo_va);
|
D | amdgpu_trace.h | 244 TP_PROTO(struct amdgpu_bo_va *bo_va, 246 TP_ARGS(bo_va, mapping), 256 __entry->bo = bo_va ? bo_va->base.bo : NULL; 268 TP_PROTO(struct amdgpu_bo_va *bo_va, 270 TP_ARGS(bo_va, mapping), 280 __entry->bo = bo_va ? bo_va->base.bo : NULL;
|
D | amdgpu_cs.c | 578 e->bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_cs_parser_bos() 729 struct amdgpu_bo_va *bo_va; in amdgpu_cs_vm_handling() local 811 bo_va = fpriv->csa_va; in amdgpu_cs_vm_handling() 812 BUG_ON(!bo_va); in amdgpu_cs_vm_handling() 813 r = amdgpu_vm_bo_update(adev, bo_va, false, NULL); in amdgpu_cs_vm_handling() 817 r = amdgpu_sync_vm_fence(&p->job->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling() 828 bo_va = e->bo_va; in amdgpu_cs_vm_handling() 829 if (bo_va == NULL) in amdgpu_cs_vm_handling() 832 r = amdgpu_vm_bo_update(adev, bo_va, false, NULL); in amdgpu_cs_vm_handling() 836 r = amdgpu_sync_vm_fence(&p->job->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling() [all …]
|
D | amdgpu_csa.h | 35 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va,
|
D | amdgpu_bo_list.h | 36 struct amdgpu_bo_va *bo_va; member
|
D | amdgpu_object.h | 64 struct amdgpu_bo_va *bo_va; member
|
D | amdgpu_amdkfd.h | 57 struct amdgpu_bo_va *bo_va; member
|
D | gmc_v9_0.c | 1094 mapping->bo_va->is_xgmi) in gmc_v9_0_get_vm_pte()
|
/Linux-v5.15/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_svm.c | 1168 struct amdgpu_bo_va bo_va; in svm_range_map_to_gpu() local 1182 bo_va.is_xgmi = amdgpu_xgmi_same_hive(adev, bo_adev); in svm_range_map_to_gpu()
|