Lines Matching refs:gtt
663 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages() local
664 unsigned long start = gtt->userptr; in amdgpu_ttm_tt_get_user_pages()
688 if (unlikely((gtt->userflags & AMDGPU_GEM_USERPTR_ANONONLY) && in amdgpu_ttm_tt_get_user_pages()
712 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_discard_user_pages() local
714 if (gtt && gtt->userptr && range) in amdgpu_ttm_tt_discard_user_pages()
727 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages_done() local
729 if (!gtt || !gtt->userptr || !range) in amdgpu_ttm_tt_get_user_pages_done()
733 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
765 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_pin_userptr() local
766 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_pin_userptr()
784 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
802 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpin_userptr() local
803 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_unpin_userptr()
826 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_gart_bind_gfx9_mqd() local
840 gtt->offset + (page_idx << PAGE_SHIFT), in amdgpu_ttm_gart_bind_gfx9_mqd()
841 1, >t->ttm.dma_address[page_idx], flags); in amdgpu_ttm_gart_bind_gfx9_mqd()
847 gtt->offset + ((page_idx + 1) << PAGE_SHIFT), in amdgpu_ttm_gart_bind_gfx9_mqd()
849 >t->ttm.dma_address[page_idx + 1], in amdgpu_ttm_gart_bind_gfx9_mqd()
860 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_gart_bind() local
868 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
869 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
884 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_bind() local
891 if (gtt->bound) in amdgpu_ttm_backend_bind()
894 if (gtt->userptr) { in amdgpu_ttm_backend_bind()
905 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_bind()
913 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
924 gtt->offset = AMDGPU_BO_INVALID_OFFSET; in amdgpu_ttm_backend_bind()
932 gtt->offset = (u64)bo_mem->start << PAGE_SHIFT; in amdgpu_ttm_backend_bind()
933 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
934 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
935 gtt->bound = true; in amdgpu_ttm_backend_bind()
951 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_alloc_gart() local
985 gtt->offset = (u64)tmp->start << PAGE_SHIFT; in amdgpu_ttm_alloc_gart()
1022 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_unbind() local
1025 if (gtt->userptr) { in amdgpu_ttm_backend_unbind()
1027 } else if (ttm->sg && gtt->gobj->import_attach) { in amdgpu_ttm_backend_unbind()
1030 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_unbind()
1035 if (!gtt->bound) in amdgpu_ttm_backend_unbind()
1038 if (gtt->offset == AMDGPU_BO_INVALID_OFFSET) in amdgpu_ttm_backend_unbind()
1042 amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1043 gtt->bound = false; in amdgpu_ttm_backend_unbind()
1049 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_destroy() local
1051 if (gtt->usertask) in amdgpu_ttm_backend_destroy()
1052 put_task_struct(gtt->usertask); in amdgpu_ttm_backend_destroy()
1054 ttm_tt_fini(>t->ttm); in amdgpu_ttm_backend_destroy()
1055 kfree(gtt); in amdgpu_ttm_backend_destroy()
1071 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_create() local
1074 gtt = kzalloc(sizeof(struct amdgpu_ttm_tt), GFP_KERNEL); in amdgpu_ttm_tt_create()
1075 if (!gtt) in amdgpu_ttm_tt_create()
1078 gtt->gobj = &bo->base; in amdgpu_ttm_tt_create()
1080 gtt->pool_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_ttm_tt_create()
1082 gtt->pool_id = abo->xcp_id; in amdgpu_ttm_tt_create()
1090 if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1091 kfree(gtt); in amdgpu_ttm_tt_create()
1094 return >t->ttm; in amdgpu_ttm_tt_create()
1108 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_populate() local
1114 if (gtt->userptr) { in amdgpu_ttm_tt_populate()
1124 if (adev->mman.ttm_pools && gtt->pool_id >= 0) in amdgpu_ttm_tt_populate()
1125 pool = &adev->mman.ttm_pools[gtt->pool_id]; in amdgpu_ttm_tt_populate()
1147 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpopulate() local
1154 if (gtt->userptr) { in amdgpu_ttm_tt_unpopulate()
1169 if (adev->mman.ttm_pools && gtt->pool_id >= 0) in amdgpu_ttm_tt_unpopulate()
1170 pool = &adev->mman.ttm_pools[gtt->pool_id]; in amdgpu_ttm_tt_unpopulate()
1187 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_get_userptr() local
1192 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr()
1193 *user_addr = gtt->userptr; in amdgpu_ttm_tt_get_userptr()
1212 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_set_userptr() local
1224 gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_tt_set_userptr()
1225 gtt->userptr = addr; in amdgpu_ttm_tt_set_userptr()
1226 gtt->userflags = flags; in amdgpu_ttm_tt_set_userptr()
1228 if (gtt->usertask) in amdgpu_ttm_tt_set_userptr()
1229 put_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1230 gtt->usertask = current->group_leader; in amdgpu_ttm_tt_set_userptr()
1231 get_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1241 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_usermm() local
1243 if (gtt == NULL) in amdgpu_ttm_tt_get_usermm()
1246 if (gtt->usertask == NULL) in amdgpu_ttm_tt_get_usermm()
1249 return gtt->usertask->mm; in amdgpu_ttm_tt_get_usermm()
1260 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_affect_userptr() local
1263 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_affect_userptr()
1269 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1270 if (gtt->userptr > end || gtt->userptr + size <= start) in amdgpu_ttm_tt_affect_userptr()
1274 *userptr = gtt->userptr; in amdgpu_ttm_tt_affect_userptr()
1283 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_userptr() local
1285 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_is_userptr()
1296 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_readonly() local
1298 if (gtt == NULL) in amdgpu_ttm_tt_is_readonly()
1301 return !!(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_is_readonly()