Lines Matching refs:gtt
662 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages() local
663 unsigned long start = gtt->userptr; in amdgpu_ttm_tt_get_user_pages()
687 if (unlikely((gtt->userflags & AMDGPU_GEM_USERPTR_ANONONLY) && in amdgpu_ttm_tt_get_user_pages()
716 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages_done() local
718 if (!gtt || !gtt->userptr || !range) in amdgpu_ttm_tt_get_user_pages_done()
722 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
758 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_pin_userptr() local
759 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_pin_userptr()
777 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
795 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpin_userptr() local
796 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_unpin_userptr()
815 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_gart_bind() local
823 amdgpu_gart_bind(adev, gtt->offset, page_idx, in amdgpu_ttm_gart_bind()
824 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
833 amdgpu_gart_bind(adev, gtt->offset + (page_idx << PAGE_SHIFT), in amdgpu_ttm_gart_bind()
835 &(gtt->ttm.dma_address[page_idx]), flags); in amdgpu_ttm_gart_bind()
837 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
838 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
853 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_bind() local
860 if (gtt->bound) in amdgpu_ttm_backend_bind()
863 if (gtt->userptr) { in amdgpu_ttm_backend_bind()
874 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_bind()
882 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
893 gtt->offset = AMDGPU_BO_INVALID_OFFSET; in amdgpu_ttm_backend_bind()
901 gtt->offset = (u64)bo_mem->start << PAGE_SHIFT; in amdgpu_ttm_backend_bind()
902 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
903 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
904 gtt->bound = true; in amdgpu_ttm_backend_bind()
920 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_alloc_gart() local
954 gtt->offset = (u64)tmp->start << PAGE_SHIFT; in amdgpu_ttm_alloc_gart()
991 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_unbind() local
994 if (gtt->userptr) { in amdgpu_ttm_backend_unbind()
996 } else if (ttm->sg && gtt->gobj->import_attach) { in amdgpu_ttm_backend_unbind()
999 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_unbind()
1004 if (!gtt->bound) in amdgpu_ttm_backend_unbind()
1007 if (gtt->offset == AMDGPU_BO_INVALID_OFFSET) in amdgpu_ttm_backend_unbind()
1011 amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1012 gtt->bound = false; in amdgpu_ttm_backend_unbind()
1018 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_destroy() local
1020 if (gtt->usertask) in amdgpu_ttm_backend_destroy()
1021 put_task_struct(gtt->usertask); in amdgpu_ttm_backend_destroy()
1023 ttm_tt_fini(>t->ttm); in amdgpu_ttm_backend_destroy()
1024 kfree(gtt); in amdgpu_ttm_backend_destroy()
1039 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_create() local
1042 gtt = kzalloc(sizeof(struct amdgpu_ttm_tt), GFP_KERNEL); in amdgpu_ttm_tt_create()
1043 if (gtt == NULL) { in amdgpu_ttm_tt_create()
1046 gtt->gobj = &bo->base; in amdgpu_ttm_tt_create()
1054 if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1055 kfree(gtt); in amdgpu_ttm_tt_create()
1058 return >t->ttm; in amdgpu_ttm_tt_create()
1072 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_populate() local
1077 if (gtt->userptr) { in amdgpu_ttm_tt_populate()
1106 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpopulate() local
1112 if (gtt->userptr) { in amdgpu_ttm_tt_unpopulate()
1139 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_get_userptr() local
1144 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr()
1145 *user_addr = gtt->userptr; in amdgpu_ttm_tt_get_userptr()
1163 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_set_userptr() local
1175 gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_tt_set_userptr()
1176 gtt->userptr = addr; in amdgpu_ttm_tt_set_userptr()
1177 gtt->userflags = flags; in amdgpu_ttm_tt_set_userptr()
1179 if (gtt->usertask) in amdgpu_ttm_tt_set_userptr()
1180 put_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1181 gtt->usertask = current->group_leader; in amdgpu_ttm_tt_set_userptr()
1182 get_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1192 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_usermm() local
1194 if (gtt == NULL) in amdgpu_ttm_tt_get_usermm()
1197 if (gtt->usertask == NULL) in amdgpu_ttm_tt_get_usermm()
1200 return gtt->usertask->mm; in amdgpu_ttm_tt_get_usermm()
1211 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_affect_userptr() local
1214 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_affect_userptr()
1220 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1221 if (gtt->userptr > end || gtt->userptr + size <= start) in amdgpu_ttm_tt_affect_userptr()
1225 *userptr = gtt->userptr; in amdgpu_ttm_tt_affect_userptr()
1234 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_userptr() local
1236 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_is_userptr()
1247 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_readonly() local
1249 if (gtt == NULL) in amdgpu_ttm_tt_is_readonly()
1252 return !!(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_is_readonly()