Lines Matching refs:ttm

346 	if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm),  in amdgpu_amdkfd_bo_validate()
507 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
508 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr() local
511 if (WARN_ON(ttm->num_pages != src_ttm->num_pages)) in kfd_mem_dmamap_userptr()
514 ttm->sg = kmalloc(sizeof(*ttm->sg), GFP_KERNEL); in kfd_mem_dmamap_userptr()
515 if (unlikely(!ttm->sg)) in kfd_mem_dmamap_userptr()
519 ret = sg_alloc_table_from_pages(ttm->sg, src_ttm->pages, in kfd_mem_dmamap_userptr()
520 ttm->num_pages, 0, in kfd_mem_dmamap_userptr()
521 (u64)ttm->num_pages << PAGE_SHIFT, in kfd_mem_dmamap_userptr()
526 ret = dma_map_sgtable(adev->dev, ttm->sg, direction, 0); in kfd_mem_dmamap_userptr()
530 drm_prime_sg_to_dma_addr_array(ttm->sg, ttm->dma_address, in kfd_mem_dmamap_userptr()
531 ttm->num_pages); in kfd_mem_dmamap_userptr()
541 dma_unmap_sgtable(adev->dev, ttm->sg, direction, 0); in kfd_mem_dmamap_userptr()
544 sg_free_table(ttm->sg); in kfd_mem_dmamap_userptr()
546 kfree(ttm->sg); in kfd_mem_dmamap_userptr()
547 ttm->sg = NULL; in kfd_mem_dmamap_userptr()
594 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_sg_bo() local
602 if (unlikely(ttm->sg)) { in kfd_mem_dmamap_sg_bo()
619 ttm->sg = create_sg_table(dma_addr, mem->bo->tbo.sg->sgl->length); in kfd_mem_dmamap_sg_bo()
620 if (unlikely(!ttm->sg)) { in kfd_mem_dmamap_sg_bo()
633 sg_free_table(ttm->sg); in kfd_mem_dmamap_sg_bo()
634 kfree(ttm->sg); in kfd_mem_dmamap_sg_bo()
635 ttm->sg = NULL; in kfd_mem_dmamap_sg_bo()
671 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_userptr() local
673 if (unlikely(!ttm->sg)) in kfd_mem_dmaunmap_userptr()
679 dma_unmap_sgtable(adev->dev, ttm->sg, direction, 0); in kfd_mem_dmaunmap_userptr()
680 sg_free_table(ttm->sg); in kfd_mem_dmaunmap_userptr()
681 kfree(ttm->sg); in kfd_mem_dmaunmap_userptr()
682 ttm->sg = NULL; in kfd_mem_dmaunmap_userptr()
718 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_sg_bo() local
721 if (unlikely(!ttm->sg)) { in kfd_mem_dmaunmap_sg_bo()
731 dma_unmap_resource(adev->dev, ttm->sg->sgl->dma_address, in kfd_mem_dmaunmap_sg_bo()
732 ttm->sg->sgl->length, dir, DMA_ATTR_SKIP_CPU_SYNC); in kfd_mem_dmaunmap_sg_bo()
733 sg_free_table(ttm->sg); in kfd_mem_dmaunmap_sg_bo()
734 kfree(ttm->sg); in kfd_mem_dmaunmap_sg_bo()
735 ttm->sg = NULL; in kfd_mem_dmaunmap_sg_bo()
846 (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && adev->ram_is_direct_mapped) || in kfd_mem_attach()
860 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
1019 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, &range); in init_user_pages()
1037 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm, range); in init_user_pages()
1719 bo->tbo.ttm->sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1913 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1944 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1953 !amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1991 if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
2056 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
2152 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
2386 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, in update_invalid_user_pages()
2406 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm, range); in update_invalid_user_pages()
2485 if (bo->tbo.ttm->pages[0]) { in validate_invalid_user_pages()