Lines Matching full:mem
70 struct kgd_mem *mem) in kfd_mem_is_attached() argument
74 list_for_each_entry(entry, &mem->attachments, list) in kfd_mem_is_attached()
88 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local
91 mem = si.freeram - si.freehigh; in amdgpu_amdkfd_gpuvm_init_mem_limits()
92 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits()
95 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits()
96 kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits()
401 static uint64_t get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) in get_pte_flags() argument
403 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags()
404 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags()
405 bool uncached = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_UNCACHED; in get_pte_flags()
411 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE) in get_pte_flags()
413 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE) in get_pte_flags()
418 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
433 !(mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM)) in get_pte_flags()
436 } else if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
466 kfd_mem_dmamap_userptr(struct kgd_mem *mem, in kfd_mem_dmamap_userptr() argument
470 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmamap_userptr()
475 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
530 kfd_mem_dmamap_attachment(struct kgd_mem *mem, in kfd_mem_dmamap_attachment() argument
537 return kfd_mem_dmamap_userptr(mem, attachment); in kfd_mem_dmamap_attachment()
547 kfd_mem_dmaunmap_userptr(struct kgd_mem *mem, in kfd_mem_dmaunmap_userptr() argument
551 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmaunmap_userptr()
581 kfd_mem_dmaunmap_attachment(struct kgd_mem *mem, in kfd_mem_dmaunmap_attachment() argument
588 kfd_mem_dmaunmap_userptr(mem, attachment); in kfd_mem_dmaunmap_attachment()
599 kfd_mem_attach_userptr(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_userptr() argument
602 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach_userptr()
606 ret = amdgpu_bo_reserve(mem->bo, false); in kfd_mem_attach_userptr()
613 ttm_bo_type_sg, mem->bo->tbo.base.resv, in kfd_mem_attach_userptr()
615 amdgpu_bo_unreserve(mem->bo); in kfd_mem_attach_userptr()
620 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_userptr()
626 kfd_mem_attach_dmabuf(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_dmabuf() argument
632 if (!mem->dmabuf) { in kfd_mem_attach_dmabuf()
633 mem->dmabuf = amdgpu_gem_prime_export(&mem->bo->tbo.base, in kfd_mem_attach_dmabuf()
634 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_attach_dmabuf()
636 if (IS_ERR(mem->dmabuf)) { in kfd_mem_attach_dmabuf()
637 ret = PTR_ERR(mem->dmabuf); in kfd_mem_attach_dmabuf()
638 mem->dmabuf = NULL; in kfd_mem_attach_dmabuf()
643 gobj = amdgpu_gem_prime_import(adev_to_drm(adev), mem->dmabuf); in kfd_mem_attach_dmabuf()
651 dma_buf_put(mem->dmabuf); in kfd_mem_attach_dmabuf()
655 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_dmabuf()
673 static int kfd_mem_attach(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach() argument
676 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in kfd_mem_attach()
677 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach()
678 uint64_t va = mem->va; in kfd_mem_attach()
698 if (adev == bo_adev || (mem->domain == AMDGPU_GEM_DOMAIN_VRAM && in kfd_mem_attach()
704 bo[i] = mem->bo; in kfd_mem_attach()
711 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
714 ret = kfd_mem_attach_userptr(adev, mem, &bo[i]); in kfd_mem_attach()
717 } else if (mem->domain == AMDGPU_GEM_DOMAIN_GTT && in kfd_mem_attach()
718 mem->bo->tbo.type != ttm_bo_type_sg) { in kfd_mem_attach()
724 ret = kfd_mem_attach_dmabuf(adev, mem, &bo[i]); in kfd_mem_attach()
732 bo[i] = mem->bo; in kfd_mem_attach()
746 attachment[i]->pte_flags = get_pte_flags(adev, mem); in kfd_mem_attach()
748 list_add(&attachment[i]->list, &mem->attachments); in kfd_mem_attach()
782 static void add_kgd_mem_to_kfd_bo_list(struct kgd_mem *mem, in add_kgd_mem_to_kfd_bo_list() argument
786 struct ttm_validate_buffer *entry = &mem->validate_list; in add_kgd_mem_to_kfd_bo_list()
787 struct amdgpu_bo *bo = mem->bo; in add_kgd_mem_to_kfd_bo_list()
800 static void remove_kgd_mem_from_kfd_bo_list(struct kgd_mem *mem, in remove_kgd_mem_from_kfd_bo_list() argument
805 bo_list_entry = &mem->validate_list; in remove_kgd_mem_from_kfd_bo_list()
823 static int init_user_pages(struct kgd_mem *mem, uint64_t user_addr) in init_user_pages() argument
825 struct amdkfd_process_info *process_info = mem->process_info; in init_user_pages()
826 struct amdgpu_bo *bo = mem->bo; in init_user_pages()
856 amdgpu_bo_placement_from_domain(bo, mem->domain); in init_user_pages()
895 * @mem: KFD BO structure.
899 static int reserve_bo_and_vm(struct kgd_mem *mem, in reserve_bo_and_vm() argument
903 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_vm()
910 ctx->sync = &mem->sync; in reserve_bo_and_vm()
941 * @mem: KFD BO structure.
949 static int reserve_bo_and_cond_vms(struct kgd_mem *mem, in reserve_bo_and_cond_vms() argument
953 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_cond_vms()
961 ctx->sync = &mem->sync; in reserve_bo_and_cond_vms()
966 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
988 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
1042 static void unmap_bo_from_gpuvm(struct kgd_mem *mem, in unmap_bo_from_gpuvm() argument
1056 kfd_mem_dmaunmap_attachment(mem, entry); in unmap_bo_from_gpuvm()
1059 static int update_gpuvm_pte(struct kgd_mem *mem, in update_gpuvm_pte() argument
1068 ret = kfd_mem_dmamap_attachment(mem, entry); in update_gpuvm_pte()
1082 static int map_bo_to_gpuvm(struct kgd_mem *mem, in map_bo_to_gpuvm() argument
1103 ret = update_gpuvm_pte(mem, entry, sync, table_freed); in map_bo_to_gpuvm()
1112 unmap_bo_from_gpuvm(mem, entry, sync); in map_bo_to_gpuvm()
1387 void *drm_priv, struct kgd_mem **mem, in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() argument
1434 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1435 if (!*mem) { in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1439 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1440 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1441 (*mem)->aql_queue = !!(flags & KFD_IOC_ALLOC_MEM_FLAGS_AQL_QUEUE_MEM); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1447 if ((*mem)->aql_queue) in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1450 (*mem)->alloc_flags = flags; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1452 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1480 bo->kfd_bo = *mem; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1481 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1485 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1486 (*mem)->domain = domain; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1487 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1488 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1489 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1492 ret = init_user_pages(*mem, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1503 remove_kgd_mem_from_kfd_bo_list(*mem, avm->process_info); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1507 /* Don't unreserve system mem limit twice */ in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1512 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1513 kfree(*mem); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1523 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu() argument
1526 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1527 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1535 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1536 mapped_to_gpu_memory = mem->mapped_to_gpu_memory; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1537 is_imported = mem->is_imported; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1538 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1539 /* lock is not needed after this, since mem is unused and will in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1545 mem->va, bo_size); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1550 bo_list_entry = &mem->validate_list; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1556 amdgpu_mn_unregister(mem->bo); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1558 ret = reserve_bo_and_cond_vms(mem, NULL, BO_VM_ALL, &ctx); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1566 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1568 pr_debug("Release VA 0x%llx - 0x%llx\n", mem->va, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1569 mem->va + bo_size * (1 + mem->aql_queue)); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1574 list_for_each_entry_safe(entry, tmp, &mem->attachments, list) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1578 amdgpu_sync_free(&mem->sync); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1583 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1584 sg_free_table(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1585 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1592 if ((mem->bo->preferred_domains == AMDGPU_GEM_DOMAIN_VRAM) && in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1600 drm_vma_node_revoke(&mem->bo->tbo.base.vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1601 if (mem->dmabuf) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1602 dma_buf_put(mem->dmabuf); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1603 drm_gem_object_put(&mem->bo->tbo.base); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1604 mutex_destroy(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1605 kfree(mem); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1611 struct kgd_dev *kgd, struct kgd_mem *mem, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() argument
1624 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1634 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1642 is_invalid_userptr = atomic_read(&mem->invalid); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1646 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1648 domain = mem->domain; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1652 mem->va, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1653 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1656 if (!kfd_mem_is_attached(avm, mem)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1657 ret = kfd_mem_attach(adev, mem, avm, mem->aql_queue); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1662 ret = reserve_bo_and_vm(mem, avm, &ctx); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1679 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1692 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1699 ret = map_bo_to_gpuvm(mem, entry, ctx.sync, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1713 mem->mapped_to_gpu_memory++; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1715 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1735 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1736 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1741 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() argument
1745 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1750 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1752 ret = reserve_bo_and_cond_vms(mem, avm, BO_VM_MAPPED, &ctx); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1766 mem->va, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1767 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1770 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1777 unmap_bo_from_gpuvm(mem, entry, ctx.sync); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1780 mem->mapped_to_gpu_memory--; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1782 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1788 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1789 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1790 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1791 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1797 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1802 struct kgd_dev *kgd, struct kgd_mem *mem, bool intr) in amdgpu_amdkfd_gpuvm_sync_memory() argument
1809 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1810 amdgpu_sync_clone(&mem->sync, &sync); in amdgpu_amdkfd_gpuvm_sync_memory()
1811 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1819 struct kgd_mem *mem, void **kptr, uint64_t *size) in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() argument
1822 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1832 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1853 bo, mem->process_info->eviction_fence); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1854 list_del_init(&mem->validate_list.head); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1861 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1869 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1875 struct kfd_vm_fault_info *mem) in amdgpu_amdkfd_gpuvm_get_vm_fault_info() argument
1881 *mem = *adev->gmc.vm_fault_info; in amdgpu_amdkfd_gpuvm_get_vm_fault_info()
1891 struct kgd_mem **mem, uint64_t *size, in amdgpu_amdkfd_gpuvm_import_dmabuf() argument
1915 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1916 if (!*mem) in amdgpu_amdkfd_gpuvm_import_dmabuf()
1921 kfree(mem); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1931 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1932 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1934 (*mem)->alloc_flags = in amdgpu_amdkfd_gpuvm_import_dmabuf()
1941 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1942 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1943 (*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ? in amdgpu_amdkfd_gpuvm_import_dmabuf()
1945 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1946 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1947 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, false); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1948 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1949 (*mem)->is_imported = true; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1965 int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem, in amdgpu_amdkfd_evict_userptr() argument
1968 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_evict_userptr()
1972 atomic_inc(&mem->invalid); in amdgpu_amdkfd_evict_userptr()
1995 struct kgd_mem *mem, *tmp_mem; in update_invalid_user_pages() local
2003 list_for_each_entry_safe(mem, tmp_mem, in update_invalid_user_pages()
2006 if (!atomic_read(&mem->invalid)) in update_invalid_user_pages()
2009 bo = mem->bo; in update_invalid_user_pages()
2022 list_move_tail(&mem->validate_list.head, in update_invalid_user_pages()
2030 list_for_each_entry(mem, &process_info->userptr_inval_list, in update_invalid_user_pages()
2032 invalid = atomic_read(&mem->invalid); in update_invalid_user_pages()
2039 bo = mem->bo; in update_invalid_user_pages()
2060 if (atomic_cmpxchg(&mem->invalid, invalid, 0) != invalid) in update_invalid_user_pages()
2081 struct kgd_mem *mem, *tmp_mem; in validate_invalid_user_pages() local
2105 list_for_each_entry(mem, &process_info->userptr_inval_list, in validate_invalid_user_pages()
2107 list_add_tail(&mem->resv_list.head, &resv_list); in validate_invalid_user_pages()
2108 mem->resv_list.bo = mem->validate_list.bo; in validate_invalid_user_pages()
2109 mem->resv_list.num_shared = mem->validate_list.num_shared; in validate_invalid_user_pages()
2125 list_for_each_entry_safe(mem, tmp_mem, in validate_invalid_user_pages()
2130 bo = mem->bo; in validate_invalid_user_pages()
2134 amdgpu_bo_placement_from_domain(bo, mem->domain); in validate_invalid_user_pages()
2142 list_move_tail(&mem->validate_list.head, in validate_invalid_user_pages()
2151 list_for_each_entry(attachment, &mem->attachments, list) { in validate_invalid_user_pages()
2155 kfd_mem_dmaunmap_attachment(mem, attachment); in validate_invalid_user_pages()
2156 ret = update_gpuvm_pte(mem, attachment, &sync, NULL); in validate_invalid_user_pages()
2160 atomic_inc(&mem->invalid); in validate_invalid_user_pages()
2275 struct kgd_mem *mem; in amdgpu_amdkfd_gpuvm_restore_process_bos() local
2303 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2306 list_add_tail(&mem->resv_list.head, &ctx.list); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2307 mem->resv_list.bo = mem->validate_list.bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2308 mem->resv_list.num_shared = mem->validate_list.num_shared; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2332 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2335 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2336 uint32_t domain = mem->domain; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2357 list_for_each_entry(attachment, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_restore_process_bos()
2361 kfd_mem_dmaunmap_attachment(mem, attachment); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2362 ret = update_gpuvm_pte(mem, attachment, &sync_obj, NULL); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2401 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2403 amdgpu_bo_fence(mem->bo, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2423 int amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem) in amdgpu_amdkfd_add_gws_to_process() argument
2432 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_add_gws_to_process()
2433 if (!*mem) in amdgpu_amdkfd_add_gws_to_process()
2436 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2437 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_add_gws_to_process()
2438 (*mem)->bo = amdgpu_bo_ref(gws_bo); in amdgpu_amdkfd_add_gws_to_process()
2439 (*mem)->domain = AMDGPU_GEM_DOMAIN_GWS; in amdgpu_amdkfd_add_gws_to_process()
2440 (*mem)->process_info = process_info; in amdgpu_amdkfd_add_gws_to_process()
2441 add_kgd_mem_to_kfd_bo_list(*mem, process_info, false); in amdgpu_amdkfd_add_gws_to_process()
2442 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2446 mutex_lock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2467 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2475 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2476 amdgpu_sync_free(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2477 remove_kgd_mem_from_kfd_bo_list(*mem, process_info); in amdgpu_amdkfd_add_gws_to_process()
2479 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2480 kfree(*mem); in amdgpu_amdkfd_add_gws_to_process()
2481 *mem = NULL; in amdgpu_amdkfd_add_gws_to_process()
2485 int amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem) in amdgpu_amdkfd_remove_gws_from_process() argument
2489 struct kgd_mem *kgd_mem = (struct kgd_mem *)mem; in amdgpu_amdkfd_remove_gws_from_process()
2509 kfree(mem); in amdgpu_amdkfd_remove_gws_from_process()