Lines Matching refs:id_mgr

186 	struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub];  in amdgpu_vmid_grab_idle()  local
194 fences = kmalloc_array(id_mgr->num_ids, sizeof(void *), GFP_KERNEL); in amdgpu_vmid_grab_idle()
200 list_for_each_entry((*idle), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_idle()
212 if (&(*idle)->list == &id_mgr->ids_lru) { in amdgpu_vmid_grab_idle()
323 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_used() local
331 list_for_each_entry_reverse((*id), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_used()
385 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab() local
390 mutex_lock(&id_mgr->lock); in amdgpu_vmid_grab()
417 list_move_tail(&id->list, &id_mgr->ids_lru); in amdgpu_vmid_grab()
427 job->vmid = id - id_mgr->ids; in amdgpu_vmid_grab()
432 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_grab()
440 struct amdgpu_vmid_mgr *id_mgr; in amdgpu_vmid_alloc_reserved() local
444 id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_alloc_reserved()
445 mutex_lock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
448 if (atomic_inc_return(&id_mgr->reserved_vmid_num) > in amdgpu_vmid_alloc_reserved()
451 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_alloc_reserved()
456 idle = list_first_entry(&id_mgr->ids_lru, struct amdgpu_vmid, list); in amdgpu_vmid_alloc_reserved()
459 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
463 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
471 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_free_reserved() local
473 mutex_lock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
476 &id_mgr->ids_lru); in amdgpu_vmid_free_reserved()
478 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_free_reserved()
480 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
495 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_reset() local
496 struct amdgpu_vmid *id = &id_mgr->ids[vmid]; in amdgpu_vmid_reset()
498 mutex_lock(&id_mgr->lock); in amdgpu_vmid_reset()
506 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_reset()
521 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_reset_all() local
522 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_reset_all()
524 for (j = 1; j < id_mgr->num_ids; ++j) in amdgpu_vmid_reset_all()
541 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_init() local
542 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_init()
544 mutex_init(&id_mgr->lock); in amdgpu_vmid_mgr_init()
545 INIT_LIST_HEAD(&id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
546 atomic_set(&id_mgr->reserved_vmid_num, 0); in amdgpu_vmid_mgr_init()
549 id_mgr->num_ids = adev->vm_manager.first_kfd_vmid; in amdgpu_vmid_mgr_init()
552 for (j = 1; j < id_mgr->num_ids; ++j) { in amdgpu_vmid_mgr_init()
554 amdgpu_sync_create(&id_mgr->ids[j].active); in amdgpu_vmid_mgr_init()
555 list_add_tail(&id_mgr->ids[j].list, &id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
572 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_fini() local
573 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_fini()
575 mutex_destroy(&id_mgr->lock); in amdgpu_vmid_mgr_fini()
577 struct amdgpu_vmid *id = &id_mgr->ids[j]; in amdgpu_vmid_mgr_fini()