Lines Matching refs:id_mgr
203 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_idle() local
211 fences = kmalloc_array(sizeof(void *), id_mgr->num_ids, GFP_KERNEL); in amdgpu_vmid_grab_idle()
217 list_for_each_entry((*idle), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_idle()
225 if (&(*idle)->list == &id_mgr->ids_lru) { in amdgpu_vmid_grab_idle()
339 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_used() local
347 list_for_each_entry_reverse((*id), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_used()
412 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab() local
417 mutex_lock(&id_mgr->lock); in amdgpu_vmid_grab()
447 list_move_tail(&id->list, &id_mgr->ids_lru); in amdgpu_vmid_grab()
457 job->vmid = id - id_mgr->ids; in amdgpu_vmid_grab()
462 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_grab()
470 struct amdgpu_vmid_mgr *id_mgr; in amdgpu_vmid_alloc_reserved() local
474 id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_alloc_reserved()
475 mutex_lock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
478 if (atomic_inc_return(&id_mgr->reserved_vmid_num) > in amdgpu_vmid_alloc_reserved()
481 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_alloc_reserved()
486 idle = list_first_entry(&id_mgr->ids_lru, struct amdgpu_vmid, list); in amdgpu_vmid_alloc_reserved()
489 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
493 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
501 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_free_reserved() local
503 mutex_lock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
506 &id_mgr->ids_lru); in amdgpu_vmid_free_reserved()
508 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_free_reserved()
510 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
524 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_reset() local
525 struct amdgpu_vmid *id = &id_mgr->ids[vmid]; in amdgpu_vmid_reset()
527 mutex_lock(&id_mgr->lock); in amdgpu_vmid_reset()
535 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_reset()
550 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_reset_all() local
551 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_reset_all()
553 for (j = 1; j < id_mgr->num_ids; ++j) in amdgpu_vmid_reset_all()
570 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_init() local
571 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_init()
573 mutex_init(&id_mgr->lock); in amdgpu_vmid_mgr_init()
574 INIT_LIST_HEAD(&id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
575 atomic_set(&id_mgr->reserved_vmid_num, 0); in amdgpu_vmid_mgr_init()
578 id_mgr->num_ids = adev->vm_manager.first_kfd_vmid; in amdgpu_vmid_mgr_init()
581 for (j = 1; j < id_mgr->num_ids; ++j) { in amdgpu_vmid_mgr_init()
583 amdgpu_sync_create(&id_mgr->ids[j].active); in amdgpu_vmid_mgr_init()
584 list_add_tail(&id_mgr->ids[j].list, &id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
601 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_fini() local
602 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_fini()
604 mutex_destroy(&id_mgr->lock); in amdgpu_vmid_mgr_fini()
606 struct amdgpu_vmid *id = &id_mgr->ids[j]; in amdgpu_vmid_mgr_fini()