Lines Matching refs:amn

114 	struct amdgpu_mn *amn = container_of(work, struct amdgpu_mn, work);  in amdgpu_mn_destroy()  local
115 struct amdgpu_device *adev = amn->adev; in amdgpu_mn_destroy()
120 down_write(&amn->lock); in amdgpu_mn_destroy()
121 hash_del(&amn->node); in amdgpu_mn_destroy()
123 &amn->objects.rb_root, it.rb) { in amdgpu_mn_destroy()
130 up_write(&amn->lock); in amdgpu_mn_destroy()
132 mmu_notifier_unregister_no_release(&amn->mn, amn->mm); in amdgpu_mn_destroy()
133 kfree(amn); in amdgpu_mn_destroy()
147 struct amdgpu_mn *amn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_release() local
149 INIT_WORK(&amn->work, amdgpu_mn_destroy); in amdgpu_mn_release()
150 schedule_work(&amn->work); in amdgpu_mn_release()
181 static int amdgpu_mn_read_lock(struct amdgpu_mn *amn, bool blockable) in amdgpu_mn_read_lock() argument
184 mutex_lock(&amn->read_lock); in amdgpu_mn_read_lock()
185 else if (!mutex_trylock(&amn->read_lock)) in amdgpu_mn_read_lock()
188 if (atomic_inc_return(&amn->recursion) == 1) in amdgpu_mn_read_lock()
189 down_read_non_owner(&amn->lock); in amdgpu_mn_read_lock()
190 mutex_unlock(&amn->read_lock); in amdgpu_mn_read_lock()
200 static void amdgpu_mn_read_unlock(struct amdgpu_mn *amn) in amdgpu_mn_read_unlock() argument
202 if (atomic_dec_return(&amn->recursion) == 0) in amdgpu_mn_read_unlock()
203 up_read_non_owner(&amn->lock); in amdgpu_mn_read_unlock()
254 struct amdgpu_mn *amn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_invalidate_range_start_gfx() local
263 if (amdgpu_mn_read_lock(amn, blockable)) in amdgpu_mn_invalidate_range_start_gfx()
266 it = interval_tree_iter_first(&amn->objects, start, end); in amdgpu_mn_invalidate_range_start_gfx()
271 amdgpu_mn_read_unlock(amn); in amdgpu_mn_invalidate_range_start_gfx()
302 struct amdgpu_mn *amn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_invalidate_range_start_hsa() local
308 if (amdgpu_mn_read_lock(amn, blockable)) in amdgpu_mn_invalidate_range_start_hsa()
311 it = interval_tree_iter_first(&amn->objects, start, end); in amdgpu_mn_invalidate_range_start_hsa()
317 amdgpu_mn_read_unlock(amn); in amdgpu_mn_invalidate_range_start_hsa()
351 struct amdgpu_mn *amn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_invalidate_range_end() local
353 amdgpu_mn_read_unlock(amn); in amdgpu_mn_invalidate_range_end()
387 struct amdgpu_mn *amn; in amdgpu_mn_get() local
397 hash_for_each_possible(adev->mn_hash, amn, node, key) in amdgpu_mn_get()
398 if (AMDGPU_MN_KEY(amn->mm, amn->type) == key) in amdgpu_mn_get()
401 amn = kzalloc(sizeof(*amn), GFP_KERNEL); in amdgpu_mn_get()
402 if (!amn) { in amdgpu_mn_get()
403 amn = ERR_PTR(-ENOMEM); in amdgpu_mn_get()
407 amn->adev = adev; in amdgpu_mn_get()
408 amn->mm = mm; in amdgpu_mn_get()
409 init_rwsem(&amn->lock); in amdgpu_mn_get()
410 amn->type = type; in amdgpu_mn_get()
411 amn->mn.ops = &amdgpu_mn_ops[type]; in amdgpu_mn_get()
412 amn->objects = RB_ROOT_CACHED; in amdgpu_mn_get()
413 mutex_init(&amn->read_lock); in amdgpu_mn_get()
414 atomic_set(&amn->recursion, 0); in amdgpu_mn_get()
416 r = __mmu_notifier_register(&amn->mn, mm); in amdgpu_mn_get()
420 hash_add(adev->mn_hash, &amn->node, AMDGPU_MN_KEY(mm, type)); in amdgpu_mn_get()
426 return amn; in amdgpu_mn_get()
431 kfree(amn); in amdgpu_mn_get()
451 struct amdgpu_mn *amn; in amdgpu_mn_register() local
456 amn = amdgpu_mn_get(adev, type); in amdgpu_mn_register()
457 if (IS_ERR(amn)) in amdgpu_mn_register()
458 return PTR_ERR(amn); in amdgpu_mn_register()
466 down_write(&amn->lock); in amdgpu_mn_register()
468 while ((it = interval_tree_iter_first(&amn->objects, addr, end))) { in amdgpu_mn_register()
471 interval_tree_remove(&node->it, &amn->objects); in amdgpu_mn_register()
482 bo->mn = amn; in amdgpu_mn_register()
490 interval_tree_insert(&node->it, &amn->objects); in amdgpu_mn_register()
492 up_write(&amn->lock); in amdgpu_mn_register()
507 struct amdgpu_mn *amn; in amdgpu_mn_unregister() local
512 amn = bo->mn; in amdgpu_mn_unregister()
513 if (amn == NULL) { in amdgpu_mn_unregister()
518 down_write(&amn->lock); in amdgpu_mn_unregister()
530 interval_tree_remove(&node->it, &amn->objects); in amdgpu_mn_unregister()
534 up_write(&amn->lock); in amdgpu_mn_unregister()