Lines Matching refs:mm

189 		interval_sub->mm->notifier_subscriptions;  in mmu_interval_read_begin()
262 struct mm_struct *mm) in mn_itree_release() argument
267 .mm = mm, in mn_itree_release()
300 struct mm_struct *mm) in mn_hlist_release() argument
319 subscription->ops->release(subscription, mm); in mn_hlist_release()
348 void __mmu_notifier_release(struct mm_struct *mm) in __mmu_notifier_release() argument
351 mm->notifier_subscriptions; in __mmu_notifier_release()
354 mn_itree_release(subscriptions, mm); in __mmu_notifier_release()
357 mn_hlist_release(subscriptions, mm); in __mmu_notifier_release()
365 int __mmu_notifier_clear_flush_young(struct mm_struct *mm, in __mmu_notifier_clear_flush_young() argument
374 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_clear_flush_young()
378 subscription, mm, start, end); in __mmu_notifier_clear_flush_young()
385 int __mmu_notifier_clear_young(struct mm_struct *mm, in __mmu_notifier_clear_young() argument
394 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_clear_young()
398 mm, start, end); in __mmu_notifier_clear_young()
405 int __mmu_notifier_test_young(struct mm_struct *mm, in __mmu_notifier_test_young() argument
413 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_test_young()
416 young = subscription->ops->test_young(subscription, mm, in __mmu_notifier_test_young()
427 void __mmu_notifier_change_pte(struct mm_struct *mm, unsigned long address, in __mmu_notifier_change_pte() argument
435 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_change_pte()
438 subscription->ops->change_pte(subscription, mm, address, in __mmu_notifier_change_pte()
539 range->mm->notifier_subscriptions; in __mmu_notifier_invalidate_range_start()
577 range->mm, in mn_hlist_invalidate_end()
596 range->mm->notifier_subscriptions; in __mmu_notifier_invalidate_range_end()
607 void __mmu_notifier_invalidate_range(struct mm_struct *mm, in __mmu_notifier_invalidate_range() argument
615 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_invalidate_range()
618 subscription->ops->invalidate_range(subscription, mm, in __mmu_notifier_invalidate_range()
630 struct mm_struct *mm) in __mmu_notifier_register() argument
635 mmap_assert_write_locked(mm); in __mmu_notifier_register()
636 BUG_ON(atomic_read(&mm->mm_users) <= 0); in __mmu_notifier_register()
638 if (!mm->notifier_subscriptions) { in __mmu_notifier_register()
657 ret = mm_take_all_locks(mm); in __mmu_notifier_register()
678 smp_store_release(&mm->notifier_subscriptions, subscriptions); in __mmu_notifier_register()
682 mmgrab(mm); in __mmu_notifier_register()
683 subscription->mm = mm; in __mmu_notifier_register()
686 spin_lock(&mm->notifier_subscriptions->lock); in __mmu_notifier_register()
688 &mm->notifier_subscriptions->list); in __mmu_notifier_register()
689 spin_unlock(&mm->notifier_subscriptions->lock); in __mmu_notifier_register()
691 mm->notifier_subscriptions->has_itree = true; in __mmu_notifier_register()
693 mm_drop_all_locks(mm); in __mmu_notifier_register()
694 BUG_ON(atomic_read(&mm->mm_users) <= 0); in __mmu_notifier_register()
723 struct mm_struct *mm) in mmu_notifier_register() argument
727 mmap_write_lock(mm); in mmu_notifier_register()
728 ret = __mmu_notifier_register(subscription, mm); in mmu_notifier_register()
729 mmap_write_unlock(mm); in mmu_notifier_register()
735 find_get_mmu_notifier(struct mm_struct *mm, const struct mmu_notifier_ops *ops) in find_get_mmu_notifier() argument
739 spin_lock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
741 &mm->notifier_subscriptions->list, hlist, in find_get_mmu_notifier()
742 lockdep_is_held(&mm->notifier_subscriptions->lock)) { in find_get_mmu_notifier()
750 spin_unlock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
753 spin_unlock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
775 struct mm_struct *mm) in mmu_notifier_get_locked() argument
780 mmap_assert_write_locked(mm); in mmu_notifier_get_locked()
782 if (mm->notifier_subscriptions) { in mmu_notifier_get_locked()
783 subscription = find_get_mmu_notifier(mm, ops); in mmu_notifier_get_locked()
788 subscription = ops->alloc_notifier(mm); in mmu_notifier_get_locked()
792 ret = __mmu_notifier_register(subscription, mm); in mmu_notifier_get_locked()
803 void __mmu_notifier_subscriptions_destroy(struct mm_struct *mm) in __mmu_notifier_subscriptions_destroy() argument
805 BUG_ON(!hlist_empty(&mm->notifier_subscriptions->list)); in __mmu_notifier_subscriptions_destroy()
806 kfree(mm->notifier_subscriptions); in __mmu_notifier_subscriptions_destroy()
807 mm->notifier_subscriptions = LIST_POISON1; /* debug */ in __mmu_notifier_subscriptions_destroy()
821 struct mm_struct *mm) in mmu_notifier_unregister() argument
823 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
838 subscription->ops->release(subscription, mm); in mmu_notifier_unregister()
841 spin_lock(&mm->notifier_subscriptions->lock); in mmu_notifier_unregister()
847 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_unregister()
856 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
858 mmdrop(mm); in mmu_notifier_unregister()
866 struct mm_struct *mm = subscription->mm; in mmu_notifier_free_rcu() local
870 mmdrop(mm); in mmu_notifier_free_rcu()
897 struct mm_struct *mm = subscription->mm; in mmu_notifier_put() local
899 spin_lock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
903 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
909 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
914 struct mmu_interval_notifier *interval_sub, struct mm_struct *mm, in __mmu_interval_notifier_insert() argument
918 interval_sub->mm = mm; in __mmu_interval_notifier_insert()
932 if (WARN_ON(atomic_read(&mm->mm_users) <= 0)) in __mmu_interval_notifier_insert()
936 mmgrab(mm); in __mmu_interval_notifier_insert()
996 struct mm_struct *mm, unsigned long start, in mmu_interval_notifier_insert() argument
1003 might_lock(&mm->mmap_lock); in mmu_interval_notifier_insert()
1005 subscriptions = smp_load_acquire(&mm->notifier_subscriptions); in mmu_interval_notifier_insert()
1007 ret = mmu_notifier_register(NULL, mm); in mmu_interval_notifier_insert()
1010 subscriptions = mm->notifier_subscriptions; in mmu_interval_notifier_insert()
1012 return __mmu_interval_notifier_insert(interval_sub, mm, subscriptions, in mmu_interval_notifier_insert()
1018 struct mmu_interval_notifier *interval_sub, struct mm_struct *mm, in mmu_interval_notifier_insert_locked() argument
1023 mm->notifier_subscriptions; in mmu_interval_notifier_insert_locked()
1026 mmap_assert_write_locked(mm); in mmu_interval_notifier_insert_locked()
1029 ret = __mmu_notifier_register(NULL, mm); in mmu_interval_notifier_insert_locked()
1032 subscriptions = mm->notifier_subscriptions; in mmu_interval_notifier_insert_locked()
1034 return __mmu_interval_notifier_insert(interval_sub, mm, subscriptions, in mmu_interval_notifier_insert_locked()
1063 struct mm_struct *mm = interval_sub->mm; in mmu_interval_notifier_remove() local
1065 mm->notifier_subscriptions; in mmu_interval_notifier_remove()
1101 mmdrop(mm); in mmu_interval_notifier_remove()