Lines Matching refs:mm
189 interval_sub->mm->notifier_subscriptions; in mmu_interval_read_begin()
262 struct mm_struct *mm) in mn_itree_release() argument
267 .mm = mm, in mn_itree_release()
300 struct mm_struct *mm) in mn_hlist_release() argument
319 subscription->ops->release(subscription, mm); in mn_hlist_release()
348 void __mmu_notifier_release(struct mm_struct *mm) in __mmu_notifier_release() argument
351 mm->notifier_subscriptions; in __mmu_notifier_release()
354 mn_itree_release(subscriptions, mm); in __mmu_notifier_release()
357 mn_hlist_release(subscriptions, mm); in __mmu_notifier_release()
365 int __mmu_notifier_clear_flush_young(struct mm_struct *mm, in __mmu_notifier_clear_flush_young() argument
374 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_clear_flush_young()
378 subscription, mm, start, end); in __mmu_notifier_clear_flush_young()
385 int __mmu_notifier_clear_young(struct mm_struct *mm, in __mmu_notifier_clear_young() argument
394 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_clear_young()
398 mm, start, end); in __mmu_notifier_clear_young()
405 int __mmu_notifier_test_young(struct mm_struct *mm, in __mmu_notifier_test_young() argument
413 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_test_young()
416 young = subscription->ops->test_young(subscription, mm, in __mmu_notifier_test_young()
427 void __mmu_notifier_change_pte(struct mm_struct *mm, unsigned long address, in __mmu_notifier_change_pte() argument
435 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_change_pte()
438 subscription->ops->change_pte(subscription, mm, address, in __mmu_notifier_change_pte()
539 range->mm->notifier_subscriptions; in __mmu_notifier_invalidate_range_start()
577 range->mm, in mn_hlist_invalidate_end()
596 range->mm->notifier_subscriptions; in __mmu_notifier_invalidate_range_end()
607 void __mmu_notifier_invalidate_range(struct mm_struct *mm, in __mmu_notifier_invalidate_range() argument
615 &mm->notifier_subscriptions->list, hlist, in __mmu_notifier_invalidate_range()
618 subscription->ops->invalidate_range(subscription, mm, in __mmu_notifier_invalidate_range()
630 struct mm_struct *mm) in __mmu_notifier_register() argument
635 mmap_assert_write_locked(mm); in __mmu_notifier_register()
636 BUG_ON(atomic_read(&mm->mm_users) <= 0); in __mmu_notifier_register()
645 if (!mm->notifier_subscriptions) { in __mmu_notifier_register()
664 ret = mm_take_all_locks(mm); in __mmu_notifier_register()
685 smp_store_release(&mm->notifier_subscriptions, subscriptions); in __mmu_notifier_register()
689 mmgrab(mm); in __mmu_notifier_register()
690 subscription->mm = mm; in __mmu_notifier_register()
693 spin_lock(&mm->notifier_subscriptions->lock); in __mmu_notifier_register()
695 &mm->notifier_subscriptions->list); in __mmu_notifier_register()
696 spin_unlock(&mm->notifier_subscriptions->lock); in __mmu_notifier_register()
698 mm->notifier_subscriptions->has_itree = true; in __mmu_notifier_register()
700 mm_drop_all_locks(mm); in __mmu_notifier_register()
701 BUG_ON(atomic_read(&mm->mm_users) <= 0); in __mmu_notifier_register()
730 struct mm_struct *mm) in mmu_notifier_register() argument
734 mmap_write_lock(mm); in mmu_notifier_register()
735 ret = __mmu_notifier_register(subscription, mm); in mmu_notifier_register()
736 mmap_write_unlock(mm); in mmu_notifier_register()
742 find_get_mmu_notifier(struct mm_struct *mm, const struct mmu_notifier_ops *ops) in find_get_mmu_notifier() argument
746 spin_lock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
748 &mm->notifier_subscriptions->list, hlist, in find_get_mmu_notifier()
749 lockdep_is_held(&mm->notifier_subscriptions->lock)) { in find_get_mmu_notifier()
757 spin_unlock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
760 spin_unlock(&mm->notifier_subscriptions->lock); in find_get_mmu_notifier()
782 struct mm_struct *mm) in mmu_notifier_get_locked() argument
787 mmap_assert_write_locked(mm); in mmu_notifier_get_locked()
789 if (mm->notifier_subscriptions) { in mmu_notifier_get_locked()
790 subscription = find_get_mmu_notifier(mm, ops); in mmu_notifier_get_locked()
795 subscription = ops->alloc_notifier(mm); in mmu_notifier_get_locked()
799 ret = __mmu_notifier_register(subscription, mm); in mmu_notifier_get_locked()
810 void __mmu_notifier_subscriptions_destroy(struct mm_struct *mm) in __mmu_notifier_subscriptions_destroy() argument
812 BUG_ON(!hlist_empty(&mm->notifier_subscriptions->list)); in __mmu_notifier_subscriptions_destroy()
813 kfree(mm->notifier_subscriptions); in __mmu_notifier_subscriptions_destroy()
814 mm->notifier_subscriptions = LIST_POISON1; /* debug */ in __mmu_notifier_subscriptions_destroy()
828 struct mm_struct *mm) in mmu_notifier_unregister() argument
830 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
845 subscription->ops->release(subscription, mm); in mmu_notifier_unregister()
848 spin_lock(&mm->notifier_subscriptions->lock); in mmu_notifier_unregister()
854 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_unregister()
863 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
865 mmdrop(mm); in mmu_notifier_unregister()
873 struct mm_struct *mm = subscription->mm; in mmu_notifier_free_rcu() local
877 mmdrop(mm); in mmu_notifier_free_rcu()
904 struct mm_struct *mm = subscription->mm; in mmu_notifier_put() local
906 spin_lock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
910 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
916 spin_unlock(&mm->notifier_subscriptions->lock); in mmu_notifier_put()
921 struct mmu_interval_notifier *interval_sub, struct mm_struct *mm, in __mmu_interval_notifier_insert() argument
925 interval_sub->mm = mm; in __mmu_interval_notifier_insert()
939 if (WARN_ON(atomic_read(&mm->mm_users) <= 0)) in __mmu_interval_notifier_insert()
943 mmgrab(mm); in __mmu_interval_notifier_insert()
1003 struct mm_struct *mm, unsigned long start, in mmu_interval_notifier_insert() argument
1010 might_lock(&mm->mmap_lock); in mmu_interval_notifier_insert()
1012 subscriptions = smp_load_acquire(&mm->notifier_subscriptions); in mmu_interval_notifier_insert()
1014 ret = mmu_notifier_register(NULL, mm); in mmu_interval_notifier_insert()
1017 subscriptions = mm->notifier_subscriptions; in mmu_interval_notifier_insert()
1019 return __mmu_interval_notifier_insert(interval_sub, mm, subscriptions, in mmu_interval_notifier_insert()
1025 struct mmu_interval_notifier *interval_sub, struct mm_struct *mm, in mmu_interval_notifier_insert_locked() argument
1030 mm->notifier_subscriptions; in mmu_interval_notifier_insert_locked()
1033 mmap_assert_write_locked(mm); in mmu_interval_notifier_insert_locked()
1036 ret = __mmu_notifier_register(NULL, mm); in mmu_interval_notifier_insert_locked()
1039 subscriptions = mm->notifier_subscriptions; in mmu_interval_notifier_insert_locked()
1041 return __mmu_interval_notifier_insert(interval_sub, mm, subscriptions, in mmu_interval_notifier_insert_locked()
1070 struct mm_struct *mm = interval_sub->mm; in mmu_interval_notifier_remove() local
1072 mm->notifier_subscriptions; in mmu_interval_notifier_remove()
1108 mmdrop(mm); in mmu_interval_notifier_remove()