Lines Matching refs:rnp
187 struct rcu_node *rnp; in rcu_iw_handler() local
190 rnp = rdp->mynode; in rcu_iw_handler()
191 raw_spin_lock_rcu_node(rnp); in rcu_iw_handler()
193 rdp->rcu_iw_gp_seq = rnp->gp_seq; in rcu_iw_handler()
196 raw_spin_unlock_rcu_node(rnp); in rcu_iw_handler()
209 static void rcu_print_detail_task_stall_rnp(struct rcu_node *rnp) in rcu_print_detail_task_stall_rnp() argument
214 raw_spin_lock_irqsave_rcu_node(rnp, flags); in rcu_print_detail_task_stall_rnp()
215 if (!rcu_preempt_blocked_readers_cgp(rnp)) { in rcu_print_detail_task_stall_rnp()
216 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_print_detail_task_stall_rnp()
219 t = list_entry(rnp->gp_tasks->prev, in rcu_print_detail_task_stall_rnp()
221 list_for_each_entry_continue(t, &rnp->blkd_tasks, rcu_node_entry) { in rcu_print_detail_task_stall_rnp()
229 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_print_detail_task_stall_rnp()
259 static int rcu_print_task_stall(struct rcu_node *rnp, unsigned long flags) in rcu_print_task_stall() argument
260 __releases(rnp->lock) in rcu_print_task_stall()
269 if (!rcu_preempt_blocked_readers_cgp(rnp)) { in rcu_print_task_stall()
270 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_print_task_stall()
274 rnp->level, rnp->grplo, rnp->grphi); in rcu_print_task_stall()
275 t = list_entry(rnp->gp_tasks->prev, in rcu_print_task_stall()
277 list_for_each_entry_continue(t, &rnp->blkd_tasks, rcu_node_entry) { in rcu_print_task_stall()
283 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_print_task_stall()
309 static void rcu_print_detail_task_stall_rnp(struct rcu_node *rnp) in rcu_print_detail_task_stall_rnp() argument
317 static int rcu_print_task_stall(struct rcu_node *rnp, unsigned long flags) in rcu_print_task_stall() argument
318 __releases(rnp->lock) in rcu_print_task_stall()
320 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_print_task_stall()
335 struct rcu_node *rnp; in rcu_dump_cpu_stacks() local
337 rcu_for_each_leaf_node(rnp) { in rcu_dump_cpu_stacks()
338 raw_spin_lock_irqsave_rcu_node(rnp, flags); in rcu_dump_cpu_stacks()
339 for_each_leaf_node_possible_cpu(rnp, cpu) in rcu_dump_cpu_stacks()
340 if (rnp->qsmask & leaf_node_cpu_bit(rnp, cpu)) { in rcu_dump_cpu_stacks()
346 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_dump_cpu_stacks()
527 struct rcu_node *rnp; in print_other_cpu_stall() local
544 rcu_for_each_leaf_node(rnp) { in print_other_cpu_stall()
545 raw_spin_lock_irqsave_rcu_node(rnp, flags); in print_other_cpu_stall()
546 if (rnp->qsmask != 0) { in print_other_cpu_stall()
547 for_each_leaf_node_possible_cpu(rnp, cpu) in print_other_cpu_stall()
548 if (rnp->qsmask & leaf_node_cpu_bit(rnp, cpu)) { in print_other_cpu_stall()
553 ndetected += rcu_print_task_stall(rnp, flags); // Releases rnp->lock. in print_other_cpu_stall()
566 rcu_for_each_leaf_node(rnp) in print_other_cpu_stall()
567 rcu_print_detail_task_stall_rnp(rnp); in print_other_cpu_stall()
598 struct rcu_node *rnp = rcu_get_root(); in print_cpu_stall() local
629 raw_spin_lock_irqsave_rcu_node(rnp, flags); in print_cpu_stall()
634 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in print_cpu_stall()
658 struct rcu_node *rnp; in check_cpu_stall() local
696 rnp = rdp->mynode; in check_cpu_stall()
699 (READ_ONCE(rnp->qsmask) & rdp->grpmask) && in check_cpu_stall()
766 struct rcu_node *rnp; in rcu_check_boost_fail() local
768 rcu_for_each_leaf_node(rnp) { in rcu_check_boost_fail()
770 if (data_race(READ_ONCE(rnp->qsmask))) { in rcu_check_boost_fail()
773 if (READ_ONCE(rnp->gp_tasks)) in rcu_check_boost_fail()
779 raw_spin_lock_irqsave_rcu_node(rnp, flags); in rcu_check_boost_fail()
780 if (rnp->gp_tasks) in rcu_check_boost_fail()
782 if (!rnp->qsmask) { in rcu_check_boost_fail()
784 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_boost_fail()
788 for_each_leaf_node_possible_cpu(rnp, cpu) { in rcu_check_boost_fail()
789 if (rnp->qsmask & (1UL << (cpu - rnp->grplo))) { in rcu_check_boost_fail()
790 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_boost_fail()
795 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_boost_fail()
815 struct rcu_node *rnp; in show_rcu_gp_kthreads() local
832 rcu_for_each_node_breadth_first(rnp) { in show_rcu_gp_kthreads()
833 if (ULONG_CMP_GE(READ_ONCE(rcu_state.gp_seq), READ_ONCE(rnp->gp_seq_needed)) && in show_rcu_gp_kthreads()
834 !data_race(READ_ONCE(rnp->qsmask)) && !data_race(READ_ONCE(rnp->boost_tasks)) && in show_rcu_gp_kthreads()
835 !data_race(READ_ONCE(rnp->exp_tasks)) && !data_race(READ_ONCE(rnp->gp_tasks))) in show_rcu_gp_kthreads()
838 rnp->grplo, rnp->grphi, in show_rcu_gp_kthreads()
839 (long)data_race(READ_ONCE(rnp->gp_seq)), in show_rcu_gp_kthreads()
840 (long)data_race(READ_ONCE(rnp->gp_seq_needed)), in show_rcu_gp_kthreads()
841 data_race(READ_ONCE(rnp->qsmask)), in show_rcu_gp_kthreads()
842 ".b"[!!data_race(READ_ONCE(rnp->boost_kthread_task))], in show_rcu_gp_kthreads()
843 ".B"[!!data_race(READ_ONCE(rnp->boost_tasks))], in show_rcu_gp_kthreads()
844 ".E"[!!data_race(READ_ONCE(rnp->exp_tasks))], in show_rcu_gp_kthreads()
845 ".G"[!!data_race(READ_ONCE(rnp->gp_tasks))], in show_rcu_gp_kthreads()
846 data_race(READ_ONCE(rnp->n_boosts))); in show_rcu_gp_kthreads()
847 if (!rcu_is_leaf_node(rnp)) in show_rcu_gp_kthreads()
849 for_each_leaf_node_possible_cpu(rnp, cpu) { in show_rcu_gp_kthreads()
874 static void rcu_check_gp_start_stall(struct rcu_node *rnp, struct rcu_data *rdp, in rcu_check_gp_start_stall() argument
893 raw_spin_lock_irqsave_rcu_node(rnp, flags); in rcu_check_gp_start_stall()
901 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_gp_start_stall()
906 if (rnp_root != rnp) in rcu_check_gp_start_stall()
915 if (rnp_root != rnp) in rcu_check_gp_start_stall()
918 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_gp_start_stall()
922 if (rnp_root != rnp) in rcu_check_gp_start_stall()
924 raw_spin_unlock_irqrestore_rcu_node(rnp, flags); in rcu_check_gp_start_stall()