Home
last modified time | relevance | path

Searched refs:rcu_data (Results 1 – 6 of 6) sorted by relevance

/Linux-v5.4/kernel/rcu/
Dtree_plugin.h130 static void rcu_preempt_ctxt_queue(struct rcu_node *rnp, struct rcu_data *rdp) in rcu_preempt_ctxt_queue()
262 if (__this_cpu_read(rcu_data.cpu_no_qs.s)) { in rcu_qs()
264 __this_cpu_read(rcu_data.gp_seq), in rcu_qs()
266 __this_cpu_write(rcu_data.cpu_no_qs.b.norm, false); in rcu_qs()
288 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_note_context_switch()
432 struct rcu_data *rdp; in rcu_preempt_deferred_qs_irqrestore()
442 rdp = this_cpu_ptr(&rcu_data); in rcu_preempt_deferred_qs_irqrestore()
552 return (__this_cpu_read(rcu_data.exp_deferred_qs) || in rcu_preempt_need_deferred_qs()
584 struct rcu_data *rdp; in rcu_preempt_deferred_qs_handler()
586 rdp = container_of(iwp, struct rcu_data, defer_qs_iw); in rcu_preempt_deferred_qs_handler()
[all …]
Dtree.h149 struct rcu_data { struct
223 struct rcu_data *nocb_next_cb_rdp; argument
227 struct rcu_data *nocb_gp_rdp ____cacheline_internodealigned_in_smp; argument
405 int rcu_dynticks_snap(struct rcu_data *rdp);
430 static void zero_cpu_stall_ticks(struct rcu_data *rdp);
434 static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
436 static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
438 static void __call_rcu_nocb_wake(struct rcu_data *rdp, bool was_empty,
440 static int rcu_nocb_need_deferred_wakeup(struct rcu_data *rdp);
441 static void do_nocb_deferred_wakeup(struct rcu_data *rdp);
[all …]
Dtree.c82 static DEFINE_PER_CPU_SHARED_ALIGNED(struct rcu_data, rcu_data) = {
151 static void rcu_report_exp_rdp(struct rcu_data *rdp);
212 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_get_n_cbs_cpu()
231 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_dynticks_eqs_enter()
254 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_dynticks_eqs_exit()
285 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_dynticks_eqs_online()
299 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_dynticks_curr_cpu_in_eqs()
308 int rcu_dynticks_snap(struct rcu_data *rdp) in rcu_dynticks_snap()
329 static bool rcu_dynticks_in_eqs_since(struct rcu_data *rdp, int snap) in rcu_dynticks_in_eqs_since()
345 struct rcu_data *rdp = &per_cpu(rcu_data, cpu); in rcu_eqs_special_set()
[all …]
Dtree_exp.h251 static void rcu_report_exp_rdp(struct rcu_data *rdp) in rcu_report_exp_rdp()
277 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id()); in exp_funnel_lock()
349 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in sync_rcu_exp_select_node_cpus()
377 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in sync_rcu_exp_select_node_cpus()
491 struct rcu_data *rdp; in synchronize_sched_expedited_wait()
497 rdp = per_cpu_ptr(&rcu_data, cpu); in synchronize_sched_expedited_wait()
604 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_exp_handler()
704 __this_cpu_write(rcu_data.cpu_no_qs.b.exp, true); in rcu_exp_need_qs()
706 smp_store_release(this_cpu_ptr(&rcu_data.rcu_urgent_qs), true); in rcu_exp_need_qs()
714 struct rcu_data *rdp; in rcu_exp_handler()
[all …]
Dtree_stall.h114 static void zero_cpu_stall_ticks(struct rcu_data *rdp) in zero_cpu_stall_ticks()
149 struct rcu_data *rdp; in rcu_iw_handler()
152 rdp = container_of(iwp, struct rcu_data, rcu_iw); in rcu_iw_handler()
264 struct rcu_data *rdp = &per_cpu(rcu_data, cpu); in print_cpu_stall_fast_no_hz()
297 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in print_cpu_stall_info()
429 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in print_cpu_stall()
477 static void check_cpu_stall(struct rcu_data *rdp) in check_cpu_stall()
559 struct rcu_data *rdp; in show_rcu_gp_kthreads()
583 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
593 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
[all …]
/Linux-v5.4/Documentation/RCU/
Drcubarrier.txt234 4 struct rcu_data *rdp = &per_cpu(rcu_data, cpu);
242 Lines 3 and 4 locate RCU's internal per-CPU rcu_data structure,