Home
last modified time | relevance | path

Searched refs:rcu_data (Results 1 – 6 of 6) sorted by relevance

/kernel/rcu/
Dtree_nocb.h19 static inline int rcu_lockdep_is_held_nocb(struct rcu_data *rdp) in rcu_lockdep_is_held_nocb()
24 static inline bool rcu_current_is_nocb_kthread(struct rcu_data *rdp) in rcu_current_is_nocb_kthread()
97 static void rcu_nocb_bypass_lock(struct rcu_data *rdp) in rcu_nocb_bypass_lock()
121 static void rcu_nocb_wait_contended(struct rcu_data *rdp) in rcu_nocb_wait_contended()
132 static bool rcu_nocb_bypass_trylock(struct rcu_data *rdp) in rcu_nocb_bypass_trylock()
141 static void rcu_nocb_bypass_unlock(struct rcu_data *rdp) in rcu_nocb_bypass_unlock()
152 static void rcu_nocb_lock(struct rcu_data *rdp) in rcu_nocb_lock()
164 static void rcu_nocb_unlock(struct rcu_data *rdp) in rcu_nocb_unlock()
176 static void rcu_nocb_unlock_irqrestore(struct rcu_data *rdp, in rcu_nocb_unlock_irqrestore()
188 static void rcu_lockdep_assert_cblist_protected(struct rcu_data *rdp) in rcu_lockdep_assert_cblist_protected()
[all …]
Dtree.h162 struct rcu_data { struct
239 struct rcu_data *nocb_toggling_rdp; /* rdp queued for (de-)offloading */ argument
242 struct rcu_data *nocb_gp_rdp ____cacheline_internodealigned_in_smp; argument
439 static bool rcu_is_callbacks_kthread(struct rcu_data *rdp);
444 static void zero_cpu_stall_ticks(struct rcu_data *rdp);
448 static bool wake_nocb_gp(struct rcu_data *rdp, bool force);
449 static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
451 static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
454 static void __call_rcu_nocb_wake(struct rcu_data *rdp, bool was_empty,
456 static int rcu_nocb_need_deferred_wakeup(struct rcu_data *rdp, int level);
[all …]
Dtree_plugin.h16 static bool rcu_rdp_is_offloaded(struct rcu_data *rdp) in rcu_rdp_is_offloaded()
31 (rdp == this_cpu_ptr(&rcu_data) && in rcu_rdp_is_offloaded()
151 static void rcu_preempt_ctxt_queue(struct rcu_node *rnp, struct rcu_data *rdp) in rcu_preempt_ctxt_queue()
289 if (__this_cpu_read(rcu_data.cpu_no_qs.b.norm)) { in rcu_qs()
291 __this_cpu_read(rcu_data.gp_seq), in rcu_qs()
293 __this_cpu_write(rcu_data.cpu_no_qs.b.norm, false); in rcu_qs()
315 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_note_context_switch()
473 struct rcu_data *rdp; in rcu_preempt_deferred_qs_irqrestore()
483 rdp = this_cpu_ptr(&rcu_data); in rcu_preempt_deferred_qs_irqrestore()
588 return (__this_cpu_read(rcu_data.cpu_no_qs.b.exp) || in rcu_preempt_need_deferred_qs()
[all …]
Dtree.c79 static DEFINE_PER_CPU_SHARED_ALIGNED(struct rcu_data, rcu_data) = {
152 static void rcu_report_exp_rdp(struct rcu_data *rdp);
154 static void check_cb_ovld_locked(struct rcu_data *rdp, struct rcu_node *rnp);
155 static bool rcu_rdp_is_offloaded(struct rcu_data *rdp);
234 static bool rcu_rdp_cpu_online(struct rcu_data *rdp) in rcu_rdp_cpu_online()
255 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_get_n_cbs_cpu()
316 static bool rcu_dynticks_in_eqs_since(struct rcu_data *rdp, int snap) in rcu_dynticks_in_eqs_since()
355 raw_cpu_write(rcu_data.rcu_need_heavy_qs, false); in rcu_momentary_dyntick_idle()
501 static void force_qs_rnp(int (*f)(struct rcu_data *rdp));
574 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_irq_work_resched()
[all …]
Dtree_exp.h236 struct rcu_data *rdp; in rcu_report_exp_cpu_mult()
245 rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_report_exp_cpu_mult()
257 static void rcu_report_exp_rdp(struct rcu_data *rdp) in rcu_report_exp_rdp()
283 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id()); in exp_funnel_lock()
352 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in __sync_rcu_exp_select_node_cpus()
380 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in __sync_rcu_exp_select_node_cpus()
592 struct rcu_data *rdp; in synchronize_rcu_expedited_wait()
605 rdp = per_cpu_ptr(&rcu_data, cpu); in synchronize_rcu_expedited_wait()
632 struct rcu_data *rdp; in synchronize_rcu_expedited_wait()
638 rdp = per_cpu_ptr(&rcu_data, cpu); in synchronize_rcu_expedited_wait()
[all …]
Dtree_stall.h185 static void zero_cpu_stall_ticks(struct rcu_data *rdp) in zero_cpu_stall_ticks()
220 struct rcu_data *rdp; in rcu_iw_handler()
223 rdp = container_of(iwp, struct rcu_data, rcu_iw); in rcu_iw_handler()
416 static bool rcu_is_rcuc_kthread_starving(struct rcu_data *rdp, unsigned long *jp) in rcu_is_rcuc_kthread_starving()
452 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in print_cpu_stall_info()
635 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in print_cpu_stall()
687 static void check_cpu_stall(struct rcu_data *rdp) in check_cpu_stall()
862 struct rcu_data *rdp; in show_rcu_gp_kthreads()
898 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
908 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
[all …]