Home
last modified time | relevance | path

Searched refs:READ_ONCE (Results 1 – 25 of 93) sorted by relevance

1234

/include/linux/
Dsrcutiny.h63 idx = ((READ_ONCE(ssp->srcu_idx) + 1) & 0x2) >> 1; in __srcu_read_lock()
64 WRITE_ONCE(ssp->srcu_lock_nesting[idx], READ_ONCE(ssp->srcu_lock_nesting[idx]) + 1); in __srcu_read_lock()
84 idx = ((data_race(READ_ONCE(ssp->srcu_idx)) + 1) & 0x2) >> 1; in srcu_torture_stats_print()
87 data_race(READ_ONCE(ssp->srcu_lock_nesting[!idx])), in srcu_torture_stats_print()
88 data_race(READ_ONCE(ssp->srcu_lock_nesting[idx])), in srcu_torture_stats_print()
89 data_race(READ_ONCE(ssp->srcu_idx)), in srcu_torture_stats_print()
90 data_race(READ_ONCE(ssp->srcu_idx_max))); in srcu_torture_stats_print()
Drcupdate_trace.h52 WRITE_ONCE(t->trc_reader_nesting, READ_ONCE(t->trc_reader_nesting) + 1); in rcu_read_lock_trace()
75 nesting = READ_ONCE(t->trc_reader_nesting) - 1; in rcu_read_unlock_trace()
79 if (likely(!READ_ONCE(t->trc_reader_special.s)) || nesting) { in rcu_read_unlock_trace()
Ddynamic_queue_limits.h94 return READ_ONCE(dql->adj_limit) - READ_ONCE(dql->num_queued); in dql_avail()
Drcupdate.h83 #define rcu_preempt_depth() READ_ONCE(current->rcu_read_lock_nesting)
194 if (!(preempt) && READ_ONCE((t)->rcu_tasks_holdout)) \
215 int ___rttq_nesting = READ_ONCE((t)->trc_reader_nesting); \
217 if (likely(!READ_ONCE((t)->trc_reader_special.b.need_qs)) && \
221 !READ_ONCE((t)->trc_reader_special.b.blocked)) { \
451 typeof(*p) *local = (typeof(*p) *__force)READ_ONCE(p); \
458 typeof(*p) *local = (typeof(*p) *__force)READ_ONCE(p); \
472 typeof(p) local = READ_ONCE(p); \
1078 rcu_callback_t func = READ_ONCE(rhp->func); in rcu_head_after_call_rcu()
Dfreelist.h46 struct freelist_node *head = READ_ONCE(list->head); in __freelist_add()
98 next = READ_ONCE(head->next); in freelist_try_get()
Drcu_sync.h36 return !READ_ONCE(rsp->gp_state); /* GP_IDLE */ in rcu_sync_is_idle()
Dlist_nulls.h83 return !READ_ONCE(h->pprev); in hlist_nulls_unhashed_lockless()
88 return is_a_nulls(READ_ONCE(h->first)); in hlist_nulls_empty()
Dtask_work.h25 return READ_ONCE(task->task_works); in task_work_pending()
Daverage.h56 unsigned long internal = READ_ONCE(e->internal); \
Dseqlock.h213 unsigned seq = READ_ONCE(s->seqcount.sequence); \
226 seq = READ_ONCE(s->seqcount.sequence); \
259 return READ_ONCE(s->sequence); in __seqprop_sequence()
429 return unlikely(READ_ONCE(s->sequence) != start); in do___read_seqcount_retry()
682 return READ_ONCE(s->seqcount.sequence); in raw_read_seqcount_latch()
Dstatic_call.h159 #define static_call_query(name) (READ_ONCE(STATIC_CALL_KEY(name).func))
320 void *func = READ_ONCE(STATIC_CALL_KEY(name).func); \
/include/net/
Dbusy_poll.h36 return READ_ONCE(sysctl_net_busy_poll); in net_busy_loop_on()
41 return READ_ONCE(sk->sk_ll_usec) && !signal_pending(current); in sk_can_busy_loop()
76 unsigned long bp_usec = READ_ONCE(sysctl_net_busy_poll); in busy_loop_timeout()
92 unsigned long bp_usec = READ_ONCE(sk->sk_ll_usec); in sk_busy_loop_timeout()
107 unsigned int napi_id = READ_ONCE(sk->sk_napi_id); in sk_busy_loop()
111 READ_ONCE(sk->sk_prefer_busy_poll), in sk_busy_loop()
112 READ_ONCE(sk->sk_busy_poll_budget) ?: BUSY_POLL_BUDGET); in sk_busy_loop()
133 if (unlikely(READ_ONCE(sk->sk_napi_id) != skb->napi_id)) in sk_mark_napi_id()
155 if (!READ_ONCE(sk->sk_napi_id)) in __sk_mark_napi_id_once()
Dinet_sock.h111 u32 mark = READ_ONCE(sk->sk_mark); in inet_request_mark()
113 if (!mark && READ_ONCE(sock_net(sk)->ipv4.sysctl_tcp_fwmark_accept)) in inet_request_mark()
122 int bound_dev_if = READ_ONCE(sk->sk_bound_dev_if); in inet_request_bound_dev_if()
126 if (!bound_dev_if && READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept)) in inet_request_bound_dev_if()
138 if (!READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept)) in inet_sk_bound_l3mdev()
158 return inet_bound_dev_eq(!!READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept), in inet_sk_bound_dev_eq()
393 return READ_ONCE(net->ipv4.sysctl_ip_nonlocal_bind) || in inet_can_nonlocal_bind()
Dsock.h684 return READ_ONCE(sk->sk_peek_off); in sk_peek_offset()
692 s32 off = READ_ONCE(sk->sk_peek_off); in sk_peek_offset_bwd()
1043 return READ_ONCE(sk->sk_ack_backlog) > READ_ONCE(sk->sk_max_ack_backlog); in sk_acceptq_is_full()
1051 return READ_ONCE(sk->sk_wmem_queued) >> 1; in sk_stream_min_wspace()
1056 return READ_ONCE(sk->sk_sndbuf) - READ_ONCE(sk->sk_wmem_queued); in sk_stream_wspace()
1139 if (unlikely(READ_ONCE(sk->sk_incoming_cpu) != cpu)) in sk_incoming_cpu_update()
1173 sock_rps_record_flow_hash(READ_ONCE(sk->sk_rxhash)); in sock_rps_record_flow()
1186 if (unlikely(READ_ONCE(sk->sk_rxhash) != skb->hash)) in sock_rps_save_rxhash()
1228 if (unlikely(READ_ONCE(sk->sk_backlog.tail))) { in sk_flush_backlog()
1422 return READ_ONCE(sk->sk_forward_alloc); in sk_forward_alloc_get()
[all …]
Dneighbour.h338 if (READ_ONCE(n->confirmed) != now) in neigh_confirm()
473 if (READ_ONCE(neigh->used) != now) in neigh_event_send_probe()
475 if (!(READ_ONCE(neigh->nud_state) & (NUD_CONNECTED | NUD_DELAY | NUD_PROBE))) in neigh_event_send_probe()
507 hh_len = READ_ONCE(hh->hh_len); in neigh_hh_output()
548 (READ_ONCE(n->nud_state) & NUD_CONNECTED) && in neigh_output()
549 READ_ONCE(hh->hh_len)) in neigh_output()
552 return READ_ONCE(n->output)(n, skb); in neigh_output()
Dip.h99 ipcm->sockc.mark = READ_ONCE(inet->sk.sk_mark); in ipcm_init_sk()
100 ipcm->sockc.tsflags = READ_ONCE(inet->sk.sk_tsflags); in ipcm_init_sk()
101 ipcm->oif = READ_ONCE(inet->sk.sk_bound_dev_if); in ipcm_init_sk()
366 return port < READ_ONCE(net->ipv4.sysctl_ip_prot_sock); in inet_port_requires_bind_service()
393 (READ_ONCE((net)->ipv4.sysctl_fwmark_reflect) ? (mark) : 0)
424 u8 pmtudisc = READ_ONCE(inet_sk(sk)->pmtudisc); in ip_dont_fragment()
455 if (READ_ONCE(net->ipv4.sysctl_ip_fwd_use_pmtu) || in ip_dst_mtu_maybe_forward()
468 mtu = READ_ONCE(dst->dev->mtu); in ip_dst_mtu_maybe_forward()
492 mtu = min(READ_ONCE(skb_dst(skb)->dev->mtu), IP_MAX_MTU); in ip_skb_dst_mtu()
/include/clocksource/
Dhyperv_timer.h60 sequence = READ_ONCE(tsc_pg->tsc_sequence); in hv_read_tsc_page_tsc()
69 scale = READ_ONCE(tsc_pg->tsc_scale); in hv_read_tsc_page_tsc()
70 offset = READ_ONCE(tsc_pg->tsc_offset); in hv_read_tsc_page_tsc()
79 } while (READ_ONCE(tsc_pg->tsc_sequence) != sequence); in hv_read_tsc_page_tsc()
/include/drm/
Dspsc_queue.h96 node = READ_ONCE(queue->head); in spsc_queue_pop()
101 next = READ_ONCE(node->next); in spsc_queue_pop()
112 } while (unlikely(!(queue->head = READ_ONCE(node->next)))); in spsc_queue_pop()
/include/vdso/
Dhelpers.h13 while (unlikely((seq = READ_ONCE(vd->seq)) & 1)) in vdso_read_begin()
26 seq = READ_ONCE(vd->seq); in vdso_read_retry()
/include/asm-generic/bitops/
Dlock.h25 if (READ_ONCE(*p) & mask) in arch_test_and_set_bit_lock()
64 old = READ_ONCE(*p); in arch___clear_bit_unlock()
/include/asm-generic/
Dbarrier.h150 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
205 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
252 VAL = READ_ONCE(*__PTR); \
Drwonce.h47 #define READ_ONCE(x) \ macro
Dpreempt.h11 return READ_ONCE(current_thread_info()->preempt_count); in preempt_count()
/include/trace/events/
Dsock.h86 __entry->sk_rcvbuf = READ_ONCE(sk->sk_rcvbuf);
113 __entry->sysctl_mem[0] = READ_ONCE(prot->sysctl_mem[0]);
114 __entry->sysctl_mem[1] = READ_ONCE(prot->sysctl_mem[1]);
115 __entry->sysctl_mem[2] = READ_ONCE(prot->sysctl_mem[2]);
121 __entry->wmem_queued = READ_ONCE(sk->sk_wmem_queued);
/include/net/tc_act/
Dtc_gact.h59 return READ_ONCE(a->tcfa_action) & TC_ACT_EXT_VAL_MASK; in tcf_gact_goto_chain_index()

1234