Home
last modified time | relevance | path

Searched refs:READ_ONCE (Results 1 – 25 of 87) sorted by relevance

1234

/include/net/
Dbusy_poll.h34 return READ_ONCE(sysctl_net_busy_poll); in net_busy_loop_on()
39 return READ_ONCE(sk->sk_ll_usec) && !signal_pending(current); in sk_can_busy_loop()
74 unsigned long bp_usec = READ_ONCE(sysctl_net_busy_poll); in busy_loop_timeout()
90 unsigned long bp_usec = READ_ONCE(sk->sk_ll_usec); in sk_busy_loop_timeout()
105 unsigned int napi_id = READ_ONCE(sk->sk_napi_id); in sk_busy_loop()
139 if (!READ_ONCE(sk->sk_napi_id)) in sk_mark_napi_id_once()
Dinet_sock.h111 READ_ONCE(sock_net(sk)->ipv4.sysctl_tcp_fwmark_accept)) in inet_request_mark()
120 int bound_dev_if = READ_ONCE(sk->sk_bound_dev_if); in inet_request_bound_dev_if()
124 if (!bound_dev_if && READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept)) in inet_request_bound_dev_if()
136 if (!READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept)) in inet_sk_bound_l3mdev()
156 return inet_bound_dev_eq(!!READ_ONCE(net->ipv4.sysctl_tcp_l3mdev_accept), in inet_sk_bound_dev_eq()
385 return READ_ONCE(net->ipv4.sysctl_ip_nonlocal_bind) || in inet_can_nonlocal_bind()
Dsock.h634 return READ_ONCE(sk->sk_peek_off); in sk_peek_offset()
642 s32 off = READ_ONCE(sk->sk_peek_off); in sk_peek_offset_bwd()
981 return READ_ONCE(sk->sk_ack_backlog) > READ_ONCE(sk->sk_max_ack_backlog); in sk_acceptq_is_full()
989 return READ_ONCE(sk->sk_wmem_queued) >> 1; in sk_stream_min_wspace()
994 return READ_ONCE(sk->sk_sndbuf) - READ_ONCE(sk->sk_wmem_queued); in sk_stream_wspace()
1065 if (unlikely(READ_ONCE(sk->sk_incoming_cpu) != cpu)) in sk_incoming_cpu_update()
1099 sock_rps_record_flow_hash(READ_ONCE(sk->sk_rxhash)); in sock_rps_record_flow()
1112 if (unlikely(READ_ONCE(sk->sk_rxhash) != skb->hash)) in sock_rps_save_rxhash()
1152 if (unlikely(READ_ONCE(sk->sk_backlog.tail))) { in sk_flush_backlog()
1326 if (READ_ONCE(sk->sk_wmem_queued) >= READ_ONCE(sk->sk_sndbuf)) in __sk_stream_memory_free()
[all …]
Dtcp.h267 return READ_ONCE(tcp_memory_pressure); in tcp_under_memory_pressure()
508 last_overflow = READ_ONCE(reuse->synq_overflow_ts); in tcp_synq_overflow()
516 last_overflow = READ_ONCE(tcp_sk(sk)->rx_opt.ts_recent_stamp); in tcp_synq_overflow()
532 last_overflow = READ_ONCE(reuse->synq_overflow_ts); in tcp_synq_no_recent_overflow()
539 last_overflow = READ_ONCE(tcp_sk(sk)->rx_opt.ts_recent_stamp); in tcp_synq_no_recent_overflow()
1396 if (!READ_ONCE(sock_net(sk)->ipv4.sysctl_tcp_slow_start_after_idle) || in tcp_slow_start_after_idle_check()
1412 int tcp_adv_win_scale = READ_ONCE(sock_net(sk)->ipv4.sysctl_tcp_adv_win_scale); in tcp_win_from_space()
1422 return tcp_win_from_space(sk, READ_ONCE(sk->sk_rcvbuf) - in tcp_space()
1423 READ_ONCE(sk->sk_backlog.len) - in tcp_space()
1429 return tcp_win_from_space(sk, READ_ONCE(sk->sk_rcvbuf)); in tcp_full_space()
[all …]
Draw.h78 return inet_bound_dev_eq(READ_ONCE(net->ipv4.sysctl_raw_l3mdev_accept), in raw_sk_bound_dev_eq()
/include/clocksource/
Dhyperv_timer.h61 sequence = READ_ONCE(tsc_pg->tsc_sequence); in hv_read_tsc_page_tsc()
70 scale = READ_ONCE(tsc_pg->tsc_scale); in hv_read_tsc_page_tsc()
71 offset = READ_ONCE(tsc_pg->tsc_offset); in hv_read_tsc_page_tsc()
80 } while (READ_ONCE(tsc_pg->tsc_sequence) != sequence); in hv_read_tsc_page_tsc()
/include/linux/
Dsrcutiny.h63 idx = ((READ_ONCE(ssp->srcu_idx) + 1) & 0x2) >> 1; in __srcu_read_lock()
84 idx = ((READ_ONCE(ssp->srcu_idx) + 1) & 0x2) >> 1; in srcu_torture_stats_print()
87 READ_ONCE(ssp->srcu_lock_nesting[!idx]), in srcu_torture_stats_print()
88 READ_ONCE(ssp->srcu_lock_nesting[idx])); in srcu_torture_stats_print()
Drcupdate_trace.h52 WRITE_ONCE(t->trc_reader_nesting, READ_ONCE(t->trc_reader_nesting) + 1); in rcu_read_lock_trace()
75 nesting = READ_ONCE(t->trc_reader_nesting) - 1; in rcu_read_unlock_trace()
79 if (likely(!READ_ONCE(t->trc_reader_special.s)) || nesting) { in rcu_read_unlock_trace()
Ddynamic_queue_limits.h94 return READ_ONCE(dql->adj_limit) - READ_ONCE(dql->num_queued); in dql_avail()
Dhugetlb_inline.h11 return !!(READ_ONCE(vma->vm_flags) & VM_HUGETLB); in is_vm_hugetlb_page()
Drcupdate.h155 if (!(preempt) && READ_ONCE((t)->rcu_tasks_holdout)) \
169 if (!likely(READ_ONCE((t)->trc_reader_checked)) && \
170 !unlikely(READ_ONCE((t)->trc_reader_nesting))) { \
385 typeof(*p) *_________p1 = (typeof(*p) *__force)READ_ONCE(p); \
392 typeof(*p) *________p1 = (typeof(*p) *__force)READ_ONCE(p); \
406 typeof(p) ________p1 = READ_ONCE(p); \
1003 rcu_callback_t func = READ_ONCE(rhp->func); in rcu_head_after_call_rcu()
Dentry-kvm.h59 unsigned long ti_work = READ_ONCE(current_thread_info()->flags); in __xfer_to_guest_mode_work_pending()
Drcu_sync.h36 return !READ_ONCE(rsp->gp_state); /* GP_IDLE */ in rcu_sync_is_idle()
Dlist_nulls.h83 return !READ_ONCE(h->pprev); in hlist_nulls_unhashed_lockless()
88 return is_a_nulls(READ_ONCE(h->first)); in hlist_nulls_empty()
Daverage.h56 unsigned long internal = READ_ONCE(e->internal); \
Dseqlock.h215 unsigned seq = READ_ONCE(s->seqcount.sequence); \
228 seq = READ_ONCE(s->seqcount.sequence); \
261 return READ_ONCE(s->sequence); in __seqprop_sequence()
433 return unlikely(READ_ONCE(s->sequence) != start); in do___read_seqcount_retry()
681 return READ_ONCE(s->seqcount.sequence); in raw_read_seqcount_latch()
Drculist.h316 container_of(READ_ONCE(ptr), type, member)
354 struct list_head *__next = READ_ONCE(__ptr->next); \
374 struct list_head *__next = READ_ONCE(__ptr->next); \
429 container_of((typeof(ptr))READ_ONCE(ptr), type, member)
/include/drm/
Dspsc_queue.h96 node = READ_ONCE(queue->head); in spsc_queue_pop()
101 next = READ_ONCE(node->next); in spsc_queue_pop()
112 } while (unlikely(!(queue->head = READ_ONCE(node->next)))); in spsc_queue_pop()
/include/asm-generic/bitops/
Dlock.h25 if (READ_ONCE(*p) & mask) in test_and_set_bit_lock()
63 old = READ_ONCE(*p); in __clear_bit_unlock()
/include/vdso/
Dhelpers.h13 while (unlikely((seq = READ_ONCE(vd->seq)) & 1)) in vdso_read_begin()
26 seq = READ_ONCE(vd->seq); in vdso_read_retry()
/include/trace/events/
Dsock.h86 __entry->sk_rcvbuf = READ_ONCE(sk->sk_rcvbuf);
113 __entry->sysctl_mem[0] = READ_ONCE(prot->sysctl_mem[0]);
114 __entry->sysctl_mem[1] = READ_ONCE(prot->sysctl_mem[1]);
115 __entry->sysctl_mem[2] = READ_ONCE(prot->sysctl_mem[2]);
121 __entry->wmem_queued = READ_ONCE(sk->sk_wmem_queued);
/include/asm-generic/
Dbarrier.h116 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
171 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
218 VAL = READ_ONCE(*__PTR); \
Drwonce.h47 #define READ_ONCE(x) \ macro
Dpreempt.h11 return READ_ONCE(current_thread_info()->preempt_count); in preempt_count()
/include/net/tc_act/
Dtc_gact.h59 return READ_ONCE(a->tcfa_action) & TC_ACT_EXT_VAL_MASK; in tcf_gact_goto_chain_index()

1234