Searched refs:smp_load_acquire (Results 1 – 16 of 16) sorted by relevance
175 #ifndef smp_load_acquire176 #define smp_load_acquire(p) __smp_load_acquire(p) macro202 #ifndef smp_load_acquire203 #define smp_load_acquire(p) \ macro
81 struct freelist_node *prev, *next, *head = smp_load_acquire(&list->head); in freelist_try_get()89 head = smp_load_acquire(&list->head); in freelist_try_get()
85 return smp_load_acquire(&mnt->mnt_userns); in mnt_user_ns()
126 return smp_load_acquire(&cookie->state); in fscache_cookie_state()
135 return smp_load_acquire(&inode->i_verity_info); in fsverity_get_info()
286 cookie->locked = smp_load_acquire(&inode->i_state) & I_WB_SWITCH; in unlocked_inode_to_wb_begin()
453 return smp_load_acquire(&key->state); in key_read_state()
209 return smp_load_acquire(&inode->i_crypt_info); in fscrypt_get_info()
328 struct list_head *next = smp_load_acquire(&head->next); in list_empty_careful()
664 if (unlikely(vma->vm_lock_seq == smp_load_acquire(&vma->vm_mm->mm_lock_seq))) { in vma_start_read()
23 #define RDMA_READ_UAPI_ATOMIC(member) smp_load_acquire(&(member).val)
140 return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); in generic_test_bit_acquire()
381 (smp_load_acquire(&sk->sk_validate_xmit_skb) == in tls_is_sk_tx_device_offloaded()517 smp_load_acquire(&sk->sk_destruct) != tls_device_sk_destruct) in tls_is_sk_rx_device_offloaded()
333 return smp_load_acquire(&sk->sk_state); in inet_sk_state_load()
1328 return smp_load_acquire(&sk->sk_pacing_status) == SK_PACING_NEEDED; in tcp_needs_internal_pacing()
227 ret = smp_load_acquire(&(v)->counter); in arch_atomic_read_acquire()1348 ret = smp_load_acquire(&(v)->counter); in arch_atomic64_read_acquire()