Home
last modified time | relevance | path

Searched refs:atomic_read (Results 1 – 25 of 1763) sorted by relevance

12345678910>>...71

/kernel/linux/linux-5.10/fs/fscache/
Dstats.c142 atomic_read(&fscache_n_cookie_index), in fscache_stats_show()
143 atomic_read(&fscache_n_cookie_data), in fscache_stats_show()
144 atomic_read(&fscache_n_cookie_special)); in fscache_stats_show()
147 atomic_read(&fscache_n_object_alloc), in fscache_stats_show()
148 atomic_read(&fscache_n_object_no_alloc), in fscache_stats_show()
149 atomic_read(&fscache_n_object_avail), in fscache_stats_show()
150 atomic_read(&fscache_n_object_dead)); in fscache_stats_show()
152 atomic_read(&fscache_n_checkaux_none), in fscache_stats_show()
153 atomic_read(&fscache_n_checkaux_okay), in fscache_stats_show()
154 atomic_read(&fscache_n_checkaux_update), in fscache_stats_show()
[all …]
Dhistogram.c37 n[0] = atomic_read(&fscache_obj_instantiate_histogram[index]); in fscache_histogram_show()
38 n[1] = atomic_read(&fscache_ops_histogram[index]); in fscache_histogram_show()
39 n[2] = atomic_read(&fscache_objs_histogram[index]); in fscache_histogram_show()
40 n[3] = atomic_read(&fscache_retrieval_delay_histogram[index]); in fscache_histogram_show()
41 n[4] = atomic_read(&fscache_retrieval_histogram[index]); in fscache_histogram_show()
/kernel/linux/linux-5.10/net/netfilter/ipvs/
Dip_vs_nq.c45 return atomic_read(&dest->activeconns) + 1; in ip_vs_nq_dest_overhead()
77 !atomic_read(&dest->weight)) in ip_vs_nq_schedule()
83 if (atomic_read(&dest->activeconns) == 0) { in ip_vs_nq_schedule()
90 ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_nq_schedule()
91 (__s64)doh * atomic_read(&least->weight))) { in ip_vs_nq_schedule()
107 atomic_read(&least->activeconns), in ip_vs_nq_schedule()
109 atomic_read(&least->weight), loh); in ip_vs_nq_schedule()
Dip_vs_lblcr.c173 if ((atomic_read(&least->weight) > 0) in ip_vs_dest_set_min()
189 if (((__s64)loh * atomic_read(&dest->weight) > in ip_vs_dest_set_min()
190 (__s64)doh * atomic_read(&least->weight)) in ip_vs_dest_set_min()
202 atomic_read(&least->activeconns), in ip_vs_dest_set_min()
204 atomic_read(&least->weight), loh); in ip_vs_dest_set_min()
222 if (atomic_read(&most->weight) > 0) { in ip_vs_dest_set_max()
235 if (((__s64)moh * atomic_read(&dest->weight) < in ip_vs_dest_set_max()
236 (__s64)doh * atomic_read(&most->weight)) in ip_vs_dest_set_max()
237 && (atomic_read(&dest->weight) > 0)) { in ip_vs_dest_set_max()
247 atomic_read(&most->activeconns), in ip_vs_dest_set_max()
[all …]
Dip_vs_sed.c49 return atomic_read(&dest->activeconns) + 1; in ip_vs_sed_dest_overhead()
80 atomic_read(&dest->weight) > 0) { in ip_vs_sed_schedule()
97 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_sed_schedule()
98 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_sed_schedule()
108 atomic_read(&least->activeconns), in ip_vs_sed_schedule()
110 atomic_read(&least->weight), loh); in ip_vs_sed_schedule()
Dip_vs_wlc.c52 atomic_read(&dest->weight) > 0) { in ip_vs_wlc_schedule()
69 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_wlc_schedule()
70 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_wlc_schedule()
80 atomic_read(&least->activeconns), in ip_vs_wlc_schedule()
82 atomic_read(&least->weight), loh); in ip_vs_wlc_schedule()
Dip_vs_fo.c34 atomic_read(&dest->weight) > hw) { in ip_vs_fo_schedule()
36 hw = atomic_read(&dest->weight); in ip_vs_fo_schedule()
44 atomic_read(&hweight->activeconns), in ip_vs_fo_schedule()
45 atomic_read(&hweight->weight)); in ip_vs_fo_schedule()
Dip_vs_ovf.c36 w = atomic_read(&dest->weight); in ip_vs_ovf_schedule()
38 atomic_read(&dest->activeconns) > w || in ip_vs_ovf_schedule()
51 atomic_read(&h->activeconns), in ip_vs_ovf_schedule()
52 atomic_read(&h->weight)); in ip_vs_ovf_schedule()
Dip_vs_lblc.c311 if (atomic_read(&tbl->entries) <= tbl->max_size) { in ip_vs_lblc_check_expire()
316 goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3; in ip_vs_lblc_check_expire()
420 if (atomic_read(&dest->weight) > 0) { in __ip_vs_lblc_schedule()
437 if ((__s64)loh * atomic_read(&dest->weight) > in __ip_vs_lblc_schedule()
438 (__s64)doh * atomic_read(&least->weight)) { in __ip_vs_lblc_schedule()
448 atomic_read(&least->activeconns), in __ip_vs_lblc_schedule()
450 atomic_read(&least->weight), loh); in __ip_vs_lblc_schedule()
463 if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) { in is_overloaded()
467 if (atomic_read(&d->activeconns)*2 in is_overloaded()
468 < atomic_read(&d->weight)) { in is_overloaded()
[all …]
/kernel/linux/linux-5.10/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_ring.h71 const unsigned int idx = atomic_read(var); in pvrdma_idx()
80 __u32 idx = atomic_read(var) + 1; /* Increment. */ in pvrdma_idx_ring_inc()
89 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_space()
90 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_space()
103 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_data()
104 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_data()
/kernel/linux/linux-5.10/drivers/lightnvm/
Dpblk-rl.c31 rb_space = atomic_read(&rl->rb_space); in pblk_rl_is_limit()
38 int rb_user_cnt = atomic_read(&rl->rb_user_cnt); in pblk_rl_user_may_insert()
39 int rb_space = atomic_read(&rl->rb_space); in pblk_rl_user_may_insert()
52 int rb_space = atomic_read(&rl->rb_space); in pblk_rl_inserted()
60 int rb_gc_cnt = atomic_read(&rl->rb_gc_cnt); in pblk_rl_gc_may_insert()
100 return atomic_read(&rl->free_blocks); in pblk_rl_nr_free_blks()
105 return atomic_read(&rl->free_user_blocks); in pblk_rl_nr_user_free_blks()
113 int werr_gc_needed = atomic_read(&rl->werr_lines); in __pblk_rl_update_rates()
161 int blk_in_line = atomic_read(&line->blk_in_line); in pblk_rl_free_lines_inc()
173 int blk_in_line = atomic_read(&line->blk_in_line); in pblk_rl_free_lines_dec()
[all …]
/kernel/linux/linux-5.10/drivers/crypto/bcm/
Dutil.c374 atomic_read(&ipriv->session_count)); in spu_debugfs_read()
377 atomic_read(&ipriv->stream_count)); in spu_debugfs_read()
380 atomic_read(&ipriv->setkey_cnt[SPU_OP_CIPHER])); in spu_debugfs_read()
383 atomic_read(&ipriv->op_counts[SPU_OP_CIPHER])); in spu_debugfs_read()
386 op_cnt = atomic_read(&ipriv->cipher_cnt[alg][mode]); in spu_debugfs_read()
397 atomic_read(&ipriv->op_counts[SPU_OP_HASH])); in spu_debugfs_read()
399 op_cnt = atomic_read(&ipriv->hash_cnt[alg]); in spu_debugfs_read()
409 atomic_read(&ipriv->setkey_cnt[SPU_OP_HMAC])); in spu_debugfs_read()
412 atomic_read(&ipriv->op_counts[SPU_OP_HMAC])); in spu_debugfs_read()
414 op_cnt = atomic_read(&ipriv->hmac_cnt[alg]); in spu_debugfs_read()
[all …]
/kernel/linux/linux-5.10/sound/core/seq/
Dseq_lock.c16 if (atomic_read(lockp) < 0) { in snd_use_lock_sync_helper()
17 pr_warn("ALSA: seq_lock: lock trouble [counter = %d] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
20 while (atomic_read(lockp) > 0) { in snd_use_lock_sync_helper()
22 pr_warn("ALSA: seq_lock: waiting [%d left] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
/kernel/linux/linux-5.10/kernel/sched/
Dmembarrier.c79 atomic_read(&mm->membarrier_state)); in ipi_sync_rq_state()
180 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
187 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
193 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
199 (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1)) in membarrier_private_expedited()
289 int membarrier_state = atomic_read(&mm->membarrier_state); in sync_runqueues_membarrier_state()
293 if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) { in sync_runqueues_membarrier_state()
350 if (atomic_read(&mm->membarrier_state) & in membarrier_register_global_expedited()
390 if ((atomic_read(&mm->membarrier_state) & ready_state) == ready_state) in membarrier_register_private_expedited()
/kernel/linux/linux-5.10/mm/
Dzswapd_control.c67 return atomic_read(&zram_wm_ratio); in get_zram_wm_ratio()
72 return atomic_read(&compress_ratio); in get_compress_ratio()
77 return atomic_read(&inactive_file_ratio); in get_inactive_file_ratio()
82 return atomic_read(&active_file_ratio); in get_active_file_ratio()
87 return atomic_read(&avail_buffers); in get_avail_buffers()
92 return atomic_read(&min_avail_buffers); in get_min_avail_buffers()
97 return atomic_read(&high_avail_buffers); in get_high_avail_buffers()
102 return atomic_read(&max_reclaim_size); in get_zswapd_max_reclaim_size()
163 if (atomic_read(&min_avail_buffers) == 0) in avail_buffers_params_write()
531 seq_printf(m, "avail_buffers: %u\n", atomic_read(&avail_buffers)); in avail_buffers_params_show()
[all …]
/kernel/linux/linux-5.10/arch/openrisc/kernel/
Dsync-timer.c53 while (atomic_read(&count_count_start) != 1) in synchronise_count_master()
74 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master()
104 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave()
114 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
/kernel/linux/linux-5.10/arch/mips/kernel/
Dsync-r4k.c50 while (atomic_read(&count_count_start) != 1) in synchronise_count_master()
71 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master()
104 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave()
114 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
/kernel/linux/linux-5.10/fs/afs/
Dcell.c288 trace_afs_cell(cell->debug_id, atomic_read(&cell->ref), 2, afs_cell_trace_insert); in afs_lookup_cell()
296 trace_afs_cell(cell->debug_id, atomic_read(&cell->ref), atomic_read(&cell->active), in afs_lookup_cell()
495 u = atomic_read(&cell->ref); in afs_cell_destroy()
497 trace_afs_cell(cell->debug_id, u, atomic_read(&cell->active), afs_cell_trace_free); in afs_cell_destroy()
542 if (atomic_read(&cell->ref) <= 0) in afs_get_cell()
546 trace_afs_cell(cell->debug_id, u, atomic_read(&cell->active), reason); in afs_get_cell()
559 a = atomic_read(&cell->active); in afs_put_cell()
563 a = atomic_read(&cell->active); in afs_put_cell()
577 if (atomic_read(&cell->ref) <= 0) in afs_use_cell()
580 u = atomic_read(&cell->ref); in afs_use_cell()
[all …]
Dproc.c50 atomic_read(&cell->ref), in afs_proc_cells_show()
51 atomic_read(&cell->active), in afs_proc_cells_show()
220 atomic_read(&vol->usage), vol->vid, in afs_proc_cell_volumes_show()
316 atomic_read(&vlserver->probe_outstanding)); in afs_proc_cell_vlservers_show()
391 atomic_read(&server->ref), in afs_proc_servers_show()
392 atomic_read(&server->active)); in afs_proc_servers_show()
397 atomic_read(&server->probe_outstanding)); in afs_proc_servers_show()
588 atomic_read(&net->n_lookup), in afs_proc_stats_show()
589 atomic_read(&net->n_reval), in afs_proc_stats_show()
590 atomic_read(&net->n_inval), in afs_proc_stats_show()
[all …]
/kernel/linux/linux-5.10/drivers/s390/scsi/
Dzfcp_erp.c88 if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_lun()
96 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_port()
111 if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_adapter()
131 if (atomic_read(&zsdev->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed()
135 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed()
139 if (atomic_read(&port->status) & in zfcp_erp_handle_failed()
148 if (atomic_read(&adapter->status) & in zfcp_erp_handle_failed()
173 l_status = atomic_read(&zfcp_sdev->status); in zfcp_erp_required_act()
176 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
184 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
[all …]
/kernel/linux/linux-5.10/kernel/
Dcred.c75 return atomic_read(&cred->subscribers); in read_cred_subscribers()
101 atomic_read(&cred->usage) != 0 || in put_cred_rcu()
106 atomic_read(&cred->usage), in put_cred_rcu()
109 if (atomic_read(&cred->usage) != 0) in put_cred_rcu()
111 cred, atomic_read(&cred->usage)); in put_cred_rcu()
135 atomic_read(&cred->usage), in __put_cred()
138 BUG_ON(atomic_read(&cred->usage) != 0); in __put_cred()
162 atomic_read(&tsk->cred->usage), in exit_creds()
352 p->cred, atomic_read(&p->cred->usage), in copy_creds()
443 atomic_read(&new->usage), in commit_creds()
[all …]
/kernel/linux/linux-5.10/include/asm-generic/
Dqspinlock.h28 return atomic_read(&lock->val); in queued_spin_is_locked()
44 return !atomic_read(&lock.val); in queued_spin_value_unlocked()
54 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK; in queued_spin_is_contended()
63 u32 val = atomic_read(&lock->val); in queued_spin_trylock()
/kernel/linux/linux-5.10/net/mac80211/
Dled.h18 if (!atomic_read(&local->rx_led_active)) in ieee80211_led_rx()
29 if (!atomic_read(&local->tx_led_active)) in ieee80211_led_tx()
78 if (ieee80211_is_data(fc) && atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_tx()
87 if (ieee80211_is_data(fc) && atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_rx()
/kernel/linux/linux-5.10/fs/btrfs/
Dlocking.c145 WARN_ON(atomic_read(&eb->spinning_readers) == 0); in btrfs_assert_spinning_readers_put()
161 BUG_ON(!atomic_read(&eb->read_locks)); in btrfs_assert_tree_read_locked()
344 if (READ_ONCE(eb->blocking_writers) || atomic_read(&eb->blocking_readers)) in btrfs_try_tree_write_lock()
349 if (READ_ONCE(eb->blocking_writers) || atomic_read(&eb->blocking_readers)) { in btrfs_try_tree_write_lock()
406 WARN_ON(atomic_read(&eb->blocking_readers) == 0); in btrfs_tree_read_unlock_blocking()
429 wait_event(eb->read_lock_wq, atomic_read(&eb->blocking_readers) == 0); in __btrfs_tree_lock()
433 if (atomic_read(&eb->blocking_readers) || in __btrfs_tree_lock()
620 if (atomic_read(&lock->readers)) in btrfs_drew_try_write_lock()
627 if (atomic_read(&lock->readers)) { in btrfs_drew_try_write_lock()
640 wait_event(lock->pending_writers, !atomic_read(&lock->readers)); in btrfs_drew_write_lock()
/kernel/linux/linux-5.10/net/batman-adv/
Dgateway_common.c140 gw_mode = atomic_read(&bat_priv->gw.mode); in batadv_gw_tvlv_container_update()
148 down = atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_tvlv_container_update()
149 up = atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_tvlv_container_update()
177 down_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_bandwidth_set()
178 up_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_bandwidth_set()
246 atomic_read(&bat_priv->gw.mode) == BATADV_GW_MODE_CLIENT) in batadv_gw_tvlv_ogm_handler_v1()

12345678910>>...71