Home
last modified time | relevance | path

Searched refs:atomic_read (Results 1 – 25 of 1663) sorted by relevance

12345678910>>...67

/linux/fs/netfs/
H A Dfscache_stats.c61 atomic_read(&fscache_n_cookies), in fscache_stats_show()
62 atomic_read(&fscache_n_volumes), in fscache_stats_show()
63 atomic_read(&fscache_n_volumes_collision), in fscache_stats_show()
64 atomic_read(&fscache_n_volumes_nomem) in fscache_stats_show()
68 atomic_read(&fscache_n_acquires), in fscache_stats_show()
69 atomic_read(&fscache_n_acquires_ok), in fscache_stats_show()
70 atomic_read(&fscache_n_acquires_oom)); in fscache_stats_show()
73 atomic_read(&fscache_n_cookies_lru), in fscache_stats_show()
74 atomic_read(&fscache_n_cookies_lru_expired), in fscache_stats_show()
75 atomic_read(&fscache_n_cookies_lru_removed), in fscache_stats_show()
[all …]
/linux/net/rxrpc/
H A Dproc.c179 atomic_read(&conn->active), in rxrpc_connection_seq_show()
254 atomic_read(&bundle->active), in rxrpc_bundle_seq_show()
421 atomic_read(&local->active_users), in rxrpc_local_seq_show()
477 atomic_read(&rxnet->stat_tx_data_send), in rxrpc_stats_show()
478 atomic_read(&rxnet->stat_tx_data_send_frag), in rxrpc_stats_show()
479 atomic_read(&rxnet->stat_tx_data_send_fail), in rxrpc_stats_show()
480 atomic_read(&rxnet->stat_tx_data_send_msgsize)); in rxrpc_stats_show()
483 atomic_read(&rxnet->stat_tx_data), in rxrpc_stats_show()
484 atomic_read(&rxnet->stat_tx_data_retrans), in rxrpc_stats_show()
485 atomic_read(&rxnet->stat_tx_data_underflow), in rxrpc_stats_show()
[all …]
/linux/net/netfilter/ipvs/
H A Dip_vs_nq.c45 return atomic_read(&dest->activeconns) + 1; in ip_vs_nq_dest_overhead()
77 !atomic_read(&dest->weight)) in ip_vs_nq_schedule()
83 if (atomic_read(&dest->activeconns) == 0) { in ip_vs_nq_schedule()
90 ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_nq_schedule()
91 (__s64)doh * atomic_read(&least->weight))) { in ip_vs_nq_schedule()
107 atomic_read(&least->activeconns), in ip_vs_nq_schedule()
109 atomic_read(&least->weight), loh); in ip_vs_nq_schedule()
H A Dip_vs_lblcr.c173 if ((atomic_read(&least->weight) > 0) in ip_vs_dest_set_min()
189 if (((__s64)loh * atomic_read(&dest->weight) > in ip_vs_dest_set_min()
190 (__s64)doh * atomic_read(&least->weight)) in ip_vs_dest_set_min()
202 atomic_read(&least->activeconns), in ip_vs_dest_set_min()
204 atomic_read(&least->weight), loh); in ip_vs_dest_set_min()
222 if (atomic_read(&most->weight) > 0) { in ip_vs_dest_set_max()
235 if (((__s64)moh * atomic_read(&dest->weight) < in ip_vs_dest_set_max()
236 (__s64)doh * atomic_read(&most->weight)) in ip_vs_dest_set_max()
237 && (atomic_read(&dest->weight) > 0)) { in ip_vs_dest_set_max()
247 atomic_read(&most->activeconns), in ip_vs_dest_set_max()
[all …]
H A Dip_vs_sed.c49 return atomic_read(&dest->activeconns) + 1; in ip_vs_sed_dest_overhead()
80 atomic_read(&dest->weight) > 0) { in ip_vs_sed_schedule()
97 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_sed_schedule()
98 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_sed_schedule()
108 atomic_read(&least->activeconns), in ip_vs_sed_schedule()
110 atomic_read(&least->weight), loh); in ip_vs_sed_schedule()
H A Dip_vs_wlc.c52 atomic_read(&dest->weight) > 0) { in ip_vs_wlc_schedule()
69 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_wlc_schedule()
70 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_wlc_schedule()
80 atomic_read(&least->activeconns), in ip_vs_wlc_schedule()
82 atomic_read(&least->weight), loh); in ip_vs_wlc_schedule()
H A Dip_vs_ovf.c36 w = atomic_read(&dest->weight); in ip_vs_ovf_schedule()
38 atomic_read(&dest->activeconns) > w || in ip_vs_ovf_schedule()
51 atomic_read(&h->activeconns), in ip_vs_ovf_schedule()
52 atomic_read(&h->weight)); in ip_vs_ovf_schedule()
H A Dip_vs_lblc.c311 if (atomic_read(&tbl->entries) <= tbl->max_size) { in ip_vs_lblc_check_expire()
316 goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3; in ip_vs_lblc_check_expire()
420 if (atomic_read(&dest->weight) > 0) { in __ip_vs_lblc_schedule()
437 if ((__s64)loh * atomic_read(&dest->weight) > in __ip_vs_lblc_schedule()
438 (__s64)doh * atomic_read(&least->weight)) { in __ip_vs_lblc_schedule()
448 atomic_read(&least->activeconns), in __ip_vs_lblc_schedule()
450 atomic_read(&least->weight), loh); in __ip_vs_lblc_schedule()
463 if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) { in is_overloaded()
467 if (atomic_read(&d->activeconns)*2 in is_overloaded()
468 < atomic_read(&d->weight)) { in is_overloaded()
[all …]
/linux/drivers/pinctrl/qcom/
H A Dtlmm-test.c182 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 0); in tlmm_test_silent()
239 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_rising()
263 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_falling()
288 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_low()
313 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_high()
335 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_falling_in_handler()
357 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_rising_in_handler()
383 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_thread_rising()
384 KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10); in tlmm_test_thread_rising()
409 KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10); in tlmm_test_thread_falling()
[all …]
/linux/fs/bcachefs/
H A Dsix.c74 if ((atomic_read(&lock->state) & mask) != mask) in six_set_bitmask()
80 if (atomic_read(&lock->state) & mask) in six_clear_bitmask()
125 (try != !(atomic_read(&lock->state) & SIX_LOCK_HELD_write))); in __do_six_trylock()
160 old = atomic_read(&lock->state); in __do_six_trylock()
168 if (atomic_read(&lock->state) & SIX_LOCK_WAITING_write) in __do_six_trylock()
191 old = atomic_read(&lock->state); in __do_six_trylock()
200 EBUG_ON(ret && !(atomic_read(&lock->state) & l[type].held_mask)); in __do_six_trylock()
207 (atomic_read(&lock->state) & SIX_LOCK_HELD_write)); in __do_six_trylock()
360 if (atomic_read(&lock->state) & SIX_LOCK_NOSPIN) in six_optimistic_spin()
415 EBUG_ON(atomic_read(&lock->state) & SIX_LOCK_HELD_write); in six_lock_slowpath()
[all …]
H A Dnocow_locking.c17 if (l->b[i] == dev_bucket && atomic_read(&l->l[i])) in bch2_bucket_nocow_is_locked()
57 if (!atomic_read(&l->l[i])) { in __bch2_bucket_nocow_trylock()
65 v = atomic_read(&l->l[i]); in __bch2_bucket_nocow_trylock()
69 v = atomic_read(&l->l[i]); in __bch2_bucket_nocow_trylock()
102 v |= atomic_read(&l->l[i]); in bch2_nocow_locks_to_text()
114 int v = atomic_read(&l->l[i]); in bch2_nocow_locks_to_text()
133 BUG_ON(atomic_read(&l->l[j])); in bch2_fs_nocow_locking_exit()
/linux/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_ring.h71 const unsigned int idx = atomic_read(var); in pvrdma_idx()
80 __u32 idx = atomic_read(var) + 1; /* Increment. */ in pvrdma_idx_ring_inc()
89 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_space()
90 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_space()
103 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_data()
104 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_data()
/linux/fs/afs/
H A Dvalidation.c130 if (atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break)) in afs_check_validity()
138 else if (vnode->cb_ro_snapshot != atomic_read(&volume->cb_ro_snapshot)) in afs_check_validity()
140 else if (vnode->cb_scrub != atomic_read(&volume->cb_scrub)) in afs_check_validity()
243 snap = atomic_read(&volume->cb_ro_snapshot); in afs_update_volume_creation_time()
334 unsigned int cb_v_break = atomic_read(&volume->cb_v_break); in afs_update_volume_state()
335 unsigned int cb_v_check = atomic_read(&volume->cb_v_check); in afs_update_volume_state()
421 atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break)) { in afs_validate()
428 cb_ro_snapshot = atomic_read(&volume->cb_ro_snapshot); in afs_validate()
429 cb_scrub = atomic_read(&volume->cb_scrub); in afs_validate()
437 atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break) || in afs_validate()
[all …]
/linux/drivers/crypto/bcm/
H A Dutil.c376 atomic_read(&ipriv->session_count)); in spu_debugfs_read()
379 atomic_read(&ipriv->stream_count)); in spu_debugfs_read()
382 atomic_read(&ipriv->setkey_cnt[SPU_OP_CIPHER])); in spu_debugfs_read()
385 atomic_read(&ipriv->op_counts[SPU_OP_CIPHER])); in spu_debugfs_read()
388 op_cnt = atomic_read(&ipriv->cipher_cnt[alg][mode]); in spu_debugfs_read()
399 atomic_read(&ipriv->op_counts[SPU_OP_HASH])); in spu_debugfs_read()
401 op_cnt = atomic_read(&ipriv->hash_cnt[alg]); in spu_debugfs_read()
411 atomic_read(&ipriv->setkey_cnt[SPU_OP_HMAC])); in spu_debugfs_read()
414 atomic_read(&ipriv->op_counts[SPU_OP_HMAC])); in spu_debugfs_read()
416 op_cnt = atomic_read(&ipriv->hmac_cnt[alg]); in spu_debugfs_read()
[all …]
/linux/sound/core/seq/
H A Dseq_lock.c16 if (atomic_read(lockp) < 0) { in snd_use_lock_sync_helper()
17 pr_warn("ALSA: seq_lock: lock trouble [counter = %d] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
20 while (atomic_read(lockp) > 0) { in snd_use_lock_sync_helper()
22 pr_warn("ALSA: seq_lock: waiting [%d left] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
/linux/kernel/sched/
H A Dmembarrier.c212 atomic_read(&mm->membarrier_state)); in ipi_sync_rq_state()
244 membarrier_state = atomic_read(&next_mm->membarrier_state); in membarrier_update_current_mm()
325 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
333 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
339 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited()
345 (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1)) in membarrier_private_expedited()
440 int membarrier_state = atomic_read(&mm->membarrier_state); in sync_runqueues_membarrier_state()
444 if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) { in sync_runqueues_membarrier_state()
502 if (atomic_read(&mm->membarrier_state) & in membarrier_register_global_expedited()
542 if ((atomic_read(&mm->membarrier_state) & ready_state) == ready_state) in membarrier_register_private_expedited()
[all …]
/linux/arch/mips/kernel/
H A Dsync-r4k.c116 while (atomic_read(&start_count) != cpus - 1) in check_counter_sync_source()
126 while (atomic_read(&stop_count) != cpus-1) in check_counter_sync_source()
167 if (atomic_read(&test_runs) > 0) in check_counter_sync_source()
192 while (atomic_read(&start_count) != cpus) in synchronise_count_slave()
210 while (atomic_read(&stop_count) != cpus) in synchronise_count_slave()
223 if (!atomic_read(&test_runs)) { in synchronise_count_slave()
/linux/fs/xfs/libxfs/
H A Dxfs_group.c43 ASSERT(atomic_read(&xg->xg_ref) >= 0); in xfs_group_get()
54 ASSERT(atomic_read(&xg->xg_ref) > 0 || in xfs_group_hold()
55 atomic_read(&xg->xg_active_ref) > 0); in xfs_group_hold()
68 ASSERT(atomic_read(&xg->xg_ref) > 0); in xfs_group_put()
162 XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_ref) != 0); in xfs_group_free()
175 XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_active_ref) > 0); in xfs_group_free()
176 XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_active_ref) < 0); in xfs_group_free()
/linux/arch/openrisc/kernel/
H A Dsync-timer.c53 while (atomic_read(&count_count_start) != 1) in synchronise_count_master()
74 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master()
104 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave()
114 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
/linux/drivers/s390/scsi/
H A Dzfcp_erp.c88 if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_lun()
96 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_port()
111 if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_adapter()
131 if (atomic_read(&zsdev->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed()
135 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed()
139 if (atomic_read(&port->status) & in zfcp_erp_handle_failed()
148 if (atomic_read(&adapter->status) & in zfcp_erp_handle_failed()
173 l_status = atomic_read(&zfcp_sdev->status); in zfcp_erp_required_act()
176 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
184 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
[all …]
/linux/net/mac80211/
H A Dled.h16 if (!atomic_read(&local->rx_led_active)) in ieee80211_led_rx()
25 if (!atomic_read(&local->tx_led_active)) in ieee80211_led_tx()
74 if (atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_tx()
83 if (atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_rx()
/linux/fs/xfs/
H A Dxfs_trans_buf.c149 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_get_buf_map()
184 ASSERT(atomic_read(&bip->bli_refcount) > 0); in __xfs_trans_getsb()
297 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_read_buf_map()
375 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_brelse()
425 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bdetach()
469 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold()
490 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold_release()
518 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_dirty_buf()
605 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_binval()
659 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_inode_buf()
[all …]
/linux/net/batman-adv/
H A Dgateway_common.c31 gw_mode = atomic_read(&bat_priv->gw.mode); in batadv_gw_tvlv_container_update()
39 down = atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_tvlv_container_update()
40 up = atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_tvlv_container_update()
86 atomic_read(&bat_priv->gw.mode) == BATADV_GW_MODE_CLIENT) in batadv_gw_tvlv_ogm_handler_v1()
/linux/drivers/net/ethernet/aquantia/atlantic/
H A Daq_utils.h19 flags_old = atomic_read(flags); in aq_utils_obj_set()
29 flags_old = atomic_read(flags); in aq_utils_obj_clear()
36 return atomic_read(flags) & mask; in aq_utils_obj_test()
/linux/include/linux/
H A Drwbase_rt.h31 return atomic_read(&rwb->readers) != READER_BIAS; in rw_base_is_locked()
36 return atomic_read(&rwb->readers) == WRITER_BIAS; in rw_base_is_write_locked()
41 return atomic_read(&rwb->readers) > 0; in rw_base_is_contended()

12345678910>>...67