/linux-6.6.21/fs/fscache/ |
D | stats.c | 59 atomic_read(&fscache_n_cookies), in fscache_stats_show() 60 atomic_read(&fscache_n_volumes), in fscache_stats_show() 61 atomic_read(&fscache_n_volumes_collision), in fscache_stats_show() 62 atomic_read(&fscache_n_volumes_nomem) in fscache_stats_show() 66 atomic_read(&fscache_n_acquires), in fscache_stats_show() 67 atomic_read(&fscache_n_acquires_ok), in fscache_stats_show() 68 atomic_read(&fscache_n_acquires_oom)); in fscache_stats_show() 71 atomic_read(&fscache_n_cookies_lru), in fscache_stats_show() 72 atomic_read(&fscache_n_cookies_lru_expired), in fscache_stats_show() 73 atomic_read(&fscache_n_cookies_lru_removed), in fscache_stats_show() [all …]
|
/linux-6.6.21/fs/netfs/ |
D | stats.c | 34 atomic_read(&netfs_n_rh_readahead), in netfs_stats_show() 35 atomic_read(&netfs_n_rh_readpage), in netfs_stats_show() 36 atomic_read(&netfs_n_rh_write_begin), in netfs_stats_show() 37 atomic_read(&netfs_n_rh_write_zskip), in netfs_stats_show() 38 atomic_read(&netfs_n_rh_rreq), in netfs_stats_show() 39 atomic_read(&netfs_n_rh_sreq)); in netfs_stats_show() 41 atomic_read(&netfs_n_rh_zero), in netfs_stats_show() 42 atomic_read(&netfs_n_rh_short_read), in netfs_stats_show() 43 atomic_read(&netfs_n_rh_write_zskip)); in netfs_stats_show() 45 atomic_read(&netfs_n_rh_download), in netfs_stats_show() [all …]
|
/linux-6.6.21/net/rxrpc/ |
D | proc.c | 181 atomic_read(&conn->active), in rxrpc_connection_seq_show() 350 atomic_read(&local->active_users), in rxrpc_local_seq_show() 406 atomic_read(&rxnet->stat_tx_data_send), in rxrpc_stats_show() 407 atomic_read(&rxnet->stat_tx_data_send_frag), in rxrpc_stats_show() 408 atomic_read(&rxnet->stat_tx_data_send_fail)); in rxrpc_stats_show() 411 atomic_read(&rxnet->stat_tx_data), in rxrpc_stats_show() 412 atomic_read(&rxnet->stat_tx_data_retrans), in rxrpc_stats_show() 413 atomic_read(&rxnet->stat_tx_data_underflow), in rxrpc_stats_show() 414 atomic_read(&rxnet->stat_tx_data_cwnd_reset)); in rxrpc_stats_show() 417 atomic_read(&rxnet->stat_rx_data), in rxrpc_stats_show() [all …]
|
/linux-6.6.21/net/netfilter/ipvs/ |
D | ip_vs_nq.c | 45 return atomic_read(&dest->activeconns) + 1; in ip_vs_nq_dest_overhead() 77 !atomic_read(&dest->weight)) in ip_vs_nq_schedule() 83 if (atomic_read(&dest->activeconns) == 0) { in ip_vs_nq_schedule() 90 ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_nq_schedule() 91 (__s64)doh * atomic_read(&least->weight))) { in ip_vs_nq_schedule() 107 atomic_read(&least->activeconns), in ip_vs_nq_schedule() 109 atomic_read(&least->weight), loh); in ip_vs_nq_schedule()
|
D | ip_vs_lblcr.c | 173 if ((atomic_read(&least->weight) > 0) in ip_vs_dest_set_min() 189 if (((__s64)loh * atomic_read(&dest->weight) > in ip_vs_dest_set_min() 190 (__s64)doh * atomic_read(&least->weight)) in ip_vs_dest_set_min() 202 atomic_read(&least->activeconns), in ip_vs_dest_set_min() 204 atomic_read(&least->weight), loh); in ip_vs_dest_set_min() 222 if (atomic_read(&most->weight) > 0) { in ip_vs_dest_set_max() 235 if (((__s64)moh * atomic_read(&dest->weight) < in ip_vs_dest_set_max() 236 (__s64)doh * atomic_read(&most->weight)) in ip_vs_dest_set_max() 237 && (atomic_read(&dest->weight) > 0)) { in ip_vs_dest_set_max() 247 atomic_read(&most->activeconns), in ip_vs_dest_set_max() [all …]
|
D | ip_vs_sed.c | 49 return atomic_read(&dest->activeconns) + 1; in ip_vs_sed_dest_overhead() 80 atomic_read(&dest->weight) > 0) { in ip_vs_sed_schedule() 97 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_sed_schedule() 98 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_sed_schedule() 108 atomic_read(&least->activeconns), in ip_vs_sed_schedule() 110 atomic_read(&least->weight), loh); in ip_vs_sed_schedule()
|
D | ip_vs_wlc.c | 52 atomic_read(&dest->weight) > 0) { in ip_vs_wlc_schedule() 69 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_wlc_schedule() 70 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_wlc_schedule() 80 atomic_read(&least->activeconns), in ip_vs_wlc_schedule() 82 atomic_read(&least->weight), loh); in ip_vs_wlc_schedule()
|
D | ip_vs_fo.c | 34 atomic_read(&dest->weight) > hw) { in ip_vs_fo_schedule() 36 hw = atomic_read(&dest->weight); in ip_vs_fo_schedule() 44 atomic_read(&hweight->activeconns), in ip_vs_fo_schedule() 45 atomic_read(&hweight->weight)); in ip_vs_fo_schedule()
|
D | ip_vs_ovf.c | 36 w = atomic_read(&dest->weight); in ip_vs_ovf_schedule() 38 atomic_read(&dest->activeconns) > w || in ip_vs_ovf_schedule() 51 atomic_read(&h->activeconns), in ip_vs_ovf_schedule() 52 atomic_read(&h->weight)); in ip_vs_ovf_schedule()
|
D | ip_vs_lblc.c | 311 if (atomic_read(&tbl->entries) <= tbl->max_size) { in ip_vs_lblc_check_expire() 316 goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3; in ip_vs_lblc_check_expire() 420 if (atomic_read(&dest->weight) > 0) { in __ip_vs_lblc_schedule() 437 if ((__s64)loh * atomic_read(&dest->weight) > in __ip_vs_lblc_schedule() 438 (__s64)doh * atomic_read(&least->weight)) { in __ip_vs_lblc_schedule() 448 atomic_read(&least->activeconns), in __ip_vs_lblc_schedule() 450 atomic_read(&least->weight), loh); in __ip_vs_lblc_schedule() 463 if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) { in is_overloaded() 467 if (atomic_read(&d->activeconns)*2 in is_overloaded() 468 < atomic_read(&d->weight)) { in is_overloaded() [all …]
|
/linux-6.6.21/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_ring.h | 71 const unsigned int idx = atomic_read(var); in pvrdma_idx() 80 __u32 idx = atomic_read(var) + 1; /* Increment. */ in pvrdma_idx_ring_inc() 89 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_space() 90 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_space() 103 const __u32 tail = atomic_read(&r->prod_tail); in pvrdma_idx_ring_has_data() 104 const __u32 head = atomic_read(&r->cons_head); in pvrdma_idx_ring_has_data()
|
/linux-6.6.21/drivers/crypto/bcm/ |
D | util.c | 376 atomic_read(&ipriv->session_count)); in spu_debugfs_read() 379 atomic_read(&ipriv->stream_count)); in spu_debugfs_read() 382 atomic_read(&ipriv->setkey_cnt[SPU_OP_CIPHER])); in spu_debugfs_read() 385 atomic_read(&ipriv->op_counts[SPU_OP_CIPHER])); in spu_debugfs_read() 388 op_cnt = atomic_read(&ipriv->cipher_cnt[alg][mode]); in spu_debugfs_read() 399 atomic_read(&ipriv->op_counts[SPU_OP_HASH])); in spu_debugfs_read() 401 op_cnt = atomic_read(&ipriv->hash_cnt[alg]); in spu_debugfs_read() 411 atomic_read(&ipriv->setkey_cnt[SPU_OP_HMAC])); in spu_debugfs_read() 414 atomic_read(&ipriv->op_counts[SPU_OP_HMAC])); in spu_debugfs_read() 416 op_cnt = atomic_read(&ipriv->hmac_cnt[alg]); in spu_debugfs_read() [all …]
|
/linux-6.6.21/sound/core/seq/ |
D | seq_lock.c | 16 if (atomic_read(lockp) < 0) { in snd_use_lock_sync_helper() 17 pr_warn("ALSA: seq_lock: lock trouble [counter = %d] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper() 20 while (atomic_read(lockp) > 0) { in snd_use_lock_sync_helper() 22 pr_warn("ALSA: seq_lock: waiting [%d left] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
|
/linux-6.6.21/kernel/sched/ |
D | membarrier.c | 210 atomic_read(&mm->membarrier_state)); in ipi_sync_rq_state() 242 membarrier_state = atomic_read(&next_mm->membarrier_state); in membarrier_update_current_mm() 323 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited() 330 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited() 336 if (!(atomic_read(&mm->membarrier_state) & in membarrier_private_expedited() 342 (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1)) in membarrier_private_expedited() 433 int membarrier_state = atomic_read(&mm->membarrier_state); in sync_runqueues_membarrier_state() 437 if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) { in sync_runqueues_membarrier_state() 495 if (atomic_read(&mm->membarrier_state) & in membarrier_register_global_expedited() 535 if ((atomic_read(&mm->membarrier_state) & ready_state) == ready_state) in membarrier_register_private_expedited() [all …]
|
/linux-6.6.21/arch/openrisc/kernel/ |
D | sync-timer.c | 53 while (atomic_read(&count_count_start) != 1) in synchronise_count_master() 74 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master() 104 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave() 114 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
|
/linux-6.6.21/drivers/s390/scsi/ |
D | zfcp_erp.c | 88 if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_lun() 96 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_port() 111 if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_adapter() 131 if (atomic_read(&zsdev->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed() 135 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_erp_handle_failed() 139 if (atomic_read(&port->status) & in zfcp_erp_handle_failed() 148 if (atomic_read(&adapter->status) & in zfcp_erp_handle_failed() 173 l_status = atomic_read(&zfcp_sdev->status); in zfcp_erp_required_act() 176 p_status = atomic_read(&port->status); in zfcp_erp_required_act() 184 p_status = atomic_read(&port->status); in zfcp_erp_required_act() [all …]
|
/linux-6.6.21/include/asm-generic/ |
D | spinlock.h | 55 u32 old = atomic_read(lock); in arch_spin_trylock() 66 u32 val = atomic_read(lock); in arch_spin_unlock() 73 u32 val = atomic_read(lock); in arch_spin_is_locked() 80 u32 val = atomic_read(lock); in arch_spin_is_contended()
|
/linux-6.6.21/arch/mips/kernel/ |
D | sync-r4k.c | 50 while (atomic_read(&count_count_start) != 1) in synchronise_count_master() 71 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master() 104 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave() 114 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
|
/linux-6.6.21/net/mac80211/ |
D | led.h | 16 if (!atomic_read(&local->rx_led_active)) in ieee80211_led_rx() 25 if (!atomic_read(&local->tx_led_active)) in ieee80211_led_tx() 74 if (atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_tx() 83 if (atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_rx()
|
/linux-6.6.21/fs/f2fs/ |
D | debug.c | 83 si->ext_tree[i] = atomic_read(&eti->total_ext_tree); in update_general_status() 84 si->zombie_tree[i] = atomic_read(&eti->total_zombie_tree); in update_general_status() 85 si->ext_node[i] = atomic_read(&eti->total_ext_node); in update_general_status() 105 si->aw_cnt = atomic_read(&sbi->atomic_files); in update_general_status() 106 si->max_aw_cnt = atomic_read(&sbi->max_aw_cnt); in update_general_status() 116 atomic_read(&SM_I(sbi)->fcc_info->issued_flush); in update_general_status() 118 atomic_read(&SM_I(sbi)->fcc_info->queued_flush); in update_general_status() 124 atomic_read(&SM_I(sbi)->dcc_info->issued_discard); in update_general_status() 126 atomic_read(&SM_I(sbi)->dcc_info->queued_discard); in update_general_status() 128 atomic_read(&SM_I(sbi)->dcc_info->discard_cmd_cnt); in update_general_status() [all …]
|
/linux-6.6.21/fs/xfs/ |
D | xfs_trans_buf.c | 149 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_get_buf_map() 185 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_getsb() 282 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_read_buf_map() 360 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_brelse() 412 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold() 433 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold_release() 461 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_dirty_buf() 548 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_binval() 602 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_inode_buf() 627 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_stale_inode_buf() [all …]
|
/linux-6.6.21/net/batman-adv/ |
D | gateway_common.c | 31 gw_mode = atomic_read(&bat_priv->gw.mode); in batadv_gw_tvlv_container_update() 39 down = atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_tvlv_container_update() 40 up = atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_tvlv_container_update() 86 atomic_read(&bat_priv->gw.mode) == BATADV_GW_MODE_CLIENT) in batadv_gw_tvlv_ogm_handler_v1()
|
/linux-6.6.21/include/linux/ |
D | maple_tree.h | 731 atomic_read(&maple_tree_tests_passed), \ 732 atomic_read(&maple_tree_tests_run)); \ 747 atomic_read(&maple_tree_tests_passed), \ 748 atomic_read(&maple_tree_tests_run)); \ 764 atomic_read(&maple_tree_tests_passed), \ 765 atomic_read(&maple_tree_tests_run)); \ 780 atomic_read(&maple_tree_tests_passed), \ 781 atomic_read(&maple_tree_tests_run)); \ 798 atomic_read(&maple_tree_tests_passed), \ 799 atomic_read(&maple_tree_tests_run)); \ [all …]
|
/linux-6.6.21/drivers/net/ethernet/aquantia/atlantic/ |
D | aq_utils.h | 19 flags_old = atomic_read(flags); in aq_utils_obj_set() 29 flags_old = atomic_read(flags); in aq_utils_obj_clear() 36 return atomic_read(flags) & mask; in aq_utils_obj_test()
|
/linux-6.6.21/arch/x86/platform/uv/ |
D | uv_nmi.c | 521 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 549 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 559 nmi = atomic_read(&uv_in_nmi); in uv_check_nmi() 581 if (cpu == atomic_read(&hub_nmi->cpu_owner)) { in uv_clear_nmi() 699 atomic_read(&uv_nmi_cpus_in_nmi), num_online_cpus()); in uv_nmi_wait() 767 while (atomic_read(&uv_nmi_cpus_in_nmi) > 0) in uv_nmi_sync_exit() 771 while (atomic_read(&uv_nmi_slave_continue)) in uv_nmi_sync_exit() 780 int in = atomic_read(&uv_nmi_cpus_in_nmi); in uv_nmi_action_health() 786 while (!atomic_read(&uv_nmi_slave_continue)) in uv_nmi_action_health() 802 atomic_read(&uv_nmi_cpus_in_nmi), cpu); in uv_nmi_dump_state() [all …]
|