Searched refs:num_flows (Results 1 – 8 of 8) sorted by relevance
1681 bnxt_hwrm_cfa_flow_stats_get(struct bnxt *bp, int num_flows, in bnxt_hwrm_cfa_flow_stats_get() argument1697 req->num_flows = cpu_to_le16(num_flows); in bnxt_hwrm_cfa_flow_stats_get()1698 for (i = 0; i < num_flows; i++) { in bnxt_hwrm_cfa_flow_stats_get()1714 for (i = 0; i < num_flows; i++) { in bnxt_hwrm_cfa_flow_stats_get()1757 bnxt_tc_flow_stats_batch_update(struct bnxt *bp, int num_flows, in bnxt_tc_flow_stats_batch_update() argument1763 rc = bnxt_hwrm_cfa_flow_stats_get(bp, num_flows, stats_batch); in bnxt_tc_flow_stats_batch_update()1767 for (i = 0; i < num_flows; i++) { in bnxt_tc_flow_stats_batch_update()1785 int *num_flows) in bnxt_tc_flow_stats_batch_prep() argument1815 *num_flows = i; in bnxt_tc_flow_stats_batch_prep()1822 int num_flows, rc; in bnxt_tc_flow_stats_work() local[all …]
7757 __le16 num_flows; member
28 u32 num_flows; member
509 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()1691 if (!dev->hw->num_flows) in mtk_wed_irq_get()1721 if (hw->num_flows) { in mtk_wed_flow_add()1722 hw->num_flows++; in mtk_wed_flow_add()1734 hw->num_flows++; in mtk_wed_flow_add()1750 if (--hw->num_flows) in mtk_wed_flow_remove()
764 atomic64_inc(&esw->offloads.num_flows); in mlx5_eswitch_add_offloaded_rule()850 atomic64_inc(&esw->offloads.num_flows); in mlx5_eswitch_add_fwd_rule()885 atomic64_dec(&esw->offloads.num_flows); in __mlx5_eswitch_del_rule()3867 if (atomic64_read(&esw->offloads.num_flows) > 0) { in mlx5_devlink_eswitch_inline_mode_set()3971 if (atomic64_read(&esw->offloads.num_flows) > 0) { in mlx5_devlink_eswitch_encap_mode_set()
296 atomic64_t num_flows; member
1880 atomic64_set(&esw->offloads.num_flows, 0); in mlx5_eswitch_init()
1125 int num_flows, bool disable) in prueth_reset_rx_chan() argument1129 for (i = 0; i < num_flows; i++) in prueth_reset_rx_chan()