/linux-5.19.10/drivers/net/ethernet/mscc/ |
D | ocelot_fdma.c | 61 if (rx_ring->next_to_use >= rx_ring->next_to_clean) in ocelot_fdma_rx_ring_free() 63 (rx_ring->next_to_use - rx_ring->next_to_clean) - 1; in ocelot_fdma_rx_ring_free() 65 return rx_ring->next_to_clean - rx_ring->next_to_use - 1; in ocelot_fdma_rx_ring_free() 72 if (tx_ring->next_to_use >= tx_ring->next_to_clean) in ocelot_fdma_tx_ring_free() 74 (tx_ring->next_to_use - tx_ring->next_to_clean) - 1; in ocelot_fdma_tx_ring_free() 76 return tx_ring->next_to_clean - tx_ring->next_to_use - 1; in ocelot_fdma_tx_ring_free() 83 return tx_ring->next_to_clean == tx_ring->next_to_use; in ocelot_fdma_tx_ring_empty() 158 idx = rx_ring->next_to_use; in ocelot_fdma_alloc_rx_buffs() 181 rx_ring->next_to_use = idx; in ocelot_fdma_alloc_rx_buffs() 229 idx = ocelot_fdma_idx_prev(rx_ring->next_to_use, in ocelot_fdma_rx_set_llp() [all …]
|
D | ocelot_fdma.h | 98 u16 next_to_use; member 130 u16 next_to_use; member
|
/linux-5.19.10/drivers/net/ethernet/intel/i40e/ |
D | i40e_xsk.c | 198 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers_zc() 393 cleaned_count = (next_to_clean - rx_ring->next_to_use - 1) & count_mask; in i40e_clean_rx_irq_zc() 402 if (failure || next_to_clean == rx_ring->next_to_use) in i40e_clean_rx_irq_zc() 421 tx_desc = I40E_TX_DESC(xdp_ring, xdp_ring->next_to_use++); in i40e_xmit_pkt() 432 u16 ntu = xdp_ring->next_to_use; in i40e_xmit_pkt_batch() 450 xdp_ring->next_to_use = ntu; in i40e_xmit_pkt_batch() 468 u16 ntu = xdp_ring->next_to_use ? xdp_ring->next_to_use - 1 : xdp_ring->count - 1; in i40e_set_rs_bit() 492 if (xdp_ring->next_to_use + nb_pkts >= xdp_ring->count) { in i40e_xmit_zc() 493 nb_processed = xdp_ring->count - xdp_ring->next_to_use; in i40e_xmit_zc() 495 xdp_ring->next_to_use = 0; in i40e_xmit_zc() [all …]
|
D | i40e_adminq.c | 354 hw->aq.asq.next_to_use = 0; in i40e_init_asq() 413 hw->aq.arq.next_to_use = 0; in i40e_init_arq() 767 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in i40e_asq_done() 816 details = I40E_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command_atomic_exec() 871 desc_on_ring = I40E_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command_atomic_exec() 878 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command_atomic_exec() 896 (hw->aq.asq.next_to_use)++; in i40e_asq_send_command_atomic_exec() 897 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in i40e_asq_send_command_atomic_exec() 898 hw->aq.asq.next_to_use = 0; in i40e_asq_send_command_atomic_exec() 900 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in i40e_asq_send_command_atomic_exec() [all …]
|
D | i40e_txrx.c | 31 i = tx_ring->next_to_use; in i40e_fdir() 35 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_fdir() 120 i = tx_ring->next_to_use; in i40e_program_fdir_filter() 125 i = tx_ring->next_to_use; in i40e_program_fdir_filter() 129 tx_ring->next_to_use = ((i + 1) < tx_ring->count) ? i + 1 : 0; in i40e_program_fdir_filter() 154 writel(tx_ring->next_to_use, tx_ring->tail); in i40e_program_fdir_filter() 813 tx_ring->next_to_use = 0; in i40e_clean_tx_ring() 859 tail = ring->next_to_use; in i40e_get_tx_pending() 1450 tx_ring->next_to_use = 0; in i40e_setup_tx_descriptors() 1535 rx_ring->next_to_use = 0; in i40e_clean_rx_ring() [all …]
|
/linux-5.19.10/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_xsk.c | 155 u16 i = rx_ring->next_to_use; in ixgbe_alloc_rx_buffers_zc() 198 if (rx_ring->next_to_use != i) { in ixgbe_alloc_rx_buffers_zc() 199 rx_ring->next_to_use = i; in ixgbe_alloc_rx_buffers_zc() 370 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ixgbe_clean_rx_irq_zc() 421 tx_bi = &xdp_ring->tx_buffer_info[xdp_ring->next_to_use]; in ixgbe_xmit_zc() 426 tx_desc = IXGBE_TX_DESC(xdp_ring, xdp_ring->next_to_use); in ixgbe_xmit_zc() 438 xdp_ring->next_to_use++; in ixgbe_xmit_zc() 439 if (xdp_ring->next_to_use == xdp_ring->count) in ixgbe_xmit_zc() 440 xdp_ring->next_to_use = 0; in ixgbe_xmit_zc() 464 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_clean_xdp_tx_irq() [all …]
|
/linux-5.19.10/drivers/net/ethernet/hisilicon/hns3/hns3_common/ |
D | hclge_comm_cmd.c | 260 int ntu = ring->next_to_use; in hclge_comm_ring_space() 273 desc_to_use = &hw->cmq.csq.desc[hw->cmq.csq.next_to_use]; in hclge_comm_cmd_copy_desc() 275 (hw->cmq.csq.next_to_use)++; in hclge_comm_cmd_copy_desc() 276 if (hw->cmq.csq.next_to_use >= hw->cmq.csq.desc_num) in hclge_comm_cmd_copy_desc() 277 hw->cmq.csq.next_to_use = 0; in hclge_comm_cmd_copy_desc() 286 int ntu = ring->next_to_use; in hclge_comm_is_valid_csq_clean_head() 305 head, csq->next_to_use, csq->next_to_clean); in hclge_comm_cmd_csq_clean() 322 return head == hw->cmq.csq.next_to_use; in hclge_comm_cmd_csq_done() 455 ntc = hw->cmq.csq.next_to_use; in hclge_comm_cmd_send() 461 hw->cmq.csq.next_to_use); in hclge_comm_cmd_send() [all …]
|
/linux-5.19.10/drivers/net/ethernet/intel/ice/ |
D | ice_controlq.h | 18 ((u16)((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 19 (R)->next_to_clean - (R)->next_to_use - 1)) 55 u16 next_to_use; member
|
D | ice_xsk.c | 489 u16 ntu = rx_ring->next_to_use; in __ice_alloc_rx_bufs_zc() 518 if (rx_ring->next_to_use != ntu) in __ice_alloc_rx_bufs_zc() 693 if (unlikely(rx_ring->next_to_clean == rx_ring->next_to_use)) in ice_clean_rx_irq_zc() 764 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ice_clean_rx_irq_zc() 871 tx_desc = ICE_TX_DESC(xdp_ring, xdp_ring->next_to_use++); in ice_xmit_pkt() 889 u16 ntu = xdp_ring->next_to_use; in ice_xmit_pkt_batch() 907 xdp_ring->next_to_use = ntu; in ice_xmit_pkt_batch() 909 if (xdp_ring->next_to_use > xdp_ring->next_rs) { in ice_xmit_pkt_batch() 937 if (xdp_ring->next_to_use > xdp_ring->next_rs) { in ice_fill_tx_hw_ring() 969 if (xdp_ring->next_to_use + nb_pkts >= xdp_ring->count) { in ice_xmit_zc() [all …]
|
D | ice_txrx_lib.c | 17 u16 prev_ntu = rx_ring->next_to_use & ~0x7; in ice_release_rx_desc() 19 rx_ring->next_to_use = val; in ice_release_rx_desc() 279 u16 i = xdp_ring->next_to_use; in ice_xmit_xdp_ring() 319 xdp_ring->next_to_use = i; in ice_xmit_xdp_ring()
|
D | ice_txrx.h | 110 (u16)((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 111 (R)->next_to_clean - (R)->next_to_use - 1) 279 u16 next_to_use; member 318 u16 next_to_use; member
|
D | ice_controlq.c | 378 cq->sq.next_to_use = 0; in ice_init_sq() 438 cq->rq.next_to_use = 0; in ice_init_rq() 946 return rd32(hw, cq->sq.head) == cq->sq.next_to_use; in ice_sq_done() 1014 details = ICE_CTL_Q_DETAILS(cq->sq, cq->sq.next_to_use); in ice_sq_send_cmd() 1032 desc_on_ring = ICE_CTL_Q_DESC(cq->sq, cq->sq.next_to_use); in ice_sq_send_cmd() 1039 dma_buf = &cq->sq.r.sq_bi[cq->sq.next_to_use]; in ice_sq_send_cmd() 1058 (cq->sq.next_to_use)++; in ice_sq_send_cmd() 1059 if (cq->sq.next_to_use == cq->sq.count) in ice_sq_send_cmd() 1060 cq->sq.next_to_use = 0; in ice_sq_send_cmd() 1061 wr32(hw, cq->sq.tail, cq->sq.next_to_use); in ice_sq_send_cmd() [all …]
|
/linux-5.19.10/drivers/net/ethernet/intel/ixgb/ |
D | ixgb.h | 93 unsigned int next_to_use; member 101 ((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 102 (R)->next_to_clean - (R)->next_to_use - 1)
|
/linux-5.19.10/drivers/net/ethernet/freescale/enetc/ |
D | enetc_cbdr.c | 24 cbdr->next_to_use = 0; in enetc_setup_cbdr() 41 enetc_wr_reg(cbdr->cir, cbdr->next_to_use); in enetc_setup_cbdr() 85 return (r->next_to_clean - r->next_to_use - 1 + r->bd_count) % in enetc_cbd_unused() 102 i = ring->next_to_use; in enetc_send_cmd() 109 ring->next_to_use = i; in enetc_send_cmd()
|
D | enetc.h | 99 int next_to_use; member 133 if (bdr->next_to_clean > bdr->next_to_use) in enetc_bd_unused() 134 return bdr->next_to_clean - bdr->next_to_use - 1; in enetc_bd_unused() 136 return bdr->bd_count + bdr->next_to_clean - bdr->next_to_use - 1; in enetc_bd_unused() 156 int next_to_use; member
|
/linux-5.19.10/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
D | hclgevf_mbx.c | 159 return tail == hw->hw.cmq.crq.next_to_use; in hclgevf_cmd_crq_empty() 234 desc = &crq->desc[crq->next_to_use]; in hclgevf_mbx_handler() 237 flag = le16_to_cpu(crq->desc[crq->next_to_use].flag); in hclgevf_mbx_handler() 245 crq->desc[crq->next_to_use].flag = 0; in hclgevf_mbx_handler() 275 crq->desc[crq->next_to_use].flag = 0; in hclgevf_mbx_handler() 281 crq->next_to_use); in hclgevf_mbx_handler()
|
/linux-5.19.10/drivers/net/ethernet/intel/igc/ |
D | igc_dump.c | 142 n, tx_ring->next_to_use, tx_ring->next_to_clean, in igc_rings_dump() 181 if (i == tx_ring->next_to_use && in igc_rings_dump() 184 else if (i == tx_ring->next_to_use) in igc_rings_dump() 215 netdev_info(netdev, "%5d %5X %5X\n", n, rx_ring->next_to_use, in igc_rings_dump() 264 if (i == rx_ring->next_to_use) in igc_rings_dump()
|
/linux-5.19.10/drivers/net/ethernet/atheros/atlx/ |
D | atl1.c | 1112 atomic_set(&tpd_ring->next_to_use, 0); in atl1_init_ring_ptrs() 1116 atomic_set(&rfd_ring->next_to_use, 0); in atl1_init_ring_ptrs() 1118 rrd_ring->next_to_use = 0; in atl1_init_ring_ptrs() 1156 atomic_set(&rfd_ring->next_to_use, 0); in atl1_clean_rx_ring() 1158 rrd_ring->next_to_use = 0; in atl1_clean_rx_ring() 1198 atomic_set(&tpd_ring->next_to_use, 0); in atl1_clean_tx_ring() 1497 value = ((atomic_read(&adapter->tpd_ring.next_to_use) in atl1_configure() 1501 ((atomic_read(&adapter->rfd_ring.next_to_use) in atl1_configure() 1743 tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use); in atl1_update_mailbox() 1744 rfd_next_to_use = atomic_read(&adapter->rfd_ring.next_to_use); in atl1_update_mailbox() [all …]
|
/linux-5.19.10/drivers/net/ethernet/intel/iavf/ |
D | iavf_adminq.c | 342 hw->aq.asq.next_to_use = 0; in iavf_init_asq() 407 hw->aq.arq.next_to_use = 0; in iavf_init_arq() 627 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in iavf_asq_done() 674 details = IAVF_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command() 729 desc_on_ring = IAVF_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command() 736 dma_buff = &hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]; in iavf_asq_send_command() 754 (hw->aq.asq.next_to_use)++; in iavf_asq_send_command() 755 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in iavf_asq_send_command() 756 hw->aq.asq.next_to_use = 0; in iavf_asq_send_command() 758 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in iavf_asq_send_command() [all …]
|
D | iavf_txrx.c | 76 tx_ring->next_to_use = 0; in iavf_clean_tx_ring() 672 tx_ring->next_to_use = 0; in iavf_setup_tx_descriptors() 737 rx_ring->next_to_use = 0; in iavf_clean_rx_ring() 793 rx_ring->next_to_use = 0; in iavf_setup_rx_descriptors() 809 rx_ring->next_to_use = val; in iavf_release_rx_desc() 916 u16 ntu = rx_ring->next_to_use; in iavf_alloc_rx_buffers() 957 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 963 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 2140 int i = tx_ring->next_to_use; in iavf_create_tx_ctx() 2150 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in iavf_create_tx_ctx() [all …]
|
/linux-5.19.10/drivers/net/ethernet/hisilicon/hns/ |
D | hns_enet.c | 41 struct hnae_desc *desc = &ring->desc[ring->next_to_use]; in fill_v2_desc_hw() 42 struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in fill_v2_desc_hw() 127 ring_ptr_move_fw(ring, next_to_use); in fill_v2_desc_hw() 149 struct hnae_desc *desc = &ring->desc[ring->next_to_use]; in fill_desc() 150 struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in fill_desc() 203 ring_ptr_move_fw(ring, next_to_use); in fill_desc() 208 ring_ptr_move_bw(ring, next_to_use); in unfill_desc() 317 int size, next_to_use; in hns_nic_net_xmit_hw() local 334 next_to_use = ring->next_to_use; in hns_nic_net_xmit_hw() 378 while (ring->next_to_use != next_to_use) { in hns_nic_net_xmit_hw() [all …]
|
/linux-5.19.10/drivers/net/ethernet/atheros/atl1e/ |
D | atl1e_main.c | 751 tx_ring->next_to_use = 0; in atl1e_init_ring_ptrs() 1559 u16 next_to_use = 0; in atl1e_tpd_avail() local 1563 next_to_use = tx_ring->next_to_use; in atl1e_tpd_avail() 1565 return (u16)(next_to_clean > next_to_use) ? in atl1e_tpd_avail() 1566 (next_to_clean - next_to_use - 1) : in atl1e_tpd_avail() 1567 (tx_ring->count + next_to_clean - next_to_use - 1); in atl1e_tpd_avail() 1578 u16 next_to_use = 0; in atl1e_get_tpd() local 1580 next_to_use = tx_ring->next_to_use; in atl1e_get_tpd() 1581 if (++tx_ring->next_to_use == tx_ring->count) in atl1e_get_tpd() 1582 tx_ring->next_to_use = 0; in atl1e_get_tpd() [all …]
|
/linux-5.19.10/drivers/net/ethernet/hisilicon/hns3/ |
D | hns3_enet.c | 968 ntu = tx_spare->next_to_use; in hns3_tx_spare_space() 1091 u32 ntu = tx_spare->next_to_use; in hns3_tx_spare_alloc() 1104 tx_spare->next_to_use = ntu + size; in hns3_tx_spare_alloc() 1105 if (tx_spare->next_to_use == tx_spare->len) in hns3_tx_spare_alloc() 1106 tx_spare->next_to_use = 0; in hns3_tx_spare_alloc() 1117 if (len > tx_spare->next_to_use) { in hns3_tx_spare_rollback() 1118 len -= tx_spare->next_to_use; in hns3_tx_spare_rollback() 1119 tx_spare->next_to_use = tx_spare->len - len; in hns3_tx_spare_rollback() 1121 tx_spare->next_to_use -= len; in hns3_tx_spare_rollback() 1682 struct hns3_desc *desc = &ring->desc[ring->next_to_use]; in hns3_fill_desc() [all …]
|
/linux-5.19.10/drivers/net/ethernet/atheros/atl1c/ |
D | atl1c_main.c | 909 tpd_ring->next_to_use = 0; in atl1c_clean_tx_ring() 932 rfd_ring->next_to_use = 0; in atl1c_clean_rx_ring() 933 rrd_ring->next_to_use = 0; in atl1c_clean_rx_ring() 949 tpd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 957 rfd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 959 rrd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 1818 next_next = rfd_next_to_use = rfd_ring->next_to_use; in atl1c_alloc_rx_buffer() 1868 rfd_ring->next_to_use = rfd_next_to_use; in atl1c_alloc_rx_buffer() 1870 rfd_ring->next_to_use & MB_RFDX_PROD_IDX_MASK); in atl1c_alloc_rx_buffer() 2026 u16 next_to_use = 0; in atl1c_tpd_avail() local [all …]
|
/linux-5.19.10/drivers/net/ethernet/intel/e1000/ |
D | e1000.h | 153 unsigned int next_to_use; member 174 unsigned int next_to_use; member 191 unsigned int use = READ_ONCE((R)->next_to_use); \
|