/linux-6.1.9/drivers/net/ethernet/intel/i40e/ |
D | i40e_xsk.c | 246 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers_zc() local 252 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 253 xdp = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 255 nb_buffs = min_t(u16, count, rx_ring->count - ntu); in i40e_alloc_rx_buffers_zc() 270 ntu += nb_buffs; in i40e_alloc_rx_buffers_zc() 271 if (ntu == rx_ring->count) { in i40e_alloc_rx_buffers_zc() 273 ntu = 0; in i40e_alloc_rx_buffers_zc() 278 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 486 u16 ntu = xdp_ring->next_to_use; in i40e_xmit_pkt_batch() local 495 tx_desc = I40E_TX_DESC(xdp_ring, ntu++); in i40e_xmit_pkt_batch() [all …]
|
D | i40e_adminq.c | 1098 u16 ntu; in i40e_clean_arq_element() local 1114 ntu = rd32(hw, hw->aq.arq.head) & I40E_PF_ARQH_ARQH_MASK; in i40e_clean_arq_element() 1115 if (ntu == ntc) { in i40e_clean_arq_element() 1168 hw->aq.arq.next_to_use = ntu; in i40e_clean_arq_element() 1174 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in i40e_clean_arq_element()
|
D | i40e_txrx.c | 1695 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers() local 1703 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers() 1704 bi = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers() 1723 ntu++; in i40e_alloc_rx_buffers() 1724 if (unlikely(ntu == rx_ring->count)) { in i40e_alloc_rx_buffers() 1727 ntu = 0; in i40e_alloc_rx_buffers() 1736 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1737 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers() 1742 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1743 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers()
|
/linux-6.1.9/drivers/net/ethernet/intel/ice/ |
D | ice_xsk.c | 482 u16 ntu = rx_ring->next_to_use; in __ice_alloc_rx_bufs_zc() local 486 rx_desc = ICE_RX_DESC(rx_ring, ntu); in __ice_alloc_rx_bufs_zc() 487 xdp = ice_xdp_buf(rx_ring, ntu); in __ice_alloc_rx_bufs_zc() 489 if (ntu + count >= rx_ring->count) { in __ice_alloc_rx_bufs_zc() 492 rx_ring->count - ntu); in __ice_alloc_rx_bufs_zc() 493 if (nb_buffs_extra != rx_ring->count - ntu) { in __ice_alloc_rx_bufs_zc() 494 ntu += nb_buffs_extra; in __ice_alloc_rx_bufs_zc() 499 ntu = 0; in __ice_alloc_rx_bufs_zc() 506 ntu += nb_buffs; in __ice_alloc_rx_bufs_zc() 507 if (ntu == rx_ring->count) in __ice_alloc_rx_bufs_zc() [all …]
|
D | ice_controlq.c | 1165 u16 ntu; in ice_clean_rq_elem() local 1180 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1182 if (ntu == ntc) { in ice_clean_rq_elem() 1229 cq->rq.next_to_use = ntu; in ice_clean_rq_elem() 1235 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1236 *pending = (u16)((ntc > ntu ? cq->rq.count : 0) + (ntu - ntc)); in ice_clean_rq_elem()
|
D | ice_txrx.c | 712 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs() local 721 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs() 722 bi = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs() 742 ntu++; in ice_alloc_rx_bufs() 743 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs() 746 ntu = 0; in ice_alloc_rx_bufs() 755 if (rx_ring->next_to_use != ntu) in ice_alloc_rx_bufs() 756 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs()
|
D | ice_main.c | 1526 u16 ntu; in ice_ctrlq_pending() local 1528 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_ctrlq_pending() 1529 return cq->rq.next_to_clean != ntu; in ice_ctrlq_pending()
|
/linux-6.1.9/drivers/net/ethernet/hisilicon/hns3/ |
D | hns3_trace.h | 73 __field(int, ntu) 82 __entry->ntu = ring->next_to_use; 92 __get_str(devname), __entry->index, __entry->ntu, 104 __field(int, ntu) 114 __entry->ntu = ring->next_to_use; 125 __get_str(devname), __entry->index, __entry->ntu,
|
D | hns3_enet.c | 964 u32 ntc, ntu; in hns3_tx_spare_space() local 970 ntu = tx_spare->next_to_use; in hns3_tx_spare_space() 972 if (ntc > ntu) in hns3_tx_spare_space() 973 return ntc - ntu - 1; in hns3_tx_spare_space() 978 return max(ntc, tx_spare->len - ntu) - 1; in hns3_tx_spare_space() 1093 u32 ntu = tx_spare->next_to_use; in hns3_tx_spare_alloc() local 1101 if (ntu + size > tx_spare->len) { in hns3_tx_spare_alloc() 1102 *cb_len += (tx_spare->len - ntu); in hns3_tx_spare_alloc() 1103 ntu = 0; in hns3_tx_spare_alloc() 1106 tx_spare->next_to_use = ntu + size; in hns3_tx_spare_alloc() [all …]
|
/linux-6.1.9/drivers/net/ethernet/hisilicon/hns3/hns3_common/ |
D | hclge_comm_cmd.c | 268 int ntu = ring->next_to_use; in hclge_comm_ring_space() local 269 int used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclge_comm_ring_space() 294 int ntu = ring->next_to_use; in hclge_comm_is_valid_csq_clean_head() local 296 if (ntu > ntc) in hclge_comm_is_valid_csq_clean_head() 297 return head >= ntc && head <= ntu; in hclge_comm_is_valid_csq_clean_head() 299 return head >= ntc || head <= ntu; in hclge_comm_is_valid_csq_clean_head()
|
/linux-6.1.9/drivers/net/ethernet/intel/iavf/ |
D | iavf_adminq.c | 865 u16 ntu; in iavf_clean_arq_element() local 881 ntu = rd32(hw, hw->aq.arq.head) & IAVF_VF_ARQH1_ARQH_MASK; in iavf_clean_arq_element() 882 if (ntu == ntc) { in iavf_clean_arq_element() 935 hw->aq.arq.next_to_use = ntu; in iavf_clean_arq_element() 940 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in iavf_clean_arq_element()
|
D | iavf_txrx.c | 919 u16 ntu = rx_ring->next_to_use; in iavf_alloc_rx_buffers() local 927 rx_desc = IAVF_RX_DESC(rx_ring, ntu); in iavf_alloc_rx_buffers() 928 bi = &rx_ring->rx_bi[ntu]; in iavf_alloc_rx_buffers() 947 ntu++; in iavf_alloc_rx_buffers() 948 if (unlikely(ntu == rx_ring->count)) { in iavf_alloc_rx_buffers() 951 ntu = 0; in iavf_alloc_rx_buffers() 960 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 961 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers() 966 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 967 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers()
|
/linux-6.1.9/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_xsk.c | 464 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_clean_xdp_tx_irq() local 474 while (ntc != ntu) { in ixgbe_clean_xdp_tx_irq() 552 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_xsk_clean_tx_ring() local 557 while (ntc != ntu) { in ixgbe_xsk_clean_tx_ring()
|
D | ixgbe.h | 550 u16 ntu = ring->next_to_use; in ixgbe_desc_unused() local 552 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbe_desc_unused()
|
/linux-6.1.9/drivers/net/ethernet/intel/ixgbevf/ |
D | ixgbevf.h | 289 u16 ntu = ring->next_to_use; in ixgbevf_desc_unused() local 291 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbevf_desc_unused()
|
/linux-6.1.9/drivers/net/ethernet/intel/igc/ |
D | igc.h | 547 u16 ntu = ring->next_to_use; in igc_desc_unused() local 549 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in igc_desc_unused()
|
D | igc_main.c | 2770 u16 ntu = ring->next_to_use; in igc_xdp_xmit_zc() local 2794 tx_desc = IGC_TX_DESC(ring, ntu); in igc_xdp_xmit_zc() 2799 bi = &ring->tx_buffer_info[ntu]; in igc_xdp_xmit_zc() 2809 ntu++; in igc_xdp_xmit_zc() 2810 if (ntu == ring->count) in igc_xdp_xmit_zc() 2811 ntu = 0; in igc_xdp_xmit_zc() 2814 ring->next_to_use = ntu; in igc_xdp_xmit_zc()
|
/linux-6.1.9/drivers/net/ethernet/hisilicon/hns/ |
D | hns_enet.c | 704 int ntu = ring->next_to_use; in hns_desc_unused() local 706 return ((ntc >= ntu) ? 0 : ring->desc_num) + ntc - ntu; in hns_desc_unused()
|