Home
last modified time | relevance | path

Searched refs:TX_DESC_NUM (Results 1 – 9 of 9) sorted by relevance

/linux-5.19.10/drivers/net/ethernet/moxa/
Dmoxart_ether.h57 #define TX_DESC_NUM 64 macro
58 #define TX_DESC_NUM_MASK (TX_DESC_NUM - 1)
311 dma_addr_t tx_mapping[TX_DESC_NUM];
319 unsigned int tx_len[TX_DESC_NUM];
320 struct sk_buff *tx_skb[TX_DESC_NUM];
Dmoxart_ether.c80 TX_REG_DESC_SIZE * TX_DESC_NUM, in moxart_mac_free_memory()
126 for (i = 0; i < TX_DESC_NUM; i++) { in moxart_mac_setup_desc_ring()
288 return CIRC_SPACE(priv->tx_head, priv->tx_tail, TX_DESC_NUM); in moxart_tx_queue_space()
498 TX_DESC_NUM, &priv->tx_base, in moxart_mac_probe()
513 priv->tx_buf_base = kmalloc_array(priv->tx_buf_size, TX_DESC_NUM, in moxart_mac_probe()
/linux-5.19.10/drivers/net/ethernet/sunplus/
Dspl2sw_desc.c40 for (i = 0; i < TX_DESC_NUM; i++) { in spl2sw_tx_descs_clean()
120 (TX_DESC_NUM + MAC_GUARD_DESC_NUM)); in spl2sw_tx_descs_init()
175 desc_size = (TX_DESC_NUM + MAC_GUARD_DESC_NUM) * sizeof(struct spl2sw_mac_desc); in spl2sw_descs_alloc()
190 comm->rx_desc[0] = &comm->tx_desc[TX_DESC_NUM + MAC_GUARD_DESC_NUM]; in spl2sw_descs_alloc()
217 for (i = 0; i < TX_DESC_NUM; i++) in spl2sw_descs_init()
Dspl2sw_mac.c135 writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * TX_DESC_NUM, in spl2sw_mac_hw_init()
137 writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * (TX_DESC_NUM + in spl2sw_mac_hw_init()
139 writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * (TX_DESC_NUM + in spl2sw_mac_hw_init()
Dspl2sw_define.h169 #define TX_DESC_NUM 16 /* # of descriptors in TX queue */ macro
239 struct spl2sw_skb_info tx_temp_skb_info[TX_DESC_NUM];
Dspl2sw_driver.c113 if (tx_pos == (TX_DESC_NUM - 1)) in spl2sw_ethernet_start_xmit()
122 tx_pos = ((tx_pos + 1) == TX_DESC_NUM) ? 0 : tx_pos + 1; in spl2sw_ethernet_start_xmit()
Dspl2sw_int.c184 tx_done_pos = ((tx_done_pos + 1) == TX_DESC_NUM) ? 0 : tx_done_pos + 1; in spl2sw_tx_poll()
/linux-5.19.10/drivers/net/ethernet/hisilicon/
Dhip04_eth.c154 #define TX_DESC_NUM 256 macro
155 #define TX_NEXT(N) (((N) + 1) & (TX_DESC_NUM-1))
228 struct sk_buff *tx_skb[TX_DESC_NUM];
229 dma_addr_t tx_phys[TX_DESC_NUM];
253 return (head - tail) % TX_DESC_NUM; in tx_count()
490 if (unlikely(netif_queue_stopped(ndev)) && (count < (TX_DESC_NUM - 1))) in hip04_tx_reclaim()
516 if (count == (TX_DESC_NUM - 1)) { in hip04_mac_start_xmit()
860 TX_DESC_NUM * sizeof(struct tx_desc), in hip04_alloc_ring()
885 for (i = 0; i < TX_DESC_NUM; i++) in hip04_free_ring()
889 dma_free_coherent(d, TX_DESC_NUM * sizeof(struct tx_desc), in hip04_free_ring()
[all …]
Dhix5hd2_gmac.c173 #define TX_DESC_NUM 1024 macro
251 struct sk_buff *tx_skb[TX_DESC_NUM];
403 hix5hd2_set_desc_depth(priv, RX_DESC_NUM, TX_DESC_NUM); in hix5hd2_hw_init()
602 num = CIRC_CNT(end, start, TX_DESC_NUM); in hix5hd2_xmit_reclaim()
625 pos = dma_ring_incr(pos, TX_DESC_NUM); in hix5hd2_xmit_reclaim()
782 pos = dma_ring_incr(pos, TX_DESC_NUM); in hix5hd2_net_xmit()
812 for (i = 0; i < TX_DESC_NUM; i++) { in hix5hd2_free_dma_desc_rings()
1000 priv->tx_bq.count = TX_DESC_NUM; in hix5hd2_init_hw_desc_queue()
1001 priv->tx_rq.count = TX_DESC_NUM; in hix5hd2_init_hw_desc_queue()
1028 TX_DESC_NUM * sizeof(struct sg_desc), in hix5hd2_init_sg_desc_queue()
[all …]