Home
last modified time | relevance | path

Searched refs:core_txq (Results 1 – 12 of 12) sorted by relevance

/linux-6.1.9/drivers/net/ethernet/sfc/siena/
Dtx.c69 netif_tx_stop_queue(txq1->core_txq); in efx_tx_maybe_stop_queue()
79 netif_tx_start_queue(txq1->core_txq); in efx_tx_maybe_stop_queue()
181 if (__netdev_tx_sent_queue(tx_queue->core_txq, skb_len, xmit_more)) in __efx_siena_enqueue_skb()
237 HARD_TX_LOCK(efx->net_dev, tx_queue->core_txq, cpu); in efx_siena_xdp_tx_buffers()
243 if (netif_tx_queue_stopped(tx_queue->core_txq)) in efx_siena_xdp_tx_buffers()
288 HARD_TX_UNLOCK(efx->net_dev, tx_queue->core_txq); in efx_siena_xdp_tx_buffers()
351 tx_queue->core_txq = in efx_siena_init_tx_queue_core_txq()
Dtx_common.c189 netdev_tx_reset_queue(tx_queue->core_txq); in efx_siena_fini_tx_queue()
258 if (unlikely(netif_tx_queue_stopped(tx_queue->core_txq)) && in efx_siena_xmit_done()
263 netif_tx_wake_queue(tx_queue->core_txq); in efx_siena_xmit_done()
Dnet_driver.h266 struct netdev_queue *core_txq; member
Defx_channels.c1231 netdev_tx_completed_queue(tx_queue->core_txq, in efx_process_channel()
/linux-6.1.9/drivers/net/ethernet/sfc/falcon/
Dtx.c133 netif_tx_stop_queue(txq1->core_txq); in ef4_tx_maybe_stop_queue()
144 netif_tx_start_queue(txq1->core_txq); in ef4_tx_maybe_stop_queue()
319 netdev_tx_sent_queue(tx_queue->core_txq, skb_len); in ef4_enqueue_skb()
322 if (!netdev_xmit_more() || netif_xmit_stopped(tx_queue->core_txq)) { in ef4_enqueue_skb()
419 tx_queue->core_txq = in ef4_init_tx_queue_core_txq()
513 if (unlikely(netif_tx_queue_stopped(tx_queue->core_txq)) && in ef4_xmit_done()
520 netif_tx_wake_queue(tx_queue->core_txq); in ef4_xmit_done()
626 netdev_tx_reset_queue(tx_queue->core_txq); in ef4_fini_tx_queue()
Dnet_driver.h217 struct netdev_queue *core_txq; member
Defx.c258 netdev_tx_completed_queue(tx_queue->core_txq, in ef4_process_channel()
/linux-6.1.9/drivers/net/ethernet/sfc/
Dtx.c85 netif_tx_stop_queue(txq1->core_txq); in efx_tx_maybe_stop_queue()
95 netif_tx_start_queue(txq1->core_txq); in efx_tx_maybe_stop_queue()
387 if (__netdev_tx_sent_queue(tx_queue->core_txq, skb_len, xmit_more)) in __efx_enqueue_skb()
450 HARD_TX_LOCK(efx->net_dev, tx_queue->core_txq, cpu); in efx_xdp_tx_buffers()
456 if (netif_tx_queue_stopped(tx_queue->core_txq)) in efx_xdp_tx_buffers()
501 HARD_TX_UNLOCK(efx->net_dev, tx_queue->core_txq); in efx_xdp_tx_buffers()
604 tx_queue->core_txq = in efx_init_tx_queue_core_txq()
Def100_tx.c35 tx_queue->core_txq = in ef100_tx_init()
412 if (netif_tx_queue_stopped(tx_queue->core_txq) || in __ef100_enqueue_skb()
461 netif_tx_stop_queue(tx_queue->core_txq); in __ef100_enqueue_skb()
471 netif_tx_start_queue(tx_queue->core_txq); in __ef100_enqueue_skb()
484 __netdev_tx_sent_queue(tx_queue->core_txq, skb->len, xmit_more) || in __ef100_enqueue_skb()
Dtx_common.c121 netdev_tx_reset_queue(tx_queue->core_txq); in efx_fini_tx_queue()
273 if (unlikely(netif_tx_queue_stopped(tx_queue->core_txq)) && in efx_xmit_done()
278 netif_tx_wake_queue(tx_queue->core_txq); in efx_xmit_done()
Dnet_driver.h268 struct netdev_queue *core_txq; member
Defx_channels.c1227 netdev_tx_completed_queue(tx_queue->core_txq, in efx_process_channel()