/linux-6.6.21/net/core/ |
D | xdp.c | 410 void xdp_return_frame(struct xdp_frame *xdpf) in xdp_return_frame() argument 415 if (likely(!xdp_frame_has_frags(xdpf))) in xdp_return_frame() 418 sinfo = xdp_get_shared_info_from_frame(xdpf); in xdp_return_frame() 422 __xdp_return(page_address(page), &xdpf->mem, false, NULL); in xdp_return_frame() 425 __xdp_return(xdpf->data, &xdpf->mem, false, NULL); in xdp_return_frame() 429 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf) in xdp_return_frame_rx_napi() argument 434 if (likely(!xdp_frame_has_frags(xdpf))) in xdp_return_frame_rx_napi() 437 sinfo = xdp_get_shared_info_from_frame(xdpf); in xdp_return_frame_rx_napi() 441 __xdp_return(page_address(page), &xdpf->mem, true, NULL); in xdp_return_frame_rx_napi() 444 __xdp_return(xdpf->data, &xdpf->mem, true, NULL); in xdp_return_frame_rx_napi() [all …]
|
/linux-6.6.21/kernel/bpf/ |
D | cpumap.c | 189 struct xdp_frame *xdpf = frames[i]; in cpu_map_bpf_prog_run_xdp() local 193 rxq.dev = xdpf->dev_rx; in cpu_map_bpf_prog_run_xdp() 194 rxq.mem = xdpf->mem; in cpu_map_bpf_prog_run_xdp() 197 xdp_convert_frame_to_buff(xdpf, &xdp); in cpu_map_bpf_prog_run_xdp() 202 err = xdp_update_frame_from_buff(&xdp, xdpf); in cpu_map_bpf_prog_run_xdp() 204 xdp_return_frame(xdpf); in cpu_map_bpf_prog_run_xdp() 207 frames[nframes++] = xdpf; in cpu_map_bpf_prog_run_xdp() 212 err = xdp_do_redirect(xdpf->dev_rx, &xdp, in cpu_map_bpf_prog_run_xdp() 215 xdp_return_frame(xdpf); in cpu_map_bpf_prog_run_xdp() 225 xdp_return_frame(xdpf); in cpu_map_bpf_prog_run_xdp() [all …]
|
D | devmap.c | 331 struct xdp_frame *xdpf = frames[i]; in dev_map_bpf_prog_run() local 335 xdp_convert_frame_to_buff(xdpf, &xdp); in dev_map_bpf_prog_run() 341 err = xdp_update_frame_from_buff(&xdp, xdpf); in dev_map_bpf_prog_run() 343 xdp_return_frame_rx_napi(xdpf); in dev_map_bpf_prog_run() 345 frames[nframes++] = xdpf; in dev_map_bpf_prog_run() 354 xdp_return_frame_rx_napi(xdpf); in dev_map_bpf_prog_run() 373 struct xdp_frame *xdpf = bq->q[i]; in bq_xmit_all() local 375 prefetch(xdpf); in bq_xmit_all() 442 static void bq_enqueue(struct net_device *dev, struct xdp_frame *xdpf, in bq_enqueue() argument 464 bq->q[bq->count++] = xdpf; in bq_enqueue() [all …]
|
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | xdp.c | 67 struct xdp_frame *xdpf; in mlx5e_xmit_xdp_buff() local 71 xdpf = xdp_convert_buff_to_frame(xdp); in mlx5e_xmit_xdp_buff() 72 if (unlikely(!xdpf)) in mlx5e_xmit_xdp_buff() 76 xdptxd->data = xdpf->data; in mlx5e_xmit_xdp_buff() 77 xdptxd->len = xdpf->len; in mlx5e_xmit_xdp_buff() 78 xdptxd->has_frags = xdp_frame_has_frags(xdpf); in mlx5e_xmit_xdp_buff() 99 xdp_return_frame(xdpf); in mlx5e_xmit_xdp_buff() 113 (union mlx5e_xdp_info) { .frame.xdpf = xdpf }); in mlx5e_xmit_xdp_buff() 125 dma_addr = page_pool_get_dma_addr(page) + (xdpf->data - (void *)xdpf); in mlx5e_xmit_xdp_buff() 129 xdptxdf.sinfo = xdp_get_shared_info_from_frame(xdpf); in mlx5e_xmit_xdp_buff() [all …]
|
/linux-6.6.21/drivers/net/vmxnet3/ |
D | vmxnet3_xdp.c | 116 struct xdp_frame *xdpf, in vmxnet3_xdp_xmit_frame() argument 128 dw2 |= xdpf->len; in vmxnet3_xdp_xmit_frame() 132 buf_size = xdpf->len; in vmxnet3_xdp_xmit_frame() 143 xdpf->data, buf_size, in vmxnet3_xdp_xmit_frame() 149 page = virt_to_page(xdpf->data); in vmxnet3_xdp_xmit_frame() 156 tbi->xdpf = xdpf; in vmxnet3_xdp_xmit_frame() 202 struct xdp_frame *xdpf) in vmxnet3_xdp_xmit_back() argument 215 err = vmxnet3_xdp_xmit_frame(adapter, xdpf, tq, false); in vmxnet3_xdp_xmit_back() 254 struct xdp_frame *xdpf; in vmxnet3_run_xdp() local 276 xdpf = xdp_convert_buff_to_frame(xdp); in vmxnet3_run_xdp() [all …]
|
/linux-6.6.21/include/net/ |
D | xdp.h | 243 struct sk_buff *__xdp_build_skb_from_frame(struct xdp_frame *xdpf, 246 struct sk_buff *xdp_build_skb_from_frame(struct xdp_frame *xdpf, 249 struct xdp_frame *xdpf_clone(struct xdp_frame *xdpf); 313 void xdp_return_frame(struct xdp_frame *xdpf); 314 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf); 317 void xdp_return_frame_bulk(struct xdp_frame *xdpf, 320 static __always_inline unsigned int xdp_get_frame_len(struct xdp_frame *xdpf) in xdp_get_frame_len() argument 323 unsigned int len = xdpf->len; in xdp_get_frame_len() 325 if (likely(!xdp_frame_has_frags(xdpf))) in xdp_get_frame_len() 328 sinfo = xdp_get_shared_info_from_frame(xdpf); in xdp_get_frame_len()
|
/linux-6.6.21/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_xsk.c | 104 struct xdp_frame *xdpf; in ixgbe_run_xdp_zc() local 125 xdpf = xdp_convert_buff_to_frame(xdp); in ixgbe_run_xdp_zc() 126 if (unlikely(!xdpf)) in ixgbe_run_xdp_zc() 131 result = ixgbe_xmit_xdp_ring(ring, xdpf); in ixgbe_run_xdp_zc() 423 tx_bi->xdpf = NULL; in ixgbe_xmit_zc() 454 xdp_return_frame(tx_bi->xdpf); in ixgbe_clean_xdp_tx_buffer() 481 if (tx_bi->xdpf) in ixgbe_clean_xdp_tx_irq() 486 tx_bi->xdpf = NULL; in ixgbe_clean_xdp_tx_irq() 560 if (tx_bi->xdpf) in ixgbe_xsk_clean_tx_ring() 565 tx_bi->xdpf = NULL; in ixgbe_xsk_clean_tx_ring()
|
D | ixgbe_txrx_common.h | 17 struct xdp_frame *xdpf);
|
/linux-6.6.21/drivers/net/ethernet/sfc/siena/ |
D | tx.c | 213 struct xdp_frame *xdpf; in efx_siena_xdp_tx_buffers() local 255 xdpf = xdpfs[i]; in efx_siena_xdp_tx_buffers() 263 len = xdpf->len; in efx_siena_xdp_tx_buffers() 267 xdpf->data, len, in efx_siena_xdp_tx_buffers() 274 tx_buffer->xdpf = xdpf; in efx_siena_xdp_tx_buffers()
|
D | rx.c | 257 struct xdp_frame *xdpf; in efx_do_xdp() local 312 xdpf = xdp_convert_buff_to_frame(&xdp); in efx_do_xdp() 313 err = efx_siena_xdp_tx_buffers(efx, 1, &xdpf, true); in efx_do_xdp()
|
/linux-6.6.21/drivers/net/ethernet/freescale/dpaa/ |
D | dpaa_eth.c | 1717 xdp_return_frame(swbp->xdpf); in dpaa_cleanup_tx_fd() 2198 struct xdp_frame *new_xdpf, *xdpf = *init_xdpf; in dpaa_a050385_wa_xdpf() local 2212 if (PTR_IS_ALIGNED(xdpf->data, DPAA_FD_DATA_ALIGNMENT) && in dpaa_a050385_wa_xdpf() 2213 xdpf->headroom >= priv->tx_headroom) { in dpaa_a050385_wa_xdpf() 2214 xdpf->headroom = priv->tx_headroom; in dpaa_a050385_wa_xdpf() 2222 aligned_data = PTR_ALIGN_DOWN(xdpf->data, DPAA_FD_DATA_ALIGNMENT); in dpaa_a050385_wa_xdpf() 2223 data_shift = xdpf->data - aligned_data; in dpaa_a050385_wa_xdpf() 2228 if (xdpf->headroom >= data_shift + priv->tx_headroom) { in dpaa_a050385_wa_xdpf() 2229 memmove(aligned_data, xdpf->data, xdpf->len); in dpaa_a050385_wa_xdpf() 2230 xdpf->data = aligned_data; in dpaa_a050385_wa_xdpf() [all …]
|
/linux-6.6.21/drivers/net/ethernet/fungible/funeth/ |
D | funeth_tx.c | 500 xdp_return_frame(q->info[reclaim_idx].xdpf); in fun_xdpq_clean() 514 bool fun_xdp_tx(struct funeth_txq *q, struct xdp_frame *xdpf) in fun_xdp_tx() argument 516 unsigned int idx, nfrags = 1, ndesc = 1, tot_len = xdpf->len; in fun_xdp_tx() 525 if (unlikely(xdp_frame_has_frags(xdpf))) { in fun_xdp_tx() 526 si = xdp_get_shared_info_from_frame(xdpf); in fun_xdp_tx() 527 tot_len = xdp_get_frame_len(xdpf); in fun_xdp_tx() 539 if (unlikely(fun_map_pkt(q->dma_dev, si, xdpf->data, xdpf->len, dma, in fun_xdp_tx() 558 q->info[idx].xdpf = xdpf; in fun_xdp_tx() 618 xdp_return_frame(q->info[idx].xdpf); in fun_xdpq_purge()
|
D | funeth_txrx.h | 101 struct xdp_frame *xdpf; /* associated XDP frame (XDP path) */ member 250 bool fun_xdp_tx(struct funeth_txq *q, struct xdp_frame *xdpf);
|
/linux-6.6.21/drivers/net/ethernet/aquantia/atlantic/ |
D | aq_ring.c | 320 } else if (buff->xdpf) { in aq_ring_tx_clean() 323 self->stats.tx.bytes += xdp_get_frame_len(buff->xdpf); in aq_ring_tx_clean() 325 xdp_return_frame_rx_napi(buff->xdpf); in aq_ring_tx_clean() 330 buff->xdpf = NULL; in aq_ring_tx_clean() 377 struct xdp_frame *xdpf = frames[i]; in aq_xdp_xmit() local 379 if (aq_nic_xmit_xdpf(aq_nic, ring, xdpf) == NETDEV_TX_BUSY) in aq_xdp_xmit() 390 struct xdp_frame *xdpf; in aq_xdp_build_skb() local 393 xdpf = xdp_convert_buff_to_frame(xdp); in aq_xdp_build_skb() 394 if (unlikely(!xdpf)) in aq_xdp_build_skb() 397 skb = xdp_build_skb_from_frame(xdpf, dev); in aq_xdp_build_skb() [all …]
|
D | aq_nic.c | 576 struct xdp_frame *xdpf, in aq_nic_map_xdp() argument 592 sinfo = xdp_get_shared_info_from_frame(xdpf); in aq_nic_map_xdp() 593 total_len = xdpf->len; in aq_nic_map_xdp() 595 if (xdp_frame_has_frags(xdpf)) { in aq_nic_map_xdp() 599 dx_buff->pa = dma_map_single(dev, xdpf->data, dx_buff->len, in aq_nic_map_xdp() 649 dx_buff->xdpf = xdpf; in aq_nic_map_xdp() 800 dx_buff->xdpf = NULL; in aq_nic_map_skb() 830 struct xdp_frame *xdpf) in aq_nic_xmit_xdpf() argument 840 if (xdp_frame_has_frags(xdpf)) { in aq_nic_xmit_xdpf() 841 sinfo = xdp_get_shared_info_from_frame(xdpf); in aq_nic_xmit_xdpf() [all …]
|
/linux-6.6.21/drivers/net/ethernet/ti/ |
D | cpsw_priv.c | 55 struct xdp_frame *xdpf; in cpsw_tx_handler() local 62 xdpf = cpsw_handle_to_xdpf(token); in cpsw_tx_handler() 63 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_tx_handler() 66 xdp_return_frame(xdpf); in cpsw_tx_handler() 1291 int cpsw_xdp_tx_frame(struct cpsw_priv *priv, struct xdp_frame *xdpf, in cpsw_xdp_tx_frame() argument 1300 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_xdp_tx_frame() 1307 dma += xdpf->headroom + sizeof(struct xdp_frame); in cpsw_xdp_tx_frame() 1308 ret = cpdma_chan_submit_mapped(txch, cpsw_xdpf_to_handle(xdpf), in cpsw_xdp_tx_frame() 1309 dma, xdpf->len, port); in cpsw_xdp_tx_frame() 1311 if (sizeof(*xmeta) > xdpf->headroom) in cpsw_xdp_tx_frame() [all …]
|
/linux-6.6.21/drivers/net/ethernet/sfc/ |
D | tx.c | 426 struct xdp_frame *xdpf; in efx_xdp_tx_buffers() local 468 xdpf = xdpfs[i]; in efx_xdp_tx_buffers() 476 len = xdpf->len; in efx_xdp_tx_buffers() 480 xdpf->data, len, in efx_xdp_tx_buffers() 487 tx_buffer->xdpf = xdpf; in efx_xdp_tx_buffers()
|
D | rx.c | 257 struct xdp_frame *xdpf; in efx_do_xdp() local 312 xdpf = xdp_convert_buff_to_frame(&xdp); in efx_do_xdp() 313 err = efx_xdp_tx_buffers(efx, 1, &xdpf, true); in efx_do_xdp()
|
/linux-6.6.21/drivers/net/ethernet/socionext/ |
D | netsec.c | 272 struct xdp_frame *xdpf; member 675 bytes += desc->xdpf->len; in netsec_clean_tx_dring() 677 xdp_return_frame_rx_napi(desc->xdpf); in netsec_clean_tx_dring() 679 xdp_return_frame_bulk(desc->xdpf, &bq); in netsec_clean_tx_dring() 821 dring->desc[idx].xdpf = buf; in netsec_set_tx_de() 829 struct xdp_frame *xdpf, bool is_ndo) in netsec_xdp_queue_one() argument 833 struct page *page = virt_to_page(xdpf->data); in netsec_xdp_queue_one() 851 dma_handle = dma_map_single(priv->dev, xdpf->data, xdpf->len, in netsec_xdp_queue_one() 865 dma_handle = page_pool_get_dma_addr(page) + xdpf->headroom + in netsec_xdp_queue_one() 866 sizeof(*xdpf); in netsec_xdp_queue_one() [all …]
|
/linux-6.6.21/drivers/net/ethernet/microchip/lan966x/ |
D | lan966x_fdma.c | 435 xdp_return_frame_bulk(dcb_buf->data.xdpf, &bq); in lan966x_fdma_tx_clear_buf() 710 struct xdp_frame *xdpf; in lan966x_fdma_xmit_xdpf() local 732 xdpf = ptr; in lan966x_fdma_xmit_xdpf() 734 if (xdpf->headroom < IFH_LEN_BYTES) { in lan966x_fdma_xmit_xdpf() 739 ifh = xdpf->data - IFH_LEN_BYTES; in lan966x_fdma_xmit_xdpf() 745 xdpf->data - IFH_LEN_BYTES, in lan966x_fdma_xmit_xdpf() 746 xdpf->len + IFH_LEN_BYTES, in lan966x_fdma_xmit_xdpf() 753 next_dcb_buf->data.xdpf = xdpf; in lan966x_fdma_xmit_xdpf() 754 next_dcb_buf->len = xdpf->len + IFH_LEN_BYTES; in lan966x_fdma_xmit_xdpf() 758 xdpf->len + IFH_LEN_BYTES, in lan966x_fdma_xmit_xdpf()
|
D | lan966x_xdp.c | 62 struct xdp_frame *xdpf = frames[i]; in lan966x_xdp_xmit() local 65 err = lan966x_fdma_xmit_xdpf(port, xdpf, 0); in lan966x_xdp_xmit()
|
/linux-6.6.21/drivers/net/ethernet/qlogic/qede/ |
D | qede_fp.c | 305 u16 len, struct page *page, struct xdp_frame *xdpf) in qede_xdp_xmit() argument 332 xdp->xdpf = xdpf; in qede_xdp_xmit() 345 struct xdp_frame *xdpf; in qede_xdp_transmit() local 362 xdpf = frames[i]; in qede_xdp_transmit() 364 mapping = dma_map_single(dmadev, xdpf->data, xdpf->len, in qede_xdp_transmit() 369 if (unlikely(qede_xdp_xmit(xdp_tx, mapping, 0, xdpf->len, in qede_xdp_transmit() 370 NULL, xdpf))) in qede_xdp_transmit() 404 struct xdp_frame *xdpf; in qede_xdp_tx_int() local 412 xdpf = xdp_info->xdpf; in qede_xdp_tx_int() 414 if (xdpf) { in qede_xdp_tx_int() [all …]
|
/linux-6.6.21/drivers/net/ethernet/broadcom/bnxt/ |
D | bnxt_xdp.c | 110 struct xdp_frame *xdpf) in __bnxt_xmit_xdp_redirect() argument 116 tx_buf->xdpf = xdpf; in __bnxt_xmit_xdp_redirect() 145 xdp_return_frame(tx_buf->xdpf); in bnxt_tx_int_xdp() 147 tx_buf->xdpf = NULL; in bnxt_tx_int_xdp()
|
/linux-6.6.21/drivers/net/ethernet/engleder/ |
D | tsnep_main.c | 565 static int tsnep_xdp_tx_map(struct xdp_frame *xdpf, struct tsnep_tx *tx, in tsnep_xdp_tx_map() argument 579 len = xdpf->len; in tsnep_xdp_tx_map() 584 xdpf->data; in tsnep_xdp_tx_map() 592 virt_to_page(xdpf->data); in tsnep_xdp_tx_map() 597 dma += sizeof(*xdpf) + xdpf->headroom; in tsnep_xdp_tx_map() 621 static bool tsnep_xdp_xmit_frame_ring(struct xdp_frame *xdpf, in tsnep_xdp_xmit_frame_ring() argument 624 struct skb_shared_info *shinfo = xdp_get_shared_info_from_frame(xdpf); in tsnep_xdp_xmit_frame_ring() 629 if (unlikely(xdp_frame_has_frags(xdpf))) in tsnep_xdp_xmit_frame_ring() 640 entry->xdpf = xdpf; in tsnep_xdp_xmit_frame_ring() 642 retval = tsnep_xdp_tx_map(xdpf, tx, shinfo, count, type); in tsnep_xdp_xmit_frame_ring() [all …]
|
/linux-6.6.21/drivers/net/ethernet/intel/i40e/ |
D | i40e_xsk.c | 643 xdp_return_frame(tx_bi->xdpf); in i40e_clean_xdp_tx_buffer() 683 if (tx_bi->xdpf) { in i40e_clean_xdp_tx_irq() 685 tx_bi->xdpf = NULL; in i40e_clean_xdp_tx_irq() 784 if (tx_bi->xdpf) in i40e_xsk_clean_tx_ring() 789 tx_bi->xdpf = NULL; in i40e_xsk_clean_tx_ring()
|