Home
last modified time | relevance | path

Searched refs:PKT_BUF_SZ (Results 1 – 25 of 36) sorted by relevance

12

/linux-6.6.21/drivers/net/ethernet/sun/
Dsunqe.h322 #define PKT_BUF_SZ 1664 macro
326 u8 tx_buf[TX_RING_SIZE][PKT_BUF_SZ];
328 u8 rx_buf[RX_RING_SIZE][PKT_BUF_SZ];
/linux-6.6.21/drivers/net/ethernet/seeq/
Dsgiseeq.c46 #define PKT_BUF_SZ 1584 macro
181 #define RCNTINFO_INIT (RCNTCFG_INIT | (PKT_BUF_SZ & HPCDMA_BCNT))
204 struct sk_buff *skb = netdev_alloc_skb(dev, PKT_BUF_SZ); in seeq_init_ring()
211 PKT_BUF_SZ, DMA_FROM_DEVICE); in seeq_init_ring()
356 len = PKT_BUF_SZ - (rd->rdma.cntinfo & HPCDMA_BCNT) - 3; in sgiseeq_rx()
358 PKT_BUF_SZ, DMA_FROM_DEVICE); in sgiseeq_rx()
366 newskb = netdev_alloc_skb(dev, PKT_BUF_SZ); in sgiseeq_rx()
401 PKT_BUF_SZ, DMA_FROM_DEVICE); in sgiseeq_rx()
/linux-6.6.21/drivers/staging/vt6655/
Ddevice_cfg.h42 #define PKT_BUF_SZ 2390 macro
Ddevice_main.c529 priv->opts.tx_descs[0] * PKT_BUF_SZ + in device_init_rings()
530 priv->opts.tx_descs[1] * PKT_BUF_SZ + in device_init_rings()
563 priv->opts.tx_descs[0] * PKT_BUF_SZ; in device_init_rings()
566 priv->opts.tx_descs[1] * PKT_BUF_SZ; in device_init_rings()
572 priv->opts.tx_descs[0] * PKT_BUF_SZ; in device_init_rings()
575 priv->opts.tx_descs[1] * PKT_BUF_SZ; in device_init_rings()
590 priv->opts.tx_descs[0] * PKT_BUF_SZ + in device_free_rings()
591 priv->opts.tx_descs[1] * PKT_BUF_SZ + in device_free_rings()
730 desc->td_info->buf = priv->tx0_bufs + i * PKT_BUF_SZ; in device_init_td0_ring()
731 desc->td_info->buf_dma = priv->tx_bufs_dma0 + i * PKT_BUF_SZ; in device_init_td0_ring()
[all …]
/linux-6.6.21/drivers/net/ethernet/amd/
Dlance.c202 #define PKT_BUF_SZ 1544 macro
248 char (*tx_bounce_buffs)[PKT_BUF_SZ];
559 lp->rx_buffs = (unsigned long)kmalloc_array(RX_RING_SIZE, PKT_BUF_SZ, in lance_probe1()
564 lp->tx_bounce_buffs = kmalloc_array(TX_RING_SIZE, PKT_BUF_SZ, in lance_probe1()
878 skb = alloc_skb(PKT_BUF_SZ, GFP_DMA | gfp); in lance_init_ring()
883 rx_buff = kmalloc(PKT_BUF_SZ, GFP_DMA | gfp); in lance_init_ring()
888 lp->rx_ring[i].buf_length = -PKT_BUF_SZ; in lance_init_ring()
1218 lp->rx_ring[entry].buf_length = -PKT_BUF_SZ; in lance_rx()
Datarilance.c239 #define PKT_BUF_SZ 1544 macro
698 if (((o) < RIEBL_RSVD_START) ? (o)+PKT_BUF_SZ > RIEBL_RSVD_START \ in lance_init_ring()
711 offset += PKT_BUF_SZ; in lance_init_ring()
719 MEM->rx_head[i].buf_length = -PKT_BUF_SZ; in lance_init_ring()
721 offset += PKT_BUF_SZ; in lance_init_ring()
Dsun3lance.c103 #define PKT_BUF_SZ 1544 macro
144 char rx_data[RX_RING_SIZE][PKT_BUF_SZ];
145 char tx_data[TX_RING_SIZE][PKT_BUF_SZ];
473 MEM->rx_head[i].buf_length = -PKT_BUF_SZ | 0xf000; in lance_init_ring()
Ddeclance.c162 #define PKT_BUF_SZ 1536 macro
163 #define RX_BUFF_SIZE PKT_BUF_SZ
164 #define TX_BUFF_SIZE PKT_BUF_SZ
Dsunlance.c186 #define PKT_BUF_SZ 1544 macro
187 #define RX_BUFF_SIZE PKT_BUF_SZ
188 #define TX_BUFF_SIZE PKT_BUF_SZ
/linux-6.6.21/drivers/net/ethernet/i825xx/
Dlib82596.c130 #define PKT_BUF_SZ 1536 macro
501 skb = netdev_alloc_skb_ip_align(dev, PKT_BUF_SZ); in init_rx_bufs()
505 PKT_BUF_SZ, DMA_FROM_DEVICE); in init_rx_bufs()
512 rbd->size = SWAP16(PKT_BUF_SZ); in init_rx_bufs()
553 PKT_BUF_SZ, DMA_FROM_DEVICE); in remove_rx_bufs()
724 PKT_BUF_SZ, DMA_FROM_DEVICE); in i596_rx()
727 PKT_BUF_SZ); in i596_rx()
739 PKT_BUF_SZ, in i596_rx()
756 PKT_BUF_SZ, DMA_FROM_DEVICE); in i596_rx()
761 PKT_BUF_SZ, DMA_FROM_DEVICE); in i596_rx()
D82596.c164 #define PKT_BUF_SZ 1536 macro
549 struct sk_buff *skb = netdev_alloc_skb(dev, PKT_BUF_SZ); in init_rx_bufs()
562 rbd->size = PKT_BUF_SZ; in init_rx_bufs()
564 cache_clear(virt_to_phys(skb->data), PKT_BUF_SZ); in init_rx_bufs()
786 newskb = netdev_alloc_skb(dev, PKT_BUF_SZ); in i596_rx()
798 cache_clear(virt_to_phys(newskb->data), PKT_BUF_SZ); in i596_rx()
/linux-6.6.21/drivers/net/ethernet/dec/tulip/
Dinterrupt.c73 netdev_alloc_skb(dev, PKT_BUF_SZ); in tulip_refill_rx()
78 PKT_BUF_SZ, DMA_FROM_DEVICE); in tulip_refill_rx()
247 PKT_BUF_SZ, in tulip_poll()
477 PKT_BUF_SZ, DMA_FROM_DEVICE); in tulip_rx()
Dtulip_core.c614 tp->rx_ring[i].length = cpu_to_le32(PKT_BUF_SZ); in tulip_init_ring()
620 tp->rx_ring[i-1].length = cpu_to_le32(PKT_BUF_SZ | DESC_RING_WRAP); in tulip_init_ring()
629 struct sk_buff *skb = netdev_alloc_skb(dev, PKT_BUF_SZ); in tulip_init_ring()
634 PKT_BUF_SZ, DMA_FROM_DEVICE); in tulip_init_ring()
799 dma_unmap_single(&tp->pdev->dev, mapping, PKT_BUF_SZ, in tulip_free_ring()
Dtulip.h335 #define PKT_BUF_SZ (1536 + 4) /* Size of each temporary Rx buffer. */ macro
Dwinbond-840.c136 #undef PKT_BUF_SZ /* tulip.h also defines this */
137 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer.*/ macro
962 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_ringdesc()
/linux-6.6.21/drivers/net/ethernet/smsc/
Dsmsc9420.h35 #define PKT_BUF_SZ (VLAN_ETH_FRAME_LEN + NET_IP_ALIGN + 4) macro
Dsmsc9420.c538 PKT_BUF_SZ, DMA_FROM_DEVICE); in smsc9420_free_rx_ring()
757 PKT_BUF_SZ, DMA_FROM_DEVICE); in smsc9420_rx_handoff()
780 struct sk_buff *skb = netdev_alloc_skb(pd->dev, PKT_BUF_SZ); in smsc9420_alloc_rx_buffer()
790 PKT_BUF_SZ, DMA_FROM_DEVICE); in smsc9420_alloc_rx_buffer()
1223 pd->rx_ring[i].length = PKT_BUF_SZ; in smsc9420_alloc_rx_ring()
1228 pd->rx_ring[RX_RING_SIZE - 1].length = (PKT_BUF_SZ | RDES1_RER_); in smsc9420_alloc_rx_ring()
Depic100.c63 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer.*/ macro
907 ep->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in epic_init_ring()
1181 if (pkt_len > PKT_BUF_SZ - 4) { in epic_rx()
/linux-6.6.21/drivers/net/ethernet/3com/
D3c59x.c40 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer.*/ macro
1681 iowrite8(PKT_BUF_SZ>>8, ioaddr + TxFreeThreshold); /* Room for a packet. */ in vortex_up()
1740 vp->rx_ring[i].length = cpu_to_le32(PKT_BUF_SZ | LAST_FRAG); in vortex_open()
1742 skb = __netdev_alloc_skb(dev, PKT_BUF_SZ + NET_IP_ALIGN, in vortex_open()
1750 PKT_BUF_SZ, DMA_FROM_DEVICE); in vortex_open()
1922 iowrite8(PKT_BUF_SZ>>8, ioaddr + TxFreeThreshold); in vortex_tx_timeout()
2631 dma_sync_single_for_cpu(vp->gendev, dma, PKT_BUF_SZ, DMA_FROM_DEVICE); in boomerang_rx()
2635 dma_sync_single_for_device(vp->gendev, dma, PKT_BUF_SZ, DMA_FROM_DEVICE); in boomerang_rx()
2642 newskb = netdev_alloc_skb_ip_align(dev, PKT_BUF_SZ); in boomerang_rx()
2648 PKT_BUF_SZ, DMA_FROM_DEVICE); in boomerang_rx()
[all …]
D3c515.c53 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer. */ macro
818 vp->rx_ring[i].length = PKT_BUF_SZ | 0x80000000; in corkscrew_open()
819 skb = netdev_alloc_skb(dev, PKT_BUF_SZ); in corkscrew_open()
833 outb(PKT_BUF_SZ >> 8, ioaddr + TxFreeThreshold); /* Room for a packet. */ in corkscrew_open()
1392 skb = netdev_alloc_skb(dev, PKT_BUF_SZ); in boomerang_rx()
Dtyphoon.c96 #define PKT_BUF_SZ 1536 macro
1597 skb = netdev_alloc_skb(tp->dev, PKT_BUF_SZ); in typhoon_alloc_rx_skb()
1608 dma_addr = dma_map_single(&tp->pdev->dev, skb->data, PKT_BUF_SZ, in typhoon_alloc_rx_skb()
1665 PKT_BUF_SZ, DMA_FROM_DEVICE); in typhoon_rx()
1668 PKT_BUF_SZ, in typhoon_rx()
1675 dma_unmap_single(&tp->pdev->dev, dma_addr, PKT_BUF_SZ, in typhoon_rx()
1791 PKT_BUF_SZ, DMA_FROM_DEVICE); in typhoon_free_rx_rings()
1888 xp_cmd.parm1 = cpu_to_le16(PKT_BUF_SZ); in typhoon_start_runtime()
/linux-6.6.21/drivers/net/ethernet/renesas/
Drswitch.c242 PKT_BUF_SZ + RSWITCH_ALIGN - 1); in rswitch_gwca_queue_alloc_skb()
363 gq->skbs[i]->data, PKT_BUF_SZ, in rswitch_gwca_queue_format()
368 desc->desc.info_ds = cpu_to_le16(PKT_BUF_SZ); in rswitch_gwca_queue_format()
391 dma_unmap_single(ndev->dev.parent, dma_addr, PKT_BUF_SZ, in rswitch_gwca_queue_format()
427 gq->skbs[index]->data, PKT_BUF_SZ, in rswitch_gwca_queue_ext_ts_fill()
432 desc->desc.info_ds = cpu_to_le16(PKT_BUF_SZ); in rswitch_gwca_queue_ext_ts_fill()
450 dma_unmap_single(ndev->dev.parent, dma_addr, PKT_BUF_SZ, in rswitch_gwca_queue_ext_ts_fill()
719 dma_unmap_single(ndev->dev.parent, dma_addr, PKT_BUF_SZ, DMA_FROM_DEVICE); in rswitch_rx()
Dsh_eth.h19 #define PKT_BUF_SZ 1538 macro
/linux-6.6.21/drivers/net/ethernet/via/
Dvia-rhine.c86 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer.*/ macro
1156 PKT_BUF_SZ * TX_RING_SIZE, in alloc_ring()
1188 dma_free_coherent(hwdev, PKT_BUF_SZ * TX_RING_SIZE, in free_ring()
1249 rp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_rbufs()
1316 rp->tx_buf[i] = &rp->tx_bufs[i * PKT_BUF_SZ]; in alloc_tbufs()
1791 if (skb->len > PKT_BUF_SZ) { in rhine_start_tx()
/linux-6.6.21/drivers/net/ethernet/adaptec/
Dstarfire.c87 #define PKT_BUF_SZ 1536 /* Size of each temporary Rx buffer.*/ macro
103 static int rx_copybreak = PKT_BUF_SZ;
1135 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring()
1697 if (np->tx_threshold <= PKT_BUF_SZ / 16) { in netdev_error()

12