Home
last modified time | relevance | path

Searched refs:tx_dma (Results 1 – 12 of 12) sorted by relevance

/linux-2.4.37.9/drivers/net/
Dznet.c126 int rx_dma, tx_dma; member
251 zn.tx_dma = netinfo->dma2; in znet_probe()
257 || request_dma(zn.tx_dma,"ZNet tx")) { in znet_probe()
381 short dma_port = ((zn.tx_dma&3)<<2) + IO_DMA2_BASE; in znet_send_packet()
628 disable_dma(zn.tx_dma); in znet_close()
693 short dma_port = ((zn.tx_dma&3)<<2) + IO_DMA2_BASE; in show_dma()
698 printk("Addr: %04x cnt:%3x...", addr<<1, get_dma_residue(zn.tx_dma)); in show_dma()
723 disable_dma(zn.tx_dma); in hardware_init()
724 clear_dma_ff(zn.tx_dma); in hardware_init()
725 set_dma_mode(zn.tx_dma, DMA_TX_MODE); in hardware_init()
[all …]
Dtitan_ge.c827 (unsigned long) titan_port->tx_dma);
1255 titan_ge_eth->tx_dma = (TITAN_GE_SRAM_BASE_PHYSICAL);
1262 titan_ge_eth->tx_dma = (TITAN_GE_SRAM_BASE_PHYSICAL + 0x100);
1280 (unsigned long) titan_ge_eth->tx_dma);
1311 titan_ge_eth->tx_dma);
1862 titan_ge_eth->tx_dma);
2104 unsigned long tx_dma) argument
2108 unsigned long titan_ge_tx_desc_bus = tx_dma;
2133 titan_ge_port->tx_dma = tx_dma;
Dbmac.c65 volatile struct dbdma_regs *tx_dma; member
242 volatile struct dbdma_regs *td = bp->tx_dma; in bmac_enable_and_reset_chip()
504 volatile struct dbdma_regs *td = bp->tx_dma; in bmac_sleep_notify()
623 volatile struct dbdma_regs *td = bp->tx_dma; in bmac_init_tx_ring()
678 volatile struct dbdma_regs *td = bp->tx_dma; in bmac_transmit_packet()
814 if (cp == bus_to_virt(in_le32(&bp->tx_dma->cmdptr))) in bmac_txdma_intr()
1397 bp->tx_dma = (volatile struct dbdma_regs *) in bmac_probe1()
1399 if (!bp->tx_dma) in bmac_probe1()
1450 iounmap((void *)bp->tx_dma); in bmac_probe1()
1530 volatile struct dbdma_regs *td = bp->tx_dma; in bmac_close()
[all …]
Dmace.c49 volatile struct dbdma_regs *tx_dma; member
201 mp->tx_dma = (volatile struct dbdma_regs *) in mace_probe1()
464 volatile struct dbdma_regs *td = mp->tx_dma; in mace_open()
552 volatile struct dbdma_regs *td = mp->tx_dma; in mace_close()
583 volatile struct dbdma_regs *td = mp->tx_dma; in mace_xmit_start()
719 volatile struct dbdma_regs *td = mp->tx_dma; in mace_interrupt()
860 volatile struct dbdma_regs *td = mp->tx_dma; in mace_restart()
Dforcedeth.c558 dma_addr_t tx_dma[TX_RING]; member
963 np->tx_dma[i] = 0; in nv_init_tx()
981 if (np->tx_dma[skbnr]) { in nv_release_txskb()
982 pci_unmap_page(np->pci_dev, np->tx_dma[skbnr], in nv_release_txskb()
985 np->tx_dma[skbnr] = 0; in nv_release_txskb()
1076 np->tx_dma[nr] = pci_map_single(np->pci_dev, skb->data + offset, bcnt, in nv_start_xmit()
1081 np->tx_ring.orig[nr].PacketBuffer = cpu_to_le32(np->tx_dma[nr]); in nv_start_xmit()
1084 np->tx_ring.ex[nr].PacketBufferHigh = cpu_to_le64(np->tx_dma[nr]) >> 32; in nv_start_xmit()
1085 np->tx_ring.ex[nr].PacketBufferLow = cpu_to_le64(np->tx_dma[nr]) & 0x0FFFFFFFF; in nv_start_xmit()
1103 np->tx_dma[nr] = pci_map_page(np->pci_dev, frag->page, frag->page_offset+offset, bcnt, in nv_start_xmit()
[all …]
Dau1000_eth.h171 typedef struct tx_dma { struct
Dtitan_ge.h231 dma_addr_t tx_dma; member
Dnatsemi.c644 dma_addr_t tx_dma[TX_RING_SIZE]; member
1536 np->tx_dma[i], np->tx_skbuff[i]->len, in drain_tx()
1608 np->tx_dma[entry] = pci_map_single(np->pci_dev, in start_tx()
1611 np->tx_ring[entry].addr = cpu_to_le32(np->tx_dma[entry]); in start_tx()
1672 pci_unmap_single(np->pci_dev,np->tx_dma[entry], in netdev_tx_done()
/linux-2.4.37.9/drivers/atm/
Deni.h80 unsigned long tx_dma; /* TX DMA queue */ member
Deni.c1166 writel(eni_dev->dma[i*2],eni_dev->tx_dma+dma_wr*8); in do_tx()
1167 writel(eni_dev->dma[i*2+1],eni_dev->tx_dma+dma_wr*8+4); in do_tx()
1825 eni_dev->tx_dma = eni_dev->rx_dma+NR_DMA_RX*8; in eni_start()
1826 eni_dev->service = eni_dev->tx_dma+NR_DMA_TX*8; in eni_start()
1829 eni_dev->vci,eni_dev->rx_dma,eni_dev->tx_dma, in eni_start()
/linux-2.4.37.9/drivers/macintosh/
Dmacserial.h169 volatile struct dbdma_regs *tx_dma; member
Dmacserial.c1143 dbdma_reset(info->tx_dma); in shutdown()
2416 zss->tx_dma = (volatile struct dbdma_regs *) in chan_init()