Lines Matching refs:c

107 static void z8530_rx_done(struct z8530_channel *c);
108 static void z8530_tx_done(struct z8530_channel *c);
122 static inline u8 read_zsreg(struct z8530_channel *c, u8 reg) in read_zsreg() argument
125 z8530_write_port(c->ctrlio, reg); in read_zsreg()
126 return z8530_read_port(c->ctrlio); in read_zsreg()
137 static inline u8 read_zsdata(struct z8530_channel *c) in read_zsdata() argument
140 r=z8530_read_port(c->dataio); in read_zsdata()
155 static inline void write_zsreg(struct z8530_channel *c, u8 reg, u8 val) in write_zsreg() argument
159 spin_lock_irqsave(c->lock, flags); in write_zsreg()
162 z8530_write_port(c->ctrlio, reg); in write_zsreg()
163 z8530_write_port(c->ctrlio, val); in write_zsreg()
165 spin_unlock_irqrestore(c->lock, flags); in write_zsreg()
176 static inline void write_zsctrl(struct z8530_channel *c, u8 val) in write_zsctrl() argument
178 z8530_write_port(c->ctrlio, val); in write_zsctrl()
190 static inline void write_zsdata(struct z8530_channel *c, u8 val) in write_zsdata() argument
192 z8530_write_port(c->dataio, val); in write_zsdata()
276 static void z8530_flush_fifo(struct z8530_channel *c) in z8530_flush_fifo() argument
278 read_zsreg(c, R1); in z8530_flush_fifo()
279 read_zsreg(c, R1); in z8530_flush_fifo()
280 read_zsreg(c, R1); in z8530_flush_fifo()
281 read_zsreg(c, R1); in z8530_flush_fifo()
282 if(c->dev->type==Z85230) in z8530_flush_fifo()
284 read_zsreg(c, R1); in z8530_flush_fifo()
285 read_zsreg(c, R1); in z8530_flush_fifo()
286 read_zsreg(c, R1); in z8530_flush_fifo()
287 read_zsreg(c, R1); in z8530_flush_fifo()
302 static void z8530_rtsdtr(struct z8530_channel *c, int set) in z8530_rtsdtr() argument
305 c->regs[5] |= (RTS | DTR); in z8530_rtsdtr()
307 c->regs[5] &= ~(RTS | DTR); in z8530_rtsdtr()
308 write_zsreg(c, R5, c->regs[5]); in z8530_rtsdtr()
335 static void z8530_rx(struct z8530_channel *c) in z8530_rx() argument
342 if(!(read_zsreg(c, R0)&1)) in z8530_rx()
344 ch=read_zsdata(c); in z8530_rx()
345 stat=read_zsreg(c, R1); in z8530_rx()
350 if(c->count < c->max) in z8530_rx()
352 *c->dptr++=ch; in z8530_rx()
353 c->count++; in z8530_rx()
365 if(c->skb) in z8530_rx()
366 c->dptr=c->skb->data; in z8530_rx()
367 c->count=0; in z8530_rx()
370 printk(KERN_WARNING "%s: overrun\n", c->dev->name); in z8530_rx()
371 c->rx_overrun++; in z8530_rx()
375 c->rx_crc_err++; in z8530_rx()
386 z8530_rx_done(c); in z8530_rx()
387 write_zsctrl(c, RES_Rx_CRC); in z8530_rx()
394 write_zsctrl(c, ERR_RES); in z8530_rx()
395 write_zsctrl(c, RES_H_IUS); in z8530_rx()
409 static void z8530_tx(struct z8530_channel *c) in z8530_tx() argument
411 while(c->txcount) { in z8530_tx()
413 if(!(read_zsreg(c, R0)&4)) in z8530_tx()
415 c->txcount--; in z8530_tx()
419 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx()
420 write_zsctrl(c, RES_H_IUS); in z8530_tx()
422 if(c->txcount==0) in z8530_tx()
424 write_zsctrl(c, RES_EOM_L); in z8530_tx()
425 write_zsreg(c, R10, c->regs[10]&~ABUNDER); in z8530_tx()
434 write_zsctrl(c, RES_Tx_P); in z8530_tx()
436 z8530_tx_done(c); in z8530_tx()
437 write_zsctrl(c, RES_H_IUS); in z8530_tx()
642 static void z8530_rx_clear(struct z8530_channel *c) in z8530_rx_clear() argument
649 read_zsdata(c); in z8530_rx_clear()
650 stat=read_zsreg(c, R1); in z8530_rx_clear()
653 write_zsctrl(c, RES_Rx_CRC); in z8530_rx_clear()
657 write_zsctrl(c, ERR_RES); in z8530_rx_clear()
658 write_zsctrl(c, RES_H_IUS); in z8530_rx_clear()
670 static void z8530_tx_clear(struct z8530_channel *c) in z8530_tx_clear() argument
672 write_zsctrl(c, RES_Tx_P); in z8530_tx_clear()
673 write_zsctrl(c, RES_H_IUS); in z8530_tx_clear()
800 int z8530_sync_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_open() argument
804 spin_lock_irqsave(c->lock, flags); in z8530_sync_open()
806 c->sync = 1; in z8530_sync_open()
807 c->mtu = dev->mtu+64; in z8530_sync_open()
808 c->count = 0; in z8530_sync_open()
809 c->skb = NULL; in z8530_sync_open()
810 c->skb2 = NULL; in z8530_sync_open()
811 c->irqs = &z8530_sync; in z8530_sync_open()
814 z8530_rx_done(c); /* Load the frame ring */ in z8530_sync_open()
815 z8530_rx_done(c); /* Load the backup frame */ in z8530_sync_open()
816 z8530_rtsdtr(c,1); in z8530_sync_open()
817 c->dma_tx = 0; in z8530_sync_open()
818 c->regs[R1]|=TxINT_ENAB; in z8530_sync_open()
819 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_open()
820 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_open()
822 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_open()
838 int z8530_sync_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_close() argument
843 spin_lock_irqsave(c->lock, flags); in z8530_sync_close()
844 c->irqs = &z8530_nop; in z8530_sync_close()
845 c->max = 0; in z8530_sync_close()
846 c->sync = 0; in z8530_sync_close()
848 chk=read_zsreg(c,R0); in z8530_sync_close()
849 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_close()
850 z8530_rtsdtr(c,0); in z8530_sync_close()
852 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_close()
868 int z8530_sync_dma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_open() argument
872 c->sync = 1; in z8530_sync_dma_open()
873 c->mtu = dev->mtu+64; in z8530_sync_dma_open()
874 c->count = 0; in z8530_sync_dma_open()
875 c->skb = NULL; in z8530_sync_dma_open()
876 c->skb2 = NULL; in z8530_sync_dma_open()
880 c->rxdma_on = 0; in z8530_sync_dma_open()
881 c->txdma_on = 0; in z8530_sync_dma_open()
889 if(c->mtu > PAGE_SIZE/2) in z8530_sync_dma_open()
892 c->rx_buf[0]=(void *)get_free_page(GFP_KERNEL|GFP_DMA); in z8530_sync_dma_open()
893 if(c->rx_buf[0]==NULL) in z8530_sync_dma_open()
895 c->rx_buf[1]=c->rx_buf[0]+PAGE_SIZE/2; in z8530_sync_dma_open()
897 c->tx_dma_buf[0]=(void *)get_free_page(GFP_KERNEL|GFP_DMA); in z8530_sync_dma_open()
898 if(c->tx_dma_buf[0]==NULL) in z8530_sync_dma_open()
900 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_open()
901 c->rx_buf[0]=NULL; in z8530_sync_dma_open()
904 c->tx_dma_buf[1]=c->tx_dma_buf[0]+PAGE_SIZE/2; in z8530_sync_dma_open()
906 c->tx_dma_used=0; in z8530_sync_dma_open()
907 c->dma_tx = 1; in z8530_sync_dma_open()
908 c->dma_num=0; in z8530_sync_dma_open()
909 c->dma_ready=1; in z8530_sync_dma_open()
915 spin_lock_irqsave(c->lock, flags); in z8530_sync_dma_open()
921 c->regs[R14]|= DTRREQ; in z8530_sync_dma_open()
922 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_open()
924 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_dma_open()
925 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
931 c->regs[R1]|= WT_FN_RDYFN; in z8530_sync_dma_open()
932 c->regs[R1]|= WT_RDY_RT; in z8530_sync_dma_open()
933 c->regs[R1]|= INT_ERR_Rx; in z8530_sync_dma_open()
934 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_dma_open()
935 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
936 c->regs[R1]|= WT_RDY_ENAB; in z8530_sync_dma_open()
937 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
949 disable_dma(c->rxdma); in z8530_sync_dma_open()
950 clear_dma_ff(c->rxdma); in z8530_sync_dma_open()
951 set_dma_mode(c->rxdma, DMA_MODE_READ|0x10); in z8530_sync_dma_open()
952 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[0])); in z8530_sync_dma_open()
953 set_dma_count(c->rxdma, c->mtu); in z8530_sync_dma_open()
954 enable_dma(c->rxdma); in z8530_sync_dma_open()
956 disable_dma(c->txdma); in z8530_sync_dma_open()
957 clear_dma_ff(c->txdma); in z8530_sync_dma_open()
958 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_dma_open()
959 disable_dma(c->txdma); in z8530_sync_dma_open()
967 c->rxdma_on = 1; in z8530_sync_dma_open()
968 c->txdma_on = 1; in z8530_sync_dma_open()
969 c->tx_dma_used = 1; in z8530_sync_dma_open()
971 c->irqs = &z8530_dma_sync; in z8530_sync_dma_open()
972 z8530_rtsdtr(c,1); in z8530_sync_dma_open()
973 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_dma_open()
975 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_dma_open()
991 int z8530_sync_dma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_close() argument
996 c->irqs = &z8530_nop; in z8530_sync_dma_close()
997 c->max = 0; in z8530_sync_dma_close()
998 c->sync = 0; in z8530_sync_dma_close()
1005 disable_dma(c->rxdma); in z8530_sync_dma_close()
1006 clear_dma_ff(c->rxdma); in z8530_sync_dma_close()
1008 c->rxdma_on = 0; in z8530_sync_dma_close()
1010 disable_dma(c->txdma); in z8530_sync_dma_close()
1011 clear_dma_ff(c->txdma); in z8530_sync_dma_close()
1014 c->txdma_on = 0; in z8530_sync_dma_close()
1015 c->tx_dma_used = 0; in z8530_sync_dma_close()
1017 spin_lock_irqsave(c->lock, flags); in z8530_sync_dma_close()
1023 c->regs[R1]&= ~WT_RDY_ENAB; in z8530_sync_dma_close()
1024 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
1025 c->regs[R1]&= ~(WT_RDY_RT|WT_FN_RDYFN|INT_ERR_Rx); in z8530_sync_dma_close()
1026 c->regs[R1]|= INT_ALL_Rx; in z8530_sync_dma_close()
1027 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
1028 c->regs[R14]&= ~DTRREQ; in z8530_sync_dma_close()
1029 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_close()
1031 if(c->rx_buf[0]) in z8530_sync_dma_close()
1033 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_close()
1034 c->rx_buf[0]=NULL; in z8530_sync_dma_close()
1036 if(c->tx_dma_buf[0]) in z8530_sync_dma_close()
1038 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_dma_close()
1039 c->tx_dma_buf[0]=NULL; in z8530_sync_dma_close()
1041 chk=read_zsreg(c,R0); in z8530_sync_dma_close()
1042 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_dma_close()
1043 z8530_rtsdtr(c,0); in z8530_sync_dma_close()
1045 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_dma_close()
1062 int z8530_sync_txdma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_open() argument
1067 c->sync = 1; in z8530_sync_txdma_open()
1068 c->mtu = dev->mtu+64; in z8530_sync_txdma_open()
1069 c->count = 0; in z8530_sync_txdma_open()
1070 c->skb = NULL; in z8530_sync_txdma_open()
1071 c->skb2 = NULL; in z8530_sync_txdma_open()
1079 if(c->mtu > PAGE_SIZE/2) in z8530_sync_txdma_open()
1082 c->tx_dma_buf[0]=(void *)get_free_page(GFP_KERNEL|GFP_DMA); in z8530_sync_txdma_open()
1083 if(c->tx_dma_buf[0]==NULL) in z8530_sync_txdma_open()
1086 c->tx_dma_buf[1] = c->tx_dma_buf[0] + PAGE_SIZE/2; in z8530_sync_txdma_open()
1089 spin_lock_irqsave(c->lock, flags); in z8530_sync_txdma_open()
1095 z8530_rx_done(c); in z8530_sync_txdma_open()
1096 z8530_rx_done(c); in z8530_sync_txdma_open()
1102 c->rxdma_on = 0; in z8530_sync_txdma_open()
1103 c->txdma_on = 0; in z8530_sync_txdma_open()
1105 c->tx_dma_used=0; in z8530_sync_txdma_open()
1106 c->dma_num=0; in z8530_sync_txdma_open()
1107 c->dma_ready=1; in z8530_sync_txdma_open()
1108 c->dma_tx = 1; in z8530_sync_txdma_open()
1117 c->regs[R14]|= DTRREQ; in z8530_sync_txdma_open()
1118 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_open()
1120 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_txdma_open()
1121 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_open()
1129 disable_dma(c->txdma); in z8530_sync_txdma_open()
1130 clear_dma_ff(c->txdma); in z8530_sync_txdma_open()
1131 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_txdma_open()
1132 disable_dma(c->txdma); in z8530_sync_txdma_open()
1140 c->rxdma_on = 0; in z8530_sync_txdma_open()
1141 c->txdma_on = 1; in z8530_sync_txdma_open()
1142 c->tx_dma_used = 1; in z8530_sync_txdma_open()
1144 c->irqs = &z8530_txdma_sync; in z8530_sync_txdma_open()
1145 z8530_rtsdtr(c,1); in z8530_sync_txdma_open()
1146 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_txdma_open()
1147 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_txdma_open()
1163 int z8530_sync_txdma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_close() argument
1169 spin_lock_irqsave(c->lock, flags); in z8530_sync_txdma_close()
1171 c->irqs = &z8530_nop; in z8530_sync_txdma_close()
1172 c->max = 0; in z8530_sync_txdma_close()
1173 c->sync = 0; in z8530_sync_txdma_close()
1181 disable_dma(c->txdma); in z8530_sync_txdma_close()
1182 clear_dma_ff(c->txdma); in z8530_sync_txdma_close()
1183 c->txdma_on = 0; in z8530_sync_txdma_close()
1184 c->tx_dma_used = 0; in z8530_sync_txdma_close()
1192 c->regs[R1]&= ~WT_RDY_ENAB; in z8530_sync_txdma_close()
1193 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1194 c->regs[R1]&= ~(WT_RDY_RT|WT_FN_RDYFN|INT_ERR_Rx); in z8530_sync_txdma_close()
1195 c->regs[R1]|= INT_ALL_Rx; in z8530_sync_txdma_close()
1196 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1197 c->regs[R14]&= ~DTRREQ; in z8530_sync_txdma_close()
1198 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_close()
1200 if(c->tx_dma_buf[0]) in z8530_sync_txdma_close()
1202 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_txdma_close()
1203 c->tx_dma_buf[0]=NULL; in z8530_sync_txdma_close()
1205 chk=read_zsreg(c,R0); in z8530_sync_txdma_close()
1206 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_txdma_close()
1207 z8530_rtsdtr(c,0); in z8530_sync_txdma_close()
1396 int z8530_channel_load(struct z8530_channel *c, u8 *rtable) in z8530_channel_load() argument
1400 spin_lock_irqsave(c->lock, flags); in z8530_channel_load()
1406 write_zsreg(c, R15, c->regs[15]|1); in z8530_channel_load()
1407 write_zsreg(c, reg&0x0F, *rtable); in z8530_channel_load()
1409 write_zsreg(c, R15, c->regs[15]&~1); in z8530_channel_load()
1410 c->regs[reg]=*rtable++; in z8530_channel_load()
1412 c->rx_function=z8530_null_rx; in z8530_channel_load()
1413 c->skb=NULL; in z8530_channel_load()
1414 c->tx_skb=NULL; in z8530_channel_load()
1415 c->tx_next_skb=NULL; in z8530_channel_load()
1416 c->mtu=1500; in z8530_channel_load()
1417 c->max=0; in z8530_channel_load()
1418 c->count=0; in z8530_channel_load()
1419 c->status=read_zsreg(c, R0); in z8530_channel_load()
1420 c->sync=1; in z8530_channel_load()
1421 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_channel_load()
1423 spin_unlock_irqrestore(c->lock, flags); in z8530_channel_load()
1444 static void z8530_tx_begin(struct z8530_channel *c) in z8530_tx_begin() argument
1447 if(c->tx_skb) in z8530_tx_begin()
1450 c->tx_skb=c->tx_next_skb; in z8530_tx_begin()
1451 c->tx_next_skb=NULL; in z8530_tx_begin()
1452 c->tx_ptr=c->tx_next_ptr; in z8530_tx_begin()
1454 if(c->tx_skb==NULL) in z8530_tx_begin()
1457 if(c->dma_tx) in z8530_tx_begin()
1460 disable_dma(c->txdma); in z8530_tx_begin()
1464 if(get_dma_residue(c->txdma)) in z8530_tx_begin()
1466 c->stats.tx_dropped++; in z8530_tx_begin()
1467 c->stats.tx_fifo_errors++; in z8530_tx_begin()
1471 c->txcount=0; in z8530_tx_begin()
1475 c->txcount=c->tx_skb->len; in z8530_tx_begin()
1478 if(c->dma_tx) in z8530_tx_begin()
1488 disable_dma(c->txdma); in z8530_tx_begin()
1495 if(c->dev->type!=Z85230) in z8530_tx_begin()
1497 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1498 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1500 write_zsreg(c, R10, c->regs[10]&~ABUNDER); in z8530_tx_begin()
1501 clear_dma_ff(c->txdma); in z8530_tx_begin()
1502 set_dma_addr(c->txdma, virt_to_bus(c->tx_ptr)); in z8530_tx_begin()
1503 set_dma_count(c->txdma, c->txcount); in z8530_tx_begin()
1504 enable_dma(c->txdma); in z8530_tx_begin()
1506 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1507 write_zsreg(c, R5, c->regs[R5]|TxENAB); in z8530_tx_begin()
1513 write_zsreg(c, R10, c->regs[10]); in z8530_tx_begin()
1514 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1516 while(c->txcount && (read_zsreg(c,R0)&Tx_BUF_EMP)) in z8530_tx_begin()
1518 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx_begin()
1519 c->txcount--; in z8530_tx_begin()
1527 netif_wake_queue(c->netdevice); in z8530_tx_begin()
1541 static void z8530_tx_done(struct z8530_channel *c) in z8530_tx_done() argument
1546 if(c->tx_skb==NULL) in z8530_tx_done()
1549 skb=c->tx_skb; in z8530_tx_done()
1550 c->tx_skb=NULL; in z8530_tx_done()
1551 z8530_tx_begin(c); in z8530_tx_done()
1552 c->stats.tx_packets++; in z8530_tx_done()
1553 c->stats.tx_bytes+=skb->len; in z8530_tx_done()
1566 void z8530_null_rx(struct z8530_channel *c, struct sk_buff *skb) in z8530_null_rx() argument
1586 static void z8530_rx_done(struct z8530_channel *c) in z8530_rx_done() argument
1595 if(c->rxdma_on) in z8530_rx_done()
1602 int ready=c->dma_ready; in z8530_rx_done()
1603 unsigned char *rxb=c->rx_buf[c->dma_num]; in z8530_rx_done()
1612 disable_dma(c->rxdma); in z8530_rx_done()
1613 clear_dma_ff(c->rxdma); in z8530_rx_done()
1614 c->rxdma_on=0; in z8530_rx_done()
1615 ct=c->mtu-get_dma_residue(c->rxdma); in z8530_rx_done()
1618 c->dma_ready=0; in z8530_rx_done()
1627 c->dma_num^=1; in z8530_rx_done()
1628 set_dma_mode(c->rxdma, DMA_MODE_READ|0x10); in z8530_rx_done()
1629 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[c->dma_num])); in z8530_rx_done()
1630 set_dma_count(c->rxdma, c->mtu); in z8530_rx_done()
1631 c->rxdma_on = 1; in z8530_rx_done()
1632 enable_dma(c->rxdma); in z8530_rx_done()
1635 write_zsreg(c, R0, RES_Rx_CRC); in z8530_rx_done()
1640 printk(KERN_WARNING "%s: DMA flip overrun!\n", c->netdevice->name); in z8530_rx_done()
1655 c->stats.rx_dropped++; in z8530_rx_done()
1656 printk(KERN_WARNING "%s: Memory squeeze.\n", c->netdevice->name); in z8530_rx_done()
1662 c->stats.rx_packets++; in z8530_rx_done()
1663 c->stats.rx_bytes+=ct; in z8530_rx_done()
1665 c->dma_ready=1; in z8530_rx_done()
1670 skb=c->skb; in z8530_rx_done()
1684 ct=c->count; in z8530_rx_done()
1686 c->skb = c->skb2; in z8530_rx_done()
1687 c->count = 0; in z8530_rx_done()
1688 c->max = c->mtu; in z8530_rx_done()
1689 if(c->skb) in z8530_rx_done()
1691 c->dptr = c->skb->data; in z8530_rx_done()
1692 c->max = c->mtu; in z8530_rx_done()
1696 c->count= 0; in z8530_rx_done()
1697 c->max = 0; in z8530_rx_done()
1701 c->skb2 = dev_alloc_skb(c->mtu); in z8530_rx_done()
1702 if(c->skb2==NULL) in z8530_rx_done()
1704 c->netdevice->name); in z8530_rx_done()
1707 skb_put(c->skb2,c->mtu); in z8530_rx_done()
1709 c->stats.rx_packets++; in z8530_rx_done()
1710 c->stats.rx_bytes+=ct; in z8530_rx_done()
1719 c->rx_function(c,skb); in z8530_rx_done()
1723 c->stats.rx_dropped++; in z8530_rx_done()
1724 printk(KERN_ERR "%s: Lost a frame\n", c->netdevice->name); in z8530_rx_done()
1759 int z8530_queue_xmit(struct z8530_channel *c, struct sk_buff *skb) in z8530_queue_xmit() argument
1763 netif_stop_queue(c->netdevice); in z8530_queue_xmit()
1764 if(c->tx_next_skb) in z8530_queue_xmit()
1776 …if(c->dma_tx && ((unsigned long)(virt_to_bus(skb->data+skb->len))>=16*1024*1024 || spans_boundary(… in z8530_queue_xmit()
1785 c->tx_next_ptr=c->tx_dma_buf[c->tx_dma_used]; in z8530_queue_xmit()
1786 c->tx_dma_used^=1; /* Flip temp buffer */ in z8530_queue_xmit()
1787 memcpy(c->tx_next_ptr, skb->data, skb->len); in z8530_queue_xmit()
1790 c->tx_next_ptr=skb->data; in z8530_queue_xmit()
1792 c->tx_next_skb=skb; in z8530_queue_xmit()
1795 spin_lock_irqsave(c->lock, flags); in z8530_queue_xmit()
1796 z8530_tx_begin(c); in z8530_queue_xmit()
1797 spin_unlock_irqrestore(c->lock, flags); in z8530_queue_xmit()
1815 struct net_device_stats *z8530_get_stats(struct z8530_channel *c) in z8530_get_stats() argument
1817 return &c->stats; in z8530_get_stats()