Lines Matching refs:self
81 static int via_ircc_dma_receive(struct via_ircc_cb *self);
82 static int via_ircc_dma_receive_complete(struct via_ircc_cb *self,
88 static void via_hw_init(struct via_ircc_cb *self);
89 static void via_ircc_change_speed(struct via_ircc_cb *self, __u32 baud);
91 static int via_ircc_is_receiving(struct via_ircc_cb *self);
100 static int RxTimerHandler(struct via_ircc_cb *self, int iobase);
101 static void hwreset(struct via_ircc_cb *self);
102 static int via_ircc_dma_xmit(struct via_ircc_cb *self, u16 iobase);
103 static int upload_rxdata(struct via_ircc_cb *self, int iobase);
292 struct via_ircc_cb *self; in via_ircc_open() local
302 self = netdev_priv(dev); in via_ircc_open()
303 self->netdev = dev; in via_ircc_open()
304 spin_lock_init(&self->lock); in via_ircc_open()
306 pci_set_drvdata(pdev, self); in via_ircc_open()
309 self->io.cfg_base = info->cfg_base; in via_ircc_open()
310 self->io.fir_base = info->fir_base; in via_ircc_open()
311 self->io.irq = info->irq; in via_ircc_open()
312 self->io.fir_ext = CHIP_IO_EXTENT; in via_ircc_open()
313 self->io.dma = info->dma; in via_ircc_open()
314 self->io.dma2 = info->dma2; in via_ircc_open()
315 self->io.fifo_size = 32; in via_ircc_open()
316 self->chip_id = id; in via_ircc_open()
317 self->st_fifo.len = 0; in via_ircc_open()
318 self->RxDataReady = 0; in via_ircc_open()
321 if (!request_region(self->io.fir_base, self->io.fir_ext, driver_name)) { in via_ircc_open()
323 __func__, self->io.fir_base); in via_ircc_open()
329 irda_init_max_qos_capabilies(&self->qos); in via_ircc_open()
333 dongle_id = via_ircc_read_dongle_id(self->io.fir_base); in via_ircc_open()
334 self->io.dongle_id = dongle_id; in via_ircc_open()
338 switch( self->io.dongle_id ){ in via_ircc_open()
340 self->qos.baud_rate.bits = in via_ircc_open()
345 self->qos.baud_rate.bits = in via_ircc_open()
357 self->qos.min_turn_time.bits = qos_mtt_bits; in via_ircc_open()
358 irda_qos_bits_to_value(&self->qos); in via_ircc_open()
361 self->rx_buff.truesize = 14384 + 2048; in via_ircc_open()
362 self->tx_buff.truesize = 14384 + 2048; in via_ircc_open()
365 self->rx_buff.head = in via_ircc_open()
366 dma_alloc_coherent(&pdev->dev, self->rx_buff.truesize, in via_ircc_open()
367 &self->rx_buff_dma, GFP_KERNEL); in via_ircc_open()
368 if (self->rx_buff.head == NULL) { in via_ircc_open()
372 memset(self->rx_buff.head, 0, self->rx_buff.truesize); in via_ircc_open()
374 self->tx_buff.head = in via_ircc_open()
375 dma_alloc_coherent(&pdev->dev, self->tx_buff.truesize, in via_ircc_open()
376 &self->tx_buff_dma, GFP_KERNEL); in via_ircc_open()
377 if (self->tx_buff.head == NULL) { in via_ircc_open()
381 memset(self->tx_buff.head, 0, self->tx_buff.truesize); in via_ircc_open()
383 self->rx_buff.in_frame = FALSE; in via_ircc_open()
384 self->rx_buff.state = OUTSIDE_FRAME; in via_ircc_open()
385 self->tx_buff.data = self->tx_buff.head; in via_ircc_open()
386 self->rx_buff.data = self->rx_buff.head; in via_ircc_open()
389 self->tx_fifo.len = self->tx_fifo.ptr = self->tx_fifo.free = 0; in via_ircc_open()
390 self->tx_fifo.tail = self->tx_buff.head; in via_ircc_open()
403 self->io.speed = 9600; in via_ircc_open()
404 via_hw_init(self); in via_ircc_open()
407 dma_free_coherent(&pdev->dev, self->tx_buff.truesize, in via_ircc_open()
408 self->tx_buff.head, self->tx_buff_dma); in via_ircc_open()
410 dma_free_coherent(&pdev->dev, self->rx_buff.truesize, in via_ircc_open()
411 self->rx_buff.head, self->rx_buff_dma); in via_ircc_open()
413 release_region(self->io.fir_base, self->io.fir_ext); in via_ircc_open()
428 struct via_ircc_cb *self = pci_get_drvdata(pdev); in via_remove_one() local
433 iobase = self->io.fir_base; in via_remove_one()
437 unregister_netdev(self->netdev); in via_remove_one()
441 __func__, self->io.fir_base); in via_remove_one()
442 release_region(self->io.fir_base, self->io.fir_ext); in via_remove_one()
443 if (self->tx_buff.head) in via_remove_one()
444 dma_free_coherent(&pdev->dev, self->tx_buff.truesize, in via_remove_one()
445 self->tx_buff.head, self->tx_buff_dma); in via_remove_one()
446 if (self->rx_buff.head) in via_remove_one()
447 dma_free_coherent(&pdev->dev, self->rx_buff.truesize, in via_remove_one()
448 self->rx_buff.head, self->rx_buff_dma); in via_remove_one()
451 free_netdev(self->netdev); in via_remove_one()
463 static void via_hw_init(struct via_ircc_cb *self) in via_hw_init() argument
465 int iobase = self->io.fir_base; in via_hw_init()
505 self->io.speed = 9600; in via_hw_init()
506 self->st_fifo.len = 0; in via_hw_init()
508 via_ircc_change_dongle_speed(iobase, self->io.speed, in via_hw_init()
509 self->io.dongle_id); in via_hw_init()
675 static void via_ircc_change_speed(struct via_ircc_cb *self, __u32 speed) in via_ircc_change_speed() argument
677 struct net_device *dev = self->netdev; in via_ircc_change_speed()
681 iobase = self->io.fir_base; in via_ircc_change_speed()
683 self->io.speed = speed; in via_ircc_change_speed()
736 via_ircc_change_dongle_speed(iobase, speed, self->io.dongle_id); in via_ircc_change_speed()
760 via_ircc_dma_receive(self); in via_ircc_change_speed()
777 struct via_ircc_cb *self; in via_ircc_hard_xmit_sir() local
782 self = netdev_priv(dev); in via_ircc_hard_xmit_sir()
783 IRDA_ASSERT(self != NULL, return NETDEV_TX_OK;); in via_ircc_hard_xmit_sir()
784 iobase = self->io.fir_base; in via_ircc_hard_xmit_sir()
789 if ((speed != self->io.speed) && (speed != -1)) { in via_ircc_hard_xmit_sir()
792 via_ircc_change_speed(self, speed); in via_ircc_hard_xmit_sir()
797 self->new_speed = speed; in via_ircc_hard_xmit_sir()
807 spin_lock_irqsave(&self->lock, flags); in via_ircc_hard_xmit_sir()
808 self->tx_buff.data = self->tx_buff.head; in via_ircc_hard_xmit_sir()
809 self->tx_buff.len = in via_ircc_hard_xmit_sir()
810 async_wrap_skb(skb, self->tx_buff.data, in via_ircc_hard_xmit_sir()
811 self->tx_buff.truesize); in via_ircc_hard_xmit_sir()
813 dev->stats.tx_bytes += self->tx_buff.len; in via_ircc_hard_xmit_sir()
815 SetBaudRate(iobase, self->io.speed); in via_ircc_hard_xmit_sir()
833 irda_setup_dma(self->io.dma, self->tx_buff_dma, self->tx_buff.len, in via_ircc_hard_xmit_sir()
836 SetSendByte(iobase, self->tx_buff.len); in via_ircc_hard_xmit_sir()
841 spin_unlock_irqrestore(&self->lock, flags); in via_ircc_hard_xmit_sir()
849 struct via_ircc_cb *self; in via_ircc_hard_xmit_fir() local
854 self = netdev_priv(dev); in via_ircc_hard_xmit_fir()
855 iobase = self->io.fir_base; in via_ircc_hard_xmit_fir()
857 if (self->st_fifo.len) in via_ircc_hard_xmit_fir()
859 if (self->chip_id == 0x3076) in via_ircc_hard_xmit_fir()
865 if ((speed != self->io.speed) && (speed != -1)) { in via_ircc_hard_xmit_fir()
867 via_ircc_change_speed(self, speed); in via_ircc_hard_xmit_fir()
872 self->new_speed = speed; in via_ircc_hard_xmit_fir()
874 spin_lock_irqsave(&self->lock, flags); in via_ircc_hard_xmit_fir()
875 self->tx_fifo.queue[self->tx_fifo.free].start = self->tx_fifo.tail; in via_ircc_hard_xmit_fir()
876 self->tx_fifo.queue[self->tx_fifo.free].len = skb->len; in via_ircc_hard_xmit_fir()
878 self->tx_fifo.tail += skb->len; in via_ircc_hard_xmit_fir()
881 self->tx_fifo.queue[self->tx_fifo.free].start, skb->len); in via_ircc_hard_xmit_fir()
882 self->tx_fifo.len++; in via_ircc_hard_xmit_fir()
883 self->tx_fifo.free++; in via_ircc_hard_xmit_fir()
885 via_ircc_dma_xmit(self, iobase); in via_ircc_hard_xmit_fir()
890 spin_unlock_irqrestore(&self->lock, flags); in via_ircc_hard_xmit_fir()
895 static int via_ircc_dma_xmit(struct via_ircc_cb *self, u16 iobase) in via_ircc_dma_xmit() argument
898 self->io.direction = IO_XMIT; in via_ircc_dma_xmit()
910 irda_setup_dma(self->io.dma, in via_ircc_dma_xmit()
911 ((u8 *)self->tx_fifo.queue[self->tx_fifo.ptr].start - in via_ircc_dma_xmit()
912 self->tx_buff.head) + self->tx_buff_dma, in via_ircc_dma_xmit()
913 self->tx_fifo.queue[self->tx_fifo.ptr].len, DMA_TX_MODE); in via_ircc_dma_xmit()
915 __func__, self->tx_fifo.ptr, in via_ircc_dma_xmit()
916 self->tx_fifo.queue[self->tx_fifo.ptr].len, in via_ircc_dma_xmit()
917 self->tx_fifo.len); in via_ircc_dma_xmit()
919 SetSendByte(iobase, self->tx_fifo.queue[self->tx_fifo.ptr].len); in via_ircc_dma_xmit()
933 static int via_ircc_dma_xmit_complete(struct via_ircc_cb *self) in via_ircc_dma_xmit_complete() argument
941 iobase = self->io.fir_base; in via_ircc_dma_xmit_complete()
948 self->netdev->stats.tx_errors++; in via_ircc_dma_xmit_complete()
949 self->netdev->stats.tx_fifo_errors++; in via_ircc_dma_xmit_complete()
950 hwreset(self); in via_ircc_dma_xmit_complete()
953 self->netdev->stats.tx_packets++; in via_ircc_dma_xmit_complete()
958 if (self->new_speed) { in via_ircc_dma_xmit_complete()
959 via_ircc_change_speed(self, self->new_speed); in via_ircc_dma_xmit_complete()
960 self->new_speed = 0; in via_ircc_dma_xmit_complete()
965 if (self->tx_fifo.len) { in via_ircc_dma_xmit_complete()
966 self->tx_fifo.len--; in via_ircc_dma_xmit_complete()
967 self->tx_fifo.ptr++; in via_ircc_dma_xmit_complete()
973 self->tx_fifo.len, self->tx_fifo.ptr, self->tx_fifo.free); in via_ircc_dma_xmit_complete()
983 self->tx_fifo.len = self->tx_fifo.ptr = self->tx_fifo.free = 0; in via_ircc_dma_xmit_complete()
984 self->tx_fifo.tail = self->tx_buff.head; in via_ircc_dma_xmit_complete()
991 netif_wake_queue(self->netdev); in via_ircc_dma_xmit_complete()
1002 static int via_ircc_dma_receive(struct via_ircc_cb *self) in via_ircc_dma_receive() argument
1006 iobase = self->io.fir_base; in via_ircc_dma_receive()
1010 self->tx_fifo.len = self->tx_fifo.ptr = self->tx_fifo.free = 0; in via_ircc_dma_receive()
1011 self->tx_fifo.tail = self->tx_buff.head; in via_ircc_dma_receive()
1012 self->RxDataReady = 0; in via_ircc_dma_receive()
1013 self->io.direction = IO_RECV; in via_ircc_dma_receive()
1014 self->rx_buff.data = self->rx_buff.head; in via_ircc_dma_receive()
1015 self->st_fifo.len = self->st_fifo.pending_bytes = 0; in via_ircc_dma_receive()
1016 self->st_fifo.tail = self->st_fifo.head = 0; in via_ircc_dma_receive()
1031 irda_setup_dma(self->io.dma2, self->rx_buff_dma, in via_ircc_dma_receive()
1032 self->rx_buff.truesize, DMA_RX_MODE); in via_ircc_dma_receive()
1046 static int via_ircc_dma_receive_complete(struct via_ircc_cb *self, in via_ircc_dma_receive_complete() argument
1054 iobase = self->io.fir_base; in via_ircc_dma_receive_complete()
1055 st_fifo = &self->st_fifo; in via_ircc_dma_receive_complete()
1057 if (self->io.speed < 4000000) { //Speed below FIR in via_ircc_dma_receive_complete()
1058 len = GetRecvByte(iobase, self); in via_ircc_dma_receive_complete()
1065 if (self->chip_id == 0x3076) { in via_ircc_dma_receive_complete()
1067 skb->data[i] = self->rx_buff.data[i * 2]; in via_ircc_dma_receive_complete()
1069 if (self->chip_id == 0x3096) { in via_ircc_dma_receive_complete()
1072 self->rx_buff.data[i]; in via_ircc_dma_receive_complete()
1076 self->rx_buff.data += len; in via_ircc_dma_receive_complete()
1077 self->netdev->stats.rx_bytes += len; in via_ircc_dma_receive_complete()
1078 self->netdev->stats.rx_packets++; in via_ircc_dma_receive_complete()
1079 skb->dev = self->netdev; in via_ircc_dma_receive_complete()
1087 len = GetRecvByte(iobase, self); in via_ircc_dma_receive_complete()
1092 __func__, len, RxCurCount(iobase, self), in via_ircc_dma_receive_complete()
1093 self->RxLastCount); in via_ircc_dma_receive_complete()
1094 hwreset(self); in via_ircc_dma_receive_complete()
1099 st_fifo->len, len - 4, RxCurCount(iobase, self)); in via_ircc_dma_receive_complete()
1108 self->RxDataReady = 0; in via_ircc_dma_receive_complete()
1137 (self->rx_buff.data == NULL) || (len < 6)) { in via_ircc_dma_receive_complete()
1138 self->netdev->stats.rx_dropped++; in via_ircc_dma_receive_complete()
1145 skb_copy_to_linear_data(skb, self->rx_buff.data, len - 4); in via_ircc_dma_receive_complete()
1147 len - 4, self->rx_buff.data); in via_ircc_dma_receive_complete()
1150 self->rx_buff.data += len; in via_ircc_dma_receive_complete()
1151 self->netdev->stats.rx_bytes += len; in via_ircc_dma_receive_complete()
1152 self->netdev->stats.rx_packets++; in via_ircc_dma_receive_complete()
1153 skb->dev = self->netdev; in via_ircc_dma_receive_complete()
1167 static int upload_rxdata(struct via_ircc_cb *self, int iobase) in upload_rxdata() argument
1172 st_fifo = &self->st_fifo; in upload_rxdata()
1174 len = GetRecvByte(iobase, self); in upload_rxdata()
1179 self->netdev->stats.rx_dropped++; in upload_rxdata()
1185 self->netdev->stats.rx_dropped++; in upload_rxdata()
1190 skb_copy_to_linear_data(skb, self->rx_buff.data, len - 4 + 1); in upload_rxdata()
1196 self->rx_buff.data += len; in upload_rxdata()
1197 self->netdev->stats.rx_bytes += len; in upload_rxdata()
1198 self->netdev->stats.rx_packets++; in upload_rxdata()
1199 skb->dev = self->netdev; in upload_rxdata()
1217 static int RxTimerHandler(struct via_ircc_cb *self, int iobase) in RxTimerHandler() argument
1224 st_fifo = &self->st_fifo; in RxTimerHandler()
1226 if (CkRxRecv(iobase, self)) { in RxTimerHandler()
1228 self->RetryCount = 0; in RxTimerHandler()
1230 self->RxDataReady++; in RxTimerHandler()
1233 self->RetryCount++; in RxTimerHandler()
1235 if ((self->RetryCount >= 1) || in RxTimerHandler()
1236 ((st_fifo->pending_bytes + 2048) > self->rx_buff.truesize) || in RxTimerHandler()
1253 (self->rx_buff.data == NULL) || (len < 6)) { in RxTimerHandler()
1254 self->netdev->stats.rx_dropped++; in RxTimerHandler()
1259 skb_copy_to_linear_data(skb, self->rx_buff.data, len - 4); in RxTimerHandler()
1265 self->rx_buff.data += len; in RxTimerHandler()
1266 self->netdev->stats.rx_bytes += len; in RxTimerHandler()
1267 self->netdev->stats.rx_packets++; in RxTimerHandler()
1268 skb->dev = self->netdev; in RxTimerHandler()
1273 self->RetryCount = 0; in RxTimerHandler()
1285 (RxCurCount(iobase, self) != self->RxLastCount)) { in RxTimerHandler()
1286 upload_rxdata(self, iobase); in RxTimerHandler()
1287 if (irda_device_txqueue_empty(self->netdev)) in RxTimerHandler()
1288 via_ircc_dma_receive(self); in RxTimerHandler()
1308 struct via_ircc_cb *self = netdev_priv(dev); in via_ircc_interrupt() local
1312 iobase = self->io.fir_base; in via_ircc_interrupt()
1313 spin_lock(&self->lock); in via_ircc_interrupt()
1324 self->EventFlag.TimeOut++; in via_ircc_interrupt()
1326 if (self->io.direction == IO_XMIT) { in via_ircc_interrupt()
1327 via_ircc_dma_xmit(self, iobase); in via_ircc_interrupt()
1329 if (self->io.direction == IO_RECV) { in via_ircc_interrupt()
1333 if (self->RxDataReady > 30) { in via_ircc_interrupt()
1334 hwreset(self); in via_ircc_interrupt()
1335 if (irda_device_txqueue_empty(self->netdev)) { in via_ircc_interrupt()
1336 via_ircc_dma_receive(self); in via_ircc_interrupt()
1339 RxTimerHandler(self, iobase); in via_ircc_interrupt()
1354 self->EventFlag.EOMessage++; // read and will auto clean in via_ircc_interrupt()
1355 if (via_ircc_dma_xmit_complete(self)) { in via_ircc_interrupt()
1357 (self->netdev)) { in via_ircc_interrupt()
1358 via_ircc_dma_receive(self); in via_ircc_interrupt()
1361 self->EventFlag.Unknown++; in via_ircc_interrupt()
1383 if (via_ircc_dma_receive_complete(self, iobase)) { in via_ircc_interrupt()
1385 via_ircc_dma_receive(self); in via_ircc_interrupt()
1391 RxCurCount(iobase, self), in via_ircc_interrupt()
1392 self->RxLastCount); in via_ircc_interrupt()
1400 hwreset(self); //F01 in via_ircc_interrupt()
1402 via_ircc_dma_receive(self); in via_ircc_interrupt()
1406 spin_unlock(&self->lock); in via_ircc_interrupt()
1410 static void hwreset(struct via_ircc_cb *self) in hwreset() argument
1413 iobase = self->io.fir_base; in hwreset()
1438 via_ircc_change_speed(self, self->io.speed); in hwreset()
1440 self->st_fifo.len = 0; in hwreset()
1449 static int via_ircc_is_receiving(struct via_ircc_cb *self) in via_ircc_is_receiving() argument
1454 IRDA_ASSERT(self != NULL, return FALSE;); in via_ircc_is_receiving()
1456 iobase = self->io.fir_base; in via_ircc_is_receiving()
1457 if (CkRxRecv(iobase, self)) in via_ircc_is_receiving()
1474 struct via_ircc_cb *self; in via_ircc_net_open() local
1481 self = netdev_priv(dev); in via_ircc_net_open()
1483 IRDA_ASSERT(self != NULL, return 0;); in via_ircc_net_open()
1484 iobase = self->io.fir_base; in via_ircc_net_open()
1485 if (request_irq(self->io.irq, via_ircc_interrupt, 0, dev->name, dev)) { in via_ircc_net_open()
1487 self->io.irq); in via_ircc_net_open()
1494 if (request_dma(self->io.dma, dev->name)) { in via_ircc_net_open()
1496 self->io.dma); in via_ircc_net_open()
1497 free_irq(self->io.irq, self); in via_ircc_net_open()
1500 if (self->io.dma2 != self->io.dma) { in via_ircc_net_open()
1501 if (request_dma(self->io.dma2, dev->name)) { in via_ircc_net_open()
1503 driver_name, self->io.dma2); in via_ircc_net_open()
1504 free_irq(self->io.irq, self); in via_ircc_net_open()
1505 free_dma(self->io.dma); in via_ircc_net_open()
1517 via_ircc_dma_receive(self); in via_ircc_net_open()
1527 self->irlap = irlap_open(dev, &self->qos, hwname); in via_ircc_net_open()
1529 self->RxLastCount = 0; in via_ircc_net_open()
1542 struct via_ircc_cb *self; in via_ircc_net_close() local
1548 self = netdev_priv(dev); in via_ircc_net_close()
1549 IRDA_ASSERT(self != NULL, return 0;); in via_ircc_net_close()
1554 if (self->irlap) in via_ircc_net_close()
1555 irlap_close(self->irlap); in via_ircc_net_close()
1556 self->irlap = NULL; in via_ircc_net_close()
1557 iobase = self->io.fir_base; in via_ircc_net_close()
1560 DisableDmaChannel(self->io.dma); in via_ircc_net_close()
1564 free_irq(self->io.irq, dev); in via_ircc_net_close()
1565 free_dma(self->io.dma); in via_ircc_net_close()
1566 if (self->io.dma2 != self->io.dma) in via_ircc_net_close()
1567 free_dma(self->io.dma2); in via_ircc_net_close()
1582 struct via_ircc_cb *self; in via_ircc_net_ioctl() local
1587 self = netdev_priv(dev); in via_ircc_net_ioctl()
1588 IRDA_ASSERT(self != NULL, return -1;); in via_ircc_net_ioctl()
1592 spin_lock_irqsave(&self->lock, flags); in via_ircc_net_ioctl()
1599 via_ircc_change_speed(self, irq->ifr_baudrate); in via_ircc_net_ioctl()
1606 irda_device_set_media_busy(self->netdev, TRUE); in via_ircc_net_ioctl()
1609 irq->ifr_receiving = via_ircc_is_receiving(self); in via_ircc_net_ioctl()
1615 spin_unlock_irqrestore(&self->lock, flags); in via_ircc_net_ioctl()