Lines Matching refs:bp

164 static inline unsigned long br32(const struct b44 *bp, unsigned long reg)  in br32()  argument
166 return ssb_read32(bp->sdev, reg); in br32()
169 static inline void bw32(const struct b44 *bp, in bw32() argument
172 ssb_write32(bp->sdev, reg, val); in bw32()
175 static int b44_wait_bit(struct b44 *bp, unsigned long reg, in b44_wait_bit() argument
181 u32 val = br32(bp, reg); in b44_wait_bit()
191 netdev_err(bp->dev, "BUG! Timeout waiting for bit %08x of register %lx to %s\n", in b44_wait_bit()
199 static inline void __b44_cam_read(struct b44 *bp, unsigned char *data, int index) in __b44_cam_read() argument
203 bw32(bp, B44_CAM_CTRL, (CAM_CTRL_READ | in __b44_cam_read()
206 b44_wait_bit(bp, B44_CAM_CTRL, CAM_CTRL_BUSY, 100, 1); in __b44_cam_read()
208 val = br32(bp, B44_CAM_DATA_LO); in __b44_cam_read()
215 val = br32(bp, B44_CAM_DATA_HI); in __b44_cam_read()
221 static inline void __b44_cam_write(struct b44 *bp, in __b44_cam_write() argument
230 bw32(bp, B44_CAM_DATA_LO, val); in __b44_cam_write()
234 bw32(bp, B44_CAM_DATA_HI, val); in __b44_cam_write()
235 bw32(bp, B44_CAM_CTRL, (CAM_CTRL_WRITE | in __b44_cam_write()
237 b44_wait_bit(bp, B44_CAM_CTRL, CAM_CTRL_BUSY, 100, 1); in __b44_cam_write()
240 static inline void __b44_disable_ints(struct b44 *bp) in __b44_disable_ints() argument
242 bw32(bp, B44_IMASK, 0); in __b44_disable_ints()
245 static void b44_disable_ints(struct b44 *bp) in b44_disable_ints() argument
247 __b44_disable_ints(bp); in b44_disable_ints()
250 br32(bp, B44_IMASK); in b44_disable_ints()
253 static void b44_enable_ints(struct b44 *bp) in b44_enable_ints() argument
255 bw32(bp, B44_IMASK, bp->imask); in b44_enable_ints()
258 static int __b44_readphy(struct b44 *bp, int phy_addr, int reg, u32 *val) in __b44_readphy() argument
262 bw32(bp, B44_EMAC_ISTAT, EMAC_INT_MII); in __b44_readphy()
263 bw32(bp, B44_MDIO_DATA, (MDIO_DATA_SB_START | in __b44_readphy()
268 err = b44_wait_bit(bp, B44_EMAC_ISTAT, EMAC_INT_MII, 100, 0); in __b44_readphy()
269 *val = br32(bp, B44_MDIO_DATA) & MDIO_DATA_DATA; in __b44_readphy()
274 static int __b44_writephy(struct b44 *bp, int phy_addr, int reg, u32 val) in __b44_writephy() argument
276 bw32(bp, B44_EMAC_ISTAT, EMAC_INT_MII); in __b44_writephy()
277 bw32(bp, B44_MDIO_DATA, (MDIO_DATA_SB_START | in __b44_writephy()
283 return b44_wait_bit(bp, B44_EMAC_ISTAT, EMAC_INT_MII, 100, 0); in __b44_writephy()
286 static inline int b44_readphy(struct b44 *bp, int reg, u32 *val) in b44_readphy() argument
288 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_readphy()
291 return __b44_readphy(bp, bp->phy_addr, reg, val); in b44_readphy()
294 static inline int b44_writephy(struct b44 *bp, int reg, u32 val) in b44_writephy() argument
296 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_writephy()
299 return __b44_writephy(bp, bp->phy_addr, reg, val); in b44_writephy()
306 struct b44 *bp = netdev_priv(dev); in b44_mdio_read_mii() local
307 int rc = __b44_readphy(bp, phy_id, location, &val); in b44_mdio_read_mii()
316 struct b44 *bp = netdev_priv(dev); in b44_mdio_write_mii() local
317 __b44_writephy(bp, phy_id, location, val); in b44_mdio_write_mii()
323 struct b44 *bp = bus->priv; in b44_mdio_read_phylib() local
324 int rc = __b44_readphy(bp, phy_id, location, &val); in b44_mdio_read_phylib()
333 struct b44 *bp = bus->priv; in b44_mdio_write_phylib() local
334 return __b44_writephy(bp, phy_id, location, val); in b44_mdio_write_phylib()
337 static int b44_phy_reset(struct b44 *bp) in b44_phy_reset() argument
342 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_phy_reset()
344 err = b44_writephy(bp, MII_BMCR, BMCR_RESET); in b44_phy_reset()
348 err = b44_readphy(bp, MII_BMCR, &val); in b44_phy_reset()
351 netdev_err(bp->dev, "PHY Reset would not complete\n"); in b44_phy_reset()
359 static void __b44_set_flow_ctrl(struct b44 *bp, u32 pause_flags) in __b44_set_flow_ctrl() argument
363 bp->flags &= ~(B44_FLAG_TX_PAUSE | B44_FLAG_RX_PAUSE); in __b44_set_flow_ctrl()
364 bp->flags |= pause_flags; in __b44_set_flow_ctrl()
366 val = br32(bp, B44_RXCONFIG); in __b44_set_flow_ctrl()
371 bw32(bp, B44_RXCONFIG, val); in __b44_set_flow_ctrl()
373 val = br32(bp, B44_MAC_FLOW); in __b44_set_flow_ctrl()
379 bw32(bp, B44_MAC_FLOW, val); in __b44_set_flow_ctrl()
382 static void b44_set_flow_ctrl(struct b44 *bp, u32 local, u32 remote) in b44_set_flow_ctrl() argument
398 __b44_set_flow_ctrl(bp, pause_enab); in b44_set_flow_ctrl()
403 static void b44_wap54g10_workaround(struct b44 *bp) in b44_wap54g10_workaround() argument
417 err = __b44_readphy(bp, 0, MII_BMCR, &val); in b44_wap54g10_workaround()
423 err = __b44_writephy(bp, 0, MII_BMCR, val); in b44_wap54g10_workaround()
432 static inline void b44_wap54g10_workaround(struct b44 *bp) in b44_wap54g10_workaround() argument
437 static int b44_setup_phy(struct b44 *bp) in b44_setup_phy() argument
442 b44_wap54g10_workaround(bp); in b44_setup_phy()
444 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_setup_phy()
446 if ((err = b44_readphy(bp, B44_MII_ALEDCTRL, &val)) != 0) in b44_setup_phy()
448 if ((err = b44_writephy(bp, B44_MII_ALEDCTRL, in b44_setup_phy()
451 if ((err = b44_readphy(bp, B44_MII_TLEDCTRL, &val)) != 0) in b44_setup_phy()
453 if ((err = b44_writephy(bp, B44_MII_TLEDCTRL, in b44_setup_phy()
457 if (!(bp->flags & B44_FLAG_FORCE_LINK)) { in b44_setup_phy()
460 if (bp->flags & B44_FLAG_ADV_10HALF) in b44_setup_phy()
462 if (bp->flags & B44_FLAG_ADV_10FULL) in b44_setup_phy()
464 if (bp->flags & B44_FLAG_ADV_100HALF) in b44_setup_phy()
466 if (bp->flags & B44_FLAG_ADV_100FULL) in b44_setup_phy()
469 if (bp->flags & B44_FLAG_PAUSE_AUTO) in b44_setup_phy()
472 if ((err = b44_writephy(bp, MII_ADVERTISE, adv)) != 0) in b44_setup_phy()
474 if ((err = b44_writephy(bp, MII_BMCR, (BMCR_ANENABLE | in b44_setup_phy()
480 if ((err = b44_readphy(bp, MII_BMCR, &bmcr)) != 0) in b44_setup_phy()
483 if (bp->flags & B44_FLAG_100_BASE_T) in b44_setup_phy()
485 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_setup_phy()
487 if ((err = b44_writephy(bp, MII_BMCR, bmcr)) != 0) in b44_setup_phy()
494 b44_set_flow_ctrl(bp, 0, 0); in b44_setup_phy()
501 static void b44_stats_update(struct b44 *bp) in b44_stats_update() argument
506 val = &bp->hw_stats.tx_good_octets; in b44_stats_update()
507 u64_stats_update_begin(&bp->hw_stats.syncp); in b44_stats_update()
510 *val++ += br32(bp, reg); in b44_stats_update()
514 *val++ += br32(bp, reg); in b44_stats_update()
517 u64_stats_update_end(&bp->hw_stats.syncp); in b44_stats_update()
520 static void b44_link_report(struct b44 *bp) in b44_link_report() argument
522 if (!netif_carrier_ok(bp->dev)) { in b44_link_report()
523 netdev_info(bp->dev, "Link is down\n"); in b44_link_report()
525 netdev_info(bp->dev, "Link is up at %d Mbps, %s duplex\n", in b44_link_report()
526 (bp->flags & B44_FLAG_100_BASE_T) ? 100 : 10, in b44_link_report()
527 (bp->flags & B44_FLAG_FULL_DUPLEX) ? "full" : "half"); in b44_link_report()
529 netdev_info(bp->dev, "Flow control is %s for TX and %s for RX\n", in b44_link_report()
530 (bp->flags & B44_FLAG_TX_PAUSE) ? "on" : "off", in b44_link_report()
531 (bp->flags & B44_FLAG_RX_PAUSE) ? "on" : "off"); in b44_link_report()
535 static void b44_check_phy(struct b44 *bp) in b44_check_phy() argument
539 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_check_phy()
540 bp->flags |= B44_FLAG_100_BASE_T; in b44_check_phy()
541 if (!netif_carrier_ok(bp->dev)) { in b44_check_phy()
542 u32 val = br32(bp, B44_TX_CTRL); in b44_check_phy()
543 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_check_phy()
547 bw32(bp, B44_TX_CTRL, val); in b44_check_phy()
548 netif_carrier_on(bp->dev); in b44_check_phy()
549 b44_link_report(bp); in b44_check_phy()
554 if (!b44_readphy(bp, MII_BMSR, &bmsr) && in b44_check_phy()
555 !b44_readphy(bp, B44_MII_AUXCTRL, &aux) && in b44_check_phy()
558 bp->flags |= B44_FLAG_100_BASE_T; in b44_check_phy()
560 bp->flags &= ~B44_FLAG_100_BASE_T; in b44_check_phy()
562 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_check_phy()
564 bp->flags &= ~B44_FLAG_FULL_DUPLEX; in b44_check_phy()
566 if (!netif_carrier_ok(bp->dev) && in b44_check_phy()
568 u32 val = br32(bp, B44_TX_CTRL); in b44_check_phy()
571 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_check_phy()
575 bw32(bp, B44_TX_CTRL, val); in b44_check_phy()
577 if (!(bp->flags & B44_FLAG_FORCE_LINK) && in b44_check_phy()
578 !b44_readphy(bp, MII_ADVERTISE, &local_adv) && in b44_check_phy()
579 !b44_readphy(bp, MII_LPA, &remote_adv)) in b44_check_phy()
580 b44_set_flow_ctrl(bp, local_adv, remote_adv); in b44_check_phy()
583 netif_carrier_on(bp->dev); in b44_check_phy()
584 b44_link_report(bp); in b44_check_phy()
585 } else if (netif_carrier_ok(bp->dev) && !(bmsr & BMSR_LSTATUS)) { in b44_check_phy()
587 netif_carrier_off(bp->dev); in b44_check_phy()
588 b44_link_report(bp); in b44_check_phy()
592 netdev_warn(bp->dev, "Remote fault detected in PHY\n"); in b44_check_phy()
594 netdev_warn(bp->dev, "Jabber detected in PHY\n"); in b44_check_phy()
600 struct b44 *bp = from_timer(bp, t, timer); in b44_timer() local
602 spin_lock_irq(&bp->lock); in b44_timer()
604 b44_check_phy(bp); in b44_timer()
606 b44_stats_update(bp); in b44_timer()
608 spin_unlock_irq(&bp->lock); in b44_timer()
610 mod_timer(&bp->timer, round_jiffies(jiffies + HZ)); in b44_timer()
613 static void b44_tx(struct b44 *bp) in b44_tx() argument
618 cur = br32(bp, B44_DMATX_STAT) & DMATX_STAT_CDMASK; in b44_tx()
622 for (cons = bp->tx_cons; cons != cur; cons = NEXT_TX(cons)) { in b44_tx()
623 struct ring_info *rp = &bp->tx_buffers[cons]; in b44_tx()
628 dma_unmap_single(bp->sdev->dma_dev, in b44_tx()
640 netdev_completed_queue(bp->dev, pkts_compl, bytes_compl); in b44_tx()
641 bp->tx_cons = cons; in b44_tx()
642 if (netif_queue_stopped(bp->dev) && in b44_tx()
643 TX_BUFFS_AVAIL(bp) > B44_TX_WAKEUP_THRESH) in b44_tx()
644 netif_wake_queue(bp->dev); in b44_tx()
646 bw32(bp, B44_GPTIMER, 0); in b44_tx()
654 static int b44_alloc_rx_skb(struct b44 *bp, int src_idx, u32 dest_idx_unmasked) in b44_alloc_rx_skb() argument
666 src_map = &bp->rx_buffers[src_idx]; in b44_alloc_rx_skb()
668 map = &bp->rx_buffers[dest_idx]; in b44_alloc_rx_skb()
669 skb = netdev_alloc_skb(bp->dev, RX_PKT_BUF_SZ); in b44_alloc_rx_skb()
673 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb()
679 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb()
682 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb()
683 dma_unmap_single(bp->sdev->dma_dev, mapping, in b44_alloc_rx_skb()
689 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb()
692 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb()
694 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb()
695 dma_unmap_single(bp->sdev->dma_dev, mapping, RX_PKT_BUF_SZ,DMA_FROM_DEVICE); in b44_alloc_rx_skb()
699 bp->force_copybreak = 1; in b44_alloc_rx_skb()
717 dp = &bp->rx_ring[dest_idx]; in b44_alloc_rx_skb()
719 dp->addr = cpu_to_le32((u32) mapping + bp->dma_offset); in b44_alloc_rx_skb()
721 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_alloc_rx_skb()
722 b44_sync_dma_desc_for_device(bp->sdev, bp->rx_ring_dma, in b44_alloc_rx_skb()
729 static void b44_recycle_rx(struct b44 *bp, int src_idx, u32 dest_idx_unmasked) in b44_recycle_rx() argument
738 dest_desc = &bp->rx_ring[dest_idx]; in b44_recycle_rx()
739 dest_map = &bp->rx_buffers[dest_idx]; in b44_recycle_rx()
740 src_desc = &bp->rx_ring[src_idx]; in b44_recycle_rx()
741 src_map = &bp->rx_buffers[src_idx]; in b44_recycle_rx()
749 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_recycle_rx()
750 b44_sync_dma_desc_for_cpu(bp->sdev, bp->rx_ring_dma, in b44_recycle_rx()
765 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_recycle_rx()
766 b44_sync_dma_desc_for_device(bp->sdev, bp->rx_ring_dma, in b44_recycle_rx()
770 dma_sync_single_for_device(bp->sdev->dma_dev, dest_map->mapping, in b44_recycle_rx()
775 static int b44_rx(struct b44 *bp, int budget) in b44_rx() argument
781 prod = br32(bp, B44_DMARX_STAT) & DMARX_STAT_CDMASK; in b44_rx()
783 cons = bp->rx_cons; in b44_rx()
786 struct ring_info *rp = &bp->rx_buffers[cons]; in b44_rx()
792 dma_sync_single_for_cpu(bp->sdev->dma_dev, map, in b44_rx()
800 b44_recycle_rx(bp, cons, bp->rx_prod); in b44_rx()
802 bp->dev->stats.rx_dropped++; in b44_rx()
821 if (!bp->force_copybreak && len > RX_COPY_THRESHOLD) { in b44_rx()
823 skb_size = b44_alloc_rx_skb(bp, cons, bp->rx_prod); in b44_rx()
826 dma_unmap_single(bp->sdev->dma_dev, map, in b44_rx()
834 b44_recycle_rx(bp, cons, bp->rx_prod); in b44_rx()
835 copy_skb = napi_alloc_skb(&bp->napi, len); in b44_rx()
846 skb->protocol = eth_type_trans(skb, bp->dev); in b44_rx()
851 bp->rx_prod = (bp->rx_prod + 1) & in b44_rx()
856 bp->rx_cons = cons; in b44_rx()
857 bw32(bp, B44_DMARX_PTR, cons * sizeof(struct dma_desc)); in b44_rx()
864 struct b44 *bp = container_of(napi, struct b44, napi); in b44_poll() local
868 spin_lock_irqsave(&bp->lock, flags); in b44_poll()
870 if (bp->istat & (ISTAT_TX | ISTAT_TO)) { in b44_poll()
872 b44_tx(bp); in b44_poll()
875 if (bp->istat & ISTAT_RFO) { /* fast recovery, in ~20msec */ in b44_poll()
876 bp->istat &= ~ISTAT_RFO; in b44_poll()
877 b44_disable_ints(bp); in b44_poll()
878 ssb_device_enable(bp->sdev, 0); /* resets ISTAT_RFO */ in b44_poll()
879 b44_init_rings(bp); in b44_poll()
880 b44_init_hw(bp, B44_FULL_RESET_SKIP_PHY); in b44_poll()
881 netif_wake_queue(bp->dev); in b44_poll()
884 spin_unlock_irqrestore(&bp->lock, flags); in b44_poll()
887 if (bp->istat & ISTAT_RX) in b44_poll()
888 work_done += b44_rx(bp, budget); in b44_poll()
890 if (bp->istat & ISTAT_ERRORS) { in b44_poll()
891 spin_lock_irqsave(&bp->lock, flags); in b44_poll()
892 b44_halt(bp); in b44_poll()
893 b44_init_rings(bp); in b44_poll()
894 b44_init_hw(bp, B44_FULL_RESET_SKIP_PHY); in b44_poll()
895 netif_wake_queue(bp->dev); in b44_poll()
896 spin_unlock_irqrestore(&bp->lock, flags); in b44_poll()
902 b44_enable_ints(bp); in b44_poll()
911 struct b44 *bp = netdev_priv(dev); in b44_interrupt() local
915 spin_lock(&bp->lock); in b44_interrupt()
917 istat = br32(bp, B44_ISTAT); in b44_interrupt()
918 imask = br32(bp, B44_IMASK); in b44_interrupt()
933 if (napi_schedule_prep(&bp->napi)) { in b44_interrupt()
937 bp->istat = istat; in b44_interrupt()
938 __b44_disable_ints(bp); in b44_interrupt()
939 __napi_schedule(&bp->napi); in b44_interrupt()
943 bw32(bp, B44_ISTAT, istat); in b44_interrupt()
944 br32(bp, B44_ISTAT); in b44_interrupt()
946 spin_unlock(&bp->lock); in b44_interrupt()
952 struct b44 *bp = netdev_priv(dev); in b44_tx_timeout() local
956 spin_lock_irq(&bp->lock); in b44_tx_timeout()
958 b44_halt(bp); in b44_tx_timeout()
959 b44_init_rings(bp); in b44_tx_timeout()
960 b44_init_hw(bp, B44_FULL_RESET); in b44_tx_timeout()
962 spin_unlock_irq(&bp->lock); in b44_tx_timeout()
964 b44_enable_ints(bp); in b44_tx_timeout()
971 struct b44 *bp = netdev_priv(dev); in b44_start_xmit() local
978 spin_lock_irqsave(&bp->lock, flags); in b44_start_xmit()
981 if (unlikely(TX_BUFFS_AVAIL(bp) < 1)) { in b44_start_xmit()
987 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, len, DMA_TO_DEVICE); in b44_start_xmit()
988 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || mapping + len > DMA_BIT_MASK(30)) { in b44_start_xmit()
992 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_start_xmit()
993 dma_unmap_single(bp->sdev->dma_dev, mapping, len, in b44_start_xmit()
1000 mapping = dma_map_single(bp->sdev->dma_dev, bounce_skb->data, in b44_start_xmit()
1002 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || mapping + len > DMA_BIT_MASK(30)) { in b44_start_xmit()
1003 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_start_xmit()
1004 dma_unmap_single(bp->sdev->dma_dev, mapping, in b44_start_xmit()
1015 entry = bp->tx_prod; in b44_start_xmit()
1016 bp->tx_buffers[entry].skb = skb; in b44_start_xmit()
1017 bp->tx_buffers[entry].mapping = mapping; in b44_start_xmit()
1024 bp->tx_ring[entry].ctrl = cpu_to_le32(ctrl); in b44_start_xmit()
1025 bp->tx_ring[entry].addr = cpu_to_le32((u32) mapping+bp->dma_offset); in b44_start_xmit()
1027 if (bp->flags & B44_FLAG_TX_RING_HACK) in b44_start_xmit()
1028 b44_sync_dma_desc_for_device(bp->sdev, bp->tx_ring_dma, in b44_start_xmit()
1029 entry * sizeof(bp->tx_ring[0]), in b44_start_xmit()
1034 bp->tx_prod = entry; in b44_start_xmit()
1038 bw32(bp, B44_DMATX_PTR, entry * sizeof(struct dma_desc)); in b44_start_xmit()
1039 if (bp->flags & B44_FLAG_BUGGY_TXPTR) in b44_start_xmit()
1040 bw32(bp, B44_DMATX_PTR, entry * sizeof(struct dma_desc)); in b44_start_xmit()
1041 if (bp->flags & B44_FLAG_REORDER_BUG) in b44_start_xmit()
1042 br32(bp, B44_DMATX_PTR); in b44_start_xmit()
1046 if (TX_BUFFS_AVAIL(bp) < 1) in b44_start_xmit()
1050 spin_unlock_irqrestore(&bp->lock, flags); in b44_start_xmit()
1061 struct b44 *bp = netdev_priv(dev); in b44_change_mtu() local
1071 spin_lock_irq(&bp->lock); in b44_change_mtu()
1072 b44_halt(bp); in b44_change_mtu()
1074 b44_init_rings(bp); in b44_change_mtu()
1075 b44_init_hw(bp, B44_FULL_RESET); in b44_change_mtu()
1076 spin_unlock_irq(&bp->lock); in b44_change_mtu()
1078 b44_enable_ints(bp); in b44_change_mtu()
1090 static void b44_free_rings(struct b44 *bp) in b44_free_rings() argument
1096 rp = &bp->rx_buffers[i]; in b44_free_rings()
1100 dma_unmap_single(bp->sdev->dma_dev, rp->mapping, RX_PKT_BUF_SZ, in b44_free_rings()
1108 rp = &bp->tx_buffers[i]; in b44_free_rings()
1112 dma_unmap_single(bp->sdev->dma_dev, rp->mapping, rp->skb->len, in b44_free_rings()
1125 static void b44_init_rings(struct b44 *bp) in b44_init_rings() argument
1129 b44_free_rings(bp); in b44_init_rings()
1131 memset(bp->rx_ring, 0, B44_RX_RING_BYTES); in b44_init_rings()
1132 memset(bp->tx_ring, 0, B44_TX_RING_BYTES); in b44_init_rings()
1134 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_init_rings()
1135 dma_sync_single_for_device(bp->sdev->dma_dev, bp->rx_ring_dma, in b44_init_rings()
1138 if (bp->flags & B44_FLAG_TX_RING_HACK) in b44_init_rings()
1139 dma_sync_single_for_device(bp->sdev->dma_dev, bp->tx_ring_dma, in b44_init_rings()
1142 for (i = 0; i < bp->rx_pending; i++) { in b44_init_rings()
1143 if (b44_alloc_rx_skb(bp, -1, i) < 0) in b44_init_rings()
1152 static void b44_free_consistent(struct b44 *bp) in b44_free_consistent() argument
1154 kfree(bp->rx_buffers); in b44_free_consistent()
1155 bp->rx_buffers = NULL; in b44_free_consistent()
1156 kfree(bp->tx_buffers); in b44_free_consistent()
1157 bp->tx_buffers = NULL; in b44_free_consistent()
1158 if (bp->rx_ring) { in b44_free_consistent()
1159 if (bp->flags & B44_FLAG_RX_RING_HACK) { in b44_free_consistent()
1160 dma_unmap_single(bp->sdev->dma_dev, bp->rx_ring_dma, in b44_free_consistent()
1162 kfree(bp->rx_ring); in b44_free_consistent()
1164 dma_free_coherent(bp->sdev->dma_dev, DMA_TABLE_BYTES, in b44_free_consistent()
1165 bp->rx_ring, bp->rx_ring_dma); in b44_free_consistent()
1166 bp->rx_ring = NULL; in b44_free_consistent()
1167 bp->flags &= ~B44_FLAG_RX_RING_HACK; in b44_free_consistent()
1169 if (bp->tx_ring) { in b44_free_consistent()
1170 if (bp->flags & B44_FLAG_TX_RING_HACK) { in b44_free_consistent()
1171 dma_unmap_single(bp->sdev->dma_dev, bp->tx_ring_dma, in b44_free_consistent()
1173 kfree(bp->tx_ring); in b44_free_consistent()
1175 dma_free_coherent(bp->sdev->dma_dev, DMA_TABLE_BYTES, in b44_free_consistent()
1176 bp->tx_ring, bp->tx_ring_dma); in b44_free_consistent()
1177 bp->tx_ring = NULL; in b44_free_consistent()
1178 bp->flags &= ~B44_FLAG_TX_RING_HACK; in b44_free_consistent()
1186 static int b44_alloc_consistent(struct b44 *bp, gfp_t gfp) in b44_alloc_consistent() argument
1191 bp->rx_buffers = kzalloc(size, gfp); in b44_alloc_consistent()
1192 if (!bp->rx_buffers) in b44_alloc_consistent()
1196 bp->tx_buffers = kzalloc(size, gfp); in b44_alloc_consistent()
1197 if (!bp->tx_buffers) in b44_alloc_consistent()
1201 bp->rx_ring = dma_alloc_coherent(bp->sdev->dma_dev, size, in b44_alloc_consistent()
1202 &bp->rx_ring_dma, gfp); in b44_alloc_consistent()
1203 if (!bp->rx_ring) { in b44_alloc_consistent()
1214 rx_ring_dma = dma_map_single(bp->sdev->dma_dev, rx_ring, in b44_alloc_consistent()
1218 if (dma_mapping_error(bp->sdev->dma_dev, rx_ring_dma) || in b44_alloc_consistent()
1224 bp->rx_ring = rx_ring; in b44_alloc_consistent()
1225 bp->rx_ring_dma = rx_ring_dma; in b44_alloc_consistent()
1226 bp->flags |= B44_FLAG_RX_RING_HACK; in b44_alloc_consistent()
1229 bp->tx_ring = dma_alloc_coherent(bp->sdev->dma_dev, size, in b44_alloc_consistent()
1230 &bp->tx_ring_dma, gfp); in b44_alloc_consistent()
1231 if (!bp->tx_ring) { in b44_alloc_consistent()
1242 tx_ring_dma = dma_map_single(bp->sdev->dma_dev, tx_ring, in b44_alloc_consistent()
1246 if (dma_mapping_error(bp->sdev->dma_dev, tx_ring_dma) || in b44_alloc_consistent()
1252 bp->tx_ring = tx_ring; in b44_alloc_consistent()
1253 bp->tx_ring_dma = tx_ring_dma; in b44_alloc_consistent()
1254 bp->flags |= B44_FLAG_TX_RING_HACK; in b44_alloc_consistent()
1260 b44_free_consistent(bp); in b44_alloc_consistent()
1265 static void b44_clear_stats(struct b44 *bp) in b44_clear_stats() argument
1269 bw32(bp, B44_MIB_CTRL, MIB_CTRL_CLR_ON_READ); in b44_clear_stats()
1271 br32(bp, reg); in b44_clear_stats()
1273 br32(bp, reg); in b44_clear_stats()
1277 static void b44_chip_reset(struct b44 *bp, int reset_kind) in b44_chip_reset() argument
1279 struct ssb_device *sdev = bp->sdev; in b44_chip_reset()
1282 was_enabled = ssb_device_is_enabled(bp->sdev); in b44_chip_reset()
1284 ssb_device_enable(bp->sdev, 0); in b44_chip_reset()
1288 bw32(bp, B44_RCV_LAZY, 0); in b44_chip_reset()
1289 bw32(bp, B44_ENET_CTRL, ENET_CTRL_DISABLE); in b44_chip_reset()
1290 b44_wait_bit(bp, B44_ENET_CTRL, ENET_CTRL_DISABLE, 200, 1); in b44_chip_reset()
1291 bw32(bp, B44_DMATX_CTRL, 0); in b44_chip_reset()
1292 bp->tx_prod = bp->tx_cons = 0; in b44_chip_reset()
1293 if (br32(bp, B44_DMARX_STAT) & DMARX_STAT_EMASK) { in b44_chip_reset()
1294 b44_wait_bit(bp, B44_DMARX_STAT, DMARX_STAT_SIDLE, in b44_chip_reset()
1297 bw32(bp, B44_DMARX_CTRL, 0); in b44_chip_reset()
1298 bp->rx_prod = bp->rx_cons = 0; in b44_chip_reset()
1301 b44_clear_stats(bp); in b44_chip_reset()
1312 bw32(bp, B44_MDIO_CTRL, (MDIO_CTRL_PREAMBLE | in b44_chip_reset()
1318 bw32(bp, B44_MDIO_CTRL, (MDIO_CTRL_PREAMBLE | in b44_chip_reset()
1327 br32(bp, B44_MDIO_CTRL); in b44_chip_reset()
1329 if (!(br32(bp, B44_DEVCTRL) & DEVCTRL_IPP)) { in b44_chip_reset()
1330 bw32(bp, B44_ENET_CTRL, ENET_CTRL_EPSEL); in b44_chip_reset()
1331 br32(bp, B44_ENET_CTRL); in b44_chip_reset()
1332 bp->flags |= B44_FLAG_EXTERNAL_PHY; in b44_chip_reset()
1334 u32 val = br32(bp, B44_DEVCTRL); in b44_chip_reset()
1337 bw32(bp, B44_DEVCTRL, (val & ~DEVCTRL_EPR)); in b44_chip_reset()
1338 br32(bp, B44_DEVCTRL); in b44_chip_reset()
1341 bp->flags &= ~B44_FLAG_EXTERNAL_PHY; in b44_chip_reset()
1346 static void b44_halt(struct b44 *bp) in b44_halt() argument
1348 b44_disable_ints(bp); in b44_halt()
1350 b44_phy_reset(bp); in b44_halt()
1352 netdev_info(bp->dev, "powering down PHY\n"); in b44_halt()
1353 bw32(bp, B44_MAC_CTRL, MAC_CTRL_PHY_PDOWN); in b44_halt()
1356 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_halt()
1357 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_halt()
1359 b44_chip_reset(bp, B44_CHIP_RESET_PARTIAL); in b44_halt()
1363 static void __b44_set_mac_addr(struct b44 *bp) in __b44_set_mac_addr() argument
1365 bw32(bp, B44_CAM_CTRL, 0); in __b44_set_mac_addr()
1366 if (!(bp->dev->flags & IFF_PROMISC)) { in __b44_set_mac_addr()
1369 __b44_cam_write(bp, bp->dev->dev_addr, 0); in __b44_set_mac_addr()
1370 val = br32(bp, B44_CAM_CTRL); in __b44_set_mac_addr()
1371 bw32(bp, B44_CAM_CTRL, val | CAM_CTRL_ENABLE); in __b44_set_mac_addr()
1377 struct b44 *bp = netdev_priv(dev); in b44_set_mac_addr() local
1389 spin_lock_irq(&bp->lock); in b44_set_mac_addr()
1391 val = br32(bp, B44_RXCONFIG); in b44_set_mac_addr()
1393 __b44_set_mac_addr(bp); in b44_set_mac_addr()
1395 spin_unlock_irq(&bp->lock); in b44_set_mac_addr()
1404 static void b44_init_hw(struct b44 *bp, int reset_kind) in b44_init_hw() argument
1408 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_init_hw()
1410 b44_phy_reset(bp); in b44_init_hw()
1411 b44_setup_phy(bp); in b44_init_hw()
1415 bw32(bp, B44_MAC_CTRL, MAC_CTRL_CRC32_ENAB | MAC_CTRL_PHY_LEDCTRL); in b44_init_hw()
1416 bw32(bp, B44_RCV_LAZY, (1 << RCV_LAZY_FC_SHIFT)); in b44_init_hw()
1419 __b44_set_rx_mode(bp->dev); in b44_init_hw()
1422 bw32(bp, B44_RXMAXLEN, bp->dev->mtu + ETH_HLEN + 8 + RX_HEADER_LEN); in b44_init_hw()
1423 bw32(bp, B44_TXMAXLEN, bp->dev->mtu + ETH_HLEN + 8 + RX_HEADER_LEN); in b44_init_hw()
1425 bw32(bp, B44_TX_WMARK, 56); /* XXX magic */ in b44_init_hw()
1427 bw32(bp, B44_DMARX_CTRL, (DMARX_CTRL_ENABLE | in b44_init_hw()
1430 bw32(bp, B44_DMATX_CTRL, DMATX_CTRL_ENABLE); in b44_init_hw()
1431 bw32(bp, B44_DMATX_ADDR, bp->tx_ring_dma + bp->dma_offset); in b44_init_hw()
1432 bw32(bp, B44_DMARX_CTRL, (DMARX_CTRL_ENABLE | in b44_init_hw()
1434 bw32(bp, B44_DMARX_ADDR, bp->rx_ring_dma + bp->dma_offset); in b44_init_hw()
1436 bw32(bp, B44_DMARX_PTR, bp->rx_pending); in b44_init_hw()
1437 bp->rx_prod = bp->rx_pending; in b44_init_hw()
1439 bw32(bp, B44_MIB_CTRL, MIB_CTRL_CLR_ON_READ); in b44_init_hw()
1442 val = br32(bp, B44_ENET_CTRL); in b44_init_hw()
1443 bw32(bp, B44_ENET_CTRL, (val | ENET_CTRL_ENABLE)); in b44_init_hw()
1445 netdev_reset_queue(bp->dev); in b44_init_hw()
1450 struct b44 *bp = netdev_priv(dev); in b44_open() local
1453 err = b44_alloc_consistent(bp, GFP_KERNEL); in b44_open()
1457 napi_enable(&bp->napi); in b44_open()
1459 b44_init_rings(bp); in b44_open()
1460 b44_init_hw(bp, B44_FULL_RESET); in b44_open()
1462 b44_check_phy(bp); in b44_open()
1466 napi_disable(&bp->napi); in b44_open()
1467 b44_chip_reset(bp, B44_CHIP_RESET_PARTIAL); in b44_open()
1468 b44_free_rings(bp); in b44_open()
1469 b44_free_consistent(bp); in b44_open()
1473 timer_setup(&bp->timer, b44_timer, 0); in b44_open()
1474 bp->timer.expires = jiffies + HZ; in b44_open()
1475 add_timer(&bp->timer); in b44_open()
1477 b44_enable_ints(bp); in b44_open()
1479 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_open()
1500 static void bwfilter_table(struct b44 *bp, u8 *pp, u32 bytes, u32 table_offset) in bwfilter_table() argument
1506 bw32(bp, B44_FILT_ADDR, table_offset + i); in bwfilter_table()
1507 bw32(bp, B44_FILT_DATA, pattern[i / sizeof(u32)]); in bwfilter_table()
1544 static void b44_setup_pseudo_magicp(struct b44 *bp) in b44_setup_pseudo_magicp() argument
1558 plen0 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1561 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, B44_PATTERN_BASE); in b44_setup_pseudo_magicp()
1562 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, B44_PMASK_BASE); in b44_setup_pseudo_magicp()
1567 plen1 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1570 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, in b44_setup_pseudo_magicp()
1572 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, in b44_setup_pseudo_magicp()
1578 plen2 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1581 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, in b44_setup_pseudo_magicp()
1583 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, in b44_setup_pseudo_magicp()
1590 bw32(bp, B44_WKUP_LEN, val); in b44_setup_pseudo_magicp()
1593 val = br32(bp, B44_DEVCTRL); in b44_setup_pseudo_magicp()
1594 bw32(bp, B44_DEVCTRL, val | DEVCTRL_PFE); in b44_setup_pseudo_magicp()
1599 static void b44_setup_wol_pci(struct b44 *bp) in b44_setup_wol_pci() argument
1603 if (bp->sdev->bus->bustype != SSB_BUSTYPE_SSB) { in b44_setup_wol_pci()
1604 bw32(bp, SSB_TMSLOW, br32(bp, SSB_TMSLOW) | SSB_TMSLOW_PE); in b44_setup_wol_pci()
1605 pci_read_config_word(bp->sdev->bus->host_pci, SSB_PMCSR, &val); in b44_setup_wol_pci()
1606 pci_write_config_word(bp->sdev->bus->host_pci, SSB_PMCSR, val | SSB_PE); in b44_setup_wol_pci()
1610 static inline void b44_setup_wol_pci(struct b44 *bp) { } in b44_setup_wol_pci() argument
1613 static void b44_setup_wol(struct b44 *bp) in b44_setup_wol() argument
1617 bw32(bp, B44_RXCONFIG, RXCONFIG_ALLMULTI); in b44_setup_wol()
1619 if (bp->flags & B44_FLAG_B0_ANDLATER) { in b44_setup_wol()
1621 bw32(bp, B44_WKUP_LEN, WKUP_LEN_DISABLE); in b44_setup_wol()
1623 val = bp->dev->dev_addr[2] << 24 | in b44_setup_wol()
1624 bp->dev->dev_addr[3] << 16 | in b44_setup_wol()
1625 bp->dev->dev_addr[4] << 8 | in b44_setup_wol()
1626 bp->dev->dev_addr[5]; in b44_setup_wol()
1627 bw32(bp, B44_ADDR_LO, val); in b44_setup_wol()
1629 val = bp->dev->dev_addr[0] << 8 | in b44_setup_wol()
1630 bp->dev->dev_addr[1]; in b44_setup_wol()
1631 bw32(bp, B44_ADDR_HI, val); in b44_setup_wol()
1633 val = br32(bp, B44_DEVCTRL); in b44_setup_wol()
1634 bw32(bp, B44_DEVCTRL, val | DEVCTRL_MPM | DEVCTRL_PFE); in b44_setup_wol()
1637 b44_setup_pseudo_magicp(bp); in b44_setup_wol()
1639 b44_setup_wol_pci(bp); in b44_setup_wol()
1644 struct b44 *bp = netdev_priv(dev); in b44_close() local
1648 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_close()
1651 napi_disable(&bp->napi); in b44_close()
1653 del_timer_sync(&bp->timer); in b44_close()
1655 spin_lock_irq(&bp->lock); in b44_close()
1657 b44_halt(bp); in b44_close()
1658 b44_free_rings(bp); in b44_close()
1661 spin_unlock_irq(&bp->lock); in b44_close()
1665 if (bp->flags & B44_FLAG_WOL_ENABLE) { in b44_close()
1666 b44_init_hw(bp, B44_PARTIAL_RESET); in b44_close()
1667 b44_setup_wol(bp); in b44_close()
1670 b44_free_consistent(bp); in b44_close()
1678 struct b44 *bp = netdev_priv(dev); in b44_get_stats64() local
1679 struct b44_hw_stats *hwstat = &bp->hw_stats; in b44_get_stats64()
1721 static int __b44_load_mcast(struct b44 *bp, struct net_device *dev) in __b44_load_mcast() argument
1731 __b44_cam_write(bp, ha->addr, i++ + 1); in __b44_load_mcast()
1738 struct b44 *bp = netdev_priv(dev); in __b44_set_rx_mode() local
1741 val = br32(bp, B44_RXCONFIG); in __b44_set_rx_mode()
1745 bw32(bp, B44_RXCONFIG, val); in __b44_set_rx_mode()
1750 __b44_set_mac_addr(bp); in __b44_set_rx_mode()
1756 i = __b44_load_mcast(bp, dev); in __b44_set_rx_mode()
1759 __b44_cam_write(bp, zero, i); in __b44_set_rx_mode()
1761 bw32(bp, B44_RXCONFIG, val); in __b44_set_rx_mode()
1762 val = br32(bp, B44_CAM_CTRL); in __b44_set_rx_mode()
1763 bw32(bp, B44_CAM_CTRL, val | CAM_CTRL_ENABLE); in __b44_set_rx_mode()
1769 struct b44 *bp = netdev_priv(dev); in b44_set_rx_mode() local
1771 spin_lock_irq(&bp->lock); in b44_set_rx_mode()
1773 spin_unlock_irq(&bp->lock); in b44_set_rx_mode()
1778 struct b44 *bp = netdev_priv(dev); in b44_get_msglevel() local
1779 return bp->msg_enable; in b44_get_msglevel()
1784 struct b44 *bp = netdev_priv(dev); in b44_set_msglevel() local
1785 bp->msg_enable = value; in b44_set_msglevel()
1790 struct b44 *bp = netdev_priv(dev); in b44_get_drvinfo() local
1791 struct ssb_bus *bus = bp->sdev->bus; in b44_get_drvinfo()
1810 struct b44 *bp = netdev_priv(dev); in b44_nway_reset() local
1814 spin_lock_irq(&bp->lock); in b44_nway_reset()
1815 b44_readphy(bp, MII_BMCR, &bmcr); in b44_nway_reset()
1816 b44_readphy(bp, MII_BMCR, &bmcr); in b44_nway_reset()
1819 b44_writephy(bp, MII_BMCR, in b44_nway_reset()
1823 spin_unlock_irq(&bp->lock); in b44_nway_reset()
1831 struct b44 *bp = netdev_priv(dev); in b44_get_link_ksettings() local
1834 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_get_link_ksettings()
1849 if (bp->flags & B44_FLAG_ADV_10HALF) in b44_get_link_ksettings()
1851 if (bp->flags & B44_FLAG_ADV_10FULL) in b44_get_link_ksettings()
1853 if (bp->flags & B44_FLAG_ADV_100HALF) in b44_get_link_ksettings()
1855 if (bp->flags & B44_FLAG_ADV_100FULL) in b44_get_link_ksettings()
1858 cmd->base.speed = (bp->flags & B44_FLAG_100_BASE_T) ? in b44_get_link_ksettings()
1860 cmd->base.duplex = (bp->flags & B44_FLAG_FULL_DUPLEX) ? in b44_get_link_ksettings()
1863 cmd->base.phy_address = bp->phy_addr; in b44_get_link_ksettings()
1864 cmd->base.autoneg = (bp->flags & B44_FLAG_FORCE_LINK) ? in b44_get_link_ksettings()
1885 struct b44 *bp = netdev_priv(dev); in b44_set_link_ksettings() local
1890 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_set_link_ksettings()
1892 spin_lock_irq(&bp->lock); in b44_set_link_ksettings()
1894 b44_setup_phy(bp); in b44_set_link_ksettings()
1898 spin_unlock_irq(&bp->lock); in b44_set_link_ksettings()
1921 spin_lock_irq(&bp->lock); in b44_set_link_ksettings()
1924 bp->flags &= ~(B44_FLAG_FORCE_LINK | in b44_set_link_ksettings()
1932 bp->flags |= (B44_FLAG_ADV_10HALF | in b44_set_link_ksettings()
1938 bp->flags |= B44_FLAG_ADV_10HALF; in b44_set_link_ksettings()
1940 bp->flags |= B44_FLAG_ADV_10FULL; in b44_set_link_ksettings()
1942 bp->flags |= B44_FLAG_ADV_100HALF; in b44_set_link_ksettings()
1944 bp->flags |= B44_FLAG_ADV_100FULL; in b44_set_link_ksettings()
1947 bp->flags |= B44_FLAG_FORCE_LINK; in b44_set_link_ksettings()
1948 bp->flags &= ~(B44_FLAG_100_BASE_T | B44_FLAG_FULL_DUPLEX); in b44_set_link_ksettings()
1950 bp->flags |= B44_FLAG_100_BASE_T; in b44_set_link_ksettings()
1952 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_set_link_ksettings()
1956 b44_setup_phy(bp); in b44_set_link_ksettings()
1958 spin_unlock_irq(&bp->lock); in b44_set_link_ksettings()
1968 struct b44 *bp = netdev_priv(dev); in b44_get_ringparam() local
1971 ering->rx_pending = bp->rx_pending; in b44_get_ringparam()
1981 struct b44 *bp = netdev_priv(dev); in b44_set_ringparam() local
1989 spin_lock_irq(&bp->lock); in b44_set_ringparam()
1991 bp->rx_pending = ering->rx_pending; in b44_set_ringparam()
1992 bp->tx_pending = ering->tx_pending; in b44_set_ringparam()
1994 b44_halt(bp); in b44_set_ringparam()
1995 b44_init_rings(bp); in b44_set_ringparam()
1996 b44_init_hw(bp, B44_FULL_RESET); in b44_set_ringparam()
1997 netif_wake_queue(bp->dev); in b44_set_ringparam()
1998 spin_unlock_irq(&bp->lock); in b44_set_ringparam()
2000 b44_enable_ints(bp); in b44_set_ringparam()
2008 struct b44 *bp = netdev_priv(dev); in b44_get_pauseparam() local
2011 (bp->flags & B44_FLAG_PAUSE_AUTO) != 0; in b44_get_pauseparam()
2013 (bp->flags & B44_FLAG_RX_PAUSE) != 0; in b44_get_pauseparam()
2015 (bp->flags & B44_FLAG_TX_PAUSE) != 0; in b44_get_pauseparam()
2021 struct b44 *bp = netdev_priv(dev); in b44_set_pauseparam() local
2023 spin_lock_irq(&bp->lock); in b44_set_pauseparam()
2025 bp->flags |= B44_FLAG_PAUSE_AUTO; in b44_set_pauseparam()
2027 bp->flags &= ~B44_FLAG_PAUSE_AUTO; in b44_set_pauseparam()
2029 bp->flags |= B44_FLAG_RX_PAUSE; in b44_set_pauseparam()
2031 bp->flags &= ~B44_FLAG_RX_PAUSE; in b44_set_pauseparam()
2033 bp->flags |= B44_FLAG_TX_PAUSE; in b44_set_pauseparam()
2035 bp->flags &= ~B44_FLAG_TX_PAUSE; in b44_set_pauseparam()
2036 if (bp->flags & B44_FLAG_PAUSE_AUTO) { in b44_set_pauseparam()
2037 b44_halt(bp); in b44_set_pauseparam()
2038 b44_init_rings(bp); in b44_set_pauseparam()
2039 b44_init_hw(bp, B44_FULL_RESET); in b44_set_pauseparam()
2041 __b44_set_flow_ctrl(bp, bp->flags); in b44_set_pauseparam()
2043 spin_unlock_irq(&bp->lock); in b44_set_pauseparam()
2045 b44_enable_ints(bp); in b44_set_pauseparam()
2072 struct b44 *bp = netdev_priv(dev); in b44_get_ethtool_stats() local
2073 struct b44_hw_stats *hwstat = &bp->hw_stats; in b44_get_ethtool_stats()
2078 spin_lock_irq(&bp->lock); in b44_get_ethtool_stats()
2079 b44_stats_update(bp); in b44_get_ethtool_stats()
2080 spin_unlock_irq(&bp->lock); in b44_get_ethtool_stats()
2095 struct b44 *bp = netdev_priv(dev); in b44_get_wol() local
2098 if (bp->flags & B44_FLAG_WOL_ENABLE) in b44_get_wol()
2107 struct b44 *bp = netdev_priv(dev); in b44_set_wol() local
2109 spin_lock_irq(&bp->lock); in b44_set_wol()
2111 bp->flags |= B44_FLAG_WOL_ENABLE; in b44_set_wol()
2113 bp->flags &= ~B44_FLAG_WOL_ENABLE; in b44_set_wol()
2114 spin_unlock_irq(&bp->lock); in b44_set_wol()
2116 device_set_wakeup_enable(bp->sdev->dev, wol->wolopts & WAKE_MAGIC); in b44_set_wol()
2141 struct b44 *bp = netdev_priv(dev); in b44_ioctl() local
2147 spin_lock_irq(&bp->lock); in b44_ioctl()
2148 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_ioctl()
2152 err = generic_mii_ioctl(&bp->mii_if, if_mii(ifr), cmd, NULL); in b44_ioctl()
2154 spin_unlock_irq(&bp->lock); in b44_ioctl()
2159 static int b44_get_invariants(struct b44 *bp) in b44_get_invariants() argument
2161 struct ssb_device *sdev = bp->sdev; in b44_get_invariants()
2165 bp->dma_offset = ssb_dma_translation(sdev); in b44_get_invariants()
2170 bp->phy_addr = sdev->bus->sprom.et1phyaddr; in b44_get_invariants()
2173 bp->phy_addr = sdev->bus->sprom.et0phyaddr; in b44_get_invariants()
2178 bp->phy_addr &= 0x1F; in b44_get_invariants()
2180 eth_hw_addr_set(bp->dev, addr); in b44_get_invariants()
2182 if (!is_valid_ether_addr(&bp->dev->dev_addr[0])){ in b44_get_invariants()
2187 bp->imask = IMASK_DEF; in b44_get_invariants()
2193 if (bp->sdev->id.revision >= 7) in b44_get_invariants()
2194 bp->flags |= B44_FLAG_B0_ANDLATER; in b44_get_invariants()
2217 struct b44 *bp = netdev_priv(dev); in b44_adjust_link() local
2223 if (bp->old_link != phydev->link) { in b44_adjust_link()
2225 bp->old_link = phydev->link; in b44_adjust_link()
2231 (bp->flags & B44_FLAG_FULL_DUPLEX)) { in b44_adjust_link()
2233 bp->flags &= ~B44_FLAG_FULL_DUPLEX; in b44_adjust_link()
2235 !(bp->flags & B44_FLAG_FULL_DUPLEX)) { in b44_adjust_link()
2237 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_adjust_link()
2242 u32 val = br32(bp, B44_TX_CTRL); in b44_adjust_link()
2243 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_adjust_link()
2247 bw32(bp, B44_TX_CTRL, val); in b44_adjust_link()
2252 static int b44_register_phy_one(struct b44 *bp) in b44_register_phy_one() argument
2256 struct ssb_device *sdev = bp->sdev; in b44_register_phy_one()
2269 mii_bus->priv = bp; in b44_register_phy_one()
2274 mii_bus->phy_mask = ~(1 << bp->phy_addr); in b44_register_phy_one()
2277 bp->mii_bus = mii_bus; in b44_register_phy_one()
2285 if (!mdiobus_is_registered_device(bp->mii_bus, bp->phy_addr) && in b44_register_phy_one()
2290 bp->phy_addr); in b44_register_phy_one()
2292 bp->phy_addr = 0; in b44_register_phy_one()
2294 bp->phy_addr); in b44_register_phy_one()
2297 bp->phy_addr); in b44_register_phy_one()
2300 phydev = phy_connect(bp->dev, bus_id, &b44_adjust_link, in b44_register_phy_one()
2304 bp->phy_addr); in b44_register_phy_one()
2317 bp->old_link = 0; in b44_register_phy_one()
2318 bp->phy_addr = phydev->mdio.addr; in b44_register_phy_one()
2334 static void b44_unregister_phy_one(struct b44 *bp) in b44_unregister_phy_one() argument
2336 struct net_device *dev = bp->dev; in b44_unregister_phy_one()
2337 struct mii_bus *mii_bus = bp->mii_bus; in b44_unregister_phy_one()
2348 struct b44 *bp; in b44_init_one() local
2353 dev = alloc_etherdev(sizeof(*bp)); in b44_init_one()
2364 bp = netdev_priv(dev); in b44_init_one()
2365 bp->sdev = sdev; in b44_init_one()
2366 bp->dev = dev; in b44_init_one()
2367 bp->force_copybreak = 0; in b44_init_one()
2369 bp->msg_enable = netif_msg_init(b44_debug, B44_DEF_MSG_ENABLE); in b44_init_one()
2371 spin_lock_init(&bp->lock); in b44_init_one()
2372 u64_stats_init(&bp->hw_stats.syncp); in b44_init_one()
2374 bp->rx_pending = B44_DEF_RX_RING_PENDING; in b44_init_one()
2375 bp->tx_pending = B44_DEF_TX_RING_PENDING; in b44_init_one()
2378 netif_napi_add(dev, &bp->napi, b44_poll); in b44_init_one()
2399 err = b44_get_invariants(bp); in b44_init_one()
2406 if (bp->phy_addr == B44_PHY_ADDR_NO_PHY) { in b44_init_one()
2412 bp->mii_if.dev = dev; in b44_init_one()
2413 bp->mii_if.mdio_read = b44_mdio_read_mii; in b44_init_one()
2414 bp->mii_if.mdio_write = b44_mdio_write_mii; in b44_init_one()
2415 bp->mii_if.phy_id = bp->phy_addr; in b44_init_one()
2416 bp->mii_if.phy_id_mask = 0x1f; in b44_init_one()
2417 bp->mii_if.reg_num_mask = 0x1f; in b44_init_one()
2420 bp->flags |= (B44_FLAG_ADV_10HALF | B44_FLAG_ADV_10FULL | in b44_init_one()
2424 bp->flags |= B44_FLAG_PAUSE_AUTO; in b44_init_one()
2439 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_init_one()
2442 err = b44_phy_reset(bp); in b44_init_one()
2448 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_init_one()
2449 err = b44_register_phy_one(bp); in b44_init_one()
2467 netif_napi_del(&bp->napi); in b44_init_one()
2477 struct b44 *bp = netdev_priv(dev); in b44_remove_one() local
2480 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_remove_one()
2481 b44_unregister_phy_one(bp); in b44_remove_one()
2484 netif_napi_del(&bp->napi); in b44_remove_one()
2493 struct b44 *bp = netdev_priv(dev); in b44_suspend() local
2498 del_timer_sync(&bp->timer); in b44_suspend()
2500 spin_lock_irq(&bp->lock); in b44_suspend()
2502 b44_halt(bp); in b44_suspend()
2503 netif_carrier_off(bp->dev); in b44_suspend()
2504 netif_device_detach(bp->dev); in b44_suspend()
2505 b44_free_rings(bp); in b44_suspend()
2507 spin_unlock_irq(&bp->lock); in b44_suspend()
2510 if (bp->flags & B44_FLAG_WOL_ENABLE) { in b44_suspend()
2511 b44_init_hw(bp, B44_PARTIAL_RESET); in b44_suspend()
2512 b44_setup_wol(bp); in b44_suspend()
2522 struct b44 *bp = netdev_priv(dev); in b44_resume() local
2535 spin_lock_irq(&bp->lock); in b44_resume()
2536 b44_init_rings(bp); in b44_resume()
2537 b44_init_hw(bp, B44_FULL_RESET); in b44_resume()
2538 spin_unlock_irq(&bp->lock); in b44_resume()
2548 spin_lock_irq(&bp->lock); in b44_resume()
2549 b44_halt(bp); in b44_resume()
2550 b44_free_rings(bp); in b44_resume()
2551 spin_unlock_irq(&bp->lock); in b44_resume()
2555 netif_device_attach(bp->dev); in b44_resume()
2557 b44_enable_ints(bp); in b44_resume()
2560 mod_timer(&bp->timer, jiffies + 1); in b44_resume()