Lines Matching refs:bp

244 static void bnx2_init_napi(struct bnx2 *bp);
245 static void bnx2_del_napi(struct bnx2 *bp);
247 static inline u32 bnx2_tx_avail(struct bnx2 *bp, struct bnx2_tx_ring_info *txr) in bnx2_tx_avail() argument
260 return bp->tx_ring_size - diff; in bnx2_tx_avail()
264 bnx2_reg_rd_ind(struct bnx2 *bp, u32 offset) in bnx2_reg_rd_ind() argument
269 spin_lock_irqsave(&bp->indirect_lock, flags); in bnx2_reg_rd_ind()
270 BNX2_WR(bp, BNX2_PCICFG_REG_WINDOW_ADDRESS, offset); in bnx2_reg_rd_ind()
271 val = BNX2_RD(bp, BNX2_PCICFG_REG_WINDOW); in bnx2_reg_rd_ind()
272 spin_unlock_irqrestore(&bp->indirect_lock, flags); in bnx2_reg_rd_ind()
277 bnx2_reg_wr_ind(struct bnx2 *bp, u32 offset, u32 val) in bnx2_reg_wr_ind() argument
281 spin_lock_irqsave(&bp->indirect_lock, flags); in bnx2_reg_wr_ind()
282 BNX2_WR(bp, BNX2_PCICFG_REG_WINDOW_ADDRESS, offset); in bnx2_reg_wr_ind()
283 BNX2_WR(bp, BNX2_PCICFG_REG_WINDOW, val); in bnx2_reg_wr_ind()
284 spin_unlock_irqrestore(&bp->indirect_lock, flags); in bnx2_reg_wr_ind()
288 bnx2_shmem_wr(struct bnx2 *bp, u32 offset, u32 val) in bnx2_shmem_wr() argument
290 bnx2_reg_wr_ind(bp, bp->shmem_base + offset, val); in bnx2_shmem_wr()
294 bnx2_shmem_rd(struct bnx2 *bp, u32 offset) in bnx2_shmem_rd() argument
296 return bnx2_reg_rd_ind(bp, bp->shmem_base + offset); in bnx2_shmem_rd()
300 bnx2_ctx_wr(struct bnx2 *bp, u32 cid_addr, u32 offset, u32 val) in bnx2_ctx_wr() argument
305 spin_lock_irqsave(&bp->indirect_lock, flags); in bnx2_ctx_wr()
306 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_ctx_wr()
309 BNX2_WR(bp, BNX2_CTX_CTX_DATA, val); in bnx2_ctx_wr()
310 BNX2_WR(bp, BNX2_CTX_CTX_CTRL, in bnx2_ctx_wr()
313 val = BNX2_RD(bp, BNX2_CTX_CTX_CTRL); in bnx2_ctx_wr()
319 BNX2_WR(bp, BNX2_CTX_DATA_ADR, offset); in bnx2_ctx_wr()
320 BNX2_WR(bp, BNX2_CTX_DATA, val); in bnx2_ctx_wr()
322 spin_unlock_irqrestore(&bp->indirect_lock, flags); in bnx2_ctx_wr()
329 struct bnx2 *bp = netdev_priv(dev); in bnx2_drv_ctl() local
334 bnx2_reg_wr_ind(bp, io->offset, io->data); in bnx2_drv_ctl()
337 io->data = bnx2_reg_rd_ind(bp, io->offset); in bnx2_drv_ctl()
340 bnx2_ctx_wr(bp, io->cid_addr, io->offset, io->data); in bnx2_drv_ctl()
348 static void bnx2_setup_cnic_irq_info(struct bnx2 *bp) in bnx2_setup_cnic_irq_info() argument
350 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2_setup_cnic_irq_info()
351 struct bnx2_napi *bnapi = &bp->bnx2_napi[0]; in bnx2_setup_cnic_irq_info()
354 if (bp->flags & BNX2_FLAG_USING_MSIX) { in bnx2_setup_cnic_irq_info()
357 sb_id = bp->irq_nvecs; in bnx2_setup_cnic_irq_info()
367 cp->irq_arr[0].vector = bp->irq_tbl[sb_id].vector; in bnx2_setup_cnic_irq_info()
378 struct bnx2 *bp = netdev_priv(dev); in bnx2_register_cnic() local
379 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2_register_cnic()
387 if (!bnx2_reg_rd_ind(bp, BNX2_FW_MAX_ISCSI_CONN)) in bnx2_register_cnic()
390 bp->cnic_data = data; in bnx2_register_cnic()
391 rcu_assign_pointer(bp->cnic_ops, ops); in bnx2_register_cnic()
396 bnx2_setup_cnic_irq_info(bp); in bnx2_register_cnic()
403 struct bnx2 *bp = netdev_priv(dev); in bnx2_unregister_cnic() local
404 struct bnx2_napi *bnapi = &bp->bnx2_napi[0]; in bnx2_unregister_cnic()
405 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2_unregister_cnic()
407 mutex_lock(&bp->cnic_lock); in bnx2_unregister_cnic()
410 RCU_INIT_POINTER(bp->cnic_ops, NULL); in bnx2_unregister_cnic()
411 mutex_unlock(&bp->cnic_lock); in bnx2_unregister_cnic()
418 struct bnx2 *bp = netdev_priv(dev); in bnx2_cnic_probe() local
419 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2_cnic_probe()
425 cp->chip_id = bp->chip_id; in bnx2_cnic_probe()
426 cp->pdev = bp->pdev; in bnx2_cnic_probe()
427 cp->io_base = bp->regview; in bnx2_cnic_probe()
436 bnx2_cnic_stop(struct bnx2 *bp) in bnx2_cnic_stop() argument
441 mutex_lock(&bp->cnic_lock); in bnx2_cnic_stop()
442 c_ops = rcu_dereference_protected(bp->cnic_ops, in bnx2_cnic_stop()
443 lockdep_is_held(&bp->cnic_lock)); in bnx2_cnic_stop()
446 c_ops->cnic_ctl(bp->cnic_data, &info); in bnx2_cnic_stop()
448 mutex_unlock(&bp->cnic_lock); in bnx2_cnic_stop()
452 bnx2_cnic_start(struct bnx2 *bp) in bnx2_cnic_start() argument
457 mutex_lock(&bp->cnic_lock); in bnx2_cnic_start()
458 c_ops = rcu_dereference_protected(bp->cnic_ops, in bnx2_cnic_start()
459 lockdep_is_held(&bp->cnic_lock)); in bnx2_cnic_start()
461 if (!(bp->flags & BNX2_FLAG_USING_MSIX)) { in bnx2_cnic_start()
462 struct bnx2_napi *bnapi = &bp->bnx2_napi[0]; in bnx2_cnic_start()
467 c_ops->cnic_ctl(bp->cnic_data, &info); in bnx2_cnic_start()
469 mutex_unlock(&bp->cnic_lock); in bnx2_cnic_start()
475 bnx2_cnic_stop(struct bnx2 *bp) in bnx2_cnic_stop() argument
480 bnx2_cnic_start(struct bnx2 *bp) in bnx2_cnic_start() argument
487 bnx2_read_phy(struct bnx2 *bp, u32 reg, u32 *val) in bnx2_read_phy() argument
492 if (bp->phy_flags & BNX2_PHY_FLAG_INT_MODE_AUTO_POLLING) { in bnx2_read_phy()
493 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_read_phy()
496 BNX2_WR(bp, BNX2_EMAC_MDIO_MODE, val1); in bnx2_read_phy()
497 BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_read_phy()
502 val1 = (bp->phy_addr << 21) | (reg << 16) | in bnx2_read_phy()
505 BNX2_WR(bp, BNX2_EMAC_MDIO_COMM, val1); in bnx2_read_phy()
510 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_COMM); in bnx2_read_phy()
514 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_COMM); in bnx2_read_phy()
530 if (bp->phy_flags & BNX2_PHY_FLAG_INT_MODE_AUTO_POLLING) { in bnx2_read_phy()
531 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_read_phy()
534 BNX2_WR(bp, BNX2_EMAC_MDIO_MODE, val1); in bnx2_read_phy()
535 BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_read_phy()
544 bnx2_write_phy(struct bnx2 *bp, u32 reg, u32 val) in bnx2_write_phy() argument
549 if (bp->phy_flags & BNX2_PHY_FLAG_INT_MODE_AUTO_POLLING) { in bnx2_write_phy()
550 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_write_phy()
553 BNX2_WR(bp, BNX2_EMAC_MDIO_MODE, val1); in bnx2_write_phy()
554 BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_write_phy()
559 val1 = (bp->phy_addr << 21) | (reg << 16) | val | in bnx2_write_phy()
562 BNX2_WR(bp, BNX2_EMAC_MDIO_COMM, val1); in bnx2_write_phy()
567 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_COMM); in bnx2_write_phy()
579 if (bp->phy_flags & BNX2_PHY_FLAG_INT_MODE_AUTO_POLLING) { in bnx2_write_phy()
580 val1 = BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_write_phy()
583 BNX2_WR(bp, BNX2_EMAC_MDIO_MODE, val1); in bnx2_write_phy()
584 BNX2_RD(bp, BNX2_EMAC_MDIO_MODE); in bnx2_write_phy()
593 bnx2_disable_int(struct bnx2 *bp) in bnx2_disable_int() argument
598 for (i = 0; i < bp->irq_nvecs; i++) { in bnx2_disable_int()
599 bnapi = &bp->bnx2_napi[i]; in bnx2_disable_int()
600 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, bnapi->int_num | in bnx2_disable_int()
603 BNX2_RD(bp, BNX2_PCICFG_INT_ACK_CMD); in bnx2_disable_int()
607 bnx2_enable_int(struct bnx2 *bp) in bnx2_enable_int() argument
612 for (i = 0; i < bp->irq_nvecs; i++) { in bnx2_enable_int()
613 bnapi = &bp->bnx2_napi[i]; in bnx2_enable_int()
615 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, bnapi->int_num | in bnx2_enable_int()
620 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, bnapi->int_num | in bnx2_enable_int()
624 BNX2_WR(bp, BNX2_HC_COMMAND, bp->hc_cmd | BNX2_HC_COMMAND_COAL_NOW); in bnx2_enable_int()
628 bnx2_disable_int_sync(struct bnx2 *bp) in bnx2_disable_int_sync() argument
632 atomic_inc(&bp->intr_sem); in bnx2_disable_int_sync()
633 if (!netif_running(bp->dev)) in bnx2_disable_int_sync()
636 bnx2_disable_int(bp); in bnx2_disable_int_sync()
637 for (i = 0; i < bp->irq_nvecs; i++) in bnx2_disable_int_sync()
638 synchronize_irq(bp->irq_tbl[i].vector); in bnx2_disable_int_sync()
642 bnx2_napi_disable(struct bnx2 *bp) in bnx2_napi_disable() argument
646 for (i = 0; i < bp->irq_nvecs; i++) in bnx2_napi_disable()
647 napi_disable(&bp->bnx2_napi[i].napi); in bnx2_napi_disable()
651 bnx2_napi_enable(struct bnx2 *bp) in bnx2_napi_enable() argument
655 for (i = 0; i < bp->irq_nvecs; i++) in bnx2_napi_enable()
656 napi_enable(&bp->bnx2_napi[i].napi); in bnx2_napi_enable()
660 bnx2_netif_stop(struct bnx2 *bp, bool stop_cnic) in bnx2_netif_stop() argument
663 bnx2_cnic_stop(bp); in bnx2_netif_stop()
664 if (netif_running(bp->dev)) { in bnx2_netif_stop()
665 bnx2_napi_disable(bp); in bnx2_netif_stop()
666 netif_tx_disable(bp->dev); in bnx2_netif_stop()
668 bnx2_disable_int_sync(bp); in bnx2_netif_stop()
669 netif_carrier_off(bp->dev); /* prevent tx timeout */ in bnx2_netif_stop()
673 bnx2_netif_start(struct bnx2 *bp, bool start_cnic) in bnx2_netif_start() argument
675 if (atomic_dec_and_test(&bp->intr_sem)) { in bnx2_netif_start()
676 if (netif_running(bp->dev)) { in bnx2_netif_start()
677 netif_tx_wake_all_queues(bp->dev); in bnx2_netif_start()
678 spin_lock_bh(&bp->phy_lock); in bnx2_netif_start()
679 if (bp->link_up) in bnx2_netif_start()
680 netif_carrier_on(bp->dev); in bnx2_netif_start()
681 spin_unlock_bh(&bp->phy_lock); in bnx2_netif_start()
682 bnx2_napi_enable(bp); in bnx2_netif_start()
683 bnx2_enable_int(bp); in bnx2_netif_start()
685 bnx2_cnic_start(bp); in bnx2_netif_start()
691 bnx2_free_tx_mem(struct bnx2 *bp) in bnx2_free_tx_mem() argument
695 for (i = 0; i < bp->num_tx_rings; i++) { in bnx2_free_tx_mem()
696 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_free_tx_mem()
700 dma_free_coherent(&bp->pdev->dev, TXBD_RING_SIZE, in bnx2_free_tx_mem()
711 bnx2_free_rx_mem(struct bnx2 *bp) in bnx2_free_rx_mem() argument
715 for (i = 0; i < bp->num_rx_rings; i++) { in bnx2_free_rx_mem()
716 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_free_rx_mem()
720 for (j = 0; j < bp->rx_max_ring; j++) { in bnx2_free_rx_mem()
722 dma_free_coherent(&bp->pdev->dev, RXBD_RING_SIZE, in bnx2_free_rx_mem()
730 for (j = 0; j < bp->rx_max_pg_ring; j++) { in bnx2_free_rx_mem()
732 dma_free_coherent(&bp->pdev->dev, RXBD_RING_SIZE, in bnx2_free_rx_mem()
743 bnx2_alloc_tx_mem(struct bnx2 *bp) in bnx2_alloc_tx_mem() argument
747 for (i = 0; i < bp->num_tx_rings; i++) { in bnx2_alloc_tx_mem()
748 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_alloc_tx_mem()
756 dma_alloc_coherent(&bp->pdev->dev, TXBD_RING_SIZE, in bnx2_alloc_tx_mem()
765 bnx2_alloc_rx_mem(struct bnx2 *bp) in bnx2_alloc_rx_mem() argument
769 for (i = 0; i < bp->num_rx_rings; i++) { in bnx2_alloc_rx_mem()
770 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_alloc_rx_mem()
775 vzalloc(array_size(SW_RXBD_RING_SIZE, bp->rx_max_ring)); in bnx2_alloc_rx_mem()
779 for (j = 0; j < bp->rx_max_ring; j++) { in bnx2_alloc_rx_mem()
781 dma_alloc_coherent(&bp->pdev->dev, in bnx2_alloc_rx_mem()
790 if (bp->rx_pg_ring_size) { in bnx2_alloc_rx_mem()
793 bp->rx_max_pg_ring)); in bnx2_alloc_rx_mem()
799 for (j = 0; j < bp->rx_max_pg_ring; j++) { in bnx2_alloc_rx_mem()
801 dma_alloc_coherent(&bp->pdev->dev, in bnx2_alloc_rx_mem()
816 struct bnx2 *bp = netdev_priv(dev); in bnx2_free_stats_blk() local
818 if (bp->status_blk) { in bnx2_free_stats_blk()
819 dma_free_coherent(&bp->pdev->dev, bp->status_stats_size, in bnx2_free_stats_blk()
820 bp->status_blk, in bnx2_free_stats_blk()
821 bp->status_blk_mapping); in bnx2_free_stats_blk()
822 bp->status_blk = NULL; in bnx2_free_stats_blk()
823 bp->stats_blk = NULL; in bnx2_free_stats_blk()
832 struct bnx2 *bp = netdev_priv(dev); in bnx2_alloc_stats_blk() local
836 if (bp->flags & BNX2_FLAG_MSIX_CAP) in bnx2_alloc_stats_blk()
839 bp->status_stats_size = status_blk_size + in bnx2_alloc_stats_blk()
841 status_blk = dma_alloc_coherent(&bp->pdev->dev, bp->status_stats_size, in bnx2_alloc_stats_blk()
842 &bp->status_blk_mapping, GFP_KERNEL); in bnx2_alloc_stats_blk()
846 bp->status_blk = status_blk; in bnx2_alloc_stats_blk()
847 bp->stats_blk = status_blk + status_blk_size; in bnx2_alloc_stats_blk()
848 bp->stats_blk_mapping = bp->status_blk_mapping + status_blk_size; in bnx2_alloc_stats_blk()
854 bnx2_free_mem(struct bnx2 *bp) in bnx2_free_mem() argument
857 struct bnx2_napi *bnapi = &bp->bnx2_napi[0]; in bnx2_free_mem()
859 bnx2_free_tx_mem(bp); in bnx2_free_mem()
860 bnx2_free_rx_mem(bp); in bnx2_free_mem()
862 for (i = 0; i < bp->ctx_pages; i++) { in bnx2_free_mem()
863 if (bp->ctx_blk[i]) { in bnx2_free_mem()
864 dma_free_coherent(&bp->pdev->dev, BNX2_PAGE_SIZE, in bnx2_free_mem()
865 bp->ctx_blk[i], in bnx2_free_mem()
866 bp->ctx_blk_mapping[i]); in bnx2_free_mem()
867 bp->ctx_blk[i] = NULL; in bnx2_free_mem()
876 bnx2_alloc_mem(struct bnx2 *bp) in bnx2_alloc_mem() argument
881 bnapi = &bp->bnx2_napi[0]; in bnx2_alloc_mem()
882 bnapi->status_blk.msi = bp->status_blk; in bnx2_alloc_mem()
887 if (bp->flags & BNX2_FLAG_MSIX_CAP) { in bnx2_alloc_mem()
888 for (i = 1; i < bp->irq_nvecs; i++) { in bnx2_alloc_mem()
891 bnapi = &bp->bnx2_napi[i]; in bnx2_alloc_mem()
893 sblk = (bp->status_blk + BNX2_SBLK_MSIX_ALIGN_SIZE * i); in bnx2_alloc_mem()
903 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_alloc_mem()
904 bp->ctx_pages = 0x2000 / BNX2_PAGE_SIZE; in bnx2_alloc_mem()
905 if (bp->ctx_pages == 0) in bnx2_alloc_mem()
906 bp->ctx_pages = 1; in bnx2_alloc_mem()
907 for (i = 0; i < bp->ctx_pages; i++) { in bnx2_alloc_mem()
908 bp->ctx_blk[i] = dma_alloc_coherent(&bp->pdev->dev, in bnx2_alloc_mem()
910 &bp->ctx_blk_mapping[i], in bnx2_alloc_mem()
912 if (!bp->ctx_blk[i]) in bnx2_alloc_mem()
917 err = bnx2_alloc_rx_mem(bp); in bnx2_alloc_mem()
921 err = bnx2_alloc_tx_mem(bp); in bnx2_alloc_mem()
928 bnx2_free_mem(bp); in bnx2_alloc_mem()
933 bnx2_report_fw_link(struct bnx2 *bp) in bnx2_report_fw_link() argument
937 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_report_fw_link()
940 if (bp->link_up) { in bnx2_report_fw_link()
943 switch (bp->line_speed) { in bnx2_report_fw_link()
945 if (bp->duplex == DUPLEX_HALF) in bnx2_report_fw_link()
951 if (bp->duplex == DUPLEX_HALF) in bnx2_report_fw_link()
957 if (bp->duplex == DUPLEX_HALF) in bnx2_report_fw_link()
963 if (bp->duplex == DUPLEX_HALF) in bnx2_report_fw_link()
972 if (bp->autoneg) { in bnx2_report_fw_link()
975 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_report_fw_link()
976 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_report_fw_link()
979 bp->phy_flags & BNX2_PHY_FLAG_PARALLEL_DETECT) in bnx2_report_fw_link()
988 bnx2_shmem_wr(bp, BNX2_LINK_STATUS, fw_link_status); in bnx2_report_fw_link()
992 bnx2_xceiver_str(struct bnx2 *bp) in bnx2_xceiver_str() argument
994 return (bp->phy_port == PORT_FIBRE) ? "SerDes" : in bnx2_xceiver_str()
995 ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) ? "Remote Copper" : in bnx2_xceiver_str()
1000 bnx2_report_link(struct bnx2 *bp) in bnx2_report_link() argument
1002 if (bp->link_up) { in bnx2_report_link()
1003 netif_carrier_on(bp->dev); in bnx2_report_link()
1004 netdev_info(bp->dev, "NIC %s Link is Up, %d Mbps %s duplex", in bnx2_report_link()
1005 bnx2_xceiver_str(bp), in bnx2_report_link()
1006 bp->line_speed, in bnx2_report_link()
1007 bp->duplex == DUPLEX_FULL ? "full" : "half"); in bnx2_report_link()
1009 if (bp->flow_ctrl) { in bnx2_report_link()
1010 if (bp->flow_ctrl & FLOW_CTRL_RX) { in bnx2_report_link()
1012 if (bp->flow_ctrl & FLOW_CTRL_TX) in bnx2_report_link()
1022 netif_carrier_off(bp->dev); in bnx2_report_link()
1023 netdev_err(bp->dev, "NIC %s Link is Down\n", in bnx2_report_link()
1024 bnx2_xceiver_str(bp)); in bnx2_report_link()
1027 bnx2_report_fw_link(bp); in bnx2_report_link()
1031 bnx2_resolve_flow_ctrl(struct bnx2 *bp) in bnx2_resolve_flow_ctrl() argument
1035 bp->flow_ctrl = 0; in bnx2_resolve_flow_ctrl()
1036 if ((bp->autoneg & (AUTONEG_SPEED | AUTONEG_FLOW_CTRL)) != in bnx2_resolve_flow_ctrl()
1039 if (bp->duplex == DUPLEX_FULL) { in bnx2_resolve_flow_ctrl()
1040 bp->flow_ctrl = bp->req_flow_ctrl; in bnx2_resolve_flow_ctrl()
1045 if (bp->duplex != DUPLEX_FULL) { in bnx2_resolve_flow_ctrl()
1049 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_resolve_flow_ctrl()
1050 (BNX2_CHIP(bp) == BNX2_CHIP_5708)) { in bnx2_resolve_flow_ctrl()
1053 bnx2_read_phy(bp, BCM5708S_1000X_STAT1, &val); in bnx2_resolve_flow_ctrl()
1055 bp->flow_ctrl |= FLOW_CTRL_TX; in bnx2_resolve_flow_ctrl()
1057 bp->flow_ctrl |= FLOW_CTRL_RX; in bnx2_resolve_flow_ctrl()
1061 bnx2_read_phy(bp, bp->mii_adv, &local_adv); in bnx2_resolve_flow_ctrl()
1062 bnx2_read_phy(bp, bp->mii_lpa, &remote_adv); in bnx2_resolve_flow_ctrl()
1064 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_resolve_flow_ctrl()
1085 bp->flow_ctrl = FLOW_CTRL_TX | FLOW_CTRL_RX; in bnx2_resolve_flow_ctrl()
1088 bp->flow_ctrl = FLOW_CTRL_RX; in bnx2_resolve_flow_ctrl()
1093 bp->flow_ctrl = FLOW_CTRL_TX | FLOW_CTRL_RX; in bnx2_resolve_flow_ctrl()
1101 bp->flow_ctrl = FLOW_CTRL_TX; in bnx2_resolve_flow_ctrl()
1107 bnx2_5709s_linkup(struct bnx2 *bp) in bnx2_5709s_linkup() argument
1111 bp->link_up = 1; in bnx2_5709s_linkup()
1113 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_GP_STATUS); in bnx2_5709s_linkup()
1114 bnx2_read_phy(bp, MII_BNX2_GP_TOP_AN_STATUS1, &val); in bnx2_5709s_linkup()
1115 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_COMBO_IEEEB0); in bnx2_5709s_linkup()
1117 if ((bp->autoneg & AUTONEG_SPEED) == 0) { in bnx2_5709s_linkup()
1118 bp->line_speed = bp->req_line_speed; in bnx2_5709s_linkup()
1119 bp->duplex = bp->req_duplex; in bnx2_5709s_linkup()
1125 bp->line_speed = SPEED_10; in bnx2_5709s_linkup()
1128 bp->line_speed = SPEED_100; in bnx2_5709s_linkup()
1132 bp->line_speed = SPEED_1000; in bnx2_5709s_linkup()
1135 bp->line_speed = SPEED_2500; in bnx2_5709s_linkup()
1139 bp->duplex = DUPLEX_FULL; in bnx2_5709s_linkup()
1141 bp->duplex = DUPLEX_HALF; in bnx2_5709s_linkup()
1146 bnx2_5708s_linkup(struct bnx2 *bp) in bnx2_5708s_linkup() argument
1150 bp->link_up = 1; in bnx2_5708s_linkup()
1151 bnx2_read_phy(bp, BCM5708S_1000X_STAT1, &val); in bnx2_5708s_linkup()
1154 bp->line_speed = SPEED_10; in bnx2_5708s_linkup()
1157 bp->line_speed = SPEED_100; in bnx2_5708s_linkup()
1160 bp->line_speed = SPEED_1000; in bnx2_5708s_linkup()
1163 bp->line_speed = SPEED_2500; in bnx2_5708s_linkup()
1167 bp->duplex = DUPLEX_FULL; in bnx2_5708s_linkup()
1169 bp->duplex = DUPLEX_HALF; in bnx2_5708s_linkup()
1175 bnx2_5706s_linkup(struct bnx2 *bp) in bnx2_5706s_linkup() argument
1179 bp->link_up = 1; in bnx2_5706s_linkup()
1180 bp->line_speed = SPEED_1000; in bnx2_5706s_linkup()
1182 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_5706s_linkup()
1184 bp->duplex = DUPLEX_FULL; in bnx2_5706s_linkup()
1187 bp->duplex = DUPLEX_HALF; in bnx2_5706s_linkup()
1194 bnx2_read_phy(bp, bp->mii_adv, &local_adv); in bnx2_5706s_linkup()
1195 bnx2_read_phy(bp, bp->mii_lpa, &remote_adv); in bnx2_5706s_linkup()
1201 bp->duplex = DUPLEX_FULL; in bnx2_5706s_linkup()
1204 bp->duplex = DUPLEX_HALF; in bnx2_5706s_linkup()
1212 bnx2_copper_linkup(struct bnx2 *bp) in bnx2_copper_linkup() argument
1216 bp->phy_flags &= ~BNX2_PHY_FLAG_MDIX; in bnx2_copper_linkup()
1218 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_copper_linkup()
1222 bnx2_read_phy(bp, MII_CTRL1000, &local_adv); in bnx2_copper_linkup()
1223 bnx2_read_phy(bp, MII_STAT1000, &remote_adv); in bnx2_copper_linkup()
1227 bp->line_speed = SPEED_1000; in bnx2_copper_linkup()
1228 bp->duplex = DUPLEX_FULL; in bnx2_copper_linkup()
1231 bp->line_speed = SPEED_1000; in bnx2_copper_linkup()
1232 bp->duplex = DUPLEX_HALF; in bnx2_copper_linkup()
1235 bnx2_read_phy(bp, bp->mii_adv, &local_adv); in bnx2_copper_linkup()
1236 bnx2_read_phy(bp, bp->mii_lpa, &remote_adv); in bnx2_copper_linkup()
1240 bp->line_speed = SPEED_100; in bnx2_copper_linkup()
1241 bp->duplex = DUPLEX_FULL; in bnx2_copper_linkup()
1244 bp->line_speed = SPEED_100; in bnx2_copper_linkup()
1245 bp->duplex = DUPLEX_HALF; in bnx2_copper_linkup()
1248 bp->line_speed = SPEED_10; in bnx2_copper_linkup()
1249 bp->duplex = DUPLEX_FULL; in bnx2_copper_linkup()
1252 bp->line_speed = SPEED_10; in bnx2_copper_linkup()
1253 bp->duplex = DUPLEX_HALF; in bnx2_copper_linkup()
1256 bp->line_speed = 0; in bnx2_copper_linkup()
1257 bp->link_up = 0; in bnx2_copper_linkup()
1263 bp->line_speed = SPEED_100; in bnx2_copper_linkup()
1266 bp->line_speed = SPEED_10; in bnx2_copper_linkup()
1269 bp->duplex = DUPLEX_FULL; in bnx2_copper_linkup()
1272 bp->duplex = DUPLEX_HALF; in bnx2_copper_linkup()
1276 if (bp->link_up) { in bnx2_copper_linkup()
1279 bnx2_read_phy(bp, MII_BNX2_EXT_STATUS, &ext_status); in bnx2_copper_linkup()
1281 bp->phy_flags |= BNX2_PHY_FLAG_MDIX; in bnx2_copper_linkup()
1288 bnx2_init_rx_context(struct bnx2 *bp, u32 cid) in bnx2_init_rx_context() argument
1296 if (bp->flow_ctrl & FLOW_CTRL_TX) in bnx2_init_rx_context()
1299 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_CTX_TYPE, val); in bnx2_init_rx_context()
1303 bnx2_init_all_rx_contexts(struct bnx2 *bp) in bnx2_init_all_rx_contexts() argument
1308 for (i = 0, cid = RX_CID; i < bp->num_rx_rings; i++, cid++) { in bnx2_init_all_rx_contexts()
1311 bnx2_init_rx_context(bp, cid); in bnx2_init_all_rx_contexts()
1316 bnx2_set_mac_link(struct bnx2 *bp) in bnx2_set_mac_link() argument
1320 BNX2_WR(bp, BNX2_EMAC_TX_LENGTHS, 0x2620); in bnx2_set_mac_link()
1321 if (bp->link_up && (bp->line_speed == SPEED_1000) && in bnx2_set_mac_link()
1322 (bp->duplex == DUPLEX_HALF)) { in bnx2_set_mac_link()
1323 BNX2_WR(bp, BNX2_EMAC_TX_LENGTHS, 0x26ff); in bnx2_set_mac_link()
1327 val = BNX2_RD(bp, BNX2_EMAC_MODE); in bnx2_set_mac_link()
1333 if (bp->link_up) { in bnx2_set_mac_link()
1334 switch (bp->line_speed) { in bnx2_set_mac_link()
1336 if (BNX2_CHIP(bp) != BNX2_CHIP_5706) { in bnx2_set_mac_link()
1357 if (bp->duplex == DUPLEX_HALF) in bnx2_set_mac_link()
1359 BNX2_WR(bp, BNX2_EMAC_MODE, val); in bnx2_set_mac_link()
1362 bp->rx_mode &= ~BNX2_EMAC_RX_MODE_FLOW_EN; in bnx2_set_mac_link()
1364 if (bp->flow_ctrl & FLOW_CTRL_RX) in bnx2_set_mac_link()
1365 bp->rx_mode |= BNX2_EMAC_RX_MODE_FLOW_EN; in bnx2_set_mac_link()
1366 BNX2_WR(bp, BNX2_EMAC_RX_MODE, bp->rx_mode); in bnx2_set_mac_link()
1369 val = BNX2_RD(bp, BNX2_EMAC_TX_MODE); in bnx2_set_mac_link()
1372 if (bp->flow_ctrl & FLOW_CTRL_TX) in bnx2_set_mac_link()
1374 BNX2_WR(bp, BNX2_EMAC_TX_MODE, val); in bnx2_set_mac_link()
1377 BNX2_WR(bp, BNX2_EMAC_STATUS, BNX2_EMAC_STATUS_LINK_CHANGE); in bnx2_set_mac_link()
1379 bnx2_init_all_rx_contexts(bp); in bnx2_set_mac_link()
1383 bnx2_enable_bmsr1(struct bnx2 *bp) in bnx2_enable_bmsr1() argument
1385 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_enable_bmsr1()
1386 (BNX2_CHIP(bp) == BNX2_CHIP_5709)) in bnx2_enable_bmsr1()
1387 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_enable_bmsr1()
1392 bnx2_disable_bmsr1(struct bnx2 *bp) in bnx2_disable_bmsr1() argument
1394 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_disable_bmsr1()
1395 (BNX2_CHIP(bp) == BNX2_CHIP_5709)) in bnx2_disable_bmsr1()
1396 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_disable_bmsr1()
1401 bnx2_test_and_enable_2g5(struct bnx2 *bp) in bnx2_test_and_enable_2g5() argument
1406 if (!(bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE)) in bnx2_test_and_enable_2g5()
1409 if (bp->autoneg & AUTONEG_SPEED) in bnx2_test_and_enable_2g5()
1410 bp->advertising |= ADVERTISED_2500baseX_Full; in bnx2_test_and_enable_2g5()
1412 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_and_enable_2g5()
1413 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_OVER1G); in bnx2_test_and_enable_2g5()
1415 bnx2_read_phy(bp, bp->mii_up1, &up1); in bnx2_test_and_enable_2g5()
1418 bnx2_write_phy(bp, bp->mii_up1, up1); in bnx2_test_and_enable_2g5()
1422 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_and_enable_2g5()
1423 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_test_and_enable_2g5()
1430 bnx2_test_and_disable_2g5(struct bnx2 *bp) in bnx2_test_and_disable_2g5() argument
1435 if (!(bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE)) in bnx2_test_and_disable_2g5()
1438 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_and_disable_2g5()
1439 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_OVER1G); in bnx2_test_and_disable_2g5()
1441 bnx2_read_phy(bp, bp->mii_up1, &up1); in bnx2_test_and_disable_2g5()
1444 bnx2_write_phy(bp, bp->mii_up1, up1); in bnx2_test_and_disable_2g5()
1448 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_and_disable_2g5()
1449 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_test_and_disable_2g5()
1456 bnx2_enable_forced_2g5(struct bnx2 *bp) in bnx2_enable_forced_2g5() argument
1461 if (!(bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE)) in bnx2_enable_forced_2g5()
1464 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_enable_forced_2g5()
1467 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_enable_forced_2g5()
1469 if (!bnx2_read_phy(bp, MII_BNX2_SERDES_DIG_MISC1, &val)) { in bnx2_enable_forced_2g5()
1473 bnx2_write_phy(bp, MII_BNX2_SERDES_DIG_MISC1, val); in bnx2_enable_forced_2g5()
1476 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_enable_forced_2g5()
1478 err = bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_enable_forced_2g5()
1480 } else if (BNX2_CHIP(bp) == BNX2_CHIP_5708) { in bnx2_enable_forced_2g5()
1481 err = bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_enable_forced_2g5()
1491 if (bp->autoneg & AUTONEG_SPEED) { in bnx2_enable_forced_2g5()
1493 if (bp->req_duplex == DUPLEX_FULL) in bnx2_enable_forced_2g5()
1496 bnx2_write_phy(bp, bp->mii_bmcr, bmcr); in bnx2_enable_forced_2g5()
1500 bnx2_disable_forced_2g5(struct bnx2 *bp) in bnx2_disable_forced_2g5() argument
1505 if (!(bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE)) in bnx2_disable_forced_2g5()
1508 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_disable_forced_2g5()
1511 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_disable_forced_2g5()
1513 if (!bnx2_read_phy(bp, MII_BNX2_SERDES_DIG_MISC1, &val)) { in bnx2_disable_forced_2g5()
1515 bnx2_write_phy(bp, MII_BNX2_SERDES_DIG_MISC1, val); in bnx2_disable_forced_2g5()
1518 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, in bnx2_disable_forced_2g5()
1520 err = bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_disable_forced_2g5()
1522 } else if (BNX2_CHIP(bp) == BNX2_CHIP_5708) { in bnx2_disable_forced_2g5()
1523 err = bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_disable_forced_2g5()
1533 if (bp->autoneg & AUTONEG_SPEED) in bnx2_disable_forced_2g5()
1535 bnx2_write_phy(bp, bp->mii_bmcr, bmcr); in bnx2_disable_forced_2g5()
1539 bnx2_5706s_force_link_dn(struct bnx2 *bp, int start) in bnx2_5706s_force_link_dn() argument
1543 bnx2_write_phy(bp, MII_BNX2_DSP_ADDRESS, MII_EXPAND_SERDES_CTL); in bnx2_5706s_force_link_dn()
1544 bnx2_read_phy(bp, MII_BNX2_DSP_RW_PORT, &val); in bnx2_5706s_force_link_dn()
1546 bnx2_write_phy(bp, MII_BNX2_DSP_RW_PORT, val & 0xff0f); in bnx2_5706s_force_link_dn()
1548 bnx2_write_phy(bp, MII_BNX2_DSP_RW_PORT, val | 0xc0); in bnx2_5706s_force_link_dn()
1552 bnx2_set_link(struct bnx2 *bp) in bnx2_set_link() argument
1557 if (bp->loopback == MAC_LOOPBACK || bp->loopback == PHY_LOOPBACK) { in bnx2_set_link()
1558 bp->link_up = 1; in bnx2_set_link()
1562 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_set_link()
1565 link_up = bp->link_up; in bnx2_set_link()
1567 bnx2_enable_bmsr1(bp); in bnx2_set_link()
1568 bnx2_read_phy(bp, bp->mii_bmsr1, &bmsr); in bnx2_set_link()
1569 bnx2_read_phy(bp, bp->mii_bmsr1, &bmsr); in bnx2_set_link()
1570 bnx2_disable_bmsr1(bp); in bnx2_set_link()
1572 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_set_link()
1573 (BNX2_CHIP(bp) == BNX2_CHIP_5706)) { in bnx2_set_link()
1576 if (bp->phy_flags & BNX2_PHY_FLAG_FORCED_DOWN) { in bnx2_set_link()
1577 bnx2_5706s_force_link_dn(bp, 0); in bnx2_set_link()
1578 bp->phy_flags &= ~BNX2_PHY_FLAG_FORCED_DOWN; in bnx2_set_link()
1580 val = BNX2_RD(bp, BNX2_EMAC_STATUS); in bnx2_set_link()
1582 bnx2_write_phy(bp, MII_BNX2_MISC_SHADOW, MISC_SHDW_AN_DBG); in bnx2_set_link()
1583 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &an_dbg); in bnx2_set_link()
1584 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &an_dbg); in bnx2_set_link()
1594 bp->link_up = 1; in bnx2_set_link()
1596 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_set_link()
1597 if (BNX2_CHIP(bp) == BNX2_CHIP_5706) in bnx2_set_link()
1598 bnx2_5706s_linkup(bp); in bnx2_set_link()
1599 else if (BNX2_CHIP(bp) == BNX2_CHIP_5708) in bnx2_set_link()
1600 bnx2_5708s_linkup(bp); in bnx2_set_link()
1601 else if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_set_link()
1602 bnx2_5709s_linkup(bp); in bnx2_set_link()
1605 bnx2_copper_linkup(bp); in bnx2_set_link()
1607 bnx2_resolve_flow_ctrl(bp); in bnx2_set_link()
1610 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_set_link()
1611 (bp->autoneg & AUTONEG_SPEED)) in bnx2_set_link()
1612 bnx2_disable_forced_2g5(bp); in bnx2_set_link()
1614 if (bp->phy_flags & BNX2_PHY_FLAG_PARALLEL_DETECT) { in bnx2_set_link()
1617 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_set_link()
1619 bnx2_write_phy(bp, bp->mii_bmcr, bmcr); in bnx2_set_link()
1621 bp->phy_flags &= ~BNX2_PHY_FLAG_PARALLEL_DETECT; in bnx2_set_link()
1623 bp->link_up = 0; in bnx2_set_link()
1626 if (bp->link_up != link_up) { in bnx2_set_link()
1627 bnx2_report_link(bp); in bnx2_set_link()
1630 bnx2_set_mac_link(bp); in bnx2_set_link()
1636 bnx2_reset_phy(struct bnx2 *bp) in bnx2_reset_phy() argument
1641 bnx2_write_phy(bp, bp->mii_bmcr, BMCR_RESET); in bnx2_reset_phy()
1647 bnx2_read_phy(bp, bp->mii_bmcr, &reg); in bnx2_reset_phy()
1660 bnx2_phy_get_pause_adv(struct bnx2 *bp) in bnx2_phy_get_pause_adv() argument
1664 if ((bp->req_flow_ctrl & (FLOW_CTRL_RX | FLOW_CTRL_TX)) == in bnx2_phy_get_pause_adv()
1667 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_phy_get_pause_adv()
1674 else if (bp->req_flow_ctrl & FLOW_CTRL_TX) { in bnx2_phy_get_pause_adv()
1675 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_phy_get_pause_adv()
1682 else if (bp->req_flow_ctrl & FLOW_CTRL_RX) { in bnx2_phy_get_pause_adv()
1683 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_phy_get_pause_adv()
1696 bnx2_setup_remote_phy(struct bnx2 *bp, u8 port) in bnx2_setup_remote_phy() argument
1697 __releases(&bp->phy_lock) in bnx2_setup_remote_phy()
1698 __acquires(&bp->phy_lock) in bnx2_setup_remote_phy()
1702 pause_adv = bnx2_phy_get_pause_adv(bp); in bnx2_setup_remote_phy()
1704 if (bp->autoneg & AUTONEG_SPEED) { in bnx2_setup_remote_phy()
1706 if (bp->advertising & ADVERTISED_10baseT_Half) in bnx2_setup_remote_phy()
1708 if (bp->advertising & ADVERTISED_10baseT_Full) in bnx2_setup_remote_phy()
1710 if (bp->advertising & ADVERTISED_100baseT_Half) in bnx2_setup_remote_phy()
1712 if (bp->advertising & ADVERTISED_100baseT_Full) in bnx2_setup_remote_phy()
1714 if (bp->advertising & ADVERTISED_1000baseT_Full) in bnx2_setup_remote_phy()
1716 if (bp->advertising & ADVERTISED_2500baseX_Full) in bnx2_setup_remote_phy()
1719 if (bp->req_line_speed == SPEED_2500) in bnx2_setup_remote_phy()
1721 else if (bp->req_line_speed == SPEED_1000) in bnx2_setup_remote_phy()
1723 else if (bp->req_line_speed == SPEED_100) { in bnx2_setup_remote_phy()
1724 if (bp->req_duplex == DUPLEX_FULL) in bnx2_setup_remote_phy()
1728 } else if (bp->req_line_speed == SPEED_10) { in bnx2_setup_remote_phy()
1729 if (bp->req_duplex == DUPLEX_FULL) in bnx2_setup_remote_phy()
1745 bnx2_shmem_wr(bp, BNX2_DRV_MB_ARG0, speed_arg); in bnx2_setup_remote_phy()
1747 spin_unlock_bh(&bp->phy_lock); in bnx2_setup_remote_phy()
1748 bnx2_fw_sync(bp, BNX2_DRV_MSG_CODE_CMD_SET_LINK, 1, 0); in bnx2_setup_remote_phy()
1749 spin_lock_bh(&bp->phy_lock); in bnx2_setup_remote_phy()
1755 bnx2_setup_serdes_phy(struct bnx2 *bp, u8 port) in bnx2_setup_serdes_phy() argument
1756 __releases(&bp->phy_lock) in bnx2_setup_serdes_phy()
1757 __acquires(&bp->phy_lock) in bnx2_setup_serdes_phy()
1762 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_setup_serdes_phy()
1763 return bnx2_setup_remote_phy(bp, port); in bnx2_setup_serdes_phy()
1765 if (!(bp->autoneg & AUTONEG_SPEED)) { in bnx2_setup_serdes_phy()
1769 if (bp->req_line_speed == SPEED_2500) { in bnx2_setup_serdes_phy()
1770 if (!bnx2_test_and_enable_2g5(bp)) in bnx2_setup_serdes_phy()
1772 } else if (bp->req_line_speed == SPEED_1000) { in bnx2_setup_serdes_phy()
1773 if (bnx2_test_and_disable_2g5(bp)) in bnx2_setup_serdes_phy()
1776 bnx2_read_phy(bp, bp->mii_adv, &adv); in bnx2_setup_serdes_phy()
1779 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_setup_serdes_phy()
1783 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_setup_serdes_phy()
1784 if (bp->req_line_speed == SPEED_2500) in bnx2_setup_serdes_phy()
1785 bnx2_enable_forced_2g5(bp); in bnx2_setup_serdes_phy()
1786 else if (bp->req_line_speed == SPEED_1000) { in bnx2_setup_serdes_phy()
1787 bnx2_disable_forced_2g5(bp); in bnx2_setup_serdes_phy()
1791 } else if (BNX2_CHIP(bp) == BNX2_CHIP_5708) { in bnx2_setup_serdes_phy()
1792 if (bp->req_line_speed == SPEED_2500) in bnx2_setup_serdes_phy()
1798 if (bp->req_duplex == DUPLEX_FULL) { in bnx2_setup_serdes_phy()
1808 if (bp->link_up) { in bnx2_setup_serdes_phy()
1809 bnx2_write_phy(bp, bp->mii_adv, adv & in bnx2_setup_serdes_phy()
1812 bnx2_write_phy(bp, bp->mii_bmcr, bmcr | in bnx2_setup_serdes_phy()
1815 bp->link_up = 0; in bnx2_setup_serdes_phy()
1816 netif_carrier_off(bp->dev); in bnx2_setup_serdes_phy()
1817 bnx2_write_phy(bp, bp->mii_bmcr, new_bmcr); in bnx2_setup_serdes_phy()
1818 bnx2_report_link(bp); in bnx2_setup_serdes_phy()
1820 bnx2_write_phy(bp, bp->mii_adv, adv); in bnx2_setup_serdes_phy()
1821 bnx2_write_phy(bp, bp->mii_bmcr, new_bmcr); in bnx2_setup_serdes_phy()
1823 bnx2_resolve_flow_ctrl(bp); in bnx2_setup_serdes_phy()
1824 bnx2_set_mac_link(bp); in bnx2_setup_serdes_phy()
1829 bnx2_test_and_enable_2g5(bp); in bnx2_setup_serdes_phy()
1831 if (bp->advertising & ADVERTISED_1000baseT_Full) in bnx2_setup_serdes_phy()
1834 new_adv |= bnx2_phy_get_pause_adv(bp); in bnx2_setup_serdes_phy()
1836 bnx2_read_phy(bp, bp->mii_adv, &adv); in bnx2_setup_serdes_phy()
1837 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_setup_serdes_phy()
1839 bp->serdes_an_pending = 0; in bnx2_setup_serdes_phy()
1842 if (bp->link_up) { in bnx2_setup_serdes_phy()
1843 bnx2_write_phy(bp, bp->mii_bmcr, BMCR_LOOPBACK); in bnx2_setup_serdes_phy()
1844 spin_unlock_bh(&bp->phy_lock); in bnx2_setup_serdes_phy()
1846 spin_lock_bh(&bp->phy_lock); in bnx2_setup_serdes_phy()
1849 bnx2_write_phy(bp, bp->mii_adv, new_adv); in bnx2_setup_serdes_phy()
1850 bnx2_write_phy(bp, bp->mii_bmcr, bmcr | BMCR_ANRESTART | in bnx2_setup_serdes_phy()
1860 bp->current_interval = BNX2_SERDES_AN_TIMEOUT; in bnx2_setup_serdes_phy()
1861 bp->serdes_an_pending = 1; in bnx2_setup_serdes_phy()
1862 mod_timer(&bp->timer, jiffies + bp->current_interval); in bnx2_setup_serdes_phy()
1864 bnx2_resolve_flow_ctrl(bp); in bnx2_setup_serdes_phy()
1865 bnx2_set_mac_link(bp); in bnx2_setup_serdes_phy()
1872 (bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE) ? \
1887 bnx2_set_default_remote_link(struct bnx2 *bp) in bnx2_set_default_remote_link() argument
1891 if (bp->phy_port == PORT_TP) in bnx2_set_default_remote_link()
1892 link = bnx2_shmem_rd(bp, BNX2_RPHY_COPPER_LINK); in bnx2_set_default_remote_link()
1894 link = bnx2_shmem_rd(bp, BNX2_RPHY_SERDES_LINK); in bnx2_set_default_remote_link()
1897 bp->req_line_speed = 0; in bnx2_set_default_remote_link()
1898 bp->autoneg |= AUTONEG_SPEED; in bnx2_set_default_remote_link()
1899 bp->advertising = ADVERTISED_Autoneg; in bnx2_set_default_remote_link()
1901 bp->advertising |= ADVERTISED_10baseT_Half; in bnx2_set_default_remote_link()
1903 bp->advertising |= ADVERTISED_10baseT_Full; in bnx2_set_default_remote_link()
1905 bp->advertising |= ADVERTISED_100baseT_Half; in bnx2_set_default_remote_link()
1907 bp->advertising |= ADVERTISED_100baseT_Full; in bnx2_set_default_remote_link()
1909 bp->advertising |= ADVERTISED_1000baseT_Full; in bnx2_set_default_remote_link()
1911 bp->advertising |= ADVERTISED_2500baseX_Full; in bnx2_set_default_remote_link()
1913 bp->autoneg = 0; in bnx2_set_default_remote_link()
1914 bp->advertising = 0; in bnx2_set_default_remote_link()
1915 bp->req_duplex = DUPLEX_FULL; in bnx2_set_default_remote_link()
1917 bp->req_line_speed = SPEED_10; in bnx2_set_default_remote_link()
1919 bp->req_duplex = DUPLEX_HALF; in bnx2_set_default_remote_link()
1922 bp->req_line_speed = SPEED_100; in bnx2_set_default_remote_link()
1924 bp->req_duplex = DUPLEX_HALF; in bnx2_set_default_remote_link()
1927 bp->req_line_speed = SPEED_1000; in bnx2_set_default_remote_link()
1929 bp->req_line_speed = SPEED_2500; in bnx2_set_default_remote_link()
1934 bnx2_set_default_link(struct bnx2 *bp) in bnx2_set_default_link() argument
1936 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) { in bnx2_set_default_link()
1937 bnx2_set_default_remote_link(bp); in bnx2_set_default_link()
1941 bp->autoneg = AUTONEG_SPEED | AUTONEG_FLOW_CTRL; in bnx2_set_default_link()
1942 bp->req_line_speed = 0; in bnx2_set_default_link()
1943 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_set_default_link()
1946 bp->advertising = ETHTOOL_ALL_FIBRE_SPEED | ADVERTISED_Autoneg; in bnx2_set_default_link()
1948 reg = bnx2_shmem_rd(bp, BNX2_PORT_HW_CFG_CONFIG); in bnx2_set_default_link()
1951 bp->autoneg = 0; in bnx2_set_default_link()
1952 bp->req_line_speed = bp->line_speed = SPEED_1000; in bnx2_set_default_link()
1953 bp->req_duplex = DUPLEX_FULL; in bnx2_set_default_link()
1956 bp->advertising = ETHTOOL_ALL_COPPER_SPEED | ADVERTISED_Autoneg; in bnx2_set_default_link()
1960 bnx2_send_heart_beat(struct bnx2 *bp) in bnx2_send_heart_beat() argument
1965 spin_lock(&bp->indirect_lock); in bnx2_send_heart_beat()
1966 msg = (u32) (++bp->fw_drv_pulse_wr_seq & BNX2_DRV_PULSE_SEQ_MASK); in bnx2_send_heart_beat()
1967 addr = bp->shmem_base + BNX2_DRV_PULSE_MB; in bnx2_send_heart_beat()
1968 BNX2_WR(bp, BNX2_PCICFG_REG_WINDOW_ADDRESS, addr); in bnx2_send_heart_beat()
1969 BNX2_WR(bp, BNX2_PCICFG_REG_WINDOW, msg); in bnx2_send_heart_beat()
1970 spin_unlock(&bp->indirect_lock); in bnx2_send_heart_beat()
1974 bnx2_remote_phy_event(struct bnx2 *bp) in bnx2_remote_phy_event() argument
1977 u8 link_up = bp->link_up; in bnx2_remote_phy_event()
1980 msg = bnx2_shmem_rd(bp, BNX2_LINK_STATUS); in bnx2_remote_phy_event()
1983 bnx2_send_heart_beat(bp); in bnx2_remote_phy_event()
1988 bp->link_up = 0; in bnx2_remote_phy_event()
1992 bp->link_up = 1; in bnx2_remote_phy_event()
1994 bp->duplex = DUPLEX_FULL; in bnx2_remote_phy_event()
1997 bp->duplex = DUPLEX_HALF; in bnx2_remote_phy_event()
2000 bp->line_speed = SPEED_10; in bnx2_remote_phy_event()
2003 bp->duplex = DUPLEX_HALF; in bnx2_remote_phy_event()
2007 bp->line_speed = SPEED_100; in bnx2_remote_phy_event()
2010 bp->duplex = DUPLEX_HALF; in bnx2_remote_phy_event()
2013 bp->line_speed = SPEED_1000; in bnx2_remote_phy_event()
2016 bp->duplex = DUPLEX_HALF; in bnx2_remote_phy_event()
2019 bp->line_speed = SPEED_2500; in bnx2_remote_phy_event()
2022 bp->line_speed = 0; in bnx2_remote_phy_event()
2026 bp->flow_ctrl = 0; in bnx2_remote_phy_event()
2027 if ((bp->autoneg & (AUTONEG_SPEED | AUTONEG_FLOW_CTRL)) != in bnx2_remote_phy_event()
2029 if (bp->duplex == DUPLEX_FULL) in bnx2_remote_phy_event()
2030 bp->flow_ctrl = bp->req_flow_ctrl; in bnx2_remote_phy_event()
2033 bp->flow_ctrl |= FLOW_CTRL_TX; in bnx2_remote_phy_event()
2035 bp->flow_ctrl |= FLOW_CTRL_RX; in bnx2_remote_phy_event()
2038 old_port = bp->phy_port; in bnx2_remote_phy_event()
2040 bp->phy_port = PORT_FIBRE; in bnx2_remote_phy_event()
2042 bp->phy_port = PORT_TP; in bnx2_remote_phy_event()
2044 if (old_port != bp->phy_port) in bnx2_remote_phy_event()
2045 bnx2_set_default_link(bp); in bnx2_remote_phy_event()
2048 if (bp->link_up != link_up) in bnx2_remote_phy_event()
2049 bnx2_report_link(bp); in bnx2_remote_phy_event()
2051 bnx2_set_mac_link(bp); in bnx2_remote_phy_event()
2055 bnx2_set_remote_link(struct bnx2 *bp) in bnx2_set_remote_link() argument
2059 evt_code = bnx2_shmem_rd(bp, BNX2_FW_EVT_CODE_MB); in bnx2_set_remote_link()
2062 bnx2_remote_phy_event(bp); in bnx2_set_remote_link()
2066 bnx2_send_heart_beat(bp); in bnx2_set_remote_link()
2073 bnx2_setup_copper_phy(struct bnx2 *bp) in bnx2_setup_copper_phy() argument
2074 __releases(&bp->phy_lock) in bnx2_setup_copper_phy()
2075 __acquires(&bp->phy_lock) in bnx2_setup_copper_phy()
2080 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_setup_copper_phy()
2082 bnx2_read_phy(bp, bp->mii_adv, &adv_reg); in bnx2_setup_copper_phy()
2086 new_adv = ADVERTISE_CSMA | ethtool_adv_to_mii_adv_t(bp->advertising); in bnx2_setup_copper_phy()
2088 if (bp->autoneg & AUTONEG_SPEED) { in bnx2_setup_copper_phy()
2092 new_adv |= bnx2_phy_get_pause_adv(bp); in bnx2_setup_copper_phy()
2094 bnx2_read_phy(bp, MII_CTRL1000, &adv1000_reg); in bnx2_setup_copper_phy()
2097 new_adv1000 |= ethtool_adv_to_mii_ctrl1000_t(bp->advertising); in bnx2_setup_copper_phy()
2102 bnx2_write_phy(bp, bp->mii_adv, new_adv); in bnx2_setup_copper_phy()
2103 bnx2_write_phy(bp, MII_CTRL1000, new_adv1000); in bnx2_setup_copper_phy()
2104 bnx2_write_phy(bp, bp->mii_bmcr, BMCR_ANRESTART | in bnx2_setup_copper_phy()
2107 else if (bp->link_up) { in bnx2_setup_copper_phy()
2111 bnx2_resolve_flow_ctrl(bp); in bnx2_setup_copper_phy()
2112 bnx2_set_mac_link(bp); in bnx2_setup_copper_phy()
2119 bnx2_write_phy(bp, bp->mii_adv, new_adv); in bnx2_setup_copper_phy()
2122 if (bp->req_line_speed == SPEED_100) { in bnx2_setup_copper_phy()
2125 if (bp->req_duplex == DUPLEX_FULL) { in bnx2_setup_copper_phy()
2131 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_setup_copper_phy()
2132 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_setup_copper_phy()
2136 bnx2_write_phy(bp, bp->mii_bmcr, BMCR_LOOPBACK); in bnx2_setup_copper_phy()
2137 spin_unlock_bh(&bp->phy_lock); in bnx2_setup_copper_phy()
2139 spin_lock_bh(&bp->phy_lock); in bnx2_setup_copper_phy()
2141 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_setup_copper_phy()
2142 bnx2_read_phy(bp, bp->mii_bmsr, &bmsr); in bnx2_setup_copper_phy()
2145 bnx2_write_phy(bp, bp->mii_bmcr, new_bmcr); in bnx2_setup_copper_phy()
2152 bp->line_speed = bp->req_line_speed; in bnx2_setup_copper_phy()
2153 bp->duplex = bp->req_duplex; in bnx2_setup_copper_phy()
2154 bnx2_resolve_flow_ctrl(bp); in bnx2_setup_copper_phy()
2155 bnx2_set_mac_link(bp); in bnx2_setup_copper_phy()
2158 bnx2_resolve_flow_ctrl(bp); in bnx2_setup_copper_phy()
2159 bnx2_set_mac_link(bp); in bnx2_setup_copper_phy()
2165 bnx2_setup_phy(struct bnx2 *bp, u8 port) in bnx2_setup_phy() argument
2166 __releases(&bp->phy_lock) in bnx2_setup_phy()
2167 __acquires(&bp->phy_lock) in bnx2_setup_phy()
2169 if (bp->loopback == MAC_LOOPBACK) in bnx2_setup_phy()
2172 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_setup_phy()
2173 return bnx2_setup_serdes_phy(bp, port); in bnx2_setup_phy()
2176 return bnx2_setup_copper_phy(bp); in bnx2_setup_phy()
2181 bnx2_init_5709s_phy(struct bnx2 *bp, int reset_phy) in bnx2_init_5709s_phy() argument
2185 bp->mii_bmcr = MII_BMCR + 0x10; in bnx2_init_5709s_phy()
2186 bp->mii_bmsr = MII_BMSR + 0x10; in bnx2_init_5709s_phy()
2187 bp->mii_bmsr1 = MII_BNX2_GP_TOP_AN_STATUS1; in bnx2_init_5709s_phy()
2188 bp->mii_adv = MII_ADVERTISE + 0x10; in bnx2_init_5709s_phy()
2189 bp->mii_lpa = MII_LPA + 0x10; in bnx2_init_5709s_phy()
2190 bp->mii_up1 = MII_BNX2_OVER1G_UP1; in bnx2_init_5709s_phy()
2192 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_AER); in bnx2_init_5709s_phy()
2193 bnx2_write_phy(bp, MII_BNX2_AER_AER, MII_BNX2_AER_AER_AN_MMD); in bnx2_init_5709s_phy()
2195 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_COMBO_IEEEB0); in bnx2_init_5709s_phy()
2197 bnx2_reset_phy(bp); in bnx2_init_5709s_phy()
2199 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_SERDES_DIG); in bnx2_init_5709s_phy()
2201 bnx2_read_phy(bp, MII_BNX2_SERDES_DIG_1000XCTL1, &val); in bnx2_init_5709s_phy()
2204 bnx2_write_phy(bp, MII_BNX2_SERDES_DIG_1000XCTL1, val); in bnx2_init_5709s_phy()
2206 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_OVER1G); in bnx2_init_5709s_phy()
2207 bnx2_read_phy(bp, MII_BNX2_OVER1G_UP1, &val); in bnx2_init_5709s_phy()
2208 if (bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE) in bnx2_init_5709s_phy()
2212 bnx2_write_phy(bp, MII_BNX2_OVER1G_UP1, val); in bnx2_init_5709s_phy()
2214 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_BAM_NXTPG); in bnx2_init_5709s_phy()
2215 bnx2_read_phy(bp, MII_BNX2_BAM_NXTPG_CTL, &val); in bnx2_init_5709s_phy()
2217 bnx2_write_phy(bp, MII_BNX2_BAM_NXTPG_CTL, val); in bnx2_init_5709s_phy()
2219 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_CL73_USERB0); in bnx2_init_5709s_phy()
2223 bnx2_write_phy(bp, MII_BNX2_CL73_BAM_CTL1, val); in bnx2_init_5709s_phy()
2225 bnx2_write_phy(bp, MII_BNX2_BLK_ADDR, MII_BNX2_BLK_ADDR_COMBO_IEEEB0); in bnx2_init_5709s_phy()
2231 bnx2_init_5708s_phy(struct bnx2 *bp, int reset_phy) in bnx2_init_5708s_phy() argument
2236 bnx2_reset_phy(bp); in bnx2_init_5708s_phy()
2238 bp->mii_up1 = BCM5708S_UP1; in bnx2_init_5708s_phy()
2240 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, BCM5708S_BLK_ADDR_DIG3); in bnx2_init_5708s_phy()
2241 bnx2_write_phy(bp, BCM5708S_DIG_3_0, BCM5708S_DIG_3_0_USE_IEEE); in bnx2_init_5708s_phy()
2242 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, BCM5708S_BLK_ADDR_DIG); in bnx2_init_5708s_phy()
2244 bnx2_read_phy(bp, BCM5708S_1000X_CTL1, &val); in bnx2_init_5708s_phy()
2246 bnx2_write_phy(bp, BCM5708S_1000X_CTL1, val); in bnx2_init_5708s_phy()
2248 bnx2_read_phy(bp, BCM5708S_1000X_CTL2, &val); in bnx2_init_5708s_phy()
2250 bnx2_write_phy(bp, BCM5708S_1000X_CTL2, val); in bnx2_init_5708s_phy()
2252 if (bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE) { in bnx2_init_5708s_phy()
2253 bnx2_read_phy(bp, BCM5708S_UP1, &val); in bnx2_init_5708s_phy()
2255 bnx2_write_phy(bp, BCM5708S_UP1, val); in bnx2_init_5708s_phy()
2258 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_A0) || in bnx2_init_5708s_phy()
2259 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_B0) || in bnx2_init_5708s_phy()
2260 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_B1)) { in bnx2_init_5708s_phy()
2262 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, in bnx2_init_5708s_phy()
2264 bnx2_read_phy(bp, BCM5708S_TX_ACTL1, &val); in bnx2_init_5708s_phy()
2266 bnx2_write_phy(bp, BCM5708S_TX_ACTL1, val); in bnx2_init_5708s_phy()
2267 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, BCM5708S_BLK_ADDR_DIG); in bnx2_init_5708s_phy()
2270 val = bnx2_shmem_rd(bp, BNX2_PORT_HW_CFG_CONFIG) & in bnx2_init_5708s_phy()
2276 is_backplane = bnx2_shmem_rd(bp, BNX2_SHARED_HW_CFG_CONFIG); in bnx2_init_5708s_phy()
2278 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, in bnx2_init_5708s_phy()
2280 bnx2_write_phy(bp, BCM5708S_TX_ACTL3, val); in bnx2_init_5708s_phy()
2281 bnx2_write_phy(bp, BCM5708S_BLK_ADDR, in bnx2_init_5708s_phy()
2289 bnx2_init_5706s_phy(struct bnx2 *bp, int reset_phy) in bnx2_init_5706s_phy() argument
2292 bnx2_reset_phy(bp); in bnx2_init_5706s_phy()
2294 bp->phy_flags &= ~BNX2_PHY_FLAG_PARALLEL_DETECT; in bnx2_init_5706s_phy()
2296 if (BNX2_CHIP(bp) == BNX2_CHIP_5706) in bnx2_init_5706s_phy()
2297 BNX2_WR(bp, BNX2_MISC_GP_HW_CTL0, 0x300); in bnx2_init_5706s_phy()
2299 if (bp->dev->mtu > ETH_DATA_LEN) { in bnx2_init_5706s_phy()
2303 bnx2_write_phy(bp, 0x18, 0x7); in bnx2_init_5706s_phy()
2304 bnx2_read_phy(bp, 0x18, &val); in bnx2_init_5706s_phy()
2305 bnx2_write_phy(bp, 0x18, (val & 0xfff8) | 0x4000); in bnx2_init_5706s_phy()
2307 bnx2_write_phy(bp, 0x1c, 0x6c00); in bnx2_init_5706s_phy()
2308 bnx2_read_phy(bp, 0x1c, &val); in bnx2_init_5706s_phy()
2309 bnx2_write_phy(bp, 0x1c, (val & 0x3ff) | 0xec02); in bnx2_init_5706s_phy()
2314 bnx2_write_phy(bp, 0x18, 0x7); in bnx2_init_5706s_phy()
2315 bnx2_read_phy(bp, 0x18, &val); in bnx2_init_5706s_phy()
2316 bnx2_write_phy(bp, 0x18, val & ~0x4007); in bnx2_init_5706s_phy()
2318 bnx2_write_phy(bp, 0x1c, 0x6c00); in bnx2_init_5706s_phy()
2319 bnx2_read_phy(bp, 0x1c, &val); in bnx2_init_5706s_phy()
2320 bnx2_write_phy(bp, 0x1c, (val & 0x3fd) | 0xec00); in bnx2_init_5706s_phy()
2327 bnx2_init_copper_phy(struct bnx2 *bp, int reset_phy) in bnx2_init_copper_phy() argument
2332 bnx2_reset_phy(bp); in bnx2_init_copper_phy()
2334 if (bp->phy_flags & BNX2_PHY_FLAG_CRC_FIX) { in bnx2_init_copper_phy()
2335 bnx2_write_phy(bp, 0x18, 0x0c00); in bnx2_init_copper_phy()
2336 bnx2_write_phy(bp, 0x17, 0x000a); in bnx2_init_copper_phy()
2337 bnx2_write_phy(bp, 0x15, 0x310b); in bnx2_init_copper_phy()
2338 bnx2_write_phy(bp, 0x17, 0x201f); in bnx2_init_copper_phy()
2339 bnx2_write_phy(bp, 0x15, 0x9506); in bnx2_init_copper_phy()
2340 bnx2_write_phy(bp, 0x17, 0x401f); in bnx2_init_copper_phy()
2341 bnx2_write_phy(bp, 0x15, 0x14e2); in bnx2_init_copper_phy()
2342 bnx2_write_phy(bp, 0x18, 0x0400); in bnx2_init_copper_phy()
2345 if (bp->phy_flags & BNX2_PHY_FLAG_DIS_EARLY_DAC) { in bnx2_init_copper_phy()
2346 bnx2_write_phy(bp, MII_BNX2_DSP_ADDRESS, in bnx2_init_copper_phy()
2348 bnx2_read_phy(bp, MII_BNX2_DSP_RW_PORT, &val); in bnx2_init_copper_phy()
2350 bnx2_write_phy(bp, MII_BNX2_DSP_RW_PORT, val); in bnx2_init_copper_phy()
2353 if (bp->dev->mtu > ETH_DATA_LEN) { in bnx2_init_copper_phy()
2355 bnx2_write_phy(bp, 0x18, 0x7); in bnx2_init_copper_phy()
2356 bnx2_read_phy(bp, 0x18, &val); in bnx2_init_copper_phy()
2357 bnx2_write_phy(bp, 0x18, val | 0x4000); in bnx2_init_copper_phy()
2359 bnx2_read_phy(bp, 0x10, &val); in bnx2_init_copper_phy()
2360 bnx2_write_phy(bp, 0x10, val | 0x1); in bnx2_init_copper_phy()
2363 bnx2_write_phy(bp, 0x18, 0x7); in bnx2_init_copper_phy()
2364 bnx2_read_phy(bp, 0x18, &val); in bnx2_init_copper_phy()
2365 bnx2_write_phy(bp, 0x18, val & ~0x4007); in bnx2_init_copper_phy()
2367 bnx2_read_phy(bp, 0x10, &val); in bnx2_init_copper_phy()
2368 bnx2_write_phy(bp, 0x10, val & ~0x1); in bnx2_init_copper_phy()
2372 bnx2_write_phy(bp, MII_BNX2_AUX_CTL, AUX_CTL_MISC_CTL); in bnx2_init_copper_phy()
2373 bnx2_read_phy(bp, MII_BNX2_AUX_CTL, &val); in bnx2_init_copper_phy()
2377 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_init_copper_phy()
2380 bnx2_write_phy(bp, MII_BNX2_AUX_CTL, val); in bnx2_init_copper_phy()
2386 bnx2_init_phy(struct bnx2 *bp, int reset_phy) in bnx2_init_phy() argument
2387 __releases(&bp->phy_lock) in bnx2_init_phy()
2388 __acquires(&bp->phy_lock) in bnx2_init_phy()
2393 bp->phy_flags &= ~BNX2_PHY_FLAG_INT_MODE_MASK; in bnx2_init_phy()
2394 bp->phy_flags |= BNX2_PHY_FLAG_INT_MODE_LINK_READY; in bnx2_init_phy()
2396 bp->mii_bmcr = MII_BMCR; in bnx2_init_phy()
2397 bp->mii_bmsr = MII_BMSR; in bnx2_init_phy()
2398 bp->mii_bmsr1 = MII_BMSR; in bnx2_init_phy()
2399 bp->mii_adv = MII_ADVERTISE; in bnx2_init_phy()
2400 bp->mii_lpa = MII_LPA; in bnx2_init_phy()
2402 BNX2_WR(bp, BNX2_EMAC_ATTENTION_ENA, BNX2_EMAC_ATTENTION_ENA_LINK); in bnx2_init_phy()
2404 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_init_phy()
2407 bnx2_read_phy(bp, MII_PHYSID1, &val); in bnx2_init_phy()
2408 bp->phy_id = val << 16; in bnx2_init_phy()
2409 bnx2_read_phy(bp, MII_PHYSID2, &val); in bnx2_init_phy()
2410 bp->phy_id |= val & 0xffff; in bnx2_init_phy()
2412 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_init_phy()
2413 if (BNX2_CHIP(bp) == BNX2_CHIP_5706) in bnx2_init_phy()
2414 rc = bnx2_init_5706s_phy(bp, reset_phy); in bnx2_init_phy()
2415 else if (BNX2_CHIP(bp) == BNX2_CHIP_5708) in bnx2_init_phy()
2416 rc = bnx2_init_5708s_phy(bp, reset_phy); in bnx2_init_phy()
2417 else if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_init_phy()
2418 rc = bnx2_init_5709s_phy(bp, reset_phy); in bnx2_init_phy()
2421 rc = bnx2_init_copper_phy(bp, reset_phy); in bnx2_init_phy()
2426 rc = bnx2_setup_phy(bp, bp->phy_port); in bnx2_init_phy()
2432 bnx2_set_mac_loopback(struct bnx2 *bp) in bnx2_set_mac_loopback() argument
2436 mac_mode = BNX2_RD(bp, BNX2_EMAC_MODE); in bnx2_set_mac_loopback()
2439 BNX2_WR(bp, BNX2_EMAC_MODE, mac_mode); in bnx2_set_mac_loopback()
2440 bp->link_up = 1; in bnx2_set_mac_loopback()
2447 bnx2_set_phy_loopback(struct bnx2 *bp) in bnx2_set_phy_loopback() argument
2452 spin_lock_bh(&bp->phy_lock); in bnx2_set_phy_loopback()
2453 rc = bnx2_write_phy(bp, bp->mii_bmcr, BMCR_LOOPBACK | BMCR_FULLDPLX | in bnx2_set_phy_loopback()
2455 spin_unlock_bh(&bp->phy_lock); in bnx2_set_phy_loopback()
2460 if (bnx2_test_link(bp) == 0) in bnx2_set_phy_loopback()
2465 mac_mode = BNX2_RD(bp, BNX2_EMAC_MODE); in bnx2_set_phy_loopback()
2471 BNX2_WR(bp, BNX2_EMAC_MODE, mac_mode); in bnx2_set_phy_loopback()
2472 bp->link_up = 1; in bnx2_set_phy_loopback()
2477 bnx2_dump_mcp_state(struct bnx2 *bp) in bnx2_dump_mcp_state() argument
2479 struct net_device *dev = bp->dev; in bnx2_dump_mcp_state()
2483 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_dump_mcp_state()
2491 bnx2_reg_rd_ind(bp, mcp_p0), bnx2_reg_rd_ind(bp, mcp_p1)); in bnx2_dump_mcp_state()
2493 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_MODE), in bnx2_dump_mcp_state()
2494 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_STATE), in bnx2_dump_mcp_state()
2495 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_EVENT_MASK)); in bnx2_dump_mcp_state()
2497 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_PROGRAM_COUNTER), in bnx2_dump_mcp_state()
2498 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_PROGRAM_COUNTER), in bnx2_dump_mcp_state()
2499 bnx2_reg_rd_ind(bp, BNX2_MCP_CPU_INSTRUCTION)); in bnx2_dump_mcp_state()
2502 bnx2_shmem_rd(bp, BNX2_DRV_MB), in bnx2_dump_mcp_state()
2503 bnx2_shmem_rd(bp, BNX2_FW_MB), in bnx2_dump_mcp_state()
2504 bnx2_shmem_rd(bp, BNX2_LINK_STATUS)); in bnx2_dump_mcp_state()
2505 pr_cont(" drv_pulse_mb[%08x]\n", bnx2_shmem_rd(bp, BNX2_DRV_PULSE_MB)); in bnx2_dump_mcp_state()
2507 bnx2_shmem_rd(bp, BNX2_DEV_INFO_SIGNATURE), in bnx2_dump_mcp_state()
2508 bnx2_shmem_rd(bp, BNX2_BC_STATE_RESET_TYPE)); in bnx2_dump_mcp_state()
2510 bnx2_shmem_rd(bp, BNX2_BC_STATE_CONDITION)); in bnx2_dump_mcp_state()
2511 DP_SHMEM_LINE(bp, BNX2_BC_RESET_TYPE); in bnx2_dump_mcp_state()
2512 DP_SHMEM_LINE(bp, 0x3cc); in bnx2_dump_mcp_state()
2513 DP_SHMEM_LINE(bp, 0x3dc); in bnx2_dump_mcp_state()
2514 DP_SHMEM_LINE(bp, 0x3ec); in bnx2_dump_mcp_state()
2515 netdev_err(dev, "DEBUG: 0x3fc[%08x]\n", bnx2_shmem_rd(bp, 0x3fc)); in bnx2_dump_mcp_state()
2520 bnx2_fw_sync(struct bnx2 *bp, u32 msg_data, int ack, int silent) in bnx2_fw_sync() argument
2525 bp->fw_wr_seq++; in bnx2_fw_sync()
2526 msg_data |= bp->fw_wr_seq; in bnx2_fw_sync()
2527 bp->fw_last_msg = msg_data; in bnx2_fw_sync()
2529 bnx2_shmem_wr(bp, BNX2_DRV_MB, msg_data); in bnx2_fw_sync()
2538 val = bnx2_shmem_rd(bp, BNX2_FW_MB); in bnx2_fw_sync()
2551 bnx2_shmem_wr(bp, BNX2_DRV_MB, msg_data); in bnx2_fw_sync()
2554 bnx2_dump_mcp_state(bp); in bnx2_fw_sync()
2567 bnx2_init_5709_context(struct bnx2 *bp) in bnx2_init_5709_context() argument
2574 BNX2_WR(bp, BNX2_CTX_COMMAND, val); in bnx2_init_5709_context()
2576 val = BNX2_RD(bp, BNX2_CTX_COMMAND); in bnx2_init_5709_context()
2584 for (i = 0; i < bp->ctx_pages; i++) { in bnx2_init_5709_context()
2587 if (bp->ctx_blk[i]) in bnx2_init_5709_context()
2588 memset(bp->ctx_blk[i], 0, BNX2_PAGE_SIZE); in bnx2_init_5709_context()
2592 BNX2_WR(bp, BNX2_CTX_HOST_PAGE_TBL_DATA0, in bnx2_init_5709_context()
2593 (bp->ctx_blk_mapping[i] & 0xffffffff) | in bnx2_init_5709_context()
2595 BNX2_WR(bp, BNX2_CTX_HOST_PAGE_TBL_DATA1, in bnx2_init_5709_context()
2596 (u64) bp->ctx_blk_mapping[i] >> 32); in bnx2_init_5709_context()
2597 BNX2_WR(bp, BNX2_CTX_HOST_PAGE_TBL_CTRL, i | in bnx2_init_5709_context()
2601 val = BNX2_RD(bp, BNX2_CTX_HOST_PAGE_TBL_CTRL); in bnx2_init_5709_context()
2615 bnx2_init_context(struct bnx2 *bp) in bnx2_init_context() argument
2626 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) { in bnx2_init_context()
2647 BNX2_WR(bp, BNX2_CTX_VIRT_ADDR, vcid_addr); in bnx2_init_context()
2648 BNX2_WR(bp, BNX2_CTX_PAGE_TBL, pcid_addr); in bnx2_init_context()
2652 bnx2_ctx_wr(bp, vcid_addr, offset, 0); in bnx2_init_context()
2658 bnx2_alloc_bad_rbuf(struct bnx2 *bp) in bnx2_alloc_bad_rbuf() argument
2668 BNX2_WR(bp, BNX2_MISC_ENABLE_SET_BITS, in bnx2_alloc_bad_rbuf()
2674 val = bnx2_reg_rd_ind(bp, BNX2_RBUF_STATUS1); in bnx2_alloc_bad_rbuf()
2676 bnx2_reg_wr_ind(bp, BNX2_RBUF_COMMAND, in bnx2_alloc_bad_rbuf()
2679 val = bnx2_reg_rd_ind(bp, BNX2_RBUF_FW_BUF_ALLOC); in bnx2_alloc_bad_rbuf()
2689 val = bnx2_reg_rd_ind(bp, BNX2_RBUF_STATUS1); in bnx2_alloc_bad_rbuf()
2700 bnx2_reg_wr_ind(bp, BNX2_RBUF_FW_BUF_FREE, val); in bnx2_alloc_bad_rbuf()
2707 bnx2_set_mac_addr(struct bnx2 *bp, const u8 *mac_addr, u32 pos) in bnx2_set_mac_addr() argument
2713 BNX2_WR(bp, BNX2_EMAC_MAC_MATCH0 + (pos * 8), val); in bnx2_set_mac_addr()
2718 BNX2_WR(bp, BNX2_EMAC_MAC_MATCH1 + (pos * 8), val); in bnx2_set_mac_addr()
2722 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_page() argument
2732 mapping = dma_map_page(&bp->pdev->dev, page, 0, PAGE_SIZE, in bnx2_alloc_rx_page()
2734 if (dma_mapping_error(&bp->pdev->dev, mapping)) { in bnx2_alloc_rx_page()
2747 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) in bnx2_free_rx_page() argument
2755 dma_unmap_page(&bp->pdev->dev, dma_unmap_addr(rx_pg, mapping), in bnx2_free_rx_page()
2763 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_data() argument
2771 data = kmalloc(bp->rx_buf_size, gfp); in bnx2_alloc_rx_data()
2775 mapping = dma_map_single(&bp->pdev->dev, in bnx2_alloc_rx_data()
2777 bp->rx_buf_use_size, in bnx2_alloc_rx_data()
2779 if (dma_mapping_error(&bp->pdev->dev, mapping)) { in bnx2_alloc_rx_data()
2790 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_alloc_rx_data()
2796 bnx2_phy_event_is_set(struct bnx2 *bp, struct bnx2_napi *bnapi, u32 event) in bnx2_phy_event_is_set() argument
2806 BNX2_WR(bp, BNX2_PCICFG_STATUS_BIT_SET_CMD, event); in bnx2_phy_event_is_set()
2808 BNX2_WR(bp, BNX2_PCICFG_STATUS_BIT_CLEAR_CMD, event); in bnx2_phy_event_is_set()
2816 bnx2_phy_int(struct bnx2 *bp, struct bnx2_napi *bnapi) in bnx2_phy_int() argument
2818 spin_lock(&bp->phy_lock); in bnx2_phy_int()
2820 if (bnx2_phy_event_is_set(bp, bnapi, STATUS_ATTN_BITS_LINK_STATE)) in bnx2_phy_int()
2821 bnx2_set_link(bp); in bnx2_phy_int()
2822 if (bnx2_phy_event_is_set(bp, bnapi, STATUS_ATTN_BITS_TIMER_ABORT)) in bnx2_phy_int()
2823 bnx2_set_remote_link(bp); in bnx2_phy_int()
2825 spin_unlock(&bp->phy_lock); in bnx2_phy_int()
2842 bnx2_tx_int(struct bnx2 *bp, struct bnx2_napi *bnapi, int budget) in bnx2_tx_int() argument
2850 index = (bnapi - bp->bnx2_napi); in bnx2_tx_int()
2851 txq = netdev_get_tx_queue(bp->dev, index); in bnx2_tx_int()
2883 dma_unmap_single(&bp->pdev->dev, dma_unmap_addr(tx_buf, mapping), in bnx2_tx_int()
2895 dma_unmap_page(&bp->pdev->dev, in bnx2_tx_int()
2925 (bnx2_tx_avail(bp, txr) > bp->tx_wake_thresh)) { in bnx2_tx_int()
2928 (bnx2_tx_avail(bp, txr) > bp->tx_wake_thresh)) in bnx2_tx_int()
2937 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_skb_pages() argument
2995 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_data() argument
3004 dma_sync_single_for_device(&bp->pdev->dev, in bnx2_reuse_rx_data()
3008 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_reuse_rx_data()
3025 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, in bnx2_rx_skb() argument
3033 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); in bnx2_rx_skb()
3035 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); in bnx2_rx_skb()
3041 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_skb()
3046 dma_unmap_single(&bp->pdev->dev, dma_addr, bp->rx_buf_use_size, in bnx2_rx_skb()
3076 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, in bnx2_rx_skb()
3101 err = bnx2_alloc_rx_page(bp, rxr, in bnx2_rx_skb()
3107 bnx2_reuse_rx_skb_pages(bp, rxr, skb, in bnx2_rx_skb()
3112 dma_unmap_page(&bp->pdev->dev, mapping_old, in bnx2_rx_skb()
3142 bnx2_rx_int(struct bnx2 *bp, struct bnx2_napi *bnapi, int budget) in bnx2_rx_int() argument
3181 dma_sync_single_for_cpu(&bp->pdev->dev, dma_addr, in bnx2_rx_int()
3196 } else if (len > bp->rx_jumbo_thresh) { in bnx2_rx_int()
3197 hdr_len = bp->rx_jumbo_thresh; in bnx2_rx_int()
3207 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3214 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_int()
3221 if (len <= bp->rx_copy_thresh) { in bnx2_rx_int()
3222 skb = netdev_alloc_skb(bp->dev, len + 6); in bnx2_rx_int()
3224 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3236 bnx2_reuse_rx_data(bp, rxr, data, in bnx2_rx_int()
3240 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, in bnx2_rx_int()
3246 !(bp->rx_mode & BNX2_EMAC_RX_MODE_KEEP_VLAN_TAG)) in bnx2_rx_int()
3249 skb->protocol = eth_type_trans(skb, bp->dev); in bnx2_rx_int()
3251 if (len > (bp->dev->mtu + ETH_HLEN) && in bnx2_rx_int()
3261 if ((bp->dev->features & NETIF_F_RXCSUM) && in bnx2_rx_int()
3269 if ((bp->dev->features & NETIF_F_RXHASH) && in bnx2_rx_int()
3275 skb_record_rx_queue(skb, bnapi - &bp->bnx2_napi[0]); in bnx2_rx_int()
3296 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_rx_int()
3298 BNX2_WR16(bp, rxr->rx_bidx_addr, sw_prod); in bnx2_rx_int()
3300 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_rx_int()
3313 struct bnx2 *bp = bnapi->bp; in bnx2_msi() local
3316 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, in bnx2_msi()
3321 if (unlikely(atomic_read(&bp->intr_sem) != 0)) in bnx2_msi()
3333 struct bnx2 *bp = bnapi->bp; in bnx2_msi_1shot() local
3338 if (unlikely(atomic_read(&bp->intr_sem) != 0)) in bnx2_msi_1shot()
3350 struct bnx2 *bp = bnapi->bp; in bnx2_interrupt() local
3360 (BNX2_RD(bp, BNX2_PCICFG_MISC_STATUS) & in bnx2_interrupt()
3364 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, in bnx2_interrupt()
3371 BNX2_RD(bp, BNX2_PCICFG_INT_ACK_CMD); in bnx2_interrupt()
3374 if (unlikely(atomic_read(&bp->intr_sem) != 0)) in bnx2_interrupt()
3421 bnx2_chk_missed_msi(struct bnx2 *bp) in bnx2_chk_missed_msi() argument
3423 struct bnx2_napi *bnapi = &bp->bnx2_napi[0]; in bnx2_chk_missed_msi()
3427 msi_ctrl = BNX2_RD(bp, BNX2_PCICFG_MSI_CONTROL); in bnx2_chk_missed_msi()
3431 if (bnapi->last_status_idx == bp->idle_chk_status_idx) { in bnx2_chk_missed_msi()
3432 BNX2_WR(bp, BNX2_PCICFG_MSI_CONTROL, msi_ctrl & in bnx2_chk_missed_msi()
3434 BNX2_WR(bp, BNX2_PCICFG_MSI_CONTROL, msi_ctrl); in bnx2_chk_missed_msi()
3435 bnx2_msi(bp->irq_tbl[0].vector, bnapi); in bnx2_chk_missed_msi()
3439 bp->idle_chk_status_idx = bnapi->last_status_idx; in bnx2_chk_missed_msi()
3443 static void bnx2_poll_cnic(struct bnx2 *bp, struct bnx2_napi *bnapi) in bnx2_poll_cnic() argument
3451 c_ops = rcu_dereference(bp->cnic_ops); in bnx2_poll_cnic()
3453 bnapi->cnic_tag = c_ops->cnic_handler(bp->cnic_data, in bnx2_poll_cnic()
3459 static void bnx2_poll_link(struct bnx2 *bp, struct bnx2_napi *bnapi) in bnx2_poll_link() argument
3468 bnx2_phy_int(bp, bnapi); in bnx2_poll_link()
3473 BNX2_WR(bp, BNX2_HC_COMMAND, in bnx2_poll_link()
3474 bp->hc_cmd | BNX2_HC_COMMAND_COAL_NOW_WO_INT); in bnx2_poll_link()
3475 BNX2_RD(bp, BNX2_HC_COMMAND); in bnx2_poll_link()
3479 static int bnx2_poll_work(struct bnx2 *bp, struct bnx2_napi *bnapi, in bnx2_poll_work() argument
3486 bnx2_tx_int(bp, bnapi, 0); in bnx2_poll_work()
3489 work_done += bnx2_rx_int(bp, bnapi, budget - work_done); in bnx2_poll_work()
3497 struct bnx2 *bp = bnapi->bp; in bnx2_poll_msix() local
3502 work_done = bnx2_poll_work(bp, bnapi, work_done, budget); in bnx2_poll_msix()
3512 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, bnapi->int_num | in bnx2_poll_msix()
3524 struct bnx2 *bp = bnapi->bp; in bnx2_poll() local
3529 bnx2_poll_link(bp, bnapi); in bnx2_poll()
3531 work_done = bnx2_poll_work(bp, bnapi, work_done, budget); in bnx2_poll()
3534 bnx2_poll_cnic(bp, bnapi); in bnx2_poll()
3549 if (likely(bp->flags & BNX2_FLAG_USING_MSI_OR_MSIX)) { in bnx2_poll()
3550 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, in bnx2_poll()
3555 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, in bnx2_poll()
3560 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, in bnx2_poll()
3576 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_rx_mode() local
3584 spin_lock_bh(&bp->phy_lock); in bnx2_set_rx_mode()
3586 rx_mode = bp->rx_mode & ~(BNX2_EMAC_RX_MODE_PROMISCUOUS | in bnx2_set_rx_mode()
3590 (bp->flags & BNX2_FLAG_CAN_KEEP_VLAN)) in bnx2_set_rx_mode()
3600 BNX2_WR(bp, BNX2_EMAC_MULTICAST_HASH0 + (i * 4), in bnx2_set_rx_mode()
3623 BNX2_WR(bp, BNX2_EMAC_MULTICAST_HASH0 + (i * 4), in bnx2_set_rx_mode()
3638 bnx2_set_mac_addr(bp, ha->addr, in bnx2_set_rx_mode()
3647 if (rx_mode != bp->rx_mode) { in bnx2_set_rx_mode()
3648 bp->rx_mode = rx_mode; in bnx2_set_rx_mode()
3649 BNX2_WR(bp, BNX2_EMAC_RX_MODE, rx_mode); in bnx2_set_rx_mode()
3652 BNX2_WR(bp, BNX2_RPM_SORT_USER0, 0x0); in bnx2_set_rx_mode()
3653 BNX2_WR(bp, BNX2_RPM_SORT_USER0, sort_mode); in bnx2_set_rx_mode()
3654 BNX2_WR(bp, BNX2_RPM_SORT_USER0, sort_mode | BNX2_RPM_SORT_USER0_ENA); in bnx2_set_rx_mode()
3656 spin_unlock_bh(&bp->phy_lock); in bnx2_set_rx_mode()
3686 static void bnx2_release_firmware(struct bnx2 *bp) in bnx2_release_firmware() argument
3688 if (bp->rv2p_firmware) { in bnx2_release_firmware()
3689 release_firmware(bp->mips_firmware); in bnx2_release_firmware()
3690 release_firmware(bp->rv2p_firmware); in bnx2_release_firmware()
3691 bp->rv2p_firmware = NULL; in bnx2_release_firmware()
3695 static int bnx2_request_uncached_firmware(struct bnx2 *bp) in bnx2_request_uncached_firmware() argument
3702 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_request_uncached_firmware()
3704 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5709_A0) || in bnx2_request_uncached_firmware()
3705 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5709_A1)) in bnx2_request_uncached_firmware()
3714 rc = request_firmware(&bp->mips_firmware, mips_fw_file, &bp->pdev->dev); in bnx2_request_uncached_firmware()
3720 rc = request_firmware(&bp->rv2p_firmware, rv2p_fw_file, &bp->pdev->dev); in bnx2_request_uncached_firmware()
3725 mips_fw = (const struct bnx2_mips_fw_file *) bp->mips_firmware->data; in bnx2_request_uncached_firmware()
3726 rv2p_fw = (const struct bnx2_rv2p_fw_file *) bp->rv2p_firmware->data; in bnx2_request_uncached_firmware()
3727 if (bp->mips_firmware->size < sizeof(*mips_fw) || in bnx2_request_uncached_firmware()
3728 check_mips_fw_entry(bp->mips_firmware, &mips_fw->com) || in bnx2_request_uncached_firmware()
3729 check_mips_fw_entry(bp->mips_firmware, &mips_fw->cp) || in bnx2_request_uncached_firmware()
3730 check_mips_fw_entry(bp->mips_firmware, &mips_fw->rxp) || in bnx2_request_uncached_firmware()
3731 check_mips_fw_entry(bp->mips_firmware, &mips_fw->tpat) || in bnx2_request_uncached_firmware()
3732 check_mips_fw_entry(bp->mips_firmware, &mips_fw->txp)) { in bnx2_request_uncached_firmware()
3737 if (bp->rv2p_firmware->size < sizeof(*rv2p_fw) || in bnx2_request_uncached_firmware()
3738 check_fw_section(bp->rv2p_firmware, &rv2p_fw->proc1.rv2p, 8, true) || in bnx2_request_uncached_firmware()
3739 check_fw_section(bp->rv2p_firmware, &rv2p_fw->proc2.rv2p, 8, true)) { in bnx2_request_uncached_firmware()
3748 release_firmware(bp->rv2p_firmware); in bnx2_request_uncached_firmware()
3749 bp->rv2p_firmware = NULL; in bnx2_request_uncached_firmware()
3751 release_firmware(bp->mips_firmware); in bnx2_request_uncached_firmware()
3755 static int bnx2_request_firmware(struct bnx2 *bp) in bnx2_request_firmware() argument
3757 return bp->rv2p_firmware ? 0 : bnx2_request_uncached_firmware(bp); in bnx2_request_firmware()
3773 load_rv2p_fw(struct bnx2 *bp, u32 rv2p_proc, in load_rv2p_fw() argument
3784 rv2p_code = (__be32 *)(bp->rv2p_firmware->data + file_offset); in load_rv2p_fw()
3795 BNX2_WR(bp, BNX2_RV2P_INSTR_HIGH, be32_to_cpu(*rv2p_code)); in load_rv2p_fw()
3797 BNX2_WR(bp, BNX2_RV2P_INSTR_LOW, be32_to_cpu(*rv2p_code)); in load_rv2p_fw()
3801 BNX2_WR(bp, addr, val); in load_rv2p_fw()
3804 rv2p_code = (__be32 *)(bp->rv2p_firmware->data + file_offset); in load_rv2p_fw()
3811 BNX2_WR(bp, BNX2_RV2P_INSTR_HIGH, code); in load_rv2p_fw()
3814 BNX2_WR(bp, BNX2_RV2P_INSTR_LOW, code); in load_rv2p_fw()
3817 BNX2_WR(bp, addr, val); in load_rv2p_fw()
3823 BNX2_WR(bp, BNX2_RV2P_COMMAND, BNX2_RV2P_COMMAND_PROC1_RESET); in load_rv2p_fw()
3826 BNX2_WR(bp, BNX2_RV2P_COMMAND, BNX2_RV2P_COMMAND_PROC2_RESET); in load_rv2p_fw()
3833 load_cpu_fw(struct bnx2 *bp, const struct cpu_reg *cpu_reg, in load_cpu_fw() argument
3842 val = bnx2_reg_rd_ind(bp, cpu_reg->mode); in load_cpu_fw()
3844 bnx2_reg_wr_ind(bp, cpu_reg->mode, val); in load_cpu_fw()
3845 bnx2_reg_wr_ind(bp, cpu_reg->state, cpu_reg->state_value_clear); in load_cpu_fw()
3851 data = (__be32 *)(bp->mips_firmware->data + file_offset); in load_cpu_fw()
3858 bnx2_reg_wr_ind(bp, offset, be32_to_cpu(data[j])); in load_cpu_fw()
3865 data = (__be32 *)(bp->mips_firmware->data + file_offset); in load_cpu_fw()
3872 bnx2_reg_wr_ind(bp, offset, be32_to_cpu(data[j])); in load_cpu_fw()
3879 data = (__be32 *)(bp->mips_firmware->data + file_offset); in load_cpu_fw()
3886 bnx2_reg_wr_ind(bp, offset, be32_to_cpu(data[j])); in load_cpu_fw()
3890 bnx2_reg_wr_ind(bp, cpu_reg->inst, 0); in load_cpu_fw()
3893 bnx2_reg_wr_ind(bp, cpu_reg->pc, val); in load_cpu_fw()
3896 val = bnx2_reg_rd_ind(bp, cpu_reg->mode); in load_cpu_fw()
3898 bnx2_reg_wr_ind(bp, cpu_reg->state, cpu_reg->state_value_clear); in load_cpu_fw()
3899 bnx2_reg_wr_ind(bp, cpu_reg->mode, val); in load_cpu_fw()
3905 bnx2_init_cpus(struct bnx2 *bp) in bnx2_init_cpus() argument
3908 (const struct bnx2_mips_fw_file *) bp->mips_firmware->data; in bnx2_init_cpus()
3910 (const struct bnx2_rv2p_fw_file *) bp->rv2p_firmware->data; in bnx2_init_cpus()
3914 load_rv2p_fw(bp, RV2P_PROC1, &rv2p_fw->proc1); in bnx2_init_cpus()
3915 load_rv2p_fw(bp, RV2P_PROC2, &rv2p_fw->proc2); in bnx2_init_cpus()
3918 rc = load_cpu_fw(bp, &cpu_reg_rxp, &mips_fw->rxp); in bnx2_init_cpus()
3923 rc = load_cpu_fw(bp, &cpu_reg_txp, &mips_fw->txp); in bnx2_init_cpus()
3928 rc = load_cpu_fw(bp, &cpu_reg_tpat, &mips_fw->tpat); in bnx2_init_cpus()
3933 rc = load_cpu_fw(bp, &cpu_reg_com, &mips_fw->com); in bnx2_init_cpus()
3938 rc = load_cpu_fw(bp, &cpu_reg_cp, &mips_fw->cp); in bnx2_init_cpus()
3945 bnx2_setup_wol(struct bnx2 *bp) in bnx2_setup_wol() argument
3950 if (bp->wol) { in bnx2_setup_wol()
3954 autoneg = bp->autoneg; in bnx2_setup_wol()
3955 advertising = bp->advertising; in bnx2_setup_wol()
3957 if (bp->phy_port == PORT_TP) { in bnx2_setup_wol()
3958 bp->autoneg = AUTONEG_SPEED; in bnx2_setup_wol()
3959 bp->advertising = ADVERTISED_10baseT_Half | in bnx2_setup_wol()
3966 spin_lock_bh(&bp->phy_lock); in bnx2_setup_wol()
3967 bnx2_setup_phy(bp, bp->phy_port); in bnx2_setup_wol()
3968 spin_unlock_bh(&bp->phy_lock); in bnx2_setup_wol()
3970 bp->autoneg = autoneg; in bnx2_setup_wol()
3971 bp->advertising = advertising; in bnx2_setup_wol()
3973 bnx2_set_mac_addr(bp, bp->dev->dev_addr, 0); in bnx2_setup_wol()
3975 val = BNX2_RD(bp, BNX2_EMAC_MODE); in bnx2_setup_wol()
3982 if (bp->phy_port == PORT_TP) { in bnx2_setup_wol()
3986 if (bp->line_speed == SPEED_2500) in bnx2_setup_wol()
3990 BNX2_WR(bp, BNX2_EMAC_MODE, val); in bnx2_setup_wol()
3994 BNX2_WR(bp, BNX2_EMAC_MULTICAST_HASH0 + (i * 4), in bnx2_setup_wol()
3997 BNX2_WR(bp, BNX2_EMAC_RX_MODE, BNX2_EMAC_RX_MODE_SORT_MODE); in bnx2_setup_wol()
4000 BNX2_WR(bp, BNX2_RPM_SORT_USER0, 0x0); in bnx2_setup_wol()
4001 BNX2_WR(bp, BNX2_RPM_SORT_USER0, val); in bnx2_setup_wol()
4002 BNX2_WR(bp, BNX2_RPM_SORT_USER0, val | BNX2_RPM_SORT_USER0_ENA); in bnx2_setup_wol()
4005 BNX2_WR(bp, BNX2_MISC_ENABLE_SET_BITS, in bnx2_setup_wol()
4010 val = BNX2_RD(bp, BNX2_RPM_CONFIG); in bnx2_setup_wol()
4012 BNX2_WR(bp, BNX2_RPM_CONFIG, val); in bnx2_setup_wol()
4019 if (!(bp->flags & BNX2_FLAG_NO_WOL)) { in bnx2_setup_wol()
4023 if (bp->fw_last_msg || BNX2_CHIP(bp) != BNX2_CHIP_5709) { in bnx2_setup_wol()
4024 bnx2_fw_sync(bp, wol_msg, 1, 0); in bnx2_setup_wol()
4030 val = bnx2_shmem_rd(bp, BNX2_PORT_FEATURE); in bnx2_setup_wol()
4031 bnx2_shmem_wr(bp, BNX2_PORT_FEATURE, in bnx2_setup_wol()
4033 bnx2_fw_sync(bp, wol_msg, 1, 0); in bnx2_setup_wol()
4034 bnx2_shmem_wr(bp, BNX2_PORT_FEATURE, val); in bnx2_setup_wol()
4040 bnx2_set_power_state(struct bnx2 *bp, pci_power_t state) in bnx2_set_power_state() argument
4046 pci_enable_wake(bp->pdev, PCI_D0, false); in bnx2_set_power_state()
4047 pci_set_power_state(bp->pdev, PCI_D0); in bnx2_set_power_state()
4049 val = BNX2_RD(bp, BNX2_EMAC_MODE); in bnx2_set_power_state()
4052 BNX2_WR(bp, BNX2_EMAC_MODE, val); in bnx2_set_power_state()
4054 val = BNX2_RD(bp, BNX2_RPM_CONFIG); in bnx2_set_power_state()
4056 BNX2_WR(bp, BNX2_RPM_CONFIG, val); in bnx2_set_power_state()
4060 bnx2_setup_wol(bp); in bnx2_set_power_state()
4061 pci_wake_from_d3(bp->pdev, bp->wol); in bnx2_set_power_state()
4062 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) || in bnx2_set_power_state()
4063 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A1)) { in bnx2_set_power_state()
4065 if (bp->wol) in bnx2_set_power_state()
4066 pci_set_power_state(bp->pdev, PCI_D3hot); in bnx2_set_power_state()
4070 if (!bp->fw_last_msg && BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_set_power_state()
4077 val = bnx2_shmem_rd(bp, BNX2_BC_STATE_CONDITION); in bnx2_set_power_state()
4080 bnx2_shmem_wr(bp, BNX2_BC_STATE_CONDITION, val); in bnx2_set_power_state()
4082 pci_set_power_state(bp->pdev, PCI_D3hot); in bnx2_set_power_state()
4096 bnx2_acquire_nvram_lock(struct bnx2 *bp) in bnx2_acquire_nvram_lock() argument
4102 BNX2_WR(bp, BNX2_NVM_SW_ARB, BNX2_NVM_SW_ARB_ARB_REQ_SET2); in bnx2_acquire_nvram_lock()
4104 val = BNX2_RD(bp, BNX2_NVM_SW_ARB); in bnx2_acquire_nvram_lock()
4118 bnx2_release_nvram_lock(struct bnx2 *bp) in bnx2_release_nvram_lock() argument
4124 BNX2_WR(bp, BNX2_NVM_SW_ARB, BNX2_NVM_SW_ARB_ARB_REQ_CLR2); in bnx2_release_nvram_lock()
4127 val = BNX2_RD(bp, BNX2_NVM_SW_ARB); in bnx2_release_nvram_lock()
4142 bnx2_enable_nvram_write(struct bnx2 *bp) in bnx2_enable_nvram_write() argument
4146 val = BNX2_RD(bp, BNX2_MISC_CFG); in bnx2_enable_nvram_write()
4147 BNX2_WR(bp, BNX2_MISC_CFG, val | BNX2_MISC_CFG_NVM_WR_EN_PCI); in bnx2_enable_nvram_write()
4149 if (bp->flash_info->flags & BNX2_NV_WREN) { in bnx2_enable_nvram_write()
4152 BNX2_WR(bp, BNX2_NVM_COMMAND, BNX2_NVM_COMMAND_DONE); in bnx2_enable_nvram_write()
4153 BNX2_WR(bp, BNX2_NVM_COMMAND, in bnx2_enable_nvram_write()
4159 val = BNX2_RD(bp, BNX2_NVM_COMMAND); in bnx2_enable_nvram_write()
4171 bnx2_disable_nvram_write(struct bnx2 *bp) in bnx2_disable_nvram_write() argument
4175 val = BNX2_RD(bp, BNX2_MISC_CFG); in bnx2_disable_nvram_write()
4176 BNX2_WR(bp, BNX2_MISC_CFG, val & ~BNX2_MISC_CFG_NVM_WR_EN); in bnx2_disable_nvram_write()
4181 bnx2_enable_nvram_access(struct bnx2 *bp) in bnx2_enable_nvram_access() argument
4185 val = BNX2_RD(bp, BNX2_NVM_ACCESS_ENABLE); in bnx2_enable_nvram_access()
4187 BNX2_WR(bp, BNX2_NVM_ACCESS_ENABLE, in bnx2_enable_nvram_access()
4192 bnx2_disable_nvram_access(struct bnx2 *bp) in bnx2_disable_nvram_access() argument
4196 val = BNX2_RD(bp, BNX2_NVM_ACCESS_ENABLE); in bnx2_disable_nvram_access()
4198 BNX2_WR(bp, BNX2_NVM_ACCESS_ENABLE, in bnx2_disable_nvram_access()
4204 bnx2_nvram_erase_page(struct bnx2 *bp, u32 offset) in bnx2_nvram_erase_page() argument
4209 if (bp->flash_info->flags & BNX2_NV_BUFFERED) in bnx2_nvram_erase_page()
4218 BNX2_WR(bp, BNX2_NVM_COMMAND, BNX2_NVM_COMMAND_DONE); in bnx2_nvram_erase_page()
4221 BNX2_WR(bp, BNX2_NVM_ADDR, offset & BNX2_NVM_ADDR_NVM_ADDR_VALUE); in bnx2_nvram_erase_page()
4224 BNX2_WR(bp, BNX2_NVM_COMMAND, cmd); in bnx2_nvram_erase_page()
4232 val = BNX2_RD(bp, BNX2_NVM_COMMAND); in bnx2_nvram_erase_page()
4244 bnx2_nvram_read_dword(struct bnx2 *bp, u32 offset, u8 *ret_val, u32 cmd_flags) in bnx2_nvram_read_dword() argument
4253 if (bp->flash_info->flags & BNX2_NV_TRANSLATE) { in bnx2_nvram_read_dword()
4254 offset = ((offset / bp->flash_info->page_size) << in bnx2_nvram_read_dword()
4255 bp->flash_info->page_bits) + in bnx2_nvram_read_dword()
4256 (offset % bp->flash_info->page_size); in bnx2_nvram_read_dword()
4260 BNX2_WR(bp, BNX2_NVM_COMMAND, BNX2_NVM_COMMAND_DONE); in bnx2_nvram_read_dword()
4263 BNX2_WR(bp, BNX2_NVM_ADDR, offset & BNX2_NVM_ADDR_NVM_ADDR_VALUE); in bnx2_nvram_read_dword()
4266 BNX2_WR(bp, BNX2_NVM_COMMAND, cmd); in bnx2_nvram_read_dword()
4274 val = BNX2_RD(bp, BNX2_NVM_COMMAND); in bnx2_nvram_read_dword()
4276 __be32 v = cpu_to_be32(BNX2_RD(bp, BNX2_NVM_READ)); in bnx2_nvram_read_dword()
4289 bnx2_nvram_write_dword(struct bnx2 *bp, u32 offset, u8 *val, u32 cmd_flags) in bnx2_nvram_write_dword() argument
4299 if (bp->flash_info->flags & BNX2_NV_TRANSLATE) { in bnx2_nvram_write_dword()
4300 offset = ((offset / bp->flash_info->page_size) << in bnx2_nvram_write_dword()
4301 bp->flash_info->page_bits) + in bnx2_nvram_write_dword()
4302 (offset % bp->flash_info->page_size); in bnx2_nvram_write_dword()
4306 BNX2_WR(bp, BNX2_NVM_COMMAND, BNX2_NVM_COMMAND_DONE); in bnx2_nvram_write_dword()
4311 BNX2_WR(bp, BNX2_NVM_WRITE, be32_to_cpu(val32)); in bnx2_nvram_write_dword()
4314 BNX2_WR(bp, BNX2_NVM_ADDR, offset & BNX2_NVM_ADDR_NVM_ADDR_VALUE); in bnx2_nvram_write_dword()
4317 BNX2_WR(bp, BNX2_NVM_COMMAND, cmd); in bnx2_nvram_write_dword()
4323 if (BNX2_RD(bp, BNX2_NVM_COMMAND) & BNX2_NVM_COMMAND_DONE) in bnx2_nvram_write_dword()
4333 bnx2_init_nvram(struct bnx2 *bp) in bnx2_init_nvram() argument
4339 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_nvram()
4340 bp->flash_info = &flash_5709; in bnx2_init_nvram()
4345 val = BNX2_RD(bp, BNX2_NVM_CFG1); in bnx2_init_nvram()
4356 bp->flash_info = flash; in bnx2_init_nvram()
4374 bp->flash_info = flash; in bnx2_init_nvram()
4377 if ((rc = bnx2_acquire_nvram_lock(bp)) != 0) in bnx2_init_nvram()
4381 bnx2_enable_nvram_access(bp); in bnx2_init_nvram()
4384 BNX2_WR(bp, BNX2_NVM_CFG1, flash->config1); in bnx2_init_nvram()
4385 BNX2_WR(bp, BNX2_NVM_CFG2, flash->config2); in bnx2_init_nvram()
4386 BNX2_WR(bp, BNX2_NVM_CFG3, flash->config3); in bnx2_init_nvram()
4387 BNX2_WR(bp, BNX2_NVM_WRITE1, flash->write1); in bnx2_init_nvram()
4390 bnx2_disable_nvram_access(bp); in bnx2_init_nvram()
4391 bnx2_release_nvram_lock(bp); in bnx2_init_nvram()
4399 bp->flash_info = NULL; in bnx2_init_nvram()
4405 val = bnx2_shmem_rd(bp, BNX2_SHARED_HW_CFG_CONFIG2); in bnx2_init_nvram()
4408 bp->flash_size = val; in bnx2_init_nvram()
4410 bp->flash_size = bp->flash_info->total_size; in bnx2_init_nvram()
4416 bnx2_nvram_read(struct bnx2 *bp, u32 offset, u8 *ret_buf, in bnx2_nvram_read() argument
4426 if ((rc = bnx2_acquire_nvram_lock(bp)) != 0) in bnx2_nvram_read()
4430 bnx2_enable_nvram_access(bp); in bnx2_nvram_read()
4454 rc = bnx2_nvram_read_dword(bp, offset32, buf, cmd_flags); in bnx2_nvram_read()
4479 rc = bnx2_nvram_read_dword(bp, offset32, buf, cmd_flags); in bnx2_nvram_read()
4492 rc = bnx2_nvram_read_dword(bp, offset32, ret_buf, cmd_flags); in bnx2_nvram_read()
4500 rc = bnx2_nvram_read_dword(bp, offset32, ret_buf, 0); in bnx2_nvram_read()
4512 rc = bnx2_nvram_read_dword(bp, offset32, buf, cmd_flags); in bnx2_nvram_read()
4518 bnx2_disable_nvram_access(bp); in bnx2_nvram_read()
4520 bnx2_release_nvram_lock(bp); in bnx2_nvram_read()
4526 bnx2_nvram_write(struct bnx2 *bp, u32 offset, u8 *data_buf, in bnx2_nvram_write() argument
4544 if ((rc = bnx2_nvram_read(bp, offset32, start, 4))) in bnx2_nvram_write()
4551 if ((rc = bnx2_nvram_read(bp, offset32 + len32 - 4, end, 4))) in bnx2_nvram_write()
4569 if (!(bp->flash_info->flags & BNX2_NV_BUFFERED)) { in bnx2_nvram_write()
4585 page_start -= (page_start % bp->flash_info->page_size); in bnx2_nvram_write()
4587 page_end = page_start + bp->flash_info->page_size; in bnx2_nvram_write()
4595 if ((rc = bnx2_acquire_nvram_lock(bp)) != 0) in bnx2_nvram_write()
4599 bnx2_enable_nvram_access(bp); in bnx2_nvram_write()
4602 if (!(bp->flash_info->flags & BNX2_NV_BUFFERED)) { in bnx2_nvram_write()
4607 for (j = 0; j < bp->flash_info->page_size; j += 4) { in bnx2_nvram_write()
4608 if (j == (bp->flash_info->page_size - 4)) { in bnx2_nvram_write()
4611 rc = bnx2_nvram_read_dword(bp, in bnx2_nvram_write()
4624 if ((rc = bnx2_enable_nvram_write(bp)) != 0) in bnx2_nvram_write()
4630 if (!(bp->flash_info->flags & BNX2_NV_BUFFERED)) { in bnx2_nvram_write()
4632 if ((rc = bnx2_nvram_erase_page(bp, page_start)) != 0) in bnx2_nvram_write()
4636 bnx2_enable_nvram_write(bp); in bnx2_nvram_write()
4641 rc = bnx2_nvram_write_dword(bp, addr, in bnx2_nvram_write()
4654 ((bp->flash_info->flags & BNX2_NV_BUFFERED) && in bnx2_nvram_write()
4659 rc = bnx2_nvram_write_dword(bp, addr, buf, in bnx2_nvram_write()
4671 if (!(bp->flash_info->flags & BNX2_NV_BUFFERED)) { in bnx2_nvram_write()
4678 rc = bnx2_nvram_write_dword(bp, addr, in bnx2_nvram_write()
4689 bnx2_disable_nvram_write(bp); in bnx2_nvram_write()
4692 bnx2_disable_nvram_access(bp); in bnx2_nvram_write()
4693 bnx2_release_nvram_lock(bp); in bnx2_nvram_write()
4706 bnx2_init_fw_cap(struct bnx2 *bp) in bnx2_init_fw_cap() argument
4710 bp->phy_flags &= ~BNX2_PHY_FLAG_REMOTE_PHY_CAP; in bnx2_init_fw_cap()
4711 bp->flags &= ~BNX2_FLAG_CAN_KEEP_VLAN; in bnx2_init_fw_cap()
4713 if (!(bp->flags & BNX2_FLAG_ASF_ENABLE)) in bnx2_init_fw_cap()
4714 bp->flags |= BNX2_FLAG_CAN_KEEP_VLAN; in bnx2_init_fw_cap()
4716 val = bnx2_shmem_rd(bp, BNX2_FW_CAP_MB); in bnx2_init_fw_cap()
4721 bp->flags |= BNX2_FLAG_CAN_KEEP_VLAN; in bnx2_init_fw_cap()
4725 if ((bp->phy_flags & BNX2_PHY_FLAG_SERDES) && in bnx2_init_fw_cap()
4729 bp->phy_flags |= BNX2_PHY_FLAG_REMOTE_PHY_CAP; in bnx2_init_fw_cap()
4731 link = bnx2_shmem_rd(bp, BNX2_LINK_STATUS); in bnx2_init_fw_cap()
4733 bp->phy_port = PORT_FIBRE; in bnx2_init_fw_cap()
4735 bp->phy_port = PORT_TP; in bnx2_init_fw_cap()
4741 if (netif_running(bp->dev) && sig) in bnx2_init_fw_cap()
4742 bnx2_shmem_wr(bp, BNX2_DRV_ACK_CAP_MB, sig); in bnx2_init_fw_cap()
4746 bnx2_setup_msix_tbl(struct bnx2 *bp) in bnx2_setup_msix_tbl() argument
4748 BNX2_WR(bp, BNX2_PCI_GRC_WINDOW_ADDR, BNX2_PCI_GRC_WINDOW_ADDR_SEP_WIN); in bnx2_setup_msix_tbl()
4750 BNX2_WR(bp, BNX2_PCI_GRC_WINDOW2_ADDR, BNX2_MSIX_TABLE_ADDR); in bnx2_setup_msix_tbl()
4751 BNX2_WR(bp, BNX2_PCI_GRC_WINDOW3_ADDR, BNX2_MSIX_PBA_ADDR); in bnx2_setup_msix_tbl()
4755 bnx2_wait_dma_complete(struct bnx2 *bp) in bnx2_wait_dma_complete() argument
4764 if ((BNX2_CHIP(bp) == BNX2_CHIP_5706) || in bnx2_wait_dma_complete()
4765 (BNX2_CHIP(bp) == BNX2_CHIP_5708)) { in bnx2_wait_dma_complete()
4766 BNX2_WR(bp, BNX2_MISC_ENABLE_CLR_BITS, in bnx2_wait_dma_complete()
4771 val = BNX2_RD(bp, BNX2_MISC_ENABLE_CLR_BITS); in bnx2_wait_dma_complete()
4774 val = BNX2_RD(bp, BNX2_MISC_NEW_CORE_CTL); in bnx2_wait_dma_complete()
4776 BNX2_WR(bp, BNX2_MISC_NEW_CORE_CTL, val); in bnx2_wait_dma_complete()
4777 val = BNX2_RD(bp, BNX2_MISC_NEW_CORE_CTL); in bnx2_wait_dma_complete()
4781 val = BNX2_RD(bp, BNX2_PCICFG_DEVICE_CONTROL); in bnx2_wait_dma_complete()
4792 bnx2_reset_chip(struct bnx2 *bp, u32 reset_code) in bnx2_reset_chip() argument
4800 bnx2_wait_dma_complete(bp); in bnx2_reset_chip()
4803 bnx2_fw_sync(bp, BNX2_DRV_MSG_DATA_WAIT0 | reset_code, 1, 1); in bnx2_reset_chip()
4807 bnx2_shmem_wr(bp, BNX2_DRV_RESET_SIGNATURE, in bnx2_reset_chip()
4812 val = BNX2_RD(bp, BNX2_MISC_ID); in bnx2_reset_chip()
4814 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_reset_chip()
4815 BNX2_WR(bp, BNX2_MISC_COMMAND, BNX2_MISC_COMMAND_SW_RESET); in bnx2_reset_chip()
4816 BNX2_RD(bp, BNX2_MISC_COMMAND); in bnx2_reset_chip()
4822 BNX2_WR(bp, BNX2_PCICFG_MISC_CONFIG, val); in bnx2_reset_chip()
4830 BNX2_WR(bp, BNX2_PCICFG_MISC_CONFIG, val); in bnx2_reset_chip()
4836 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) || in bnx2_reset_chip()
4837 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A1)) in bnx2_reset_chip()
4842 val = BNX2_RD(bp, BNX2_PCICFG_MISC_CONFIG); in bnx2_reset_chip()
4857 val = BNX2_RD(bp, BNX2_PCI_SWAP_DIAG0); in bnx2_reset_chip()
4864 rc = bnx2_fw_sync(bp, BNX2_DRV_MSG_DATA_WAIT1 | reset_code, 1, 0); in bnx2_reset_chip()
4868 spin_lock_bh(&bp->phy_lock); in bnx2_reset_chip()
4869 old_port = bp->phy_port; in bnx2_reset_chip()
4870 bnx2_init_fw_cap(bp); in bnx2_reset_chip()
4871 if ((bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) && in bnx2_reset_chip()
4872 old_port != bp->phy_port) in bnx2_reset_chip()
4873 bnx2_set_default_remote_link(bp); in bnx2_reset_chip()
4874 spin_unlock_bh(&bp->phy_lock); in bnx2_reset_chip()
4876 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) { in bnx2_reset_chip()
4879 BNX2_WR(bp, BNX2_MISC_VREG_CONTROL, 0x000000fa); in bnx2_reset_chip()
4882 rc = bnx2_alloc_bad_rbuf(bp); in bnx2_reset_chip()
4885 if (bp->flags & BNX2_FLAG_USING_MSIX) { in bnx2_reset_chip()
4886 bnx2_setup_msix_tbl(bp); in bnx2_reset_chip()
4888 BNX2_WR(bp, BNX2_MISC_ECO_HW_CTL, in bnx2_reset_chip()
4896 bnx2_init_chip(struct bnx2 *bp) in bnx2_init_chip() argument
4902 BNX2_WR(bp, BNX2_PCICFG_INT_ACK_CMD, BNX2_PCICFG_INT_ACK_CMD_MASK_INT); in bnx2_init_chip()
4915 if ((bp->flags & BNX2_FLAG_PCIX) && (bp->bus_speed_mhz == 133)) in bnx2_init_chip()
4918 if ((BNX2_CHIP(bp) == BNX2_CHIP_5706) && in bnx2_init_chip()
4919 (BNX2_CHIP_ID(bp) != BNX2_CHIP_ID_5706_A0) && in bnx2_init_chip()
4920 !(bp->flags & BNX2_FLAG_PCIX)) in bnx2_init_chip()
4923 BNX2_WR(bp, BNX2_DMA_CONFIG, val); in bnx2_init_chip()
4925 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) { in bnx2_init_chip()
4926 val = BNX2_RD(bp, BNX2_TDMA_CONFIG); in bnx2_init_chip()
4928 BNX2_WR(bp, BNX2_TDMA_CONFIG, val); in bnx2_init_chip()
4931 if (bp->flags & BNX2_FLAG_PCIX) { in bnx2_init_chip()
4934 pci_read_config_word(bp->pdev, bp->pcix_cap + PCI_X_CMD, in bnx2_init_chip()
4936 pci_write_config_word(bp->pdev, bp->pcix_cap + PCI_X_CMD, in bnx2_init_chip()
4940 BNX2_WR(bp, BNX2_MISC_ENABLE_SET_BITS, in bnx2_init_chip()
4947 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_chip()
4948 rc = bnx2_init_5709_context(bp); in bnx2_init_chip()
4952 bnx2_init_context(bp); in bnx2_init_chip()
4954 if ((rc = bnx2_init_cpus(bp)) != 0) in bnx2_init_chip()
4957 bnx2_init_nvram(bp); in bnx2_init_chip()
4959 bnx2_set_mac_addr(bp, bp->dev->dev_addr, 0); in bnx2_init_chip()
4961 val = BNX2_RD(bp, BNX2_MQ_CONFIG); in bnx2_init_chip()
4964 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_chip()
4966 if (BNX2_CHIP_REV(bp) == BNX2_CHIP_REV_Ax) in bnx2_init_chip()
4970 BNX2_WR(bp, BNX2_MQ_CONFIG, val); in bnx2_init_chip()
4973 BNX2_WR(bp, BNX2_MQ_KNL_BYP_WIND_START, val); in bnx2_init_chip()
4974 BNX2_WR(bp, BNX2_MQ_KNL_WIND_END, val); in bnx2_init_chip()
4977 BNX2_WR(bp, BNX2_RV2P_CONFIG, val); in bnx2_init_chip()
4980 val = BNX2_RD(bp, BNX2_TBDR_CONFIG); in bnx2_init_chip()
4983 BNX2_WR(bp, BNX2_TBDR_CONFIG, val); in bnx2_init_chip()
4985 val = bp->mac_addr[0] + in bnx2_init_chip()
4986 (bp->mac_addr[1] << 8) + in bnx2_init_chip()
4987 (bp->mac_addr[2] << 16) + in bnx2_init_chip()
4988 bp->mac_addr[3] + in bnx2_init_chip()
4989 (bp->mac_addr[4] << 8) + in bnx2_init_chip()
4990 (bp->mac_addr[5] << 16); in bnx2_init_chip()
4991 BNX2_WR(bp, BNX2_EMAC_BACKOFF_SEED, val); in bnx2_init_chip()
4994 mtu = bp->dev->mtu; in bnx2_init_chip()
4998 BNX2_WR(bp, BNX2_EMAC_RX_MTU_SIZE, val); in bnx2_init_chip()
5003 bnx2_reg_wr_ind(bp, BNX2_RBUF_CONFIG, BNX2_RBUF_CONFIG_VAL(mtu)); in bnx2_init_chip()
5004 bnx2_reg_wr_ind(bp, BNX2_RBUF_CONFIG2, BNX2_RBUF_CONFIG2_VAL(mtu)); in bnx2_init_chip()
5005 bnx2_reg_wr_ind(bp, BNX2_RBUF_CONFIG3, BNX2_RBUF_CONFIG3_VAL(mtu)); in bnx2_init_chip()
5007 memset(bp->bnx2_napi[0].status_blk.msi, 0, bp->status_stats_size); in bnx2_init_chip()
5009 bp->bnx2_napi[i].last_status_idx = 0; in bnx2_init_chip()
5011 bp->idle_chk_status_idx = 0xffff; in bnx2_init_chip()
5014 BNX2_WR(bp, BNX2_EMAC_ATTENTION_ENA, BNX2_EMAC_ATTENTION_ENA_LINK); in bnx2_init_chip()
5016 BNX2_WR(bp, BNX2_HC_STATUS_ADDR_L, in bnx2_init_chip()
5017 (u64) bp->status_blk_mapping & 0xffffffff); in bnx2_init_chip()
5018 BNX2_WR(bp, BNX2_HC_STATUS_ADDR_H, (u64) bp->status_blk_mapping >> 32); in bnx2_init_chip()
5020 BNX2_WR(bp, BNX2_HC_STATISTICS_ADDR_L, in bnx2_init_chip()
5021 (u64) bp->stats_blk_mapping & 0xffffffff); in bnx2_init_chip()
5022 BNX2_WR(bp, BNX2_HC_STATISTICS_ADDR_H, in bnx2_init_chip()
5023 (u64) bp->stats_blk_mapping >> 32); in bnx2_init_chip()
5025 BNX2_WR(bp, BNX2_HC_TX_QUICK_CONS_TRIP, in bnx2_init_chip()
5026 (bp->tx_quick_cons_trip_int << 16) | bp->tx_quick_cons_trip); in bnx2_init_chip()
5028 BNX2_WR(bp, BNX2_HC_RX_QUICK_CONS_TRIP, in bnx2_init_chip()
5029 (bp->rx_quick_cons_trip_int << 16) | bp->rx_quick_cons_trip); in bnx2_init_chip()
5031 BNX2_WR(bp, BNX2_HC_COMP_PROD_TRIP, in bnx2_init_chip()
5032 (bp->comp_prod_trip_int << 16) | bp->comp_prod_trip); in bnx2_init_chip()
5034 BNX2_WR(bp, BNX2_HC_TX_TICKS, (bp->tx_ticks_int << 16) | bp->tx_ticks); in bnx2_init_chip()
5036 BNX2_WR(bp, BNX2_HC_RX_TICKS, (bp->rx_ticks_int << 16) | bp->rx_ticks); in bnx2_init_chip()
5038 BNX2_WR(bp, BNX2_HC_COM_TICKS, in bnx2_init_chip()
5039 (bp->com_ticks_int << 16) | bp->com_ticks); in bnx2_init_chip()
5041 BNX2_WR(bp, BNX2_HC_CMD_TICKS, in bnx2_init_chip()
5042 (bp->cmd_ticks_int << 16) | bp->cmd_ticks); in bnx2_init_chip()
5044 if (bp->flags & BNX2_FLAG_BROKEN_STATS) in bnx2_init_chip()
5045 BNX2_WR(bp, BNX2_HC_STATS_TICKS, 0); in bnx2_init_chip()
5047 BNX2_WR(bp, BNX2_HC_STATS_TICKS, bp->stats_ticks); in bnx2_init_chip()
5048 BNX2_WR(bp, BNX2_HC_STAT_COLLECT_TICKS, 0xbb8); /* 3ms */ in bnx2_init_chip()
5050 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A1) in bnx2_init_chip()
5057 if (bp->flags & BNX2_FLAG_USING_MSIX) { in bnx2_init_chip()
5058 BNX2_WR(bp, BNX2_HC_MSIX_BIT_VECTOR, in bnx2_init_chip()
5064 if (bp->flags & BNX2_FLAG_ONE_SHOT_MSI) in bnx2_init_chip()
5067 BNX2_WR(bp, BNX2_HC_CONFIG, val); in bnx2_init_chip()
5069 if (bp->rx_ticks < 25) in bnx2_init_chip()
5070 bnx2_reg_wr_ind(bp, BNX2_FW_RX_LOW_LATENCY, 1); in bnx2_init_chip()
5072 bnx2_reg_wr_ind(bp, BNX2_FW_RX_LOW_LATENCY, 0); in bnx2_init_chip()
5074 for (i = 1; i < bp->irq_nvecs; i++) { in bnx2_init_chip()
5078 BNX2_WR(bp, base, in bnx2_init_chip()
5083 BNX2_WR(bp, base + BNX2_HC_TX_QUICK_CONS_TRIP_OFF, in bnx2_init_chip()
5084 (bp->tx_quick_cons_trip_int << 16) | in bnx2_init_chip()
5085 bp->tx_quick_cons_trip); in bnx2_init_chip()
5087 BNX2_WR(bp, base + BNX2_HC_TX_TICKS_OFF, in bnx2_init_chip()
5088 (bp->tx_ticks_int << 16) | bp->tx_ticks); in bnx2_init_chip()
5090 BNX2_WR(bp, base + BNX2_HC_RX_QUICK_CONS_TRIP_OFF, in bnx2_init_chip()
5091 (bp->rx_quick_cons_trip_int << 16) | in bnx2_init_chip()
5092 bp->rx_quick_cons_trip); in bnx2_init_chip()
5094 BNX2_WR(bp, base + BNX2_HC_RX_TICKS_OFF, in bnx2_init_chip()
5095 (bp->rx_ticks_int << 16) | bp->rx_ticks); in bnx2_init_chip()
5099 BNX2_WR(bp, BNX2_HC_COMMAND, BNX2_HC_COMMAND_CLR_STAT_NOW); in bnx2_init_chip()
5101 BNX2_WR(bp, BNX2_HC_ATTN_BITS_ENABLE, STATUS_ATTN_EVENTS); in bnx2_init_chip()
5104 bnx2_set_rx_mode(bp->dev); in bnx2_init_chip()
5106 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_chip()
5107 val = BNX2_RD(bp, BNX2_MISC_NEW_CORE_CTL); in bnx2_init_chip()
5109 BNX2_WR(bp, BNX2_MISC_NEW_CORE_CTL, val); in bnx2_init_chip()
5111 rc = bnx2_fw_sync(bp, BNX2_DRV_MSG_DATA_WAIT2 | BNX2_DRV_MSG_CODE_RESET, in bnx2_init_chip()
5114 BNX2_WR(bp, BNX2_MISC_ENABLE_SET_BITS, BNX2_MISC_ENABLE_DEFAULT); in bnx2_init_chip()
5115 BNX2_RD(bp, BNX2_MISC_ENABLE_SET_BITS); in bnx2_init_chip()
5119 bp->hc_cmd = BNX2_RD(bp, BNX2_HC_COMMAND); in bnx2_init_chip()
5125 bnx2_clear_ring_states(struct bnx2 *bp) in bnx2_clear_ring_states() argument
5133 bnapi = &bp->bnx2_napi[i]; in bnx2_clear_ring_states()
5148 bnx2_init_tx_context(struct bnx2 *bp, u32 cid, struct bnx2_tx_ring_info *txr) in bnx2_init_tx_context() argument
5153 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_tx_context()
5165 bnx2_ctx_wr(bp, cid_addr, offset0, val); in bnx2_init_tx_context()
5168 bnx2_ctx_wr(bp, cid_addr, offset1, val); in bnx2_init_tx_context()
5171 bnx2_ctx_wr(bp, cid_addr, offset2, val); in bnx2_init_tx_context()
5174 bnx2_ctx_wr(bp, cid_addr, offset3, val); in bnx2_init_tx_context()
5178 bnx2_init_tx_ring(struct bnx2 *bp, int ring_num) in bnx2_init_tx_ring() argument
5185 bnapi = &bp->bnx2_napi[ring_num]; in bnx2_init_tx_ring()
5193 bp->tx_wake_thresh = bp->tx_ring_size / 2; in bnx2_init_tx_ring()
5206 bnx2_init_tx_context(bp, cid, txr); in bnx2_init_tx_ring()
5234 bnx2_init_rx_ring(struct bnx2 *bp, int ring_num) in bnx2_init_rx_ring() argument
5239 struct bnx2_napi *bnapi = &bp->bnx2_napi[ring_num]; in bnx2_init_rx_ring()
5250 bp->rx_buf_use_size, bp->rx_max_ring); in bnx2_init_rx_ring()
5252 bnx2_init_rx_context(bp, cid); in bnx2_init_rx_ring()
5254 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_rx_ring()
5255 val = BNX2_RD(bp, BNX2_MQ_MAP_L2_5); in bnx2_init_rx_ring()
5256 BNX2_WR(bp, BNX2_MQ_MAP_L2_5, val | BNX2_MQ_MAP_L2_5_ARM); in bnx2_init_rx_ring()
5259 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_PG_BUF_SIZE, 0); in bnx2_init_rx_ring()
5260 if (bp->rx_pg_ring_size) { in bnx2_init_rx_ring()
5263 PAGE_SIZE, bp->rx_max_pg_ring); in bnx2_init_rx_ring()
5264 val = (bp->rx_buf_use_size << 16) | PAGE_SIZE; in bnx2_init_rx_ring()
5265 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_PG_BUF_SIZE, val); in bnx2_init_rx_ring()
5266 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_RBDC_KEY, in bnx2_init_rx_ring()
5270 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_NX_PG_BDHADDR_HI, val); in bnx2_init_rx_ring()
5273 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_NX_PG_BDHADDR_LO, val); in bnx2_init_rx_ring()
5275 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_init_rx_ring()
5276 BNX2_WR(bp, BNX2_MQ_MAP_L2_3, BNX2_MQ_MAP_L2_3_DEFAULT); in bnx2_init_rx_ring()
5280 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_NX_BDHADDR_HI, val); in bnx2_init_rx_ring()
5283 bnx2_ctx_wr(bp, rx_cid_addr, BNX2_L2CTX_NX_BDHADDR_LO, val); in bnx2_init_rx_ring()
5286 for (i = 0; i < bp->rx_pg_ring_size; i++) { in bnx2_init_rx_ring()
5287 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5288 netdev_warn(bp->dev, "init'ed rx page ring %d with %d/%d pages only\n", in bnx2_init_rx_ring()
5289 ring_num, i, bp->rx_pg_ring_size); in bnx2_init_rx_ring()
5298 for (i = 0; i < bp->rx_ring_size; i++) { in bnx2_init_rx_ring()
5299 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5300 netdev_warn(bp->dev, "init'ed rx ring %d with %d/%d skbs only\n", in bnx2_init_rx_ring()
5301 ring_num, i, bp->rx_ring_size); in bnx2_init_rx_ring()
5313 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_init_rx_ring()
5314 BNX2_WR16(bp, rxr->rx_bidx_addr, prod); in bnx2_init_rx_ring()
5316 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_init_rx_ring()
5320 bnx2_init_all_rings(struct bnx2 *bp) in bnx2_init_all_rings() argument
5325 bnx2_clear_ring_states(bp); in bnx2_init_all_rings()
5327 BNX2_WR(bp, BNX2_TSCH_TSS_CFG, 0); in bnx2_init_all_rings()
5328 for (i = 0; i < bp->num_tx_rings; i++) in bnx2_init_all_rings()
5329 bnx2_init_tx_ring(bp, i); in bnx2_init_all_rings()
5331 if (bp->num_tx_rings > 1) in bnx2_init_all_rings()
5332 BNX2_WR(bp, BNX2_TSCH_TSS_CFG, ((bp->num_tx_rings - 1) << 24) | in bnx2_init_all_rings()
5335 BNX2_WR(bp, BNX2_RLUP_RSS_CONFIG, 0); in bnx2_init_all_rings()
5336 bnx2_reg_wr_ind(bp, BNX2_RXP_SCRATCH_RSS_TBL_SZ, 0); in bnx2_init_all_rings()
5338 for (i = 0; i < bp->num_rx_rings; i++) in bnx2_init_all_rings()
5339 bnx2_init_rx_ring(bp, i); in bnx2_init_all_rings()
5341 if (bp->num_rx_rings > 1) { in bnx2_init_all_rings()
5347 tbl_32 |= (i % (bp->num_rx_rings - 1)) << shift; in bnx2_init_all_rings()
5349 BNX2_WR(bp, BNX2_RLUP_RSS_DATA, tbl_32); in bnx2_init_all_rings()
5350 BNX2_WR(bp, BNX2_RLUP_RSS_COMMAND, (i >> 3) | in bnx2_init_all_rings()
5361 BNX2_WR(bp, BNX2_RLUP_RSS_CONFIG, val); in bnx2_init_all_rings()
5386 bnx2_set_rx_ring_size(struct bnx2 *bp, u32 size) in bnx2_set_rx_ring_size() argument
5391 rx_size = bp->dev->mtu + ETH_HLEN + BNX2_RX_OFFSET + 8; in bnx2_set_rx_ring_size()
5396 bp->rx_copy_thresh = BNX2_RX_COPY_THRESH; in bnx2_set_rx_ring_size()
5397 bp->rx_pg_ring_size = 0; in bnx2_set_rx_ring_size()
5398 bp->rx_max_pg_ring = 0; in bnx2_set_rx_ring_size()
5399 bp->rx_max_pg_ring_idx = 0; in bnx2_set_rx_ring_size()
5400 if ((rx_space > PAGE_SIZE) && !(bp->flags & BNX2_FLAG_JUMBO_BROKEN)) { in bnx2_set_rx_ring_size()
5401 int pages = PAGE_ALIGN(bp->dev->mtu - 40) >> PAGE_SHIFT; in bnx2_set_rx_ring_size()
5407 bp->rx_pg_ring_size = jumbo_size; in bnx2_set_rx_ring_size()
5408 bp->rx_max_pg_ring = bnx2_find_max_ring(jumbo_size, in bnx2_set_rx_ring_size()
5410 bp->rx_max_pg_ring_idx = in bnx2_set_rx_ring_size()
5411 (bp->rx_max_pg_ring * BNX2_RX_DESC_CNT) - 1; in bnx2_set_rx_ring_size()
5413 bp->rx_copy_thresh = 0; in bnx2_set_rx_ring_size()
5416 bp->rx_buf_use_size = rx_size; in bnx2_set_rx_ring_size()
5418 bp->rx_buf_size = kmalloc_size_roundup( in bnx2_set_rx_ring_size()
5419 SKB_DATA_ALIGN(bp->rx_buf_use_size + BNX2_RX_ALIGN) + in bnx2_set_rx_ring_size()
5421 bp->rx_jumbo_thresh = rx_size - BNX2_RX_OFFSET; in bnx2_set_rx_ring_size()
5422 bp->rx_ring_size = size; in bnx2_set_rx_ring_size()
5423 bp->rx_max_ring = bnx2_find_max_ring(size, BNX2_MAX_RX_RINGS); in bnx2_set_rx_ring_size()
5424 bp->rx_max_ring_idx = (bp->rx_max_ring * BNX2_RX_DESC_CNT) - 1; in bnx2_set_rx_ring_size()
5428 bnx2_free_tx_skbs(struct bnx2 *bp) in bnx2_free_tx_skbs() argument
5432 for (i = 0; i < bp->num_tx_rings; i++) { in bnx2_free_tx_skbs()
5433 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_free_tx_skbs()
5450 dma_unmap_single(&bp->pdev->dev, in bnx2_free_tx_skbs()
5461 dma_unmap_page(&bp->pdev->dev, in bnx2_free_tx_skbs()
5468 netdev_tx_reset_queue(netdev_get_tx_queue(bp->dev, i)); in bnx2_free_tx_skbs()
5473 bnx2_free_rx_skbs(struct bnx2 *bp) in bnx2_free_rx_skbs() argument
5477 for (i = 0; i < bp->num_rx_rings; i++) { in bnx2_free_rx_skbs()
5478 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_free_rx_skbs()
5485 for (j = 0; j < bp->rx_max_ring_idx; j++) { in bnx2_free_rx_skbs()
5492 dma_unmap_single(&bp->pdev->dev, in bnx2_free_rx_skbs()
5494 bp->rx_buf_use_size, in bnx2_free_rx_skbs()
5501 for (j = 0; j < bp->rx_max_pg_ring_idx; j++) in bnx2_free_rx_skbs()
5502 bnx2_free_rx_page(bp, rxr, j); in bnx2_free_rx_skbs()
5507 bnx2_free_skbs(struct bnx2 *bp) in bnx2_free_skbs() argument
5509 bnx2_free_tx_skbs(bp); in bnx2_free_skbs()
5510 bnx2_free_rx_skbs(bp); in bnx2_free_skbs()
5514 bnx2_reset_nic(struct bnx2 *bp, u32 reset_code) in bnx2_reset_nic() argument
5518 rc = bnx2_reset_chip(bp, reset_code); in bnx2_reset_nic()
5519 bnx2_free_skbs(bp); in bnx2_reset_nic()
5523 if ((rc = bnx2_init_chip(bp)) != 0) in bnx2_reset_nic()
5526 bnx2_init_all_rings(bp); in bnx2_reset_nic()
5531 bnx2_init_nic(struct bnx2 *bp, int reset_phy) in bnx2_init_nic() argument
5535 if ((rc = bnx2_reset_nic(bp, BNX2_DRV_MSG_CODE_RESET)) != 0) in bnx2_init_nic()
5538 spin_lock_bh(&bp->phy_lock); in bnx2_init_nic()
5539 bnx2_init_phy(bp, reset_phy); in bnx2_init_nic()
5540 bnx2_set_link(bp); in bnx2_init_nic()
5541 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_init_nic()
5542 bnx2_remote_phy_event(bp); in bnx2_init_nic()
5543 spin_unlock_bh(&bp->phy_lock); in bnx2_init_nic()
5548 bnx2_shutdown_chip(struct bnx2 *bp) in bnx2_shutdown_chip() argument
5552 if (bp->flags & BNX2_FLAG_NO_WOL) in bnx2_shutdown_chip()
5554 else if (bp->wol) in bnx2_shutdown_chip()
5559 return bnx2_reset_chip(bp, reset_code); in bnx2_shutdown_chip()
5563 bnx2_test_registers(struct bnx2 *bp) in bnx2_test_registers() argument
5684 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_registers()
5698 save_val = readl(bp->regview + offset); in bnx2_test_registers()
5700 writel(0, bp->regview + offset); in bnx2_test_registers()
5702 val = readl(bp->regview + offset); in bnx2_test_registers()
5711 writel(0xffffffff, bp->regview + offset); in bnx2_test_registers()
5713 val = readl(bp->regview + offset); in bnx2_test_registers()
5722 writel(save_val, bp->regview + offset); in bnx2_test_registers()
5726 writel(save_val, bp->regview + offset); in bnx2_test_registers()
5734 bnx2_do_mem_test(struct bnx2 *bp, u32 start, u32 size) in bnx2_do_mem_test() argument
5745 bnx2_reg_wr_ind(bp, start + offset, test_pattern[i]); in bnx2_do_mem_test()
5747 if (bnx2_reg_rd_ind(bp, start + offset) != in bnx2_do_mem_test()
5757 bnx2_test_memory(struct bnx2 *bp) in bnx2_test_memory() argument
5783 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_test_memory()
5789 if ((ret = bnx2_do_mem_test(bp, mem_tbl[i].offset, in bnx2_test_memory()
5802 bnx2_run_loopback(struct bnx2 *bp, int loopback_mode) in bnx2_run_loopback() argument
5814 struct bnx2_napi *bnapi = &bp->bnx2_napi[0], *tx_napi; in bnx2_run_loopback()
5823 bp->loopback = MAC_LOOPBACK; in bnx2_run_loopback()
5824 bnx2_set_mac_loopback(bp); in bnx2_run_loopback()
5827 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_run_loopback()
5830 bp->loopback = PHY_LOOPBACK; in bnx2_run_loopback()
5831 bnx2_set_phy_loopback(bp); in bnx2_run_loopback()
5836 pkt_size = min(bp->dev->mtu + ETH_HLEN, bp->rx_jumbo_thresh - 4); in bnx2_run_loopback()
5837 skb = netdev_alloc_skb(bp->dev, pkt_size); in bnx2_run_loopback()
5841 memcpy(packet, bp->dev->dev_addr, ETH_ALEN); in bnx2_run_loopback()
5846 map = dma_map_single(&bp->pdev->dev, skb->data, pkt_size, in bnx2_run_loopback()
5848 if (dma_mapping_error(&bp->pdev->dev, map)) { in bnx2_run_loopback()
5853 BNX2_WR(bp, BNX2_HC_COMMAND, in bnx2_run_loopback()
5854 bp->hc_cmd | BNX2_HC_COMMAND_COAL_NOW_WO_INT); in bnx2_run_loopback()
5856 BNX2_RD(bp, BNX2_HC_COMMAND); in bnx2_run_loopback()
5874 BNX2_WR16(bp, txr->tx_bidx_addr, txr->tx_prod); in bnx2_run_loopback()
5875 BNX2_WR(bp, txr->tx_bseq_addr, txr->tx_prod_bseq); in bnx2_run_loopback()
5879 BNX2_WR(bp, BNX2_HC_COMMAND, in bnx2_run_loopback()
5880 bp->hc_cmd | BNX2_HC_COMMAND_COAL_NOW_WO_INT); in bnx2_run_loopback()
5882 BNX2_RD(bp, BNX2_HC_COMMAND); in bnx2_run_loopback()
5886 dma_unmap_single(&bp->pdev->dev, map, pkt_size, DMA_TO_DEVICE); in bnx2_run_loopback()
5903 dma_sync_single_for_cpu(&bp->pdev->dev, in bnx2_run_loopback()
5905 bp->rx_buf_use_size, DMA_FROM_DEVICE); in bnx2_run_loopback()
5930 bp->loopback = 0; in bnx2_run_loopback()
5940 bnx2_test_loopback(struct bnx2 *bp) in bnx2_test_loopback() argument
5944 if (!netif_running(bp->dev)) in bnx2_test_loopback()
5947 bnx2_reset_nic(bp, BNX2_DRV_MSG_CODE_RESET); in bnx2_test_loopback()
5948 spin_lock_bh(&bp->phy_lock); in bnx2_test_loopback()
5949 bnx2_init_phy(bp, 1); in bnx2_test_loopback()
5950 spin_unlock_bh(&bp->phy_lock); in bnx2_test_loopback()
5951 if (bnx2_run_loopback(bp, BNX2_MAC_LOOPBACK)) in bnx2_test_loopback()
5953 if (bnx2_run_loopback(bp, BNX2_PHY_LOOPBACK)) in bnx2_test_loopback()
5962 bnx2_test_nvram(struct bnx2 *bp) in bnx2_test_nvram() argument
5969 if ((rc = bnx2_nvram_read(bp, 0, data, 4)) != 0) in bnx2_test_nvram()
5978 if ((rc = bnx2_nvram_read(bp, 0x100, data, NVRAM_SIZE)) != 0) in bnx2_test_nvram()
5997 bnx2_test_link(struct bnx2 *bp) in bnx2_test_link() argument
6001 if (!netif_running(bp->dev)) in bnx2_test_link()
6004 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) { in bnx2_test_link()
6005 if (bp->link_up) in bnx2_test_link()
6009 spin_lock_bh(&bp->phy_lock); in bnx2_test_link()
6010 bnx2_enable_bmsr1(bp); in bnx2_test_link()
6011 bnx2_read_phy(bp, bp->mii_bmsr1, &bmsr); in bnx2_test_link()
6012 bnx2_read_phy(bp, bp->mii_bmsr1, &bmsr); in bnx2_test_link()
6013 bnx2_disable_bmsr1(bp); in bnx2_test_link()
6014 spin_unlock_bh(&bp->phy_lock); in bnx2_test_link()
6023 bnx2_test_intr(struct bnx2 *bp) in bnx2_test_intr() argument
6028 if (!netif_running(bp->dev)) in bnx2_test_intr()
6031 status_idx = BNX2_RD(bp, BNX2_PCICFG_INT_ACK_CMD) & 0xffff; in bnx2_test_intr()
6034 BNX2_WR(bp, BNX2_HC_COMMAND, bp->hc_cmd | BNX2_HC_COMMAND_COAL_NOW); in bnx2_test_intr()
6035 BNX2_RD(bp, BNX2_HC_COMMAND); in bnx2_test_intr()
6038 if ((BNX2_RD(bp, BNX2_PCICFG_INT_ACK_CMD) & 0xffff) != in bnx2_test_intr()
6054 bnx2_5706_serdes_has_link(struct bnx2 *bp) in bnx2_5706_serdes_has_link() argument
6058 if (bp->phy_flags & BNX2_PHY_FLAG_NO_PARALLEL) in bnx2_5706_serdes_has_link()
6061 bnx2_write_phy(bp, MII_BNX2_MISC_SHADOW, MISC_SHDW_MODE_CTL); in bnx2_5706_serdes_has_link()
6062 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &mode_ctl); in bnx2_5706_serdes_has_link()
6067 bnx2_write_phy(bp, MII_BNX2_MISC_SHADOW, MISC_SHDW_AN_DBG); in bnx2_5706_serdes_has_link()
6068 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &an_dbg); in bnx2_5706_serdes_has_link()
6069 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &an_dbg); in bnx2_5706_serdes_has_link()
6074 bnx2_write_phy(bp, MII_BNX2_DSP_ADDRESS, MII_EXPAND_REG1); in bnx2_5706_serdes_has_link()
6075 bnx2_read_phy(bp, MII_BNX2_DSP_RW_PORT, &exp); in bnx2_5706_serdes_has_link()
6076 bnx2_read_phy(bp, MII_BNX2_DSP_RW_PORT, &exp); in bnx2_5706_serdes_has_link()
6085 bnx2_5706_serdes_timer(struct bnx2 *bp) in bnx2_5706_serdes_timer() argument
6089 spin_lock(&bp->phy_lock); in bnx2_5706_serdes_timer()
6090 if (bp->serdes_an_pending) { in bnx2_5706_serdes_timer()
6091 bp->serdes_an_pending--; in bnx2_5706_serdes_timer()
6093 } else if ((bp->link_up == 0) && (bp->autoneg & AUTONEG_SPEED)) { in bnx2_5706_serdes_timer()
6096 bp->current_interval = BNX2_TIMER_INTERVAL; in bnx2_5706_serdes_timer()
6098 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_5706_serdes_timer()
6101 if (bnx2_5706_serdes_has_link(bp)) { in bnx2_5706_serdes_timer()
6104 bnx2_write_phy(bp, bp->mii_bmcr, bmcr); in bnx2_5706_serdes_timer()
6105 bp->phy_flags |= BNX2_PHY_FLAG_PARALLEL_DETECT; in bnx2_5706_serdes_timer()
6109 else if ((bp->link_up) && (bp->autoneg & AUTONEG_SPEED) && in bnx2_5706_serdes_timer()
6110 (bp->phy_flags & BNX2_PHY_FLAG_PARALLEL_DETECT)) { in bnx2_5706_serdes_timer()
6113 bnx2_write_phy(bp, 0x17, 0x0f01); in bnx2_5706_serdes_timer()
6114 bnx2_read_phy(bp, 0x15, &phy2); in bnx2_5706_serdes_timer()
6118 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_5706_serdes_timer()
6120 bnx2_write_phy(bp, bp->mii_bmcr, bmcr); in bnx2_5706_serdes_timer()
6122 bp->phy_flags &= ~BNX2_PHY_FLAG_PARALLEL_DETECT; in bnx2_5706_serdes_timer()
6125 bp->current_interval = BNX2_TIMER_INTERVAL; in bnx2_5706_serdes_timer()
6130 bnx2_write_phy(bp, MII_BNX2_MISC_SHADOW, MISC_SHDW_AN_DBG); in bnx2_5706_serdes_timer()
6131 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &val); in bnx2_5706_serdes_timer()
6132 bnx2_read_phy(bp, MII_BNX2_MISC_SHADOW, &val); in bnx2_5706_serdes_timer()
6134 if (bp->link_up && (val & MISC_SHDW_AN_DBG_NOSYNC)) { in bnx2_5706_serdes_timer()
6135 if (!(bp->phy_flags & BNX2_PHY_FLAG_FORCED_DOWN)) { in bnx2_5706_serdes_timer()
6136 bnx2_5706s_force_link_dn(bp, 1); in bnx2_5706_serdes_timer()
6137 bp->phy_flags |= BNX2_PHY_FLAG_FORCED_DOWN; in bnx2_5706_serdes_timer()
6139 bnx2_set_link(bp); in bnx2_5706_serdes_timer()
6140 } else if (!bp->link_up && !(val & MISC_SHDW_AN_DBG_NOSYNC)) in bnx2_5706_serdes_timer()
6141 bnx2_set_link(bp); in bnx2_5706_serdes_timer()
6143 spin_unlock(&bp->phy_lock); in bnx2_5706_serdes_timer()
6147 bnx2_5708_serdes_timer(struct bnx2 *bp) in bnx2_5708_serdes_timer() argument
6149 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_5708_serdes_timer()
6152 if ((bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE) == 0) { in bnx2_5708_serdes_timer()
6153 bp->serdes_an_pending = 0; in bnx2_5708_serdes_timer()
6157 spin_lock(&bp->phy_lock); in bnx2_5708_serdes_timer()
6158 if (bp->serdes_an_pending) in bnx2_5708_serdes_timer()
6159 bp->serdes_an_pending--; in bnx2_5708_serdes_timer()
6160 else if ((bp->link_up == 0) && (bp->autoneg & AUTONEG_SPEED)) { in bnx2_5708_serdes_timer()
6163 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_5708_serdes_timer()
6165 bnx2_enable_forced_2g5(bp); in bnx2_5708_serdes_timer()
6166 bp->current_interval = BNX2_SERDES_FORCED_TIMEOUT; in bnx2_5708_serdes_timer()
6168 bnx2_disable_forced_2g5(bp); in bnx2_5708_serdes_timer()
6169 bp->serdes_an_pending = 2; in bnx2_5708_serdes_timer()
6170 bp->current_interval = BNX2_TIMER_INTERVAL; in bnx2_5708_serdes_timer()
6174 bp->current_interval = BNX2_TIMER_INTERVAL; in bnx2_5708_serdes_timer()
6176 spin_unlock(&bp->phy_lock); in bnx2_5708_serdes_timer()
6182 struct bnx2 *bp = from_timer(bp, t, timer); in bnx2_timer() local
6184 if (!netif_running(bp->dev)) in bnx2_timer()
6187 if (atomic_read(&bp->intr_sem) != 0) in bnx2_timer()
6190 if ((bp->flags & (BNX2_FLAG_USING_MSI | BNX2_FLAG_ONE_SHOT_MSI)) == in bnx2_timer()
6192 bnx2_chk_missed_msi(bp); in bnx2_timer()
6194 bnx2_send_heart_beat(bp); in bnx2_timer()
6196 bp->stats_blk->stat_FwRxDrop = in bnx2_timer()
6197 bnx2_reg_rd_ind(bp, BNX2_FW_RX_DROP_COUNT); in bnx2_timer()
6200 if ((bp->flags & BNX2_FLAG_BROKEN_STATS) && bp->stats_ticks) in bnx2_timer()
6201 BNX2_WR(bp, BNX2_HC_COMMAND, bp->hc_cmd | in bnx2_timer()
6204 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_timer()
6205 if (BNX2_CHIP(bp) == BNX2_CHIP_5706) in bnx2_timer()
6206 bnx2_5706_serdes_timer(bp); in bnx2_timer()
6208 bnx2_5708_serdes_timer(bp); in bnx2_timer()
6212 mod_timer(&bp->timer, jiffies + bp->current_interval); in bnx2_timer()
6216 bnx2_request_irq(struct bnx2 *bp) in bnx2_request_irq() argument
6222 if (bp->flags & BNX2_FLAG_USING_MSI_OR_MSIX) in bnx2_request_irq()
6227 for (i = 0; i < bp->irq_nvecs; i++) { in bnx2_request_irq()
6228 irq = &bp->irq_tbl[i]; in bnx2_request_irq()
6230 &bp->bnx2_napi[i]); in bnx2_request_irq()
6239 __bnx2_free_irq(struct bnx2 *bp) in __bnx2_free_irq() argument
6244 for (i = 0; i < bp->irq_nvecs; i++) { in __bnx2_free_irq()
6245 irq = &bp->irq_tbl[i]; in __bnx2_free_irq()
6247 free_irq(irq->vector, &bp->bnx2_napi[i]); in __bnx2_free_irq()
6253 bnx2_free_irq(struct bnx2 *bp) in bnx2_free_irq() argument
6256 __bnx2_free_irq(bp); in bnx2_free_irq()
6257 if (bp->flags & BNX2_FLAG_USING_MSI) in bnx2_free_irq()
6258 pci_disable_msi(bp->pdev); in bnx2_free_irq()
6259 else if (bp->flags & BNX2_FLAG_USING_MSIX) in bnx2_free_irq()
6260 pci_disable_msix(bp->pdev); in bnx2_free_irq()
6262 bp->flags &= ~(BNX2_FLAG_USING_MSI_OR_MSIX | BNX2_FLAG_ONE_SHOT_MSI); in bnx2_free_irq()
6266 bnx2_enable_msix(struct bnx2 *bp, int msix_vecs) in bnx2_enable_msix() argument
6270 struct net_device *dev = bp->dev; in bnx2_enable_msix()
6271 const int len = sizeof(bp->irq_tbl[0].name); in bnx2_enable_msix()
6273 bnx2_setup_msix_tbl(bp); in bnx2_enable_msix()
6274 BNX2_WR(bp, BNX2_PCI_MSIX_CONTROL, BNX2_MAX_MSIX_HW_VEC - 1); in bnx2_enable_msix()
6275 BNX2_WR(bp, BNX2_PCI_MSIX_TBL_OFF_BIR, BNX2_PCI_GRC_WINDOW2_BASE); in bnx2_enable_msix()
6276 BNX2_WR(bp, BNX2_PCI_MSIX_PBA_OFF_BIT, BNX2_PCI_GRC_WINDOW3_BASE); in bnx2_enable_msix()
6280 BNX2_RD(bp, BNX2_PCI_MSIX_CONTROL); in bnx2_enable_msix()
6291 total_vecs = pci_enable_msix_range(bp->pdev, msix_ent, in bnx2_enable_msix()
6300 bp->irq_nvecs = msix_vecs; in bnx2_enable_msix()
6301 bp->flags |= BNX2_FLAG_USING_MSIX | BNX2_FLAG_ONE_SHOT_MSI; in bnx2_enable_msix()
6303 bp->irq_tbl[i].vector = msix_ent[i].vector; in bnx2_enable_msix()
6304 snprintf(bp->irq_tbl[i].name, len, "%s-%d", dev->name, i); in bnx2_enable_msix()
6305 bp->irq_tbl[i].handler = bnx2_msi_1shot; in bnx2_enable_msix()
6310 bnx2_setup_int_mode(struct bnx2 *bp, int dis_msi) in bnx2_setup_int_mode() argument
6315 if (!bp->num_req_rx_rings) in bnx2_setup_int_mode()
6316 msix_vecs = max(cpus + 1, bp->num_req_tx_rings); in bnx2_setup_int_mode()
6317 else if (!bp->num_req_tx_rings) in bnx2_setup_int_mode()
6318 msix_vecs = max(cpus, bp->num_req_rx_rings); in bnx2_setup_int_mode()
6320 msix_vecs = max(bp->num_req_rx_rings, bp->num_req_tx_rings); in bnx2_setup_int_mode()
6324 bp->irq_tbl[0].handler = bnx2_interrupt; in bnx2_setup_int_mode()
6325 strcpy(bp->irq_tbl[0].name, bp->dev->name); in bnx2_setup_int_mode()
6326 bp->irq_nvecs = 1; in bnx2_setup_int_mode()
6327 bp->irq_tbl[0].vector = bp->pdev->irq; in bnx2_setup_int_mode()
6329 if ((bp->flags & BNX2_FLAG_MSIX_CAP) && !dis_msi) in bnx2_setup_int_mode()
6330 bnx2_enable_msix(bp, msix_vecs); in bnx2_setup_int_mode()
6332 if ((bp->flags & BNX2_FLAG_MSI_CAP) && !dis_msi && in bnx2_setup_int_mode()
6333 !(bp->flags & BNX2_FLAG_USING_MSIX)) { in bnx2_setup_int_mode()
6334 if (pci_enable_msi(bp->pdev) == 0) { in bnx2_setup_int_mode()
6335 bp->flags |= BNX2_FLAG_USING_MSI; in bnx2_setup_int_mode()
6336 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_setup_int_mode()
6337 bp->flags |= BNX2_FLAG_ONE_SHOT_MSI; in bnx2_setup_int_mode()
6338 bp->irq_tbl[0].handler = bnx2_msi_1shot; in bnx2_setup_int_mode()
6340 bp->irq_tbl[0].handler = bnx2_msi; in bnx2_setup_int_mode()
6342 bp->irq_tbl[0].vector = bp->pdev->irq; in bnx2_setup_int_mode()
6346 if (!bp->num_req_tx_rings) in bnx2_setup_int_mode()
6347 bp->num_tx_rings = rounddown_pow_of_two(bp->irq_nvecs); in bnx2_setup_int_mode()
6349 bp->num_tx_rings = min(bp->irq_nvecs, bp->num_req_tx_rings); in bnx2_setup_int_mode()
6351 if (!bp->num_req_rx_rings) in bnx2_setup_int_mode()
6352 bp->num_rx_rings = bp->irq_nvecs; in bnx2_setup_int_mode()
6354 bp->num_rx_rings = min(bp->irq_nvecs, bp->num_req_rx_rings); in bnx2_setup_int_mode()
6356 netif_set_real_num_tx_queues(bp->dev, bp->num_tx_rings); in bnx2_setup_int_mode()
6358 return netif_set_real_num_rx_queues(bp->dev, bp->num_rx_rings); in bnx2_setup_int_mode()
6365 struct bnx2 *bp = netdev_priv(dev); in bnx2_open() local
6368 rc = bnx2_request_firmware(bp); in bnx2_open()
6374 bnx2_disable_int(bp); in bnx2_open()
6376 rc = bnx2_setup_int_mode(bp, disable_msi); in bnx2_open()
6379 bnx2_init_napi(bp); in bnx2_open()
6380 bnx2_napi_enable(bp); in bnx2_open()
6381 rc = bnx2_alloc_mem(bp); in bnx2_open()
6385 rc = bnx2_request_irq(bp); in bnx2_open()
6389 rc = bnx2_init_nic(bp, 1); in bnx2_open()
6393 mod_timer(&bp->timer, jiffies + bp->current_interval); in bnx2_open()
6395 atomic_set(&bp->intr_sem, 0); in bnx2_open()
6397 memset(bp->temp_stats_blk, 0, sizeof(struct statistics_block)); in bnx2_open()
6399 bnx2_enable_int(bp); in bnx2_open()
6401 if (bp->flags & BNX2_FLAG_USING_MSI) { in bnx2_open()
6405 if (bnx2_test_intr(bp) != 0) { in bnx2_open()
6406 …netdev_warn(bp->dev, "No interrupt was generated using MSI, switching to INTx mode. Please report … in bnx2_open()
6408 bnx2_disable_int(bp); in bnx2_open()
6409 bnx2_free_irq(bp); in bnx2_open()
6411 bnx2_setup_int_mode(bp, 1); in bnx2_open()
6413 rc = bnx2_init_nic(bp, 0); in bnx2_open()
6416 rc = bnx2_request_irq(bp); in bnx2_open()
6419 del_timer_sync(&bp->timer); in bnx2_open()
6422 bnx2_enable_int(bp); in bnx2_open()
6425 if (bp->flags & BNX2_FLAG_USING_MSI) in bnx2_open()
6427 else if (bp->flags & BNX2_FLAG_USING_MSIX) in bnx2_open()
6435 bnx2_napi_disable(bp); in bnx2_open()
6436 bnx2_free_skbs(bp); in bnx2_open()
6437 bnx2_free_irq(bp); in bnx2_open()
6438 bnx2_free_mem(bp); in bnx2_open()
6439 bnx2_del_napi(bp); in bnx2_open()
6440 bnx2_release_firmware(bp); in bnx2_open()
6447 struct bnx2 *bp = container_of(work, struct bnx2, reset_task); in bnx2_reset_task() local
6452 if (!netif_running(bp->dev)) { in bnx2_reset_task()
6457 bnx2_netif_stop(bp, true); in bnx2_reset_task()
6459 pci_read_config_word(bp->pdev, PCI_COMMAND, &pcicmd); in bnx2_reset_task()
6462 pci_restore_state(bp->pdev); in bnx2_reset_task()
6463 pci_save_state(bp->pdev); in bnx2_reset_task()
6465 rc = bnx2_init_nic(bp, 1); in bnx2_reset_task()
6467 netdev_err(bp->dev, "failed to reset NIC, closing\n"); in bnx2_reset_task()
6468 bnx2_napi_enable(bp); in bnx2_reset_task()
6469 dev_close(bp->dev); in bnx2_reset_task()
6474 atomic_set(&bp->intr_sem, 1); in bnx2_reset_task()
6475 bnx2_netif_start(bp, true); in bnx2_reset_task()
6482 bnx2_dump_ftq(struct bnx2 *bp) in bnx2_dump_ftq() argument
6486 struct net_device *dev = bp->dev; in bnx2_dump_ftq()
6510 bnx2_reg_rd_ind(bp, ftq_arr[i].off)); in bnx2_dump_ftq()
6515 reg, bnx2_reg_rd_ind(bp, reg), in bnx2_dump_ftq()
6516 bnx2_reg_rd_ind(bp, reg + 4), in bnx2_dump_ftq()
6517 bnx2_reg_rd_ind(bp, reg + 8), in bnx2_dump_ftq()
6518 bnx2_reg_rd_ind(bp, reg + 0x1c), in bnx2_dump_ftq()
6519 bnx2_reg_rd_ind(bp, reg + 0x1c), in bnx2_dump_ftq()
6520 bnx2_reg_rd_ind(bp, reg + 0x20)); in bnx2_dump_ftq()
6525 BNX2_RD(bp, BNX2_TBDC_STATUS) & BNX2_TBDC_STATUS_FREE_CNT); in bnx2_dump_ftq()
6530 BNX2_WR(bp, BNX2_TBDC_BD_ADDR, i); in bnx2_dump_ftq()
6531 BNX2_WR(bp, BNX2_TBDC_CAM_OPCODE, in bnx2_dump_ftq()
6533 BNX2_WR(bp, BNX2_TBDC_COMMAND, BNX2_TBDC_COMMAND_CMD_REG_ARB); in bnx2_dump_ftq()
6534 while ((BNX2_RD(bp, BNX2_TBDC_COMMAND) & in bnx2_dump_ftq()
6538 cid = BNX2_RD(bp, BNX2_TBDC_CID); in bnx2_dump_ftq()
6539 bdidx = BNX2_RD(bp, BNX2_TBDC_BIDX); in bnx2_dump_ftq()
6540 valid = BNX2_RD(bp, BNX2_TBDC_CAM_OPCODE); in bnx2_dump_ftq()
6549 bnx2_dump_state(struct bnx2 *bp) in bnx2_dump_state() argument
6551 struct net_device *dev = bp->dev; in bnx2_dump_state()
6554 pci_read_config_dword(bp->pdev, PCI_COMMAND, &val1); in bnx2_dump_state()
6556 atomic_read(&bp->intr_sem), val1); in bnx2_dump_state()
6557 pci_read_config_dword(bp->pdev, bp->pm_cap + PCI_PM_CTRL, &val1); in bnx2_dump_state()
6558 pci_read_config_dword(bp->pdev, BNX2_PCICFG_MISC_CONFIG, &val2); in bnx2_dump_state()
6561 BNX2_RD(bp, BNX2_EMAC_TX_STATUS), in bnx2_dump_state()
6562 BNX2_RD(bp, BNX2_EMAC_RX_STATUS)); in bnx2_dump_state()
6564 BNX2_RD(bp, BNX2_RPM_MGMT_PKT_CTRL)); in bnx2_dump_state()
6566 BNX2_RD(bp, BNX2_HC_STATS_INTERRUPT_STATUS)); in bnx2_dump_state()
6567 if (bp->flags & BNX2_FLAG_USING_MSIX) in bnx2_dump_state()
6569 BNX2_RD(bp, BNX2_PCI_GRC_WINDOW3_BASE)); in bnx2_dump_state()
6575 struct bnx2 *bp = netdev_priv(dev); in bnx2_tx_timeout() local
6577 bnx2_dump_ftq(bp); in bnx2_tx_timeout()
6578 bnx2_dump_state(bp); in bnx2_tx_timeout()
6579 bnx2_dump_mcp_state(bp); in bnx2_tx_timeout()
6582 schedule_work(&bp->reset_task); in bnx2_tx_timeout()
6592 struct bnx2 *bp = netdev_priv(dev); in bnx2_start_xmit() local
6605 bnapi = &bp->bnx2_napi[i]; in bnx2_start_xmit()
6609 if (unlikely(bnx2_tx_avail(bp, txr) < in bnx2_start_xmit()
6664 mapping = dma_map_single(&bp->pdev->dev, skb->data, len, in bnx2_start_xmit()
6666 if (dma_mapping_error(&bp->pdev->dev, mapping)) { in bnx2_start_xmit()
6694 mapping = skb_frag_dma_map(&bp->pdev->dev, frag, 0, len, in bnx2_start_xmit()
6696 if (dma_mapping_error(&bp->pdev->dev, mapping)) in bnx2_start_xmit()
6717 BNX2_WR16(bp, txr->tx_bidx_addr, prod); in bnx2_start_xmit()
6718 BNX2_WR(bp, txr->tx_bseq_addr, txr->tx_prod_bseq); in bnx2_start_xmit()
6722 if (unlikely(bnx2_tx_avail(bp, txr) <= MAX_SKB_FRAGS)) { in bnx2_start_xmit()
6731 if (bnx2_tx_avail(bp, txr) > bp->tx_wake_thresh) in bnx2_start_xmit()
6745 dma_unmap_single(&bp->pdev->dev, dma_unmap_addr(tx_buf, mapping), in bnx2_start_xmit()
6753 dma_unmap_page(&bp->pdev->dev, dma_unmap_addr(tx_buf, mapping), in bnx2_start_xmit()
6766 struct bnx2 *bp = netdev_priv(dev); in bnx2_close() local
6768 bnx2_disable_int_sync(bp); in bnx2_close()
6769 bnx2_napi_disable(bp); in bnx2_close()
6771 del_timer_sync(&bp->timer); in bnx2_close()
6772 bnx2_shutdown_chip(bp); in bnx2_close()
6773 bnx2_free_irq(bp); in bnx2_close()
6774 bnx2_free_skbs(bp); in bnx2_close()
6775 bnx2_free_mem(bp); in bnx2_close()
6776 bnx2_del_napi(bp); in bnx2_close()
6777 bp->link_up = 0; in bnx2_close()
6778 netif_carrier_off(bp->dev); in bnx2_close()
6783 bnx2_save_stats(struct bnx2 *bp) in bnx2_save_stats() argument
6785 u32 *hw_stats = (u32 *) bp->stats_blk; in bnx2_save_stats()
6786 u32 *temp_stats = (u32 *) bp->temp_stats_blk; in bnx2_save_stats()
6810 GET_64BIT_NET_STATS64(bp->stats_blk->ctr) + \
6811 GET_64BIT_NET_STATS64(bp->temp_stats_blk->ctr)
6814 (unsigned long) (bp->stats_blk->ctr + \
6815 bp->temp_stats_blk->ctr)
6820 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_stats64() local
6822 if (!bp->stats_blk) in bnx2_get_stats64()
6869 if ((BNX2_CHIP(bp) == BNX2_CHIP_5706) || in bnx2_get_stats64()
6870 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_A0)) in bnx2_get_stats64()
6895 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_link_ksettings() local
6900 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) { in bnx2_get_link_ksettings()
6903 } else if (bp->phy_port == PORT_FIBRE) in bnx2_get_link_ksettings()
6911 if (bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE) in bnx2_get_link_ksettings()
6923 spin_lock_bh(&bp->phy_lock); in bnx2_get_link_ksettings()
6924 cmd->base.port = bp->phy_port; in bnx2_get_link_ksettings()
6925 advertising = bp->advertising; in bnx2_get_link_ksettings()
6927 if (bp->autoneg & AUTONEG_SPEED) { in bnx2_get_link_ksettings()
6934 cmd->base.speed = bp->line_speed; in bnx2_get_link_ksettings()
6935 cmd->base.duplex = bp->duplex; in bnx2_get_link_ksettings()
6936 if (!(bp->phy_flags & BNX2_PHY_FLAG_SERDES)) { in bnx2_get_link_ksettings()
6937 if (bp->phy_flags & BNX2_PHY_FLAG_MDIX) in bnx2_get_link_ksettings()
6947 spin_unlock_bh(&bp->phy_lock); in bnx2_get_link_ksettings()
6949 cmd->base.phy_address = bp->phy_addr; in bnx2_get_link_ksettings()
6963 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_link_ksettings() local
6964 u8 autoneg = bp->autoneg; in bnx2_set_link_ksettings()
6965 u8 req_duplex = bp->req_duplex; in bnx2_set_link_ksettings()
6966 u16 req_line_speed = bp->req_line_speed; in bnx2_set_link_ksettings()
6967 u32 advertising = bp->advertising; in bnx2_set_link_ksettings()
6970 spin_lock_bh(&bp->phy_lock); in bnx2_set_link_ksettings()
6975 if (cmd->base.port != bp->phy_port && in bnx2_set_link_ksettings()
6976 !(bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP)) in bnx2_set_link_ksettings()
6982 if (!netif_running(dev) && cmd->base.port != bp->phy_port) in bnx2_set_link_ksettings()
7012 !(bp->phy_flags & BNX2_PHY_FLAG_2_5G_CAPABLE)) in bnx2_set_link_ksettings()
7023 bp->autoneg = autoneg; in bnx2_set_link_ksettings()
7024 bp->advertising = advertising; in bnx2_set_link_ksettings()
7025 bp->req_line_speed = req_line_speed; in bnx2_set_link_ksettings()
7026 bp->req_duplex = req_duplex; in bnx2_set_link_ksettings()
7033 err = bnx2_setup_phy(bp, cmd->base.port); in bnx2_set_link_ksettings()
7036 spin_unlock_bh(&bp->phy_lock); in bnx2_set_link_ksettings()
7044 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_drvinfo() local
7047 strscpy(info->bus_info, pci_name(bp->pdev), sizeof(info->bus_info)); in bnx2_get_drvinfo()
7048 strscpy(info->fw_version, bp->fw_version, sizeof(info->fw_version)); in bnx2_get_drvinfo()
7064 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_regs() local
7094 if (!netif_running(bp->dev)) in bnx2_get_regs()
7101 *p++ = BNX2_RD(bp, offset); in bnx2_get_regs()
7114 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_wol() local
7116 if (bp->flags & BNX2_FLAG_NO_WOL) { in bnx2_get_wol()
7122 if (bp->wol) in bnx2_get_wol()
7133 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_wol() local
7139 if (bp->flags & BNX2_FLAG_NO_WOL) in bnx2_set_wol()
7142 bp->wol = 1; in bnx2_set_wol()
7145 bp->wol = 0; in bnx2_set_wol()
7148 device_set_wakeup_enable(&bp->pdev->dev, bp->wol); in bnx2_set_wol()
7156 struct bnx2 *bp = netdev_priv(dev); in bnx2_nway_reset() local
7162 if (!(bp->autoneg & AUTONEG_SPEED)) { in bnx2_nway_reset()
7166 spin_lock_bh(&bp->phy_lock); in bnx2_nway_reset()
7168 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) { in bnx2_nway_reset()
7171 rc = bnx2_setup_remote_phy(bp, bp->phy_port); in bnx2_nway_reset()
7172 spin_unlock_bh(&bp->phy_lock); in bnx2_nway_reset()
7177 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_nway_reset()
7178 bnx2_write_phy(bp, bp->mii_bmcr, BMCR_LOOPBACK); in bnx2_nway_reset()
7179 spin_unlock_bh(&bp->phy_lock); in bnx2_nway_reset()
7183 spin_lock_bh(&bp->phy_lock); in bnx2_nway_reset()
7185 bp->current_interval = BNX2_SERDES_AN_TIMEOUT; in bnx2_nway_reset()
7186 bp->serdes_an_pending = 1; in bnx2_nway_reset()
7187 mod_timer(&bp->timer, jiffies + bp->current_interval); in bnx2_nway_reset()
7190 bnx2_read_phy(bp, bp->mii_bmcr, &bmcr); in bnx2_nway_reset()
7192 bnx2_write_phy(bp, bp->mii_bmcr, bmcr | BMCR_ANRESTART | BMCR_ANENABLE); in bnx2_nway_reset()
7194 spin_unlock_bh(&bp->phy_lock); in bnx2_nway_reset()
7202 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_link() local
7204 return bp->link_up; in bnx2_get_link()
7210 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_eeprom_len() local
7212 if (!bp->flash_info) in bnx2_get_eeprom_len()
7215 return (int) bp->flash_size; in bnx2_get_eeprom_len()
7222 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_eeprom() local
7227 rc = bnx2_nvram_read(bp, eeprom->offset, eebuf, eeprom->len); in bnx2_get_eeprom()
7236 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_eeprom() local
7241 rc = bnx2_nvram_write(bp, eeprom->offset, eebuf, eeprom->len); in bnx2_set_eeprom()
7251 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_coalesce() local
7255 coal->rx_coalesce_usecs = bp->rx_ticks; in bnx2_get_coalesce()
7256 coal->rx_max_coalesced_frames = bp->rx_quick_cons_trip; in bnx2_get_coalesce()
7257 coal->rx_coalesce_usecs_irq = bp->rx_ticks_int; in bnx2_get_coalesce()
7258 coal->rx_max_coalesced_frames_irq = bp->rx_quick_cons_trip_int; in bnx2_get_coalesce()
7260 coal->tx_coalesce_usecs = bp->tx_ticks; in bnx2_get_coalesce()
7261 coal->tx_max_coalesced_frames = bp->tx_quick_cons_trip; in bnx2_get_coalesce()
7262 coal->tx_coalesce_usecs_irq = bp->tx_ticks_int; in bnx2_get_coalesce()
7263 coal->tx_max_coalesced_frames_irq = bp->tx_quick_cons_trip_int; in bnx2_get_coalesce()
7265 coal->stats_block_coalesce_usecs = bp->stats_ticks; in bnx2_get_coalesce()
7275 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_coalesce() local
7277 bp->rx_ticks = (u16) coal->rx_coalesce_usecs; in bnx2_set_coalesce()
7278 if (bp->rx_ticks > 0x3ff) bp->rx_ticks = 0x3ff; in bnx2_set_coalesce()
7280 bp->rx_quick_cons_trip = (u16) coal->rx_max_coalesced_frames; in bnx2_set_coalesce()
7281 if (bp->rx_quick_cons_trip > 0xff) bp->rx_quick_cons_trip = 0xff; in bnx2_set_coalesce()
7283 bp->rx_ticks_int = (u16) coal->rx_coalesce_usecs_irq; in bnx2_set_coalesce()
7284 if (bp->rx_ticks_int > 0x3ff) bp->rx_ticks_int = 0x3ff; in bnx2_set_coalesce()
7286 bp->rx_quick_cons_trip_int = (u16) coal->rx_max_coalesced_frames_irq; in bnx2_set_coalesce()
7287 if (bp->rx_quick_cons_trip_int > 0xff) in bnx2_set_coalesce()
7288 bp->rx_quick_cons_trip_int = 0xff; in bnx2_set_coalesce()
7290 bp->tx_ticks = (u16) coal->tx_coalesce_usecs; in bnx2_set_coalesce()
7291 if (bp->tx_ticks > 0x3ff) bp->tx_ticks = 0x3ff; in bnx2_set_coalesce()
7293 bp->tx_quick_cons_trip = (u16) coal->tx_max_coalesced_frames; in bnx2_set_coalesce()
7294 if (bp->tx_quick_cons_trip > 0xff) bp->tx_quick_cons_trip = 0xff; in bnx2_set_coalesce()
7296 bp->tx_ticks_int = (u16) coal->tx_coalesce_usecs_irq; in bnx2_set_coalesce()
7297 if (bp->tx_ticks_int > 0x3ff) bp->tx_ticks_int = 0x3ff; in bnx2_set_coalesce()
7299 bp->tx_quick_cons_trip_int = (u16) coal->tx_max_coalesced_frames_irq; in bnx2_set_coalesce()
7300 if (bp->tx_quick_cons_trip_int > 0xff) bp->tx_quick_cons_trip_int = in bnx2_set_coalesce()
7303 bp->stats_ticks = coal->stats_block_coalesce_usecs; in bnx2_set_coalesce()
7304 if (bp->flags & BNX2_FLAG_BROKEN_STATS) { in bnx2_set_coalesce()
7305 if (bp->stats_ticks != 0 && bp->stats_ticks != USEC_PER_SEC) in bnx2_set_coalesce()
7306 bp->stats_ticks = USEC_PER_SEC; in bnx2_set_coalesce()
7308 if (bp->stats_ticks > BNX2_HC_STATS_TICKS_HC_STAT_TICKS) in bnx2_set_coalesce()
7309 bp->stats_ticks = BNX2_HC_STATS_TICKS_HC_STAT_TICKS; in bnx2_set_coalesce()
7310 bp->stats_ticks &= BNX2_HC_STATS_TICKS_HC_STAT_TICKS; in bnx2_set_coalesce()
7312 if (netif_running(bp->dev)) { in bnx2_set_coalesce()
7313 bnx2_netif_stop(bp, true); in bnx2_set_coalesce()
7314 bnx2_init_nic(bp, 0); in bnx2_set_coalesce()
7315 bnx2_netif_start(bp, true); in bnx2_set_coalesce()
7326 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_ringparam() local
7331 ering->rx_pending = bp->rx_ring_size; in bnx2_get_ringparam()
7332 ering->rx_jumbo_pending = bp->rx_pg_ring_size; in bnx2_get_ringparam()
7335 ering->tx_pending = bp->tx_ring_size; in bnx2_get_ringparam()
7339 bnx2_change_ring_size(struct bnx2 *bp, u32 rx, u32 tx, bool reset_irq) in bnx2_change_ring_size() argument
7341 if (netif_running(bp->dev)) { in bnx2_change_ring_size()
7343 bnx2_save_stats(bp); in bnx2_change_ring_size()
7345 bnx2_netif_stop(bp, true); in bnx2_change_ring_size()
7346 bnx2_reset_chip(bp, BNX2_DRV_MSG_CODE_RESET); in bnx2_change_ring_size()
7348 bnx2_free_irq(bp); in bnx2_change_ring_size()
7349 bnx2_del_napi(bp); in bnx2_change_ring_size()
7351 __bnx2_free_irq(bp); in bnx2_change_ring_size()
7353 bnx2_free_skbs(bp); in bnx2_change_ring_size()
7354 bnx2_free_mem(bp); in bnx2_change_ring_size()
7357 bnx2_set_rx_ring_size(bp, rx); in bnx2_change_ring_size()
7358 bp->tx_ring_size = tx; in bnx2_change_ring_size()
7360 if (netif_running(bp->dev)) { in bnx2_change_ring_size()
7364 rc = bnx2_setup_int_mode(bp, disable_msi); in bnx2_change_ring_size()
7365 bnx2_init_napi(bp); in bnx2_change_ring_size()
7369 rc = bnx2_alloc_mem(bp); in bnx2_change_ring_size()
7372 rc = bnx2_request_irq(bp); in bnx2_change_ring_size()
7375 rc = bnx2_init_nic(bp, 0); in bnx2_change_ring_size()
7378 bnx2_napi_enable(bp); in bnx2_change_ring_size()
7379 dev_close(bp->dev); in bnx2_change_ring_size()
7383 mutex_lock(&bp->cnic_lock); in bnx2_change_ring_size()
7385 if (bp->cnic_eth_dev.drv_state & CNIC_DRV_STATE_REGD) in bnx2_change_ring_size()
7386 bnx2_setup_cnic_irq_info(bp); in bnx2_change_ring_size()
7387 mutex_unlock(&bp->cnic_lock); in bnx2_change_ring_size()
7389 bnx2_netif_start(bp, true); in bnx2_change_ring_size()
7399 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_ringparam() local
7408 rc = bnx2_change_ring_size(bp, ering->rx_pending, ering->tx_pending, in bnx2_set_ringparam()
7416 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_pauseparam() local
7418 epause->autoneg = ((bp->autoneg & AUTONEG_FLOW_CTRL) != 0); in bnx2_get_pauseparam()
7419 epause->rx_pause = ((bp->flow_ctrl & FLOW_CTRL_RX) != 0); in bnx2_get_pauseparam()
7420 epause->tx_pause = ((bp->flow_ctrl & FLOW_CTRL_TX) != 0); in bnx2_get_pauseparam()
7426 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_pauseparam() local
7428 bp->req_flow_ctrl = 0; in bnx2_set_pauseparam()
7430 bp->req_flow_ctrl |= FLOW_CTRL_RX; in bnx2_set_pauseparam()
7432 bp->req_flow_ctrl |= FLOW_CTRL_TX; in bnx2_set_pauseparam()
7435 bp->autoneg |= AUTONEG_FLOW_CTRL; in bnx2_set_pauseparam()
7438 bp->autoneg &= ~AUTONEG_FLOW_CTRL; in bnx2_set_pauseparam()
7442 spin_lock_bh(&bp->phy_lock); in bnx2_set_pauseparam()
7443 bnx2_setup_phy(bp, bp->phy_port); in bnx2_set_pauseparam()
7444 spin_unlock_bh(&bp->phy_lock); in bnx2_set_pauseparam()
7604 struct bnx2 *bp = netdev_priv(dev); in bnx2_self_test() local
7610 bnx2_netif_stop(bp, true); in bnx2_self_test()
7611 bnx2_reset_chip(bp, BNX2_DRV_MSG_CODE_DIAG); in bnx2_self_test()
7612 bnx2_free_skbs(bp); in bnx2_self_test()
7614 if (bnx2_test_registers(bp) != 0) { in bnx2_self_test()
7618 if (bnx2_test_memory(bp) != 0) { in bnx2_self_test()
7622 if ((buf[2] = bnx2_test_loopback(bp)) != 0) in bnx2_self_test()
7625 if (!netif_running(bp->dev)) in bnx2_self_test()
7626 bnx2_shutdown_chip(bp); in bnx2_self_test()
7628 bnx2_init_nic(bp, 1); in bnx2_self_test()
7629 bnx2_netif_start(bp, true); in bnx2_self_test()
7634 if (bp->link_up) in bnx2_self_test()
7640 if (bnx2_test_nvram(bp) != 0) { in bnx2_self_test()
7644 if (bnx2_test_intr(bp) != 0) { in bnx2_self_test()
7649 if (bnx2_test_link(bp) != 0) { in bnx2_self_test()
7675 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_ethtool_stats() local
7677 u32 *hw_stats = (u32 *) bp->stats_blk; in bnx2_get_ethtool_stats()
7678 u32 *temp_stats = (u32 *) bp->temp_stats_blk; in bnx2_get_ethtool_stats()
7686 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) || in bnx2_get_ethtool_stats()
7687 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A1) || in bnx2_get_ethtool_stats()
7688 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A2) || in bnx2_get_ethtool_stats()
7689 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_A0)) in bnx2_get_ethtool_stats()
7721 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_phys_id() local
7725 bp->leds_save = BNX2_RD(bp, BNX2_MISC_CFG); in bnx2_set_phys_id()
7726 BNX2_WR(bp, BNX2_MISC_CFG, BNX2_MISC_CFG_LEDMODE_MAC); in bnx2_set_phys_id()
7730 BNX2_WR(bp, BNX2_EMAC_LED, BNX2_EMAC_LED_OVERRIDE | in bnx2_set_phys_id()
7739 BNX2_WR(bp, BNX2_EMAC_LED, BNX2_EMAC_LED_OVERRIDE); in bnx2_set_phys_id()
7743 BNX2_WR(bp, BNX2_EMAC_LED, 0); in bnx2_set_phys_id()
7744 BNX2_WR(bp, BNX2_MISC_CFG, bp->leds_save); in bnx2_set_phys_id()
7754 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_features() local
7763 !!(bp->rx_mode & BNX2_EMAC_RX_MODE_KEEP_VLAN_TAG)) && in bnx2_set_features()
7765 bnx2_netif_stop(bp, false); in bnx2_set_features()
7768 bnx2_fw_sync(bp, BNX2_DRV_MSG_CODE_KEEP_VLAN_UPDATE, 0, 1); in bnx2_set_features()
7769 bnx2_netif_start(bp, false); in bnx2_set_features()
7779 struct bnx2 *bp = netdev_priv(dev); in bnx2_get_channels() local
7783 if ((bp->flags & BNX2_FLAG_MSIX_CAP) && !disable_msi) { in bnx2_get_channels()
7792 channels->rx_count = bp->num_rx_rings; in bnx2_get_channels()
7793 channels->tx_count = bp->num_tx_rings; in bnx2_get_channels()
7801 struct bnx2 *bp = netdev_priv(dev); in bnx2_set_channels() local
7806 if ((bp->flags & BNX2_FLAG_MSIX_CAP) && !disable_msi) { in bnx2_set_channels()
7814 bp->num_req_rx_rings = channels->rx_count; in bnx2_set_channels()
7815 bp->num_req_tx_rings = channels->tx_count; in bnx2_set_channels()
7818 rc = bnx2_change_ring_size(bp, bp->rx_ring_size, in bnx2_set_channels()
7819 bp->tx_ring_size, true); in bnx2_set_channels()
7862 struct bnx2 *bp = netdev_priv(dev); in bnx2_ioctl() local
7867 data->phy_id = bp->phy_addr; in bnx2_ioctl()
7873 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_ioctl()
7879 spin_lock_bh(&bp->phy_lock); in bnx2_ioctl()
7880 err = bnx2_read_phy(bp, data->reg_num & 0x1f, &mii_regval); in bnx2_ioctl()
7881 spin_unlock_bh(&bp->phy_lock); in bnx2_ioctl()
7889 if (bp->phy_flags & BNX2_PHY_FLAG_REMOTE_PHY_CAP) in bnx2_ioctl()
7895 spin_lock_bh(&bp->phy_lock); in bnx2_ioctl()
7896 err = bnx2_write_phy(bp, data->reg_num & 0x1f, data->val_in); in bnx2_ioctl()
7897 spin_unlock_bh(&bp->phy_lock); in bnx2_ioctl()
7913 struct bnx2 *bp = netdev_priv(dev); in bnx2_change_mac_addr() local
7920 bnx2_set_mac_addr(bp, bp->dev->dev_addr, 0); in bnx2_change_mac_addr()
7929 struct bnx2 *bp = netdev_priv(dev); in bnx2_change_mtu() local
7932 return bnx2_change_ring_size(bp, bp->rx_ring_size, bp->tx_ring_size, in bnx2_change_mtu()
7940 struct bnx2 *bp = netdev_priv(dev); in poll_bnx2() local
7943 for (i = 0; i < bp->irq_nvecs; i++) { in poll_bnx2()
7944 struct bnx2_irq *irq = &bp->irq_tbl[i]; in poll_bnx2()
7947 irq->handler(irq->vector, &bp->bnx2_napi[i]); in poll_bnx2()
7954 bnx2_get_5709_media(struct bnx2 *bp) in bnx2_get_5709_media() argument
7956 u32 val = BNX2_RD(bp, BNX2_MISC_DUAL_MEDIA_CTRL); in bnx2_get_5709_media()
7963 bp->phy_flags |= BNX2_PHY_FLAG_SERDES; in bnx2_get_5709_media()
7972 if (bp->func == 0) { in bnx2_get_5709_media()
7977 bp->phy_flags |= BNX2_PHY_FLAG_SERDES; in bnx2_get_5709_media()
7985 bp->phy_flags |= BNX2_PHY_FLAG_SERDES; in bnx2_get_5709_media()
7992 bnx2_get_pci_speed(struct bnx2 *bp) in bnx2_get_pci_speed() argument
7996 reg = BNX2_RD(bp, BNX2_PCICFG_MISC_STATUS); in bnx2_get_pci_speed()
8000 bp->flags |= BNX2_FLAG_PCIX; in bnx2_get_pci_speed()
8002 clkreg = BNX2_RD(bp, BNX2_PCICFG_PCI_CLOCK_CONTROL_BITS); in bnx2_get_pci_speed()
8007 bp->bus_speed_mhz = 133; in bnx2_get_pci_speed()
8011 bp->bus_speed_mhz = 100; in bnx2_get_pci_speed()
8016 bp->bus_speed_mhz = 66; in bnx2_get_pci_speed()
8021 bp->bus_speed_mhz = 50; in bnx2_get_pci_speed()
8027 bp->bus_speed_mhz = 33; in bnx2_get_pci_speed()
8033 bp->bus_speed_mhz = 66; in bnx2_get_pci_speed()
8035 bp->bus_speed_mhz = 33; in bnx2_get_pci_speed()
8039 bp->flags |= BNX2_FLAG_PCI_32BIT; in bnx2_get_pci_speed()
8044 bnx2_read_vpd_fw_ver(struct bnx2 *bp) in bnx2_read_vpd_fw_ver() argument
8058 rc = bnx2_nvram_read(bp, BNX2_VPD_NVRAM_OFFSET, data, BNX2_VPD_LEN); in bnx2_read_vpd_fw_ver()
8082 memcpy(bp->fw_version, &data[j], len); in bnx2_read_vpd_fw_ver()
8083 bp->fw_version[len] = ' '; in bnx2_read_vpd_fw_ver()
8092 struct bnx2 *bp; in bnx2_init_board() local
8099 bp = netdev_priv(dev); in bnx2_init_board()
8101 bp->flags = 0; in bnx2_init_board()
8102 bp->phy_flags = 0; in bnx2_init_board()
8104 bp->temp_stats_blk = in bnx2_init_board()
8107 if (!bp->temp_stats_blk) { in bnx2_init_board()
8134 bp->pm_cap = pdev->pm_cap; in bnx2_init_board()
8135 if (bp->pm_cap == 0) { in bnx2_init_board()
8142 bp->dev = dev; in bnx2_init_board()
8143 bp->pdev = pdev; in bnx2_init_board()
8145 spin_lock_init(&bp->phy_lock); in bnx2_init_board()
8146 spin_lock_init(&bp->indirect_lock); in bnx2_init_board()
8148 mutex_init(&bp->cnic_lock); in bnx2_init_board()
8150 INIT_WORK(&bp->reset_task, bnx2_reset_task); in bnx2_init_board()
8152 bp->regview = pci_iomap(pdev, 0, MB_GET_CID_ADDR(TX_TSS_CID + in bnx2_init_board()
8154 if (!bp->regview) { in bnx2_init_board()
8164 BNX2_WR(bp, BNX2_PCICFG_MISC_CONFIG, in bnx2_init_board()
8168 bp->chip_id = BNX2_RD(bp, BNX2_MISC_ID); in bnx2_init_board()
8170 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) { in bnx2_init_board()
8176 bp->flags |= BNX2_FLAG_PCIE; in bnx2_init_board()
8177 if (BNX2_CHIP_REV(bp) == BNX2_CHIP_REV_Ax) in bnx2_init_board()
8178 bp->flags |= BNX2_FLAG_JUMBO_BROKEN; in bnx2_init_board()
8183 bp->flags |= BNX2_FLAG_AER_ENABLED; in bnx2_init_board()
8186 bp->pcix_cap = pci_find_capability(pdev, PCI_CAP_ID_PCIX); in bnx2_init_board()
8187 if (bp->pcix_cap == 0) { in bnx2_init_board()
8193 bp->flags |= BNX2_FLAG_BROKEN_STATS; in bnx2_init_board()
8196 if (BNX2_CHIP(bp) == BNX2_CHIP_5709 && in bnx2_init_board()
8197 BNX2_CHIP_REV(bp) != BNX2_CHIP_REV_Ax) { in bnx2_init_board()
8199 bp->flags |= BNX2_FLAG_MSIX_CAP; in bnx2_init_board()
8202 if (BNX2_CHIP_ID(bp) != BNX2_CHIP_ID_5706_A0 && in bnx2_init_board()
8203 BNX2_CHIP_ID(bp) != BNX2_CHIP_ID_5706_A1) { in bnx2_init_board()
8205 bp->flags |= BNX2_FLAG_MSI_CAP; in bnx2_init_board()
8209 if (BNX2_CHIP(bp) == BNX2_CHIP_5708) in bnx2_init_board()
8228 if (!(bp->flags & BNX2_FLAG_PCIE)) in bnx2_init_board()
8229 bnx2_get_pci_speed(bp); in bnx2_init_board()
8232 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) { in bnx2_init_board()
8233 reg = BNX2_RD(bp, PCI_COMMAND); in bnx2_init_board()
8235 BNX2_WR(bp, PCI_COMMAND, reg); in bnx2_init_board()
8236 } else if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A1) && in bnx2_init_board()
8237 !(bp->flags & BNX2_FLAG_PCIX)) { in bnx2_init_board()
8244 bnx2_init_nvram(bp); in bnx2_init_board()
8246 reg = bnx2_reg_rd_ind(bp, BNX2_SHM_HDR_SIGNATURE); in bnx2_init_board()
8248 if (bnx2_reg_rd_ind(bp, BNX2_MCP_TOE_ID) & BNX2_MCP_TOE_ID_FUNCTION_ID) in bnx2_init_board()
8249 bp->func = 1; in bnx2_init_board()
8253 u32 off = bp->func << 2; in bnx2_init_board()
8255 bp->shmem_base = bnx2_reg_rd_ind(bp, BNX2_SHM_HDR_ADDR_0 + off); in bnx2_init_board()
8257 bp->shmem_base = HOST_VIEW_SHMEM_BASE; in bnx2_init_board()
8262 reg = bnx2_shmem_rd(bp, BNX2_DEV_INFO_SIGNATURE); in bnx2_init_board()
8271 bnx2_read_vpd_fw_ver(bp); in bnx2_init_board()
8273 j = strlen(bp->fw_version); in bnx2_init_board()
8274 reg = bnx2_shmem_rd(bp, BNX2_DEV_INFO_BC_REV); in bnx2_init_board()
8279 bp->fw_version[j++] = 'b'; in bnx2_init_board()
8280 bp->fw_version[j++] = 'c'; in bnx2_init_board()
8281 bp->fw_version[j++] = ' '; in bnx2_init_board()
8286 bp->fw_version[j++] = (num / k) + '0'; in bnx2_init_board()
8291 bp->fw_version[j++] = '.'; in bnx2_init_board()
8293 reg = bnx2_shmem_rd(bp, BNX2_PORT_FEATURE); in bnx2_init_board()
8295 bp->wol = 1; in bnx2_init_board()
8298 bp->flags |= BNX2_FLAG_ASF_ENABLE; in bnx2_init_board()
8301 reg = bnx2_shmem_rd(bp, BNX2_BC_STATE_CONDITION); in bnx2_init_board()
8307 reg = bnx2_shmem_rd(bp, BNX2_BC_STATE_CONDITION); in bnx2_init_board()
8311 u32 addr = bnx2_shmem_rd(bp, BNX2_MFW_VER_PTR); in bnx2_init_board()
8314 bp->fw_version[j++] = ' '; in bnx2_init_board()
8316 reg = bnx2_reg_rd_ind(bp, addr + i * 4); in bnx2_init_board()
8318 memcpy(&bp->fw_version[j], &reg, 4); in bnx2_init_board()
8323 reg = bnx2_shmem_rd(bp, BNX2_PORT_HW_CFG_MAC_UPPER); in bnx2_init_board()
8324 bp->mac_addr[0] = (u8) (reg >> 8); in bnx2_init_board()
8325 bp->mac_addr[1] = (u8) reg; in bnx2_init_board()
8327 reg = bnx2_shmem_rd(bp, BNX2_PORT_HW_CFG_MAC_LOWER); in bnx2_init_board()
8328 bp->mac_addr[2] = (u8) (reg >> 24); in bnx2_init_board()
8329 bp->mac_addr[3] = (u8) (reg >> 16); in bnx2_init_board()
8330 bp->mac_addr[4] = (u8) (reg >> 8); in bnx2_init_board()
8331 bp->mac_addr[5] = (u8) reg; in bnx2_init_board()
8333 bp->tx_ring_size = BNX2_MAX_TX_DESC_CNT; in bnx2_init_board()
8334 bnx2_set_rx_ring_size(bp, 255); in bnx2_init_board()
8336 bp->tx_quick_cons_trip_int = 2; in bnx2_init_board()
8337 bp->tx_quick_cons_trip = 20; in bnx2_init_board()
8338 bp->tx_ticks_int = 18; in bnx2_init_board()
8339 bp->tx_ticks = 80; in bnx2_init_board()
8341 bp->rx_quick_cons_trip_int = 2; in bnx2_init_board()
8342 bp->rx_quick_cons_trip = 12; in bnx2_init_board()
8343 bp->rx_ticks_int = 18; in bnx2_init_board()
8344 bp->rx_ticks = 18; in bnx2_init_board()
8346 bp->stats_ticks = USEC_PER_SEC & BNX2_HC_STATS_TICKS_HC_STAT_TICKS; in bnx2_init_board()
8348 bp->current_interval = BNX2_TIMER_INTERVAL; in bnx2_init_board()
8350 bp->phy_addr = 1; in bnx2_init_board()
8358 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_init_board()
8359 bnx2_get_5709_media(bp); in bnx2_init_board()
8360 else if (BNX2_CHIP_BOND(bp) & BNX2_CHIP_BOND_SERDES_BIT) in bnx2_init_board()
8361 bp->phy_flags |= BNX2_PHY_FLAG_SERDES; in bnx2_init_board()
8363 bp->phy_port = PORT_TP; in bnx2_init_board()
8364 if (bp->phy_flags & BNX2_PHY_FLAG_SERDES) { in bnx2_init_board()
8365 bp->phy_port = PORT_FIBRE; in bnx2_init_board()
8366 reg = bnx2_shmem_rd(bp, BNX2_SHARED_HW_CFG_CONFIG); in bnx2_init_board()
8368 bp->flags |= BNX2_FLAG_NO_WOL; in bnx2_init_board()
8369 bp->wol = 0; in bnx2_init_board()
8371 if (BNX2_CHIP(bp) == BNX2_CHIP_5706) { in bnx2_init_board()
8378 bp->phy_flags |= BNX2_PHY_FLAG_NO_PARALLEL; in bnx2_init_board()
8380 bp->phy_addr = 2; in bnx2_init_board()
8382 bp->phy_flags |= BNX2_PHY_FLAG_2_5G_CAPABLE; in bnx2_init_board()
8384 } else if (BNX2_CHIP(bp) == BNX2_CHIP_5706 || in bnx2_init_board()
8385 BNX2_CHIP(bp) == BNX2_CHIP_5708) in bnx2_init_board()
8386 bp->phy_flags |= BNX2_PHY_FLAG_CRC_FIX; in bnx2_init_board()
8387 else if (BNX2_CHIP(bp) == BNX2_CHIP_5709 && in bnx2_init_board()
8388 (BNX2_CHIP_REV(bp) == BNX2_CHIP_REV_Ax || in bnx2_init_board()
8389 BNX2_CHIP_REV(bp) == BNX2_CHIP_REV_Bx)) in bnx2_init_board()
8390 bp->phy_flags |= BNX2_PHY_FLAG_DIS_EARLY_DAC; in bnx2_init_board()
8392 bnx2_init_fw_cap(bp); in bnx2_init_board()
8394 if ((BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_A0) || in bnx2_init_board()
8395 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_B0) || in bnx2_init_board()
8396 (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5708_B1) || in bnx2_init_board()
8397 !(BNX2_RD(bp, BNX2_PCI_CONFIG_3) & BNX2_PCI_CONFIG_3_VAUX_PRESET)) { in bnx2_init_board()
8398 bp->flags |= BNX2_FLAG_NO_WOL; in bnx2_init_board()
8399 bp->wol = 0; in bnx2_init_board()
8402 if (bp->flags & BNX2_FLAG_NO_WOL) in bnx2_init_board()
8403 device_set_wakeup_capable(&bp->pdev->dev, false); in bnx2_init_board()
8405 device_set_wakeup_enable(&bp->pdev->dev, bp->wol); in bnx2_init_board()
8407 if (BNX2_CHIP_ID(bp) == BNX2_CHIP_ID_5706_A0) { in bnx2_init_board()
8408 bp->tx_quick_cons_trip_int = in bnx2_init_board()
8409 bp->tx_quick_cons_trip; in bnx2_init_board()
8410 bp->tx_ticks_int = bp->tx_ticks; in bnx2_init_board()
8411 bp->rx_quick_cons_trip_int = in bnx2_init_board()
8412 bp->rx_quick_cons_trip; in bnx2_init_board()
8413 bp->rx_ticks_int = bp->rx_ticks; in bnx2_init_board()
8414 bp->comp_prod_trip_int = bp->comp_prod_trip; in bnx2_init_board()
8415 bp->com_ticks_int = bp->com_ticks; in bnx2_init_board()
8416 bp->cmd_ticks_int = bp->cmd_ticks; in bnx2_init_board()
8429 if (BNX2_CHIP(bp) == BNX2_CHIP_5706 && disable_msi == 0) { in bnx2_init_board()
8445 bnx2_set_default_link(bp); in bnx2_init_board()
8446 bp->req_flow_ctrl = FLOW_CTRL_RX | FLOW_CTRL_TX; in bnx2_init_board()
8448 timer_setup(&bp->timer, bnx2_timer, 0); in bnx2_init_board()
8449 bp->timer.expires = RUN_AT(BNX2_TIMER_INTERVAL); in bnx2_init_board()
8452 if (bnx2_shmem_rd(bp, BNX2_ISCSI_INITIATOR) & BNX2_ISCSI_INITIATOR_EN) in bnx2_init_board()
8453 bp->cnic_eth_dev.max_iscsi_conn = in bnx2_init_board()
8454 (bnx2_shmem_rd(bp, BNX2_ISCSI_MAX_CONN) & in bnx2_init_board()
8456 bp->cnic_probe = bnx2_cnic_probe; in bnx2_init_board()
8463 if (bp->flags & BNX2_FLAG_AER_ENABLED) { in bnx2_init_board()
8465 bp->flags &= ~BNX2_FLAG_AER_ENABLED; in bnx2_init_board()
8468 pci_iounmap(pdev, bp->regview); in bnx2_init_board()
8469 bp->regview = NULL; in bnx2_init_board()
8478 kfree(bp->temp_stats_blk); in bnx2_init_board()
8484 bnx2_bus_string(struct bnx2 *bp, char *str) in bnx2_bus_string() argument
8488 if (bp->flags & BNX2_FLAG_PCIE) { in bnx2_bus_string()
8492 if (bp->flags & BNX2_FLAG_PCIX) in bnx2_bus_string()
8494 if (bp->flags & BNX2_FLAG_PCI_32BIT) in bnx2_bus_string()
8498 s += sprintf(s, " %dMHz", bp->bus_speed_mhz); in bnx2_bus_string()
8504 bnx2_del_napi(struct bnx2 *bp) in bnx2_del_napi() argument
8508 for (i = 0; i < bp->irq_nvecs; i++) in bnx2_del_napi()
8509 netif_napi_del(&bp->bnx2_napi[i].napi); in bnx2_del_napi()
8513 bnx2_init_napi(struct bnx2 *bp) in bnx2_init_napi() argument
8517 for (i = 0; i < bp->irq_nvecs; i++) { in bnx2_init_napi()
8518 struct bnx2_napi *bnapi = &bp->bnx2_napi[i]; in bnx2_init_napi()
8526 netif_napi_add(bp->dev, &bp->bnx2_napi[i].napi, poll); in bnx2_init_napi()
8527 bnapi->bp = bp; in bnx2_init_napi()
8552 struct bnx2 *bp; in bnx2_init_one() local
8557 dev = alloc_etherdev_mq(sizeof(*bp), TX_MAX_RINGS); in bnx2_init_one()
8569 bp = netdev_priv(dev); in bnx2_init_one()
8580 bnx2_wait_dma_complete(bp); in bnx2_init_one()
8582 eth_hw_addr_set(dev, bp->mac_addr); in bnx2_init_one()
8588 if (BNX2_CHIP(bp) == BNX2_CHIP_5709) in bnx2_init_one()
8598 if (!(bp->flags & BNX2_FLAG_CAN_KEEP_VLAN)) in bnx2_init_one()
8608 ((BNX2_CHIP_ID(bp) & 0xf000) >> 12) + 'A', in bnx2_init_one()
8609 ((BNX2_CHIP_ID(bp) & 0x0ff0) >> 4), in bnx2_init_one()
8610 bnx2_bus_string(bp, str), (long)pci_resource_start(pdev, 0), in bnx2_init_one()
8616 pci_iounmap(pdev, bp->regview); in bnx2_init_one()
8629 struct bnx2 *bp = netdev_priv(dev); in bnx2_remove_one() local
8633 del_timer_sync(&bp->timer); in bnx2_remove_one()
8634 cancel_work_sync(&bp->reset_task); in bnx2_remove_one()
8636 pci_iounmap(bp->pdev, bp->regview); in bnx2_remove_one()
8639 kfree(bp->temp_stats_blk); in bnx2_remove_one()
8641 if (bp->flags & BNX2_FLAG_AER_ENABLED) { in bnx2_remove_one()
8643 bp->flags &= ~BNX2_FLAG_AER_ENABLED; in bnx2_remove_one()
8646 bnx2_release_firmware(bp); in bnx2_remove_one()
8659 struct bnx2 *bp = netdev_priv(dev); in bnx2_suspend() local
8662 cancel_work_sync(&bp->reset_task); in bnx2_suspend()
8663 bnx2_netif_stop(bp, true); in bnx2_suspend()
8665 del_timer_sync(&bp->timer); in bnx2_suspend()
8666 bnx2_shutdown_chip(bp); in bnx2_suspend()
8667 __bnx2_free_irq(bp); in bnx2_suspend()
8668 bnx2_free_skbs(bp); in bnx2_suspend()
8670 bnx2_setup_wol(bp); in bnx2_suspend()
8678 struct bnx2 *bp = netdev_priv(dev); in bnx2_resume() local
8683 bnx2_set_power_state(bp, PCI_D0); in bnx2_resume()
8685 bnx2_request_irq(bp); in bnx2_resume()
8686 bnx2_init_nic(bp, 1); in bnx2_resume()
8687 bnx2_netif_start(bp, true); in bnx2_resume()
8711 struct bnx2 *bp = netdev_priv(dev); in bnx2_io_error_detected() local
8722 bnx2_netif_stop(bp, true); in bnx2_io_error_detected()
8723 del_timer_sync(&bp->timer); in bnx2_io_error_detected()
8724 bnx2_reset_nic(bp, BNX2_DRV_MSG_CODE_RESET); in bnx2_io_error_detected()
8743 struct bnx2 *bp = netdev_priv(dev); in bnx2_io_slot_reset() local
8757 err = bnx2_init_nic(bp, 1); in bnx2_io_slot_reset()
8764 bnx2_napi_enable(bp); in bnx2_io_slot_reset()
8769 if (!(bp->flags & BNX2_FLAG_AER_ENABLED)) in bnx2_io_slot_reset()
8785 struct bnx2 *bp = netdev_priv(dev); in bnx2_io_resume() local
8789 bnx2_netif_start(bp, true); in bnx2_io_resume()
8798 struct bnx2 *bp; in bnx2_shutdown() local
8803 bp = netdev_priv(dev); in bnx2_shutdown()
8804 if (!bp) in bnx2_shutdown()
8809 dev_close(bp->dev); in bnx2_shutdown()
8812 bnx2_set_power_state(bp, PCI_D3hot); in bnx2_shutdown()