Lines Matching refs:bp
307 static int bnx2x_set_storm_rx_mode(struct bnx2x *bp);
313 static int bnx2x_hwtstamp_ioctl(struct bnx2x *bp, struct ifreq *ifr);
315 static void __storm_memset_dma_mapping(struct bnx2x *bp, in __storm_memset_dma_mapping() argument
318 REG_WR(bp, addr, U64_LO(mapping)); in __storm_memset_dma_mapping()
319 REG_WR(bp, addr + 4, U64_HI(mapping)); in __storm_memset_dma_mapping()
322 static void storm_memset_spq_addr(struct bnx2x *bp, in storm_memset_spq_addr() argument
328 __storm_memset_dma_mapping(bp, addr, mapping); in storm_memset_spq_addr()
331 static void storm_memset_vf_to_pf(struct bnx2x *bp, u16 abs_fid, in storm_memset_vf_to_pf() argument
334 REG_WR8(bp, BAR_XSTRORM_INTMEM + XSTORM_VF_TO_PF_OFFSET(abs_fid), in storm_memset_vf_to_pf()
336 REG_WR8(bp, BAR_CSTRORM_INTMEM + CSTORM_VF_TO_PF_OFFSET(abs_fid), in storm_memset_vf_to_pf()
338 REG_WR8(bp, BAR_TSTRORM_INTMEM + TSTORM_VF_TO_PF_OFFSET(abs_fid), in storm_memset_vf_to_pf()
340 REG_WR8(bp, BAR_USTRORM_INTMEM + USTORM_VF_TO_PF_OFFSET(abs_fid), in storm_memset_vf_to_pf()
344 static void storm_memset_func_en(struct bnx2x *bp, u16 abs_fid, in storm_memset_func_en() argument
347 REG_WR8(bp, BAR_XSTRORM_INTMEM + XSTORM_FUNC_EN_OFFSET(abs_fid), in storm_memset_func_en()
349 REG_WR8(bp, BAR_CSTRORM_INTMEM + CSTORM_FUNC_EN_OFFSET(abs_fid), in storm_memset_func_en()
351 REG_WR8(bp, BAR_TSTRORM_INTMEM + TSTORM_FUNC_EN_OFFSET(abs_fid), in storm_memset_func_en()
353 REG_WR8(bp, BAR_USTRORM_INTMEM + USTORM_FUNC_EN_OFFSET(abs_fid), in storm_memset_func_en()
357 static void storm_memset_eq_data(struct bnx2x *bp, in storm_memset_eq_data() argument
365 __storm_memset_struct(bp, addr, size, (u32 *)eq_data); in storm_memset_eq_data()
368 static void storm_memset_eq_prod(struct bnx2x *bp, u16 eq_prod, in storm_memset_eq_prod() argument
372 REG_WR16(bp, addr, eq_prod); in storm_memset_eq_prod()
378 static void bnx2x_reg_wr_ind(struct bnx2x *bp, u32 addr, u32 val) in bnx2x_reg_wr_ind() argument
380 pci_write_config_dword(bp->pdev, PCICFG_GRC_ADDRESS, addr); in bnx2x_reg_wr_ind()
381 pci_write_config_dword(bp->pdev, PCICFG_GRC_DATA, val); in bnx2x_reg_wr_ind()
382 pci_write_config_dword(bp->pdev, PCICFG_GRC_ADDRESS, in bnx2x_reg_wr_ind()
386 static u32 bnx2x_reg_rd_ind(struct bnx2x *bp, u32 addr) in bnx2x_reg_rd_ind() argument
390 pci_write_config_dword(bp->pdev, PCICFG_GRC_ADDRESS, addr); in bnx2x_reg_rd_ind()
391 pci_read_config_dword(bp->pdev, PCICFG_GRC_DATA, &val); in bnx2x_reg_rd_ind()
392 pci_write_config_dword(bp->pdev, PCICFG_GRC_ADDRESS, in bnx2x_reg_rd_ind()
404 static void bnx2x_dp_dmae(struct bnx2x *bp, in bnx2x_dp_dmae() argument
471 void bnx2x_post_dmae(struct bnx2x *bp, struct dmae_command *dmae, int idx) in bnx2x_post_dmae() argument
478 REG_WR(bp, cmd_offset + i*4, *(((u32 *)dmae) + i)); in bnx2x_post_dmae()
480 REG_WR(bp, dmae_reg_go_c[idx], 1); in bnx2x_post_dmae()
494 u32 bnx2x_dmae_opcode(struct bnx2x *bp, u8 src_type, u8 dst_type, in bnx2x_dmae_opcode() argument
504 opcode |= (BP_PORT(bp) ? DMAE_CMD_PORT_1 : DMAE_CMD_PORT_0); in bnx2x_dmae_opcode()
505 opcode |= ((BP_VN(bp) << DMAE_CMD_E1HVN_SHIFT) | in bnx2x_dmae_opcode()
506 (BP_VN(bp) << DMAE_COMMAND_DST_VN_SHIFT)); in bnx2x_dmae_opcode()
519 void bnx2x_prep_dmae_with_comp(struct bnx2x *bp, in bnx2x_prep_dmae_with_comp() argument
526 dmae->opcode = bnx2x_dmae_opcode(bp, src_type, dst_type, in bnx2x_prep_dmae_with_comp()
530 dmae->comp_addr_lo = U64_LO(bnx2x_sp_mapping(bp, wb_comp)); in bnx2x_prep_dmae_with_comp()
531 dmae->comp_addr_hi = U64_HI(bnx2x_sp_mapping(bp, wb_comp)); in bnx2x_prep_dmae_with_comp()
536 int bnx2x_issue_dmae_with_comp(struct bnx2x *bp, struct dmae_command *dmae, in bnx2x_issue_dmae_with_comp() argument
539 int cnt = CHIP_REV_IS_SLOW(bp) ? (400000) : 4000; in bnx2x_issue_dmae_with_comp()
542 bnx2x_dp_dmae(bp, dmae, BNX2X_MSG_DMAE); in bnx2x_issue_dmae_with_comp()
549 spin_lock_bh(&bp->dmae_lock); in bnx2x_issue_dmae_with_comp()
555 bnx2x_post_dmae(bp, dmae, INIT_DMAE_C(bp)); in bnx2x_issue_dmae_with_comp()
562 (bp->recovery_state != BNX2X_RECOVERY_DONE && in bnx2x_issue_dmae_with_comp()
563 bp->recovery_state != BNX2X_RECOVERY_NIC_LOADING)) { in bnx2x_issue_dmae_with_comp()
578 spin_unlock_bh(&bp->dmae_lock); in bnx2x_issue_dmae_with_comp()
583 void bnx2x_write_dmae(struct bnx2x *bp, dma_addr_t dma_addr, u32 dst_addr, in bnx2x_write_dmae() argument
589 if (!bp->dmae_ready) { in bnx2x_write_dmae()
590 u32 *data = bnx2x_sp(bp, wb_data[0]); in bnx2x_write_dmae()
592 if (CHIP_IS_E1(bp)) in bnx2x_write_dmae()
593 bnx2x_init_ind_wr(bp, dst_addr, data, len32); in bnx2x_write_dmae()
595 bnx2x_init_str_wr(bp, dst_addr, data, len32); in bnx2x_write_dmae()
600 bnx2x_prep_dmae_with_comp(bp, &dmae, DMAE_SRC_PCI, DMAE_DST_GRC); in bnx2x_write_dmae()
610 rc = bnx2x_issue_dmae_with_comp(bp, &dmae, bnx2x_sp(bp, wb_comp)); in bnx2x_write_dmae()
619 void bnx2x_read_dmae(struct bnx2x *bp, u32 src_addr, u32 len32) in bnx2x_read_dmae() argument
624 if (!bp->dmae_ready) { in bnx2x_read_dmae()
625 u32 *data = bnx2x_sp(bp, wb_data[0]); in bnx2x_read_dmae()
628 if (CHIP_IS_E1(bp)) in bnx2x_read_dmae()
630 data[i] = bnx2x_reg_rd_ind(bp, src_addr + i*4); in bnx2x_read_dmae()
633 data[i] = REG_RD(bp, src_addr + i*4); in bnx2x_read_dmae()
639 bnx2x_prep_dmae_with_comp(bp, &dmae, DMAE_SRC_GRC, DMAE_DST_PCI); in bnx2x_read_dmae()
644 dmae.dst_addr_lo = U64_LO(bnx2x_sp_mapping(bp, wb_data)); in bnx2x_read_dmae()
645 dmae.dst_addr_hi = U64_HI(bnx2x_sp_mapping(bp, wb_data)); in bnx2x_read_dmae()
649 rc = bnx2x_issue_dmae_with_comp(bp, &dmae, bnx2x_sp(bp, wb_comp)); in bnx2x_read_dmae()
658 static void bnx2x_write_dmae_phys_len(struct bnx2x *bp, dma_addr_t phys_addr, in bnx2x_write_dmae_phys_len() argument
661 int dmae_wr_max = DMAE_LEN32_WR_MAX(bp); in bnx2x_write_dmae_phys_len()
665 bnx2x_write_dmae(bp, phys_addr + offset, in bnx2x_write_dmae_phys_len()
671 bnx2x_write_dmae(bp, phys_addr + offset, addr + offset, len); in bnx2x_write_dmae_phys_len()
685 static inline int bnx2x_get_assert_list_entry(struct bnx2x *bp, in bnx2x_get_assert_list_entry() argument
705 static int bnx2x_mc_assert(struct bnx2x *bp) in bnx2x_mc_assert() argument
731 last_idx = REG_RD8(bp, bar_storm_intmem[storm] + in bnx2x_mc_assert()
741 regs[j] = REG_RD(bp, bar_storm_intmem[storm] + in bnx2x_mc_assert()
742 bnx2x_get_assert_list_entry(bp, in bnx2x_mc_assert()
760 CHIP_IS_E1(bp) ? "everest1" : in bnx2x_mc_assert()
761 CHIP_IS_E1H(bp) ? "everest1h" : in bnx2x_mc_assert()
762 CHIP_IS_E2(bp) ? "everest2" : "everest3", in bnx2x_mc_assert()
763 bp->fw_major, bp->fw_minor, bp->fw_rev); in bnx2x_mc_assert()
769 #define SCRATCH_BUFFER_SIZE(bp) \ argument
770 (CHIP_IS_E1(bp) ? 0x10000 : (CHIP_IS_E1H(bp) ? 0x20000 : 0x28000))
772 void bnx2x_fw_dump_lvl(struct bnx2x *bp, const char *lvl) in bnx2x_fw_dump_lvl() argument
779 if (BP_NOMCP(bp)) { in bnx2x_fw_dump_lvl()
783 netdev_printk(lvl, bp->dev, "bc %d.%d.%d\n", in bnx2x_fw_dump_lvl()
784 (bp->common.bc_ver & 0xff0000) >> 16, in bnx2x_fw_dump_lvl()
785 (bp->common.bc_ver & 0xff00) >> 8, in bnx2x_fw_dump_lvl()
786 (bp->common.bc_ver & 0xff)); in bnx2x_fw_dump_lvl()
788 if (pci_channel_offline(bp->pdev)) { in bnx2x_fw_dump_lvl()
793 val = REG_RD(bp, MCP_REG_MCPR_CPU_PROGRAM_COUNTER); in bnx2x_fw_dump_lvl()
794 if (val == REG_RD(bp, MCP_REG_MCPR_CPU_PROGRAM_COUNTER)) in bnx2x_fw_dump_lvl()
797 if (BP_PATH(bp) == 0) in bnx2x_fw_dump_lvl()
798 trace_shmem_base = bp->common.shmem_base; in bnx2x_fw_dump_lvl()
800 trace_shmem_base = SHMEM2_RD(bp, other_shmem_base_addr); in bnx2x_fw_dump_lvl()
803 if (trace_shmem_base < MCPR_SCRATCH_BASE(bp) + MCPR_TRACE_BUFFER_SIZE || in bnx2x_fw_dump_lvl()
804 trace_shmem_base >= MCPR_SCRATCH_BASE(bp) + in bnx2x_fw_dump_lvl()
805 SCRATCH_BUFFER_SIZE(bp)) { in bnx2x_fw_dump_lvl()
814 mark = REG_RD(bp, addr); in bnx2x_fw_dump_lvl()
822 mark = REG_RD(bp, addr); in bnx2x_fw_dump_lvl()
823 mark = MCPR_SCRATCH_BASE(bp) + ((mark + 0x3) & ~0x3) - 0x08000000; in bnx2x_fw_dump_lvl()
835 data[word] = htonl(REG_RD(bp, offset + 4*word)); in bnx2x_fw_dump_lvl()
843 data[word] = htonl(REG_RD(bp, offset + 4*word)); in bnx2x_fw_dump_lvl()
850 static void bnx2x_fw_dump(struct bnx2x *bp) in bnx2x_fw_dump() argument
852 bnx2x_fw_dump_lvl(bp, KERN_ERR); in bnx2x_fw_dump()
855 static void bnx2x_hc_int_disable(struct bnx2x *bp) in bnx2x_hc_int_disable() argument
857 int port = BP_PORT(bp); in bnx2x_hc_int_disable()
859 u32 val = REG_RD(bp, addr); in bnx2x_hc_int_disable()
865 if (CHIP_IS_E1(bp)) { in bnx2x_hc_int_disable()
870 REG_WR(bp, HC_REG_INT_MASK + port*4, 0); in bnx2x_hc_int_disable()
885 REG_WR(bp, addr, val); in bnx2x_hc_int_disable()
886 if (REG_RD(bp, addr) != val) in bnx2x_hc_int_disable()
890 static void bnx2x_igu_int_disable(struct bnx2x *bp) in bnx2x_igu_int_disable() argument
892 u32 val = REG_RD(bp, IGU_REG_PF_CONFIGURATION); in bnx2x_igu_int_disable()
900 REG_WR(bp, IGU_REG_PF_CONFIGURATION, val); in bnx2x_igu_int_disable()
901 if (REG_RD(bp, IGU_REG_PF_CONFIGURATION) != val) in bnx2x_igu_int_disable()
905 static void bnx2x_int_disable(struct bnx2x *bp) in bnx2x_int_disable() argument
907 if (bp->common.int_block == INT_BLOCK_HC) in bnx2x_int_disable()
908 bnx2x_hc_int_disable(bp); in bnx2x_int_disable()
910 bnx2x_igu_int_disable(bp); in bnx2x_int_disable()
913 void bnx2x_panic_dump(struct bnx2x *bp, bool disable_int) in bnx2x_panic_dump() argument
918 int func = BP_FUNC(bp); in bnx2x_panic_dump()
923 if (IS_PF(bp) && disable_int) in bnx2x_panic_dump()
924 bnx2x_int_disable(bp); in bnx2x_panic_dump()
926 bp->stats_state = STATS_STATE_DISABLED; in bnx2x_panic_dump()
927 bp->eth_stats.unrecoverable_error++; in bnx2x_panic_dump()
934 if (IS_PF(bp)) { in bnx2x_panic_dump()
935 struct host_sp_status_block *def_sb = bp->def_status_blk; in bnx2x_panic_dump()
939 bp->def_idx, bp->def_att_idx, bp->attn_state, in bnx2x_panic_dump()
940 bp->spq_prod_idx, bp->stats_counter); in bnx2x_panic_dump()
957 REG_RD(bp, BAR_CSTRORM_INTMEM + cstorm_offset + in bnx2x_panic_dump()
970 for_each_eth_queue(bp, i) { in bnx2x_panic_dump()
971 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_panic_dump()
976 CHIP_IS_E1x(bp) ? in bnx2x_panic_dump()
980 CHIP_IS_E1x(bp) ? in bnx2x_panic_dump()
987 if (!bp->fp) in bnx2x_panic_dump()
1020 loop = CHIP_IS_E1x(bp) ? in bnx2x_panic_dump()
1041 if (IS_VF(bp)) in bnx2x_panic_dump()
1045 data_size = CHIP_IS_E1x(bp) ? in bnx2x_panic_dump()
1049 sb_data_p = CHIP_IS_E1x(bp) ? in bnx2x_panic_dump()
1054 *(sb_data_p + j) = REG_RD(bp, BAR_CSTRORM_INTMEM + in bnx2x_panic_dump()
1058 if (!CHIP_IS_E1x(bp)) { in bnx2x_panic_dump()
1095 if (IS_PF(bp)) { in bnx2x_panic_dump()
1097 BNX2X_ERR("eq cons %x prod %x\n", bp->eq_cons, bp->eq_prod); in bnx2x_panic_dump()
1099 u32 *data = (u32 *)&bp->eq_ring[i].message.data; in bnx2x_panic_dump()
1102 i, bp->eq_ring[i].message.opcode, in bnx2x_panic_dump()
1103 bp->eq_ring[i].message.error); in bnx2x_panic_dump()
1111 for_each_valid_rx_queue(bp, i) { in bnx2x_panic_dump()
1112 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_panic_dump()
1114 if (!bp->fp) in bnx2x_panic_dump()
1151 for_each_valid_tx_queue(bp, i) { in bnx2x_panic_dump()
1152 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_panic_dump()
1154 if (!bp->fp) in bnx2x_panic_dump()
1189 if (IS_PF(bp)) { in bnx2x_panic_dump()
1190 int tmp_msg_en = bp->msg_enable; in bnx2x_panic_dump()
1192 bnx2x_fw_dump(bp); in bnx2x_panic_dump()
1193 bp->msg_enable |= NETIF_MSG_HW; in bnx2x_panic_dump()
1195 bnx2x_idle_chk(bp); in bnx2x_panic_dump()
1197 bnx2x_idle_chk(bp); in bnx2x_panic_dump()
1198 bp->msg_enable = tmp_msg_en; in bnx2x_panic_dump()
1199 bnx2x_mc_assert(bp); in bnx2x_panic_dump()
1228 static void bnx2x_pbf_pN_buf_flushed(struct bnx2x *bp, in bnx2x_pbf_pN_buf_flushed() argument
1235 crd_freed = crd_freed_start = REG_RD(bp, regs->crd_freed); in bnx2x_pbf_pN_buf_flushed()
1236 crd = crd_start = REG_RD(bp, regs->crd); in bnx2x_pbf_pN_buf_flushed()
1237 init_crd = REG_RD(bp, regs->init_crd); in bnx2x_pbf_pN_buf_flushed()
1247 crd = REG_RD(bp, regs->crd); in bnx2x_pbf_pN_buf_flushed()
1248 crd_freed = REG_RD(bp, regs->crd_freed); in bnx2x_pbf_pN_buf_flushed()
1263 static void bnx2x_pbf_pN_cmd_flushed(struct bnx2x *bp, in bnx2x_pbf_pN_cmd_flushed() argument
1270 occup = to_free = REG_RD(bp, regs->lines_occup); in bnx2x_pbf_pN_cmd_flushed()
1271 freed = freed_start = REG_RD(bp, regs->lines_freed); in bnx2x_pbf_pN_cmd_flushed()
1279 occup = REG_RD(bp, regs->lines_occup); in bnx2x_pbf_pN_cmd_flushed()
1280 freed = REG_RD(bp, regs->lines_freed); in bnx2x_pbf_pN_cmd_flushed()
1295 static u32 bnx2x_flr_clnup_reg_poll(struct bnx2x *bp, u32 reg, in bnx2x_flr_clnup_reg_poll() argument
1301 while ((val = REG_RD(bp, reg)) != expected && cur_cnt--) in bnx2x_flr_clnup_reg_poll()
1307 int bnx2x_flr_clnup_poll_hw_counter(struct bnx2x *bp, u32 reg, in bnx2x_flr_clnup_poll_hw_counter() argument
1310 u32 val = bnx2x_flr_clnup_reg_poll(bp, reg, 0, poll_cnt); in bnx2x_flr_clnup_poll_hw_counter()
1319 u32 bnx2x_flr_clnup_poll_count(struct bnx2x *bp) in bnx2x_flr_clnup_poll_count() argument
1322 if (CHIP_REV_IS_EMUL(bp)) in bnx2x_flr_clnup_poll_count()
1325 if (CHIP_REV_IS_FPGA(bp)) in bnx2x_flr_clnup_poll_count()
1331 void bnx2x_tx_hw_flushed(struct bnx2x *bp, u32 poll_count) in bnx2x_tx_hw_flushed() argument
1334 {0, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1337 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1340 {1, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1343 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1346 {4, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1349 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1355 {0, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1358 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1361 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1364 {1, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1367 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1370 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1373 {4, (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1376 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1379 (CHIP_IS_E3B0(bp)) ? in bnx2x_tx_hw_flushed()
1388 bnx2x_pbf_pN_cmd_flushed(bp, &cmd_regs[i], poll_count); in bnx2x_tx_hw_flushed()
1392 bnx2x_pbf_pN_buf_flushed(bp, &buf_regs[i], poll_count); in bnx2x_tx_hw_flushed()
1404 int bnx2x_send_final_clnup(struct bnx2x *bp, u8 clnup_func, u32 poll_cnt) in bnx2x_send_final_clnup() argument
1410 if (REG_RD(bp, comp_addr)) { in bnx2x_send_final_clnup()
1421 REG_WR(bp, XSDM_REG_OPERATION_GEN, op_gen_command); in bnx2x_send_final_clnup()
1423 if (bnx2x_flr_clnup_reg_poll(bp, comp_addr, 1, poll_cnt) != 1) { in bnx2x_send_final_clnup()
1426 (REG_RD(bp, comp_addr))); in bnx2x_send_final_clnup()
1431 REG_WR(bp, comp_addr, 0); in bnx2x_send_final_clnup()
1446 static int bnx2x_poll_hw_usage_counters(struct bnx2x *bp, u32 poll_cnt) in bnx2x_poll_hw_usage_counters() argument
1449 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1456 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1463 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1464 QM_REG_PF_USG_CNT_0 + 4*BP_FUNC(bp), in bnx2x_poll_hw_usage_counters()
1470 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1471 TM_REG_LIN0_VNIC_UC + 4*BP_PORT(bp), in bnx2x_poll_hw_usage_counters()
1475 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1476 TM_REG_LIN0_NUM_SCANS + 4*BP_PORT(bp), in bnx2x_poll_hw_usage_counters()
1482 if (bnx2x_flr_clnup_poll_hw_counter(bp, in bnx2x_poll_hw_usage_counters()
1483 dmae_reg_go_c[INIT_DMAE_C(bp)], in bnx2x_poll_hw_usage_counters()
1491 static void bnx2x_hw_enable_status(struct bnx2x *bp) in bnx2x_hw_enable_status() argument
1495 val = REG_RD(bp, CFC_REG_WEAK_ENABLE_PF); in bnx2x_hw_enable_status()
1498 val = REG_RD(bp, PBF_REG_DISABLE_PF); in bnx2x_hw_enable_status()
1501 val = REG_RD(bp, IGU_REG_PCI_PF_MSI_EN); in bnx2x_hw_enable_status()
1504 val = REG_RD(bp, IGU_REG_PCI_PF_MSIX_EN); in bnx2x_hw_enable_status()
1507 val = REG_RD(bp, IGU_REG_PCI_PF_MSIX_FUNC_MASK); in bnx2x_hw_enable_status()
1510 val = REG_RD(bp, PGLUE_B_REG_SHADOW_BME_PF_7_0_CLR); in bnx2x_hw_enable_status()
1513 val = REG_RD(bp, PGLUE_B_REG_FLR_REQUEST_PF_7_0_CLR); in bnx2x_hw_enable_status()
1516 val = REG_RD(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER); in bnx2x_hw_enable_status()
1521 static int bnx2x_pf_flr_clnup(struct bnx2x *bp) in bnx2x_pf_flr_clnup() argument
1523 u32 poll_cnt = bnx2x_flr_clnup_poll_count(bp); in bnx2x_pf_flr_clnup()
1525 DP(BNX2X_MSG_SP, "Cleanup after FLR PF[%d]\n", BP_ABS_FUNC(bp)); in bnx2x_pf_flr_clnup()
1528 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_TARGET_READ, 1); in bnx2x_pf_flr_clnup()
1532 if (bnx2x_poll_hw_usage_counters(bp, poll_cnt)) in bnx2x_pf_flr_clnup()
1538 if (bnx2x_send_final_clnup(bp, (u8)BP_FUNC(bp), poll_cnt)) in bnx2x_pf_flr_clnup()
1544 bnx2x_tx_hw_flushed(bp, poll_cnt); in bnx2x_pf_flr_clnup()
1550 if (bnx2x_is_pcie_pending(bp->pdev)) in bnx2x_pf_flr_clnup()
1554 bnx2x_hw_enable_status(bp); in bnx2x_pf_flr_clnup()
1560 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER, 1); in bnx2x_pf_flr_clnup()
1565 static void bnx2x_hc_int_enable(struct bnx2x *bp) in bnx2x_hc_int_enable() argument
1567 int port = BP_PORT(bp); in bnx2x_hc_int_enable()
1569 u32 val = REG_RD(bp, addr); in bnx2x_hc_int_enable()
1570 bool msix = (bp->flags & USING_MSIX_FLAG) ? true : false; in bnx2x_hc_int_enable()
1571 bool single_msix = (bp->flags & USING_SINGLE_MSIX_FLAG) ? true : false; in bnx2x_hc_int_enable()
1572 bool msi = (bp->flags & USING_MSI_FLAG) ? true : false; in bnx2x_hc_int_enable()
1592 if (!CHIP_IS_E1(bp)) { in bnx2x_hc_int_enable()
1596 REG_WR(bp, addr, val); in bnx2x_hc_int_enable()
1602 if (CHIP_IS_E1(bp)) in bnx2x_hc_int_enable()
1603 REG_WR(bp, HC_REG_INT_MASK + port*4, 0x1FFFF); in bnx2x_hc_int_enable()
1609 REG_WR(bp, addr, val); in bnx2x_hc_int_enable()
1615 if (!CHIP_IS_E1(bp)) { in bnx2x_hc_int_enable()
1617 if (IS_MF(bp)) { in bnx2x_hc_int_enable()
1618 val = (0xee0f | (1 << (BP_VN(bp) + 4))); in bnx2x_hc_int_enable()
1619 if (bp->port.pmf) in bnx2x_hc_int_enable()
1625 REG_WR(bp, HC_REG_TRAILING_EDGE_0 + port*8, val); in bnx2x_hc_int_enable()
1626 REG_WR(bp, HC_REG_LEADING_EDGE_0 + port*8, val); in bnx2x_hc_int_enable()
1630 static void bnx2x_igu_int_enable(struct bnx2x *bp) in bnx2x_igu_int_enable() argument
1633 bool msix = (bp->flags & USING_MSIX_FLAG) ? true : false; in bnx2x_igu_int_enable()
1634 bool single_msix = (bp->flags & USING_SINGLE_MSIX_FLAG) ? true : false; in bnx2x_igu_int_enable()
1635 bool msi = (bp->flags & USING_MSI_FLAG) ? true : false; in bnx2x_igu_int_enable()
1637 val = REG_RD(bp, IGU_REG_PF_CONFIGURATION); in bnx2x_igu_int_enable()
1661 REG_WR(bp, IGU_REG_PF_CONFIGURATION, val); in bnx2x_igu_int_enable()
1662 bnx2x_ack_int(bp); in bnx2x_igu_int_enable()
1670 REG_WR(bp, IGU_REG_PF_CONFIGURATION, val); in bnx2x_igu_int_enable()
1673 pci_intx(bp->pdev, true); in bnx2x_igu_int_enable()
1678 if (IS_MF(bp)) { in bnx2x_igu_int_enable()
1679 val = (0xee0f | (1 << (BP_VN(bp) + 4))); in bnx2x_igu_int_enable()
1680 if (bp->port.pmf) in bnx2x_igu_int_enable()
1686 REG_WR(bp, IGU_REG_TRAILING_EDGE_LATCH, val); in bnx2x_igu_int_enable()
1687 REG_WR(bp, IGU_REG_LEADING_EDGE_LATCH, val); in bnx2x_igu_int_enable()
1690 void bnx2x_int_enable(struct bnx2x *bp) in bnx2x_int_enable() argument
1692 if (bp->common.int_block == INT_BLOCK_HC) in bnx2x_int_enable()
1693 bnx2x_hc_int_enable(bp); in bnx2x_int_enable()
1695 bnx2x_igu_int_enable(bp); in bnx2x_int_enable()
1698 void bnx2x_int_disable_sync(struct bnx2x *bp, int disable_hw) in bnx2x_int_disable_sync() argument
1700 int msix = (bp->flags & USING_MSIX_FLAG) ? 1 : 0; in bnx2x_int_disable_sync()
1705 bnx2x_int_disable(bp); in bnx2x_int_disable_sync()
1709 synchronize_irq(bp->msix_table[0].vector); in bnx2x_int_disable_sync()
1711 if (CNIC_SUPPORT(bp)) in bnx2x_int_disable_sync()
1713 for_each_eth_queue(bp, i) in bnx2x_int_disable_sync()
1714 synchronize_irq(bp->msix_table[offset++].vector); in bnx2x_int_disable_sync()
1716 synchronize_irq(bp->pdev->irq); in bnx2x_int_disable_sync()
1719 cancel_delayed_work(&bp->sp_task); in bnx2x_int_disable_sync()
1720 cancel_delayed_work(&bp->period_task); in bnx2x_int_disable_sync()
1731 static bool bnx2x_trylock_hw_lock(struct bnx2x *bp, u32 resource) in bnx2x_trylock_hw_lock() argument
1735 int func = BP_FUNC(bp); in bnx2x_trylock_hw_lock()
1756 REG_WR(bp, hw_lock_control_reg + 4, resource_bit); in bnx2x_trylock_hw_lock()
1757 lock_status = REG_RD(bp, hw_lock_control_reg); in bnx2x_trylock_hw_lock()
1774 static int bnx2x_get_leader_lock_resource(struct bnx2x *bp) in bnx2x_get_leader_lock_resource() argument
1776 if (BP_PATH(bp)) in bnx2x_get_leader_lock_resource()
1789 static bool bnx2x_trylock_leader_lock(struct bnx2x *bp) in bnx2x_trylock_leader_lock() argument
1791 return bnx2x_trylock_hw_lock(bp, bnx2x_get_leader_lock_resource(bp)); in bnx2x_trylock_leader_lock()
1794 static void bnx2x_cnic_cfc_comp(struct bnx2x *bp, int cid, u8 err);
1797 static int bnx2x_schedule_sp_task(struct bnx2x *bp) in bnx2x_schedule_sp_task() argument
1803 atomic_set(&bp->interrupt_occurred, 1); in bnx2x_schedule_sp_task()
1812 return queue_delayed_work(bnx2x_wq, &bp->sp_task, 0); in bnx2x_schedule_sp_task()
1817 struct bnx2x *bp = fp->bp; in bnx2x_sp_event() local
1821 struct bnx2x_queue_sp_obj *q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_sp_event()
1825 fp->index, cid, command, bp->state, in bnx2x_sp_event()
1833 bnx2x_iov_set_queue_sp_obj(bp, cid, &q_obj); in bnx2x_sp_event()
1878 q_obj->complete_cmd(bp, q_obj, drv_cmd)) in bnx2x_sp_event()
1893 atomic_inc(&bp->cq_spq_left); in bnx2x_sp_event()
1897 DP(BNX2X_MSG_SP, "bp->cq_spq_left %x\n", atomic_read(&bp->cq_spq_left)); in bnx2x_sp_event()
1900 (!!test_bit(BNX2X_AFEX_FCOE_Q_UPDATE_PENDING, &bp->sp_state))) { in bnx2x_sp_event()
1911 set_bit(BNX2X_AFEX_PENDING_VIFSET_MCP_ACK, &bp->sp_state); in bnx2x_sp_event()
1913 clear_bit(BNX2X_AFEX_FCOE_Q_UPDATE_PENDING, &bp->sp_state); in bnx2x_sp_event()
1917 bnx2x_schedule_sp_task(bp); in bnx2x_sp_event()
1925 struct bnx2x *bp = netdev_priv(dev_instance); in bnx2x_interrupt() local
1926 u16 status = bnx2x_ack_int(bp); in bnx2x_interrupt()
1939 if (unlikely(bp->panic)) in bnx2x_interrupt()
1943 for_each_eth_queue(bp, i) { in bnx2x_interrupt()
1944 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_interrupt()
1946 mask = 0x2 << (fp->index + CNIC_SUPPORT(bp)); in bnx2x_interrupt()
1952 napi_schedule_irqoff(&bnx2x_fp(bp, fp->index, napi)); in bnx2x_interrupt()
1957 if (CNIC_SUPPORT(bp)) { in bnx2x_interrupt()
1963 c_ops = rcu_dereference(bp->cnic_ops); in bnx2x_interrupt()
1964 if (c_ops && (bp->cnic_eth_dev.drv_state & in bnx2x_interrupt()
1966 c_ops->cnic_handler(bp->cnic_data, NULL); in bnx2x_interrupt()
1978 bnx2x_schedule_sp_task(bp); in bnx2x_interrupt()
1998 int bnx2x_acquire_hw_lock(struct bnx2x *bp, u32 resource) in bnx2x_acquire_hw_lock() argument
2002 int func = BP_FUNC(bp); in bnx2x_acquire_hw_lock()
2021 lock_status = REG_RD(bp, hw_lock_control_reg); in bnx2x_acquire_hw_lock()
2031 REG_WR(bp, hw_lock_control_reg + 4, resource_bit); in bnx2x_acquire_hw_lock()
2032 lock_status = REG_RD(bp, hw_lock_control_reg); in bnx2x_acquire_hw_lock()
2042 int bnx2x_release_leader_lock(struct bnx2x *bp) in bnx2x_release_leader_lock() argument
2044 return bnx2x_release_hw_lock(bp, bnx2x_get_leader_lock_resource(bp)); in bnx2x_release_leader_lock()
2047 int bnx2x_release_hw_lock(struct bnx2x *bp, u32 resource) in bnx2x_release_hw_lock() argument
2051 int func = BP_FUNC(bp); in bnx2x_release_hw_lock()
2069 lock_status = REG_RD(bp, hw_lock_control_reg); in bnx2x_release_hw_lock()
2076 REG_WR(bp, hw_lock_control_reg, resource_bit); in bnx2x_release_hw_lock()
2080 int bnx2x_get_gpio(struct bnx2x *bp, int gpio_num, u8 port) in bnx2x_get_gpio() argument
2083 int gpio_port = (REG_RD(bp, NIG_REG_PORT_SWAP) && in bnx2x_get_gpio()
2084 REG_RD(bp, NIG_REG_STRAP_OVERRIDE)) ^ port; in bnx2x_get_gpio()
2097 gpio_reg = REG_RD(bp, MISC_REG_GPIO); in bnx2x_get_gpio()
2108 int bnx2x_set_gpio(struct bnx2x *bp, int gpio_num, u32 mode, u8 port) in bnx2x_set_gpio() argument
2111 int gpio_port = (REG_RD(bp, NIG_REG_PORT_SWAP) && in bnx2x_set_gpio()
2112 REG_RD(bp, NIG_REG_STRAP_OVERRIDE)) ^ port; in bnx2x_set_gpio()
2123 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_gpio()
2125 gpio_reg = (REG_RD(bp, MISC_REG_GPIO) & MISC_REGISTERS_GPIO_FLOAT); in bnx2x_set_gpio()
2158 REG_WR(bp, MISC_REG_GPIO, gpio_reg); in bnx2x_set_gpio()
2159 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_gpio()
2164 int bnx2x_set_mult_gpio(struct bnx2x *bp, u8 pins, u32 mode) in bnx2x_set_mult_gpio() argument
2171 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_mult_gpio()
2173 gpio_reg = REG_RD(bp, MISC_REG_GPIO); in bnx2x_set_mult_gpio()
2204 REG_WR(bp, MISC_REG_GPIO, gpio_reg); in bnx2x_set_mult_gpio()
2206 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_mult_gpio()
2211 int bnx2x_set_gpio_int(struct bnx2x *bp, int gpio_num, u32 mode, u8 port) in bnx2x_set_gpio_int() argument
2214 int gpio_port = (REG_RD(bp, NIG_REG_PORT_SWAP) && in bnx2x_set_gpio_int()
2215 REG_RD(bp, NIG_REG_STRAP_OVERRIDE)) ^ port; in bnx2x_set_gpio_int()
2226 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_gpio_int()
2228 gpio_reg = REG_RD(bp, MISC_REG_GPIO_INT); in bnx2x_set_gpio_int()
2253 REG_WR(bp, MISC_REG_GPIO_INT, gpio_reg); in bnx2x_set_gpio_int()
2254 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_GPIO); in bnx2x_set_gpio_int()
2259 static int bnx2x_set_spio(struct bnx2x *bp, int spio, u32 mode) in bnx2x_set_spio() argument
2269 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_SPIO); in bnx2x_set_spio()
2271 spio_reg = (REG_RD(bp, MISC_REG_SPIO) & MISC_SPIO_FLOAT); in bnx2x_set_spio()
2298 REG_WR(bp, MISC_REG_SPIO, spio_reg); in bnx2x_set_spio()
2299 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_SPIO); in bnx2x_set_spio()
2304 void bnx2x_calc_fc_adv(struct bnx2x *bp) in bnx2x_calc_fc_adv() argument
2306 u8 cfg_idx = bnx2x_get_link_cfg_idx(bp); in bnx2x_calc_fc_adv()
2308 bp->port.advertising[cfg_idx] &= ~(ADVERTISED_Asym_Pause | in bnx2x_calc_fc_adv()
2310 switch (bp->link_vars.ieee_fc & in bnx2x_calc_fc_adv()
2313 bp->port.advertising[cfg_idx] |= (ADVERTISED_Asym_Pause | in bnx2x_calc_fc_adv()
2318 bp->port.advertising[cfg_idx] |= ADVERTISED_Asym_Pause; in bnx2x_calc_fc_adv()
2326 static void bnx2x_set_requested_fc(struct bnx2x *bp) in bnx2x_set_requested_fc() argument
2332 if (CHIP_IS_E1x(bp) && (bp->dev->mtu > 5000)) in bnx2x_set_requested_fc()
2333 bp->link_params.req_fc_auto_adv = BNX2X_FLOW_CTRL_TX; in bnx2x_set_requested_fc()
2335 bp->link_params.req_fc_auto_adv = BNX2X_FLOW_CTRL_BOTH; in bnx2x_set_requested_fc()
2338 static void bnx2x_init_dropless_fc(struct bnx2x *bp) in bnx2x_init_dropless_fc() argument
2342 if (!CHIP_IS_E1(bp) && bp->dropless_fc && bp->link_vars.link_up) { in bnx2x_init_dropless_fc()
2343 if (bp->link_vars.flow_ctrl & BNX2X_FLOW_CTRL_TX) in bnx2x_init_dropless_fc()
2346 REG_WR(bp, BAR_USTRORM_INTMEM + in bnx2x_init_dropless_fc()
2347 USTORM_ETH_PAUSE_ENABLED_OFFSET(BP_PORT(bp)), in bnx2x_init_dropless_fc()
2355 int bnx2x_initial_phy_init(struct bnx2x *bp, int load_mode) in bnx2x_initial_phy_init() argument
2357 int rc, cfx_idx = bnx2x_get_link_cfg_idx(bp); in bnx2x_initial_phy_init()
2358 u16 req_line_speed = bp->link_params.req_line_speed[cfx_idx]; in bnx2x_initial_phy_init()
2360 if (!BP_NOMCP(bp)) { in bnx2x_initial_phy_init()
2361 bnx2x_set_requested_fc(bp); in bnx2x_initial_phy_init()
2362 bnx2x_acquire_phy_lock(bp); in bnx2x_initial_phy_init()
2365 struct link_params *lp = &bp->link_params; in bnx2x_initial_phy_init()
2384 struct link_params *lp = &bp->link_params; in bnx2x_initial_phy_init()
2388 rc = bnx2x_phy_init(&bp->link_params, &bp->link_vars); in bnx2x_initial_phy_init()
2390 bnx2x_release_phy_lock(bp); in bnx2x_initial_phy_init()
2392 bnx2x_init_dropless_fc(bp); in bnx2x_initial_phy_init()
2394 bnx2x_calc_fc_adv(bp); in bnx2x_initial_phy_init()
2396 if (bp->link_vars.link_up) { in bnx2x_initial_phy_init()
2397 bnx2x_stats_handle(bp, STATS_EVENT_LINK_UP); in bnx2x_initial_phy_init()
2398 bnx2x_link_report(bp); in bnx2x_initial_phy_init()
2400 queue_delayed_work(bnx2x_wq, &bp->period_task, 0); in bnx2x_initial_phy_init()
2401 bp->link_params.req_line_speed[cfx_idx] = req_line_speed; in bnx2x_initial_phy_init()
2408 void bnx2x_link_set(struct bnx2x *bp) in bnx2x_link_set() argument
2410 if (!BP_NOMCP(bp)) { in bnx2x_link_set()
2411 bnx2x_acquire_phy_lock(bp); in bnx2x_link_set()
2412 bnx2x_phy_init(&bp->link_params, &bp->link_vars); in bnx2x_link_set()
2413 bnx2x_release_phy_lock(bp); in bnx2x_link_set()
2415 bnx2x_init_dropless_fc(bp); in bnx2x_link_set()
2417 bnx2x_calc_fc_adv(bp); in bnx2x_link_set()
2422 static void bnx2x__link_reset(struct bnx2x *bp) in bnx2x__link_reset() argument
2424 if (!BP_NOMCP(bp)) { in bnx2x__link_reset()
2425 bnx2x_acquire_phy_lock(bp); in bnx2x__link_reset()
2426 bnx2x_lfa_reset(&bp->link_params, &bp->link_vars); in bnx2x__link_reset()
2427 bnx2x_release_phy_lock(bp); in bnx2x__link_reset()
2432 void bnx2x_force_link_reset(struct bnx2x *bp) in bnx2x_force_link_reset() argument
2434 bnx2x_acquire_phy_lock(bp); in bnx2x_force_link_reset()
2435 bnx2x_link_reset(&bp->link_params, &bp->link_vars, 1); in bnx2x_force_link_reset()
2436 bnx2x_release_phy_lock(bp); in bnx2x_force_link_reset()
2439 u8 bnx2x_link_test(struct bnx2x *bp, u8 is_serdes) in bnx2x_link_test() argument
2443 if (!BP_NOMCP(bp)) { in bnx2x_link_test()
2444 bnx2x_acquire_phy_lock(bp); in bnx2x_link_test()
2445 rc = bnx2x_test_link(&bp->link_params, &bp->link_vars, in bnx2x_link_test()
2447 bnx2x_release_phy_lock(bp); in bnx2x_link_test()
2463 static void bnx2x_calc_vn_min(struct bnx2x *bp, in bnx2x_calc_vn_min() argument
2469 for (vn = VN_0; vn < BP_MAX_VN_NUM(bp); vn++) { in bnx2x_calc_vn_min()
2470 u32 vn_cfg = bp->mf_config[vn]; in bnx2x_calc_vn_min()
2487 if (BNX2X_IS_ETS_ENABLED(bp)) { in bnx2x_calc_vn_min()
2501 static void bnx2x_calc_vn_max(struct bnx2x *bp, int vn, in bnx2x_calc_vn_max() argument
2505 u32 vn_cfg = bp->mf_config[vn]; in bnx2x_calc_vn_max()
2510 u32 maxCfg = bnx2x_extract_max_cfg(bp, vn_cfg); in bnx2x_calc_vn_max()
2512 if (IS_MF_PERCENT_BW(bp)) { in bnx2x_calc_vn_max()
2514 vn_max_rate = (bp->link_vars.line_speed * maxCfg) / 100; in bnx2x_calc_vn_max()
2525 static int bnx2x_get_cmng_fns_mode(struct bnx2x *bp) in bnx2x_get_cmng_fns_mode() argument
2527 if (CHIP_REV_IS_SLOW(bp)) in bnx2x_get_cmng_fns_mode()
2529 if (IS_MF(bp)) in bnx2x_get_cmng_fns_mode()
2535 void bnx2x_read_mf_cfg(struct bnx2x *bp) in bnx2x_read_mf_cfg() argument
2537 int vn, n = (CHIP_MODE_IS_4_PORT(bp) ? 2 : 1); in bnx2x_read_mf_cfg()
2539 if (BP_NOMCP(bp)) in bnx2x_read_mf_cfg()
2553 for (vn = VN_0; vn < BP_MAX_VN_NUM(bp); vn++) { in bnx2x_read_mf_cfg()
2554 int /*abs*/func = n * (2 * vn + BP_PORT(bp)) + BP_PATH(bp); in bnx2x_read_mf_cfg()
2559 bp->mf_config[vn] = in bnx2x_read_mf_cfg()
2560 MF_CFG_RD(bp, func_mf_config[func].config); in bnx2x_read_mf_cfg()
2562 if (bp->mf_config[BP_VN(bp)] & FUNC_MF_CFG_FUNC_DISABLED) { in bnx2x_read_mf_cfg()
2564 bp->flags |= MF_FUNC_DIS; in bnx2x_read_mf_cfg()
2567 bp->flags &= ~MF_FUNC_DIS; in bnx2x_read_mf_cfg()
2571 static void bnx2x_cmng_fns_init(struct bnx2x *bp, u8 read_cfg, u8 cmng_type) in bnx2x_cmng_fns_init() argument
2576 input.port_rate = bp->link_vars.line_speed; in bnx2x_cmng_fns_init()
2583 bnx2x_read_mf_cfg(bp); in bnx2x_cmng_fns_init()
2586 bnx2x_calc_vn_min(bp, &input); in bnx2x_cmng_fns_init()
2589 if (bp->port.pmf) in bnx2x_cmng_fns_init()
2590 for (vn = VN_0; vn < BP_MAX_VN_NUM(bp); vn++) in bnx2x_cmng_fns_init()
2591 bnx2x_calc_vn_max(bp, vn, &input); in bnx2x_cmng_fns_init()
2597 bnx2x_init_cmng(&input, &bp->cmng); in bnx2x_cmng_fns_init()
2606 static void storm_memset_cmng(struct bnx2x *bp, in storm_memset_cmng() argument
2616 __storm_memset_struct(bp, addr, size, (u32 *)&cmng->port); in storm_memset_cmng()
2618 for (vn = VN_0; vn < BP_MAX_VN_NUM(bp); vn++) { in storm_memset_cmng()
2619 int func = func_by_vn(bp, vn); in storm_memset_cmng()
2624 __storm_memset_struct(bp, addr, size, in storm_memset_cmng()
2630 __storm_memset_struct(bp, addr, size, in storm_memset_cmng()
2636 void bnx2x_set_local_cmng(struct bnx2x *bp) in bnx2x_set_local_cmng() argument
2638 int cmng_fns = bnx2x_get_cmng_fns_mode(bp); in bnx2x_set_local_cmng()
2641 bnx2x_cmng_fns_init(bp, false, cmng_fns); in bnx2x_set_local_cmng()
2642 storm_memset_cmng(bp, &bp->cmng, BP_PORT(bp)); in bnx2x_set_local_cmng()
2651 static void bnx2x_link_attn(struct bnx2x *bp) in bnx2x_link_attn() argument
2654 bnx2x_stats_handle(bp, STATS_EVENT_STOP); in bnx2x_link_attn()
2656 bnx2x_link_update(&bp->link_params, &bp->link_vars); in bnx2x_link_attn()
2658 bnx2x_init_dropless_fc(bp); in bnx2x_link_attn()
2660 if (bp->link_vars.link_up) { in bnx2x_link_attn()
2662 if (bp->link_vars.mac_type != MAC_TYPE_EMAC) { in bnx2x_link_attn()
2665 pstats = bnx2x_sp(bp, port_stats); in bnx2x_link_attn()
2670 if (bp->state == BNX2X_STATE_OPEN) in bnx2x_link_attn()
2671 bnx2x_stats_handle(bp, STATS_EVENT_LINK_UP); in bnx2x_link_attn()
2674 if (bp->link_vars.link_up && bp->link_vars.line_speed) in bnx2x_link_attn()
2675 bnx2x_set_local_cmng(bp); in bnx2x_link_attn()
2677 __bnx2x_link_report(bp); in bnx2x_link_attn()
2679 if (IS_MF(bp)) in bnx2x_link_attn()
2680 bnx2x_link_sync_notify(bp); in bnx2x_link_attn()
2683 void bnx2x__link_status_update(struct bnx2x *bp) in bnx2x__link_status_update() argument
2685 if (bp->state != BNX2X_STATE_OPEN) in bnx2x__link_status_update()
2689 if (IS_PF(bp)) { in bnx2x__link_status_update()
2690 bnx2x_dcbx_pmf_update(bp); in bnx2x__link_status_update()
2691 bnx2x_link_status_update(&bp->link_params, &bp->link_vars); in bnx2x__link_status_update()
2692 if (bp->link_vars.link_up) in bnx2x__link_status_update()
2693 bnx2x_stats_handle(bp, STATS_EVENT_LINK_UP); in bnx2x__link_status_update()
2695 bnx2x_stats_handle(bp, STATS_EVENT_STOP); in bnx2x__link_status_update()
2697 bnx2x_link_report(bp); in bnx2x__link_status_update()
2700 bp->port.supported[0] |= (SUPPORTED_10baseT_Half | in bnx2x__link_status_update()
2712 bp->port.advertising[0] = bp->port.supported[0]; in bnx2x__link_status_update()
2714 bp->link_params.bp = bp; in bnx2x__link_status_update()
2715 bp->link_params.port = BP_PORT(bp); in bnx2x__link_status_update()
2716 bp->link_params.req_duplex[0] = DUPLEX_FULL; in bnx2x__link_status_update()
2717 bp->link_params.req_flow_ctrl[0] = BNX2X_FLOW_CTRL_NONE; in bnx2x__link_status_update()
2718 bp->link_params.req_line_speed[0] = SPEED_10000; in bnx2x__link_status_update()
2719 bp->link_params.speed_cap_mask[0] = 0x7f0000; in bnx2x__link_status_update()
2720 bp->link_params.switch_cfg = SWITCH_CFG_10G; in bnx2x__link_status_update()
2721 bp->link_vars.mac_type = MAC_TYPE_BMAC; in bnx2x__link_status_update()
2722 bp->link_vars.line_speed = SPEED_10000; in bnx2x__link_status_update()
2723 bp->link_vars.link_status = in bnx2x__link_status_update()
2726 bp->link_vars.link_up = 1; in bnx2x__link_status_update()
2727 bp->link_vars.duplex = DUPLEX_FULL; in bnx2x__link_status_update()
2728 bp->link_vars.flow_ctrl = BNX2X_FLOW_CTRL_NONE; in bnx2x__link_status_update()
2729 __bnx2x_link_report(bp); in bnx2x__link_status_update()
2731 bnx2x_sample_bulletin(bp); in bnx2x__link_status_update()
2738 bnx2x_stats_handle(bp, STATS_EVENT_LINK_UP); in bnx2x__link_status_update()
2742 static int bnx2x_afex_func_update(struct bnx2x *bp, u16 vifid, in bnx2x_afex_func_update() argument
2749 func_params.f_obj = &bp->func_obj; in bnx2x_afex_func_update()
2761 if (bnx2x_func_state_change(bp, &func_params) < 0) in bnx2x_afex_func_update()
2762 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_VIFSET_ACK, 0); in bnx2x_afex_func_update()
2767 static int bnx2x_afex_handle_vif_list_cmd(struct bnx2x *bp, u8 cmd_type, in bnx2x_afex_handle_vif_list_cmd() argument
2781 func_params.f_obj = &bp->func_obj; in bnx2x_afex_handle_vif_list_cmd()
2798 rc = bnx2x_func_state_change(bp, &func_params); in bnx2x_afex_handle_vif_list_cmd()
2800 bnx2x_fw_command(bp, drv_msg_code, 0); in bnx2x_afex_handle_vif_list_cmd()
2805 static void bnx2x_handle_afex_cmd(struct bnx2x *bp, u32 cmd) in bnx2x_handle_afex_cmd() argument
2808 u32 func = BP_ABS_FUNC(bp); in bnx2x_handle_afex_cmd()
2818 vifid = SHMEM2_RD(bp, afex_param1_to_driver[BP_FW_MB_IDX(bp)]); in bnx2x_handle_afex_cmd()
2821 bnx2x_afex_handle_vif_list_cmd(bp, VIF_LIST_RULE_GET, vifid, 0); in bnx2x_handle_afex_cmd()
2825 vifid = SHMEM2_RD(bp, afex_param1_to_driver[BP_FW_MB_IDX(bp)]); in bnx2x_handle_afex_cmd()
2826 addrs = SHMEM2_RD(bp, afex_param2_to_driver[BP_FW_MB_IDX(bp)]); in bnx2x_handle_afex_cmd()
2830 bnx2x_afex_handle_vif_list_cmd(bp, VIF_LIST_RULE_SET, vifid, in bnx2x_handle_afex_cmd()
2835 addr_to_write = SHMEM2_RD(bp, in bnx2x_handle_afex_cmd()
2836 afex_scratchpad_addr_to_write[BP_FW_MB_IDX(bp)]); in bnx2x_handle_afex_cmd()
2837 stats_type = SHMEM2_RD(bp, in bnx2x_handle_afex_cmd()
2838 afex_param1_to_driver[BP_FW_MB_IDX(bp)]); in bnx2x_handle_afex_cmd()
2844 bnx2x_afex_collect_stats(bp, (void *)&afex_stats, stats_type); in bnx2x_handle_afex_cmd()
2848 REG_WR(bp, addr_to_write + i*sizeof(u32), in bnx2x_handle_afex_cmd()
2852 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_STATSGET_ACK, 0); in bnx2x_handle_afex_cmd()
2856 mf_config = MF_CFG_RD(bp, func_mf_config[func].config); in bnx2x_handle_afex_cmd()
2857 bp->mf_config[BP_VN(bp)] = mf_config; in bnx2x_handle_afex_cmd()
2869 XSTORM_RATE_SHAPING_PER_VN_VARS_OFFSET(BP_FUNC(bp)); in bnx2x_handle_afex_cmd()
2871 bp->mf_config[BP_VN(bp)] = mf_config; in bnx2x_handle_afex_cmd()
2873 bnx2x_calc_vn_max(bp, BP_VN(bp), &cmng_input); in bnx2x_handle_afex_cmd()
2875 cmng_input.vnic_max_rate[BP_VN(bp)]; in bnx2x_handle_afex_cmd()
2880 __storm_memset_struct(bp, addr, size, (u32 *)&m_rs_vn); in bnx2x_handle_afex_cmd()
2884 (MF_CFG_RD(bp, func_mf_config[func].e1hov_tag) & in bnx2x_handle_afex_cmd()
2888 (MF_CFG_RD(bp, func_mf_config[func].e1hov_tag) & in bnx2x_handle_afex_cmd()
2896 (MF_CFG_RD(bp, in bnx2x_handle_afex_cmd()
2901 (MF_CFG_RD(bp, in bnx2x_handle_afex_cmd()
2907 if (bnx2x_afex_func_update(bp, vif_id, vlan_val, in bnx2x_handle_afex_cmd()
2911 bp->afex_def_vlan_tag = vlan_val; in bnx2x_handle_afex_cmd()
2912 bp->afex_vlan_mode = vlan_mode; in bnx2x_handle_afex_cmd()
2915 bnx2x_link_report(bp); in bnx2x_handle_afex_cmd()
2918 bnx2x_afex_func_update(bp, 0xFFFF, 0, 0); in bnx2x_handle_afex_cmd()
2921 bp->afex_def_vlan_tag = -1; in bnx2x_handle_afex_cmd()
2926 static void bnx2x_handle_update_svid_cmd(struct bnx2x *bp) in bnx2x_handle_update_svid_cmd() argument
2933 func_params.f_obj = &bp->func_obj; in bnx2x_handle_update_svid_cmd()
2940 if (IS_MF_UFP(bp) || IS_MF_BD(bp)) { in bnx2x_handle_update_svid_cmd()
2941 int func = BP_ABS_FUNC(bp); in bnx2x_handle_update_svid_cmd()
2945 val = MF_CFG_RD(bp, func_mf_config[func].e1hov_tag) & in bnx2x_handle_update_svid_cmd()
2948 bp->mf_ov = val; in bnx2x_handle_update_svid_cmd()
2955 REG_WR(bp, NIG_REG_LLH0_FUNC_VLAN_ID + BP_PORT(bp) * 8, in bnx2x_handle_update_svid_cmd()
2956 bp->mf_ov); in bnx2x_handle_update_svid_cmd()
2961 switch_update_params->vlan = bp->mf_ov; in bnx2x_handle_update_svid_cmd()
2963 if (bnx2x_func_state_change(bp, &func_params) < 0) { in bnx2x_handle_update_svid_cmd()
2965 bp->mf_ov); in bnx2x_handle_update_svid_cmd()
2969 bp->mf_ov); in bnx2x_handle_update_svid_cmd()
2975 bnx2x_fw_command(bp, DRV_MSG_CODE_OEM_UPDATE_SVID_OK, 0); in bnx2x_handle_update_svid_cmd()
2978 bnx2x_fw_command(bp, DRV_MSG_CODE_OEM_UPDATE_SVID_FAILURE, 0); in bnx2x_handle_update_svid_cmd()
2981 static void bnx2x_pmf_update(struct bnx2x *bp) in bnx2x_pmf_update() argument
2983 int port = BP_PORT(bp); in bnx2x_pmf_update()
2986 bp->port.pmf = 1; in bnx2x_pmf_update()
2987 DP(BNX2X_MSG_MCP, "pmf %d\n", bp->port.pmf); in bnx2x_pmf_update()
2996 queue_delayed_work(bnx2x_wq, &bp->period_task, 0); in bnx2x_pmf_update()
2998 bnx2x_dcbx_pmf_update(bp); in bnx2x_pmf_update()
3001 val = (0xff0f | (1 << (BP_VN(bp) + 4))); in bnx2x_pmf_update()
3002 if (bp->common.int_block == INT_BLOCK_HC) { in bnx2x_pmf_update()
3003 REG_WR(bp, HC_REG_TRAILING_EDGE_0 + port*8, val); in bnx2x_pmf_update()
3004 REG_WR(bp, HC_REG_LEADING_EDGE_0 + port*8, val); in bnx2x_pmf_update()
3005 } else if (!CHIP_IS_E1x(bp)) { in bnx2x_pmf_update()
3006 REG_WR(bp, IGU_REG_TRAILING_EDGE_LATCH, val); in bnx2x_pmf_update()
3007 REG_WR(bp, IGU_REG_LEADING_EDGE_LATCH, val); in bnx2x_pmf_update()
3010 bnx2x_stats_handle(bp, STATS_EVENT_PMF); in bnx2x_pmf_update()
3022 u32 bnx2x_fw_command(struct bnx2x *bp, u32 command, u32 param) in bnx2x_fw_command() argument
3024 int mb_idx = BP_FW_MB_IDX(bp); in bnx2x_fw_command()
3028 u8 delay = CHIP_REV_IS_SLOW(bp) ? 100 : 10; in bnx2x_fw_command()
3030 mutex_lock(&bp->fw_mb_mutex); in bnx2x_fw_command()
3031 seq = ++bp->fw_seq; in bnx2x_fw_command()
3032 SHMEM_WR(bp, func_mb[mb_idx].drv_mb_param, param); in bnx2x_fw_command()
3033 SHMEM_WR(bp, func_mb[mb_idx].drv_mb_header, (command | seq)); in bnx2x_fw_command()
3042 rc = SHMEM_RD(bp, func_mb[mb_idx].fw_mb_header); in bnx2x_fw_command()
3056 bnx2x_fw_dump(bp); in bnx2x_fw_command()
3059 mutex_unlock(&bp->fw_mb_mutex); in bnx2x_fw_command()
3064 static void storm_memset_func_cfg(struct bnx2x *bp, in storm_memset_func_cfg() argument
3073 __storm_memset_struct(bp, addr, size, (u32 *)tcfg); in storm_memset_func_cfg()
3076 void bnx2x_func_init(struct bnx2x *bp, struct bnx2x_func_init_params *p) in bnx2x_func_init() argument
3078 if (CHIP_IS_E1x(bp)) { in bnx2x_func_init()
3081 storm_memset_func_cfg(bp, &tcfg, p->func_id); in bnx2x_func_init()
3085 storm_memset_vf_to_pf(bp, p->func_id, p->pf_id); in bnx2x_func_init()
3086 storm_memset_func_en(bp, p->func_id, 1); in bnx2x_func_init()
3090 storm_memset_spq_addr(bp, p->spq_map, p->func_id); in bnx2x_func_init()
3091 REG_WR(bp, XSEM_REG_FAST_MEMORY + in bnx2x_func_init()
3105 static unsigned long bnx2x_get_common_flags(struct bnx2x *bp, in bnx2x_get_common_flags() argument
3123 if (bp->flags & TX_SWITCHING) in bnx2x_get_common_flags()
3136 static unsigned long bnx2x_get_q_flags(struct bnx2x *bp, in bnx2x_get_q_flags() argument
3143 if (IS_MF_SD(bp)) in bnx2x_get_q_flags()
3168 if (IS_MF_AFEX(bp)) in bnx2x_get_q_flags()
3171 return flags | bnx2x_get_common_flags(bp, fp, true); in bnx2x_get_q_flags()
3174 static void bnx2x_pf_q_prep_general(struct bnx2x *bp, in bnx2x_pf_q_prep_general() argument
3185 gen_init->mtu = bp->dev->mtu; in bnx2x_pf_q_prep_general()
3192 static void bnx2x_pf_rx_q_prep(struct bnx2x *bp, in bnx2x_pf_rx_q_prep() argument
3201 pause->sge_th_lo = SGE_TH_LO(bp); in bnx2x_pf_rx_q_prep()
3202 pause->sge_th_hi = SGE_TH_HI(bp); in bnx2x_pf_rx_q_prep()
3205 WARN_ON(bp->dropless_fc && in bnx2x_pf_rx_q_prep()
3210 max_sge = SGE_PAGE_ALIGN(bp->dev->mtu) >> in bnx2x_pf_rx_q_prep()
3218 if (!CHIP_IS_E1(bp)) { in bnx2x_pf_rx_q_prep()
3219 pause->bd_th_lo = BD_TH_LO(bp); in bnx2x_pf_rx_q_prep()
3220 pause->bd_th_hi = BD_TH_HI(bp); in bnx2x_pf_rx_q_prep()
3222 pause->rcq_th_lo = RCQ_TH_LO(bp); in bnx2x_pf_rx_q_prep()
3223 pause->rcq_th_hi = RCQ_TH_HI(bp); in bnx2x_pf_rx_q_prep()
3228 WARN_ON(bp->dropless_fc && in bnx2x_pf_rx_q_prep()
3230 bp->rx_ring_size); in bnx2x_pf_rx_q_prep()
3231 WARN_ON(bp->dropless_fc && in bnx2x_pf_rx_q_prep()
3254 rxq_init->rss_engine_id = BP_FUNC(bp); in bnx2x_pf_rx_q_prep()
3255 rxq_init->mcast_engine_id = BP_FUNC(bp); in bnx2x_pf_rx_q_prep()
3262 rxq_init->max_tpa_queues = MAX_AGG_QS(bp); in bnx2x_pf_rx_q_prep()
3274 if (IS_MF_AFEX(bp)) { in bnx2x_pf_rx_q_prep()
3275 rxq_init->silent_removal_value = bp->afex_def_vlan_tag; in bnx2x_pf_rx_q_prep()
3280 static void bnx2x_pf_tx_q_prep(struct bnx2x *bp, in bnx2x_pf_tx_q_prep() argument
3293 txq_init->tss_leading_cl_id = bnx2x_fp(bp, 0, cl_id); in bnx2x_pf_tx_q_prep()
3301 static void bnx2x_pf_init(struct bnx2x *bp) in bnx2x_pf_init() argument
3306 if (!CHIP_IS_E1x(bp)) { in bnx2x_pf_init()
3309 REG_WR(bp, IGU_REG_STATISTIC_NUM_MESSAGE_SENT + in bnx2x_pf_init()
3311 (CHIP_MODE_IS_4_PORT(bp) ? in bnx2x_pf_init()
3312 BP_FUNC(bp) : BP_VN(bp))*4, 0); in bnx2x_pf_init()
3314 REG_WR(bp, IGU_REG_STATISTIC_NUM_MESSAGE_SENT + in bnx2x_pf_init()
3317 (CHIP_MODE_IS_4_PORT(bp) ? in bnx2x_pf_init()
3318 BP_FUNC(bp) : BP_VN(bp))*4, 0); in bnx2x_pf_init()
3322 func_init.pf_id = BP_FUNC(bp); in bnx2x_pf_init()
3323 func_init.func_id = BP_FUNC(bp); in bnx2x_pf_init()
3324 func_init.spq_map = bp->spq_mapping; in bnx2x_pf_init()
3325 func_init.spq_prod = bp->spq_prod_idx; in bnx2x_pf_init()
3327 bnx2x_func_init(bp, &func_init); in bnx2x_pf_init()
3329 memset(&(bp->cmng), 0, sizeof(struct cmng_struct_per_port)); in bnx2x_pf_init()
3337 bp->link_vars.line_speed = SPEED_10000; in bnx2x_pf_init()
3338 bnx2x_cmng_fns_init(bp, true, bnx2x_get_cmng_fns_mode(bp)); in bnx2x_pf_init()
3341 if (bp->port.pmf) in bnx2x_pf_init()
3342 storm_memset_cmng(bp, &bp->cmng, BP_PORT(bp)); in bnx2x_pf_init()
3345 eq_data.base_addr.hi = U64_HI(bp->eq_mapping); in bnx2x_pf_init()
3346 eq_data.base_addr.lo = U64_LO(bp->eq_mapping); in bnx2x_pf_init()
3347 eq_data.producer = bp->eq_prod; in bnx2x_pf_init()
3350 storm_memset_eq_data(bp, &eq_data, BP_FUNC(bp)); in bnx2x_pf_init()
3353 static void bnx2x_e1h_disable(struct bnx2x *bp) in bnx2x_e1h_disable() argument
3355 int port = BP_PORT(bp); in bnx2x_e1h_disable()
3357 bnx2x_tx_disable(bp); in bnx2x_e1h_disable()
3359 REG_WR(bp, NIG_REG_LLH0_FUNC_EN + port*8, 0); in bnx2x_e1h_disable()
3362 static void bnx2x_e1h_enable(struct bnx2x *bp) in bnx2x_e1h_enable() argument
3364 int port = BP_PORT(bp); in bnx2x_e1h_enable()
3366 if (!(IS_MF_UFP(bp) && BNX2X_IS_MF_SD_PROTOCOL_FCOE(bp))) in bnx2x_e1h_enable()
3367 REG_WR(bp, NIG_REG_LLH0_FUNC_EN + port * 8, 1); in bnx2x_e1h_enable()
3370 netif_tx_wake_all_queues(bp->dev); in bnx2x_e1h_enable()
3380 static void bnx2x_drv_info_ether_stat(struct bnx2x *bp) in bnx2x_drv_info_ether_stat() argument
3383 &bp->slowpath->drv_info_to_mcp.ether_stat; in bnx2x_drv_info_ether_stat()
3385 &bp->sp_objs->mac_obj; in bnx2x_drv_info_ether_stat()
3402 mac_obj->get_n_elements(bp, &bp->sp_objs[0].mac_obj, in bnx2x_drv_info_ether_stat()
3406 ether_stat->mtu_size = bp->dev->mtu; in bnx2x_drv_info_ether_stat()
3407 if (bp->dev->features & NETIF_F_RXCSUM) in bnx2x_drv_info_ether_stat()
3409 if (bp->dev->features & NETIF_F_TSO) in bnx2x_drv_info_ether_stat()
3411 ether_stat->feature_flags |= bp->common.boot_mode; in bnx2x_drv_info_ether_stat()
3413 ether_stat->promiscuous_mode = (bp->dev->flags & IFF_PROMISC) ? 1 : 0; in bnx2x_drv_info_ether_stat()
3415 ether_stat->txq_size = bp->tx_ring_size; in bnx2x_drv_info_ether_stat()
3416 ether_stat->rxq_size = bp->rx_ring_size; in bnx2x_drv_info_ether_stat()
3419 ether_stat->vf_cnt = IS_SRIOV(bp) ? bp->vfdb->sriov.nr_virtfn : 0; in bnx2x_drv_info_ether_stat()
3423 static void bnx2x_drv_info_fcoe_stat(struct bnx2x *bp) in bnx2x_drv_info_fcoe_stat() argument
3425 struct bnx2x_dcbx_app_params *app = &bp->dcbx_port_params.app; in bnx2x_drv_info_fcoe_stat()
3427 &bp->slowpath->drv_info_to_mcp.fcoe_stat; in bnx2x_drv_info_fcoe_stat()
3429 if (!CNIC_LOADED(bp)) in bnx2x_drv_info_fcoe_stat()
3432 memcpy(fcoe_stat->mac_local + MAC_PAD, bp->fip_mac, ETH_ALEN); in bnx2x_drv_info_fcoe_stat()
3438 if (!NO_FCOE(bp)) { in bnx2x_drv_info_fcoe_stat()
3440 &bp->fw_stats_data->queue_stats[FCOE_IDX(bp)]. in bnx2x_drv_info_fcoe_stat()
3444 &bp->fw_stats_data->queue_stats[FCOE_IDX(bp)]. in bnx2x_drv_info_fcoe_stat()
3448 &bp->fw_stats_data->fcoe; in bnx2x_drv_info_fcoe_stat()
3522 bnx2x_cnic_notify(bp, CNIC_CTL_FCOE_STATS_GET_CMD); in bnx2x_drv_info_fcoe_stat()
3525 static void bnx2x_drv_info_iscsi_stat(struct bnx2x *bp) in bnx2x_drv_info_iscsi_stat() argument
3527 struct bnx2x_dcbx_app_params *app = &bp->dcbx_port_params.app; in bnx2x_drv_info_iscsi_stat()
3529 &bp->slowpath->drv_info_to_mcp.iscsi_stat; in bnx2x_drv_info_iscsi_stat()
3531 if (!CNIC_LOADED(bp)) in bnx2x_drv_info_iscsi_stat()
3534 memcpy(iscsi_stat->mac_local + MAC_PAD, bp->cnic_eth_dev.iscsi_mac, in bnx2x_drv_info_iscsi_stat()
3541 bnx2x_cnic_notify(bp, CNIC_CTL_ISCSI_STATS_GET_CMD); in bnx2x_drv_info_iscsi_stat()
3549 static void bnx2x_config_mf_bw(struct bnx2x *bp) in bnx2x_config_mf_bw() argument
3555 if (!IS_MF(bp)) { in bnx2x_config_mf_bw()
3561 if (bp->link_vars.link_up) { in bnx2x_config_mf_bw()
3562 bnx2x_cmng_fns_init(bp, true, CMNG_FNS_MINMAX); in bnx2x_config_mf_bw()
3563 bnx2x_link_sync_notify(bp); in bnx2x_config_mf_bw()
3565 storm_memset_cmng(bp, &bp->cmng, BP_PORT(bp)); in bnx2x_config_mf_bw()
3568 static void bnx2x_set_mf_bw(struct bnx2x *bp) in bnx2x_set_mf_bw() argument
3570 bnx2x_config_mf_bw(bp); in bnx2x_set_mf_bw()
3571 bnx2x_fw_command(bp, DRV_MSG_CODE_SET_MF_BW_ACK, 0); in bnx2x_set_mf_bw()
3574 static void bnx2x_handle_eee_event(struct bnx2x *bp) in bnx2x_handle_eee_event() argument
3577 bnx2x_fw_command(bp, DRV_MSG_CODE_EEE_RESULTS_ACK, 0); in bnx2x_handle_eee_event()
3583 static void bnx2x_handle_drv_info_req(struct bnx2x *bp) in bnx2x_handle_drv_info_req() argument
3586 u32 drv_info_ctl = SHMEM2_RD(bp, drv_info_control); in bnx2x_handle_drv_info_req()
3592 bnx2x_fw_command(bp, DRV_MSG_CODE_DRV_INFO_NACK, 0); in bnx2x_handle_drv_info_req()
3600 mutex_lock(&bp->drv_info_mutex); in bnx2x_handle_drv_info_req()
3602 memset(&bp->slowpath->drv_info_to_mcp, 0, in bnx2x_handle_drv_info_req()
3607 bnx2x_drv_info_ether_stat(bp); in bnx2x_handle_drv_info_req()
3610 bnx2x_drv_info_fcoe_stat(bp); in bnx2x_handle_drv_info_req()
3613 bnx2x_drv_info_iscsi_stat(bp); in bnx2x_handle_drv_info_req()
3617 bnx2x_fw_command(bp, DRV_MSG_CODE_DRV_INFO_NACK, 0); in bnx2x_handle_drv_info_req()
3624 SHMEM2_WR(bp, drv_info_host_addr_lo, in bnx2x_handle_drv_info_req()
3625 U64_LO(bnx2x_sp_mapping(bp, drv_info_to_mcp))); in bnx2x_handle_drv_info_req()
3626 SHMEM2_WR(bp, drv_info_host_addr_hi, in bnx2x_handle_drv_info_req()
3627 U64_HI(bnx2x_sp_mapping(bp, drv_info_to_mcp))); in bnx2x_handle_drv_info_req()
3629 bnx2x_fw_command(bp, DRV_MSG_CODE_DRV_INFO_ACK, 0); in bnx2x_handle_drv_info_req()
3635 if (!SHMEM2_HAS(bp, mfw_drv_indication)) { in bnx2x_handle_drv_info_req()
3637 } else if (!bp->drv_info_mng_owner) { in bnx2x_handle_drv_info_req()
3638 u32 bit = MFW_DRV_IND_READ_DONE_OFFSET((BP_ABS_FUNC(bp) >> 1)); in bnx2x_handle_drv_info_req()
3641 u32 indication = SHMEM2_RD(bp, mfw_drv_indication); in bnx2x_handle_drv_info_req()
3645 SHMEM2_WR(bp, mfw_drv_indication, in bnx2x_handle_drv_info_req()
3656 bp->drv_info_mng_owner = true; in bnx2x_handle_drv_info_req()
3660 mutex_unlock(&bp->drv_info_mutex); in bnx2x_handle_drv_info_req()
3684 void bnx2x_update_mng_version(struct bnx2x *bp) in bnx2x_update_mng_version() argument
3689 int idx = BP_FW_MB_IDX(bp); in bnx2x_update_mng_version()
3692 if (!SHMEM2_HAS(bp, func_os_drv_ver)) in bnx2x_update_mng_version()
3695 mutex_lock(&bp->drv_info_mutex); in bnx2x_update_mng_version()
3697 if (bp->drv_info_mng_owner) in bnx2x_update_mng_version()
3700 if (bp->state != BNX2X_STATE_OPEN) in bnx2x_update_mng_version()
3705 if (!CNIC_LOADED(bp)) in bnx2x_update_mng_version()
3709 memset(&bp->slowpath->drv_info_to_mcp, 0, in bnx2x_update_mng_version()
3711 bnx2x_drv_info_iscsi_stat(bp); in bnx2x_update_mng_version()
3712 version = bp->slowpath->drv_info_to_mcp.iscsi_stat.version; in bnx2x_update_mng_version()
3715 memset(&bp->slowpath->drv_info_to_mcp, 0, in bnx2x_update_mng_version()
3717 bnx2x_drv_info_fcoe_stat(bp); in bnx2x_update_mng_version()
3718 version = bp->slowpath->drv_info_to_mcp.fcoe_stat.version; in bnx2x_update_mng_version()
3722 SHMEM2_WR(bp, func_os_drv_ver[idx].versions[DRV_PERS_ETHERNET], ethver); in bnx2x_update_mng_version()
3723 SHMEM2_WR(bp, func_os_drv_ver[idx].versions[DRV_PERS_ISCSI], iscsiver); in bnx2x_update_mng_version()
3724 SHMEM2_WR(bp, func_os_drv_ver[idx].versions[DRV_PERS_FCOE], fcoever); in bnx2x_update_mng_version()
3726 mutex_unlock(&bp->drv_info_mutex); in bnx2x_update_mng_version()
3732 void bnx2x_update_mfw_dump(struct bnx2x *bp) in bnx2x_update_mfw_dump() argument
3737 if (!SHMEM2_HAS(bp, drv_info)) in bnx2x_update_mfw_dump()
3741 SHMEM2_WR(bp, drv_info.epoc, (u32)ktime_get_real_seconds()); in bnx2x_update_mfw_dump()
3744 SHMEM2_WR(bp, drv_info.drv_ver, drv_ver); in bnx2x_update_mfw_dump()
3746 SHMEM2_WR(bp, drv_info.fw_ver, REG_RD(bp, XSEM_REG_PRAM)); in bnx2x_update_mfw_dump()
3749 valid_dump = SHMEM2_RD(bp, drv_info.valid_dump); in bnx2x_update_mfw_dump()
3758 static void bnx2x_oem_event(struct bnx2x *bp, u32 event) in bnx2x_oem_event() argument
3785 if (bp->mf_config[BP_VN(bp)] & FUNC_MF_CFG_FUNC_DISABLED) { in bnx2x_oem_event()
3787 bp->flags |= MF_FUNC_DIS; in bnx2x_oem_event()
3789 bnx2x_e1h_disable(bp); in bnx2x_oem_event()
3792 bp->flags &= ~MF_FUNC_DIS; in bnx2x_oem_event()
3794 bnx2x_e1h_enable(bp); in bnx2x_oem_event()
3802 bnx2x_config_mf_bw(bp); in bnx2x_oem_event()
3809 bnx2x_fw_command(bp, cmd_fail, 0); in bnx2x_oem_event()
3811 bnx2x_fw_command(bp, cmd_ok, 0); in bnx2x_oem_event()
3815 static struct eth_spe *bnx2x_sp_get_next(struct bnx2x *bp) in bnx2x_sp_get_next() argument
3817 struct eth_spe *next_spe = bp->spq_prod_bd; in bnx2x_sp_get_next()
3819 if (bp->spq_prod_bd == bp->spq_last_bd) { in bnx2x_sp_get_next()
3820 bp->spq_prod_bd = bp->spq; in bnx2x_sp_get_next()
3821 bp->spq_prod_idx = 0; in bnx2x_sp_get_next()
3824 bp->spq_prod_bd++; in bnx2x_sp_get_next()
3825 bp->spq_prod_idx++; in bnx2x_sp_get_next()
3831 static void bnx2x_sp_prod_update(struct bnx2x *bp) in bnx2x_sp_prod_update() argument
3833 int func = BP_FUNC(bp); in bnx2x_sp_prod_update()
3842 REG_WR16_RELAXED(bp, BAR_XSTRORM_INTMEM + XSTORM_SPQ_PROD_OFFSET(func), in bnx2x_sp_prod_update()
3843 bp->spq_prod_idx); in bnx2x_sp_prod_update()
3880 int bnx2x_sp_post(struct bnx2x *bp, int command, int cid, in bnx2x_sp_post() argument
3888 if (unlikely(bp->panic)) { in bnx2x_sp_post()
3894 spin_lock_bh(&bp->spq_lock); in bnx2x_sp_post()
3897 if (!atomic_read(&bp->eq_spq_left)) { in bnx2x_sp_post()
3899 spin_unlock_bh(&bp->spq_lock); in bnx2x_sp_post()
3903 } else if (!atomic_read(&bp->cq_spq_left)) { in bnx2x_sp_post()
3905 spin_unlock_bh(&bp->spq_lock); in bnx2x_sp_post()
3910 spe = bnx2x_sp_get_next(bp); in bnx2x_sp_post()
3915 HW_CID(bp, cid)); in bnx2x_sp_post()
3924 type |= ((BP_FUNC(bp) << SPE_HDR_FUNCTION_ID_SHIFT) & in bnx2x_sp_post()
3941 atomic_dec(&bp->eq_spq_left); in bnx2x_sp_post()
3943 atomic_dec(&bp->cq_spq_left); in bnx2x_sp_post()
3947 bp->spq_prod_idx, (u32)U64_HI(bp->spq_mapping), in bnx2x_sp_post()
3948 (u32)(U64_LO(bp->spq_mapping) + in bnx2x_sp_post()
3949 (void *)bp->spq_prod_bd - (void *)bp->spq), command, common, in bnx2x_sp_post()
3950 HW_CID(bp, cid), data_hi, data_lo, type, in bnx2x_sp_post()
3951 atomic_read(&bp->cq_spq_left), atomic_read(&bp->eq_spq_left)); in bnx2x_sp_post()
3953 bnx2x_sp_prod_update(bp); in bnx2x_sp_post()
3954 spin_unlock_bh(&bp->spq_lock); in bnx2x_sp_post()
3959 static int bnx2x_acquire_alr(struct bnx2x *bp) in bnx2x_acquire_alr() argument
3966 REG_WR(bp, MCP_REG_MCPR_ACCESS_LOCK, MCPR_ACCESS_LOCK_LOCK); in bnx2x_acquire_alr()
3967 val = REG_RD(bp, MCP_REG_MCPR_ACCESS_LOCK); in bnx2x_acquire_alr()
3982 static void bnx2x_release_alr(struct bnx2x *bp) in bnx2x_release_alr() argument
3984 REG_WR(bp, MCP_REG_MCPR_ACCESS_LOCK, 0); in bnx2x_release_alr()
3990 static u16 bnx2x_update_dsb_idx(struct bnx2x *bp) in bnx2x_update_dsb_idx() argument
3992 struct host_sp_status_block *def_sb = bp->def_status_blk; in bnx2x_update_dsb_idx()
3996 if (bp->def_att_idx != def_sb->atten_status_block.attn_bits_index) { in bnx2x_update_dsb_idx()
3997 bp->def_att_idx = def_sb->atten_status_block.attn_bits_index; in bnx2x_update_dsb_idx()
4001 if (bp->def_idx != def_sb->sp_sb.running_index) { in bnx2x_update_dsb_idx()
4002 bp->def_idx = def_sb->sp_sb.running_index; in bnx2x_update_dsb_idx()
4015 static void bnx2x_attn_int_asserted(struct bnx2x *bp, u32 asserted) in bnx2x_attn_int_asserted() argument
4017 int port = BP_PORT(bp); in bnx2x_attn_int_asserted()
4026 if (bp->attn_state & asserted) in bnx2x_attn_int_asserted()
4029 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_PORT0_ATT_MASK + port); in bnx2x_attn_int_asserted()
4030 aeu_mask = REG_RD(bp, aeu_addr); in bnx2x_attn_int_asserted()
4037 REG_WR(bp, aeu_addr, aeu_mask); in bnx2x_attn_int_asserted()
4038 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_PORT0_ATT_MASK + port); in bnx2x_attn_int_asserted()
4040 DP(NETIF_MSG_HW, "attn_state %x\n", bp->attn_state); in bnx2x_attn_int_asserted()
4041 bp->attn_state |= asserted; in bnx2x_attn_int_asserted()
4042 DP(NETIF_MSG_HW, "new state %x\n", bp->attn_state); in bnx2x_attn_int_asserted()
4047 bnx2x_acquire_phy_lock(bp); in bnx2x_attn_int_asserted()
4050 nig_mask = REG_RD(bp, nig_int_mask_addr); in bnx2x_attn_int_asserted()
4056 REG_WR(bp, nig_int_mask_addr, 0); in bnx2x_attn_int_asserted()
4058 bnx2x_link_attn(bp); in bnx2x_attn_int_asserted()
4078 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_1, 0x0); in bnx2x_attn_int_asserted()
4082 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_2, 0x0); in bnx2x_attn_int_asserted()
4086 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_3, 0x0); in bnx2x_attn_int_asserted()
4091 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_4, 0x0); in bnx2x_attn_int_asserted()
4095 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_5, 0x0); in bnx2x_attn_int_asserted()
4099 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_6, 0x0); in bnx2x_attn_int_asserted()
4105 if (bp->common.int_block == INT_BLOCK_HC) in bnx2x_attn_int_asserted()
4112 (bp->common.int_block == INT_BLOCK_HC) ? "HC" : "IGU", reg_addr); in bnx2x_attn_int_asserted()
4113 REG_WR(bp, reg_addr, asserted); in bnx2x_attn_int_asserted()
4120 if (bp->common.int_block != INT_BLOCK_HC) { in bnx2x_attn_int_asserted()
4123 igu_acked = REG_RD(bp, in bnx2x_attn_int_asserted()
4132 REG_WR(bp, nig_int_mask_addr, nig_mask); in bnx2x_attn_int_asserted()
4133 bnx2x_release_phy_lock(bp); in bnx2x_attn_int_asserted()
4137 static void bnx2x_fan_failure(struct bnx2x *bp) in bnx2x_fan_failure() argument
4139 int port = BP_PORT(bp); in bnx2x_fan_failure()
4143 SHMEM_RD(bp, in bnx2x_fan_failure()
4148 SHMEM_WR(bp, dev_info.port_hw_config[port].external_phy_config, in bnx2x_fan_failure()
4152 …netdev_err(bp->dev, "Fan Failure on Network Controller has caused the driver to shutdown the card … in bnx2x_fan_failure()
4159 bnx2x_schedule_sp_rtnl(bp, BNX2X_SP_RTNL_FAN_FAILURE, 0); in bnx2x_fan_failure()
4162 static void bnx2x_attn_int_deasserted0(struct bnx2x *bp, u32 attn) in bnx2x_attn_int_deasserted0() argument
4164 int port = BP_PORT(bp); in bnx2x_attn_int_deasserted0()
4173 val = REG_RD(bp, reg_offset); in bnx2x_attn_int_deasserted0()
4175 REG_WR(bp, reg_offset, val); in bnx2x_attn_int_deasserted0()
4180 bnx2x_hw_reset_phy(&bp->link_params); in bnx2x_attn_int_deasserted0()
4181 bnx2x_fan_failure(bp); in bnx2x_attn_int_deasserted0()
4184 if ((attn & bp->link_vars.aeu_int_mask) && bp->port.pmf) { in bnx2x_attn_int_deasserted0()
4185 bnx2x_acquire_phy_lock(bp); in bnx2x_attn_int_deasserted0()
4186 bnx2x_handle_module_detect_int(&bp->link_params); in bnx2x_attn_int_deasserted0()
4187 bnx2x_release_phy_lock(bp); in bnx2x_attn_int_deasserted0()
4192 val = REG_RD(bp, reg_offset); in bnx2x_attn_int_deasserted0()
4194 REG_WR(bp, reg_offset, val); in bnx2x_attn_int_deasserted0()
4202 static void bnx2x_attn_int_deasserted1(struct bnx2x *bp, u32 attn) in bnx2x_attn_int_deasserted1() argument
4208 val = REG_RD(bp, DORQ_REG_DORQ_INT_STS_CLR); in bnx2x_attn_int_deasserted1()
4217 int port = BP_PORT(bp); in bnx2x_attn_int_deasserted1()
4223 val = REG_RD(bp, reg_offset); in bnx2x_attn_int_deasserted1()
4225 REG_WR(bp, reg_offset, val); in bnx2x_attn_int_deasserted1()
4233 static void bnx2x_attn_int_deasserted2(struct bnx2x *bp, u32 attn) in bnx2x_attn_int_deasserted2() argument
4239 val = REG_RD(bp, CFC_REG_CFC_INT_STS_CLR); in bnx2x_attn_int_deasserted2()
4247 val = REG_RD(bp, PXP_REG_PXP_INT_STS_CLR_0); in bnx2x_attn_int_deasserted2()
4253 if (!CHIP_IS_E1x(bp)) { in bnx2x_attn_int_deasserted2()
4254 val = REG_RD(bp, PXP_REG_PXP_INT_STS_CLR_1); in bnx2x_attn_int_deasserted2()
4261 int port = BP_PORT(bp); in bnx2x_attn_int_deasserted2()
4267 val = REG_RD(bp, reg_offset); in bnx2x_attn_int_deasserted2()
4269 REG_WR(bp, reg_offset, val); in bnx2x_attn_int_deasserted2()
4277 static void bnx2x_attn_int_deasserted3(struct bnx2x *bp, u32 attn) in bnx2x_attn_int_deasserted3() argument
4284 int func = BP_FUNC(bp); in bnx2x_attn_int_deasserted3()
4286 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_12 + func*4, 0); in bnx2x_attn_int_deasserted3()
4287 bnx2x_read_mf_cfg(bp); in bnx2x_attn_int_deasserted3()
4288 bp->mf_config[BP_VN(bp)] = MF_CFG_RD(bp, in bnx2x_attn_int_deasserted3()
4289 func_mf_config[BP_ABS_FUNC(bp)].config); in bnx2x_attn_int_deasserted3()
4290 val = SHMEM_RD(bp, in bnx2x_attn_int_deasserted3()
4291 func_mb[BP_FW_MB_IDX(bp)].drv_status); in bnx2x_attn_int_deasserted3()
4295 bnx2x_oem_event(bp, in bnx2x_attn_int_deasserted3()
4300 bnx2x_set_mf_bw(bp); in bnx2x_attn_int_deasserted3()
4303 bnx2x_handle_drv_info_req(bp); in bnx2x_attn_int_deasserted3()
4306 bnx2x_schedule_iov_task(bp, in bnx2x_attn_int_deasserted3()
4309 if ((bp->port.pmf == 0) && (val & DRV_STATUS_PMF)) in bnx2x_attn_int_deasserted3()
4310 bnx2x_pmf_update(bp); in bnx2x_attn_int_deasserted3()
4312 if (bp->port.pmf && in bnx2x_attn_int_deasserted3()
4314 bp->dcbx_enabled > 0) in bnx2x_attn_int_deasserted3()
4316 bnx2x_dcbx_set_params(bp, in bnx2x_attn_int_deasserted3()
4319 bnx2x_handle_afex_cmd(bp, in bnx2x_attn_int_deasserted3()
4322 bnx2x_handle_eee_event(bp); in bnx2x_attn_int_deasserted3()
4325 bnx2x_schedule_sp_rtnl(bp, in bnx2x_attn_int_deasserted3()
4328 if (bp->link_vars.periodic_flags & in bnx2x_attn_int_deasserted3()
4331 bnx2x_acquire_phy_lock(bp); in bnx2x_attn_int_deasserted3()
4332 bp->link_vars.periodic_flags &= in bnx2x_attn_int_deasserted3()
4334 bnx2x_release_phy_lock(bp); in bnx2x_attn_int_deasserted3()
4335 if (IS_MF(bp)) in bnx2x_attn_int_deasserted3()
4336 bnx2x_link_sync_notify(bp); in bnx2x_attn_int_deasserted3()
4337 bnx2x_link_report(bp); in bnx2x_attn_int_deasserted3()
4342 bnx2x__link_status_update(bp); in bnx2x_attn_int_deasserted3()
4346 bnx2x_mc_assert(bp); in bnx2x_attn_int_deasserted3()
4347 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_10, 0); in bnx2x_attn_int_deasserted3()
4348 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_9, 0); in bnx2x_attn_int_deasserted3()
4349 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_8, 0); in bnx2x_attn_int_deasserted3()
4350 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_7, 0); in bnx2x_attn_int_deasserted3()
4356 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_11, 0); in bnx2x_attn_int_deasserted3()
4357 bnx2x_fw_dump(bp); in bnx2x_attn_int_deasserted3()
4366 val = CHIP_IS_E1(bp) ? 0 : in bnx2x_attn_int_deasserted3()
4367 REG_RD(bp, MISC_REG_GRC_TIMEOUT_ATTN); in bnx2x_attn_int_deasserted3()
4371 val = CHIP_IS_E1(bp) ? 0 : in bnx2x_attn_int_deasserted3()
4372 REG_RD(bp, MISC_REG_GRC_RSV_ATTN); in bnx2x_attn_int_deasserted3()
4375 REG_WR(bp, MISC_REG_AEU_CLR_LATCH_SIGNAL, 0x7ff); in bnx2x_attn_int_deasserted3()
4408 void bnx2x_set_reset_global(struct bnx2x *bp) in bnx2x_set_reset_global() argument
4411 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_global()
4412 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_set_reset_global()
4413 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val | BNX2X_GLOBAL_RESET_BIT); in bnx2x_set_reset_global()
4414 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_global()
4422 static void bnx2x_clear_reset_global(struct bnx2x *bp) in bnx2x_clear_reset_global() argument
4425 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_clear_reset_global()
4426 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_clear_reset_global()
4427 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val & (~BNX2X_GLOBAL_RESET_BIT)); in bnx2x_clear_reset_global()
4428 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_clear_reset_global()
4436 static bool bnx2x_reset_is_global(struct bnx2x *bp) in bnx2x_reset_is_global() argument
4438 u32 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_reset_is_global()
4449 static void bnx2x_set_reset_done(struct bnx2x *bp) in bnx2x_set_reset_done() argument
4452 u32 bit = BP_PATH(bp) ? in bnx2x_set_reset_done()
4454 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_done()
4455 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_set_reset_done()
4459 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val); in bnx2x_set_reset_done()
4461 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_done()
4469 void bnx2x_set_reset_in_progress(struct bnx2x *bp) in bnx2x_set_reset_in_progress() argument
4472 u32 bit = BP_PATH(bp) ? in bnx2x_set_reset_in_progress()
4474 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_in_progress()
4475 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_set_reset_in_progress()
4479 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val); in bnx2x_set_reset_in_progress()
4480 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_reset_in_progress()
4487 bool bnx2x_reset_is_done(struct bnx2x *bp, int engine) in bnx2x_reset_is_done() argument
4489 u32 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_reset_is_done()
4502 void bnx2x_set_pf_load(struct bnx2x *bp) in bnx2x_set_pf_load() argument
4505 u32 mask = BP_PATH(bp) ? BNX2X_PATH1_LOAD_CNT_MASK : in bnx2x_set_pf_load()
4507 u32 shift = BP_PATH(bp) ? BNX2X_PATH1_LOAD_CNT_SHIFT : in bnx2x_set_pf_load()
4510 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_pf_load()
4511 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_set_pf_load()
4519 val1 |= (1 << bp->pf_num); in bnx2x_set_pf_load()
4527 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val); in bnx2x_set_pf_load()
4528 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_set_pf_load()
4540 bool bnx2x_clear_pf_load(struct bnx2x *bp) in bnx2x_clear_pf_load() argument
4543 u32 mask = BP_PATH(bp) ? BNX2X_PATH1_LOAD_CNT_MASK : in bnx2x_clear_pf_load()
4545 u32 shift = BP_PATH(bp) ? BNX2X_PATH1_LOAD_CNT_SHIFT : in bnx2x_clear_pf_load()
4548 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_clear_pf_load()
4549 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_clear_pf_load()
4556 val1 &= ~(1 << bp->pf_num); in bnx2x_clear_pf_load()
4564 REG_WR(bp, BNX2X_RECOVERY_GLOB_REG, val); in bnx2x_clear_pf_load()
4565 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RECOVERY_REG); in bnx2x_clear_pf_load()
4574 static bool bnx2x_get_load_status(struct bnx2x *bp, int engine) in bnx2x_get_load_status() argument
4580 u32 val = REG_RD(bp, BNX2X_RECOVERY_GLOB_REG); in bnx2x_get_load_status()
4592 static void _print_parity(struct bnx2x *bp, u32 reg) in _print_parity() argument
4594 pr_cont(" [0x%08x] ", REG_RD(bp, reg)); in _print_parity()
4602 static bool bnx2x_check_blocks_with_parity0(struct bnx2x *bp, u32 sig, in bnx2x_check_blocks_with_parity0() argument
4620 _print_parity(bp, in bnx2x_check_blocks_with_parity0()
4626 _print_parity(bp, PRS_REG_PRS_PRTY_STS); in bnx2x_check_blocks_with_parity0()
4630 _print_parity(bp, in bnx2x_check_blocks_with_parity0()
4636 _print_parity(bp, SRC_REG_SRC_PRTY_STS); in bnx2x_check_blocks_with_parity0()
4640 _print_parity(bp, TCM_REG_TCM_PRTY_STS); in bnx2x_check_blocks_with_parity0()
4645 _print_parity(bp, in bnx2x_check_blocks_with_parity0()
4647 _print_parity(bp, in bnx2x_check_blocks_with_parity0()
4652 _print_parity(bp, GRCBASE_XPB + in bnx2x_check_blocks_with_parity0()
4666 static bool bnx2x_check_blocks_with_parity1(struct bnx2x *bp, u32 sig, in bnx2x_check_blocks_with_parity1() argument
4684 _print_parity(bp, PBF_REG_PBF_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4690 _print_parity(bp, QM_REG_QM_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4696 _print_parity(bp, TM_REG_TM_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4702 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4709 _print_parity(bp, XCM_REG_XCM_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4716 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4718 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4726 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4733 if (CHIP_IS_E1x(bp)) { in bnx2x_check_blocks_with_parity1()
4734 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4737 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4739 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4754 _print_parity(bp, DBG_REG_DBG_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4760 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4767 _print_parity(bp, UCM_REG_UCM_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4774 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4776 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4783 _print_parity(bp, GRCBASE_UPB + in bnx2x_check_blocks_with_parity1()
4790 _print_parity(bp, in bnx2x_check_blocks_with_parity1()
4797 _print_parity(bp, CCM_REG_CCM_PRTY_STS); in bnx2x_check_blocks_with_parity1()
4810 static bool bnx2x_check_blocks_with_parity2(struct bnx2x *bp, u32 sig, in bnx2x_check_blocks_with_parity2() argument
4828 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4830 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4835 _print_parity(bp, PXP_REG_PXP_PRTY_STS); in bnx2x_check_blocks_with_parity2()
4836 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4838 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4847 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4852 _print_parity(bp, CDU_REG_CDU_PRTY_STS); in bnx2x_check_blocks_with_parity2()
4856 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4861 if (CHIP_IS_E1x(bp)) in bnx2x_check_blocks_with_parity2()
4862 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4865 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4870 _print_parity(bp, in bnx2x_check_blocks_with_parity2()
4884 static bool bnx2x_check_blocks_with_parity3(struct bnx2x *bp, u32 sig, in bnx2x_check_blocks_with_parity3() argument
4920 REG_WR(bp, MISC_REG_AEU_CLR_LATCH_SIGNAL, in bnx2x_check_blocks_with_parity3()
4933 static bool bnx2x_check_blocks_with_parity4(struct bnx2x *bp, u32 sig, in bnx2x_check_blocks_with_parity4() argument
4951 _print_parity(bp, in bnx2x_check_blocks_with_parity4()
4956 _print_parity(bp, in bnx2x_check_blocks_with_parity4()
4969 static bool bnx2x_parity_attn(struct bnx2x *bp, bool *global, bool print, in bnx2x_parity_attn() argument
4994 netdev_err(bp->dev, in bnx2x_parity_attn()
5000 res |= bnx2x_check_blocks_with_parity0(bp, in bnx2x_parity_attn()
5002 res |= bnx2x_check_blocks_with_parity1(bp, in bnx2x_parity_attn()
5004 res |= bnx2x_check_blocks_with_parity2(bp, in bnx2x_parity_attn()
5006 res |= bnx2x_check_blocks_with_parity3(bp, in bnx2x_parity_attn()
5008 res |= bnx2x_check_blocks_with_parity4(bp, in bnx2x_parity_attn()
5025 bool bnx2x_chk_parity_attn(struct bnx2x *bp, bool *global, bool print) in bnx2x_chk_parity_attn() argument
5028 int port = BP_PORT(bp); in bnx2x_chk_parity_attn()
5030 attn.sig[0] = REG_RD(bp, in bnx2x_chk_parity_attn()
5033 attn.sig[1] = REG_RD(bp, in bnx2x_chk_parity_attn()
5036 attn.sig[2] = REG_RD(bp, in bnx2x_chk_parity_attn()
5039 attn.sig[3] = REG_RD(bp, in bnx2x_chk_parity_attn()
5045 attn.sig[3] &= ((REG_RD(bp, in bnx2x_chk_parity_attn()
5051 if (!CHIP_IS_E1x(bp)) in bnx2x_chk_parity_attn()
5052 attn.sig[4] = REG_RD(bp, in bnx2x_chk_parity_attn()
5056 return bnx2x_parity_attn(bp, global, print, attn.sig); in bnx2x_chk_parity_attn()
5059 static void bnx2x_attn_int_deasserted4(struct bnx2x *bp, u32 attn) in bnx2x_attn_int_deasserted4() argument
5064 val = REG_RD(bp, PGLUE_B_REG_PGLUE_B_INT_STS_CLR); in bnx2x_attn_int_deasserted4()
5088 val = REG_RD(bp, ATC_REG_ATC_INT_STS_CLR); in bnx2x_attn_int_deasserted4()
5112 static void bnx2x_attn_int_deasserted(struct bnx2x *bp, u32 deasserted) in bnx2x_attn_int_deasserted() argument
5115 int port = BP_PORT(bp); in bnx2x_attn_int_deasserted()
5124 bnx2x_acquire_alr(bp); in bnx2x_attn_int_deasserted()
5126 if (bnx2x_chk_parity_attn(bp, &global, true)) { in bnx2x_attn_int_deasserted()
5128 bp->recovery_state = BNX2X_RECOVERY_INIT; in bnx2x_attn_int_deasserted()
5129 schedule_delayed_work(&bp->sp_rtnl_task, 0); in bnx2x_attn_int_deasserted()
5131 bnx2x_int_disable(bp); in bnx2x_attn_int_deasserted()
5138 bnx2x_release_alr(bp); in bnx2x_attn_int_deasserted()
5142 attn.sig[0] = REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_1_FUNC_0 + port*4); in bnx2x_attn_int_deasserted()
5143 attn.sig[1] = REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_2_FUNC_0 + port*4); in bnx2x_attn_int_deasserted()
5144 attn.sig[2] = REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_3_FUNC_0 + port*4); in bnx2x_attn_int_deasserted()
5145 attn.sig[3] = REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_4_FUNC_0 + port*4); in bnx2x_attn_int_deasserted()
5146 if (!CHIP_IS_E1x(bp)) in bnx2x_attn_int_deasserted()
5148 REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_5_FUNC_0 + port*4); in bnx2x_attn_int_deasserted()
5157 group_mask = &bp->attn_group[index]; in bnx2x_attn_int_deasserted()
5165 bnx2x_attn_int_deasserted4(bp, in bnx2x_attn_int_deasserted()
5167 bnx2x_attn_int_deasserted3(bp, in bnx2x_attn_int_deasserted()
5169 bnx2x_attn_int_deasserted1(bp, in bnx2x_attn_int_deasserted()
5171 bnx2x_attn_int_deasserted2(bp, in bnx2x_attn_int_deasserted()
5173 bnx2x_attn_int_deasserted0(bp, in bnx2x_attn_int_deasserted()
5178 bnx2x_release_alr(bp); in bnx2x_attn_int_deasserted()
5180 if (bp->common.int_block == INT_BLOCK_HC) in bnx2x_attn_int_deasserted()
5188 (bp->common.int_block == INT_BLOCK_HC) ? "HC" : "IGU", reg_addr); in bnx2x_attn_int_deasserted()
5189 REG_WR(bp, reg_addr, val); in bnx2x_attn_int_deasserted()
5191 if (~bp->attn_state & deasserted) in bnx2x_attn_int_deasserted()
5197 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_PORT0_ATT_MASK + port); in bnx2x_attn_int_deasserted()
5198 aeu_mask = REG_RD(bp, reg_addr); in bnx2x_attn_int_deasserted()
5205 REG_WR(bp, reg_addr, aeu_mask); in bnx2x_attn_int_deasserted()
5206 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_PORT0_ATT_MASK + port); in bnx2x_attn_int_deasserted()
5208 DP(NETIF_MSG_HW, "attn_state %x\n", bp->attn_state); in bnx2x_attn_int_deasserted()
5209 bp->attn_state &= ~deasserted; in bnx2x_attn_int_deasserted()
5210 DP(NETIF_MSG_HW, "new state %x\n", bp->attn_state); in bnx2x_attn_int_deasserted()
5213 static void bnx2x_attn_int(struct bnx2x *bp) in bnx2x_attn_int() argument
5216 u32 attn_bits = le32_to_cpu(bp->def_status_blk->atten_status_block. in bnx2x_attn_int()
5218 u32 attn_ack = le32_to_cpu(bp->def_status_blk->atten_status_block. in bnx2x_attn_int()
5220 u32 attn_state = bp->attn_state; in bnx2x_attn_int()
5235 bnx2x_attn_int_asserted(bp, asserted); in bnx2x_attn_int()
5238 bnx2x_attn_int_deasserted(bp, deasserted); in bnx2x_attn_int()
5241 void bnx2x_igu_ack_sb(struct bnx2x *bp, u8 igu_sb_id, u8 segment, in bnx2x_igu_ack_sb() argument
5244 u32 igu_addr = bp->igu_base_addr; in bnx2x_igu_ack_sb()
5246 bnx2x_igu_ack_sb_gen(bp, igu_sb_id, segment, index, op, update, in bnx2x_igu_ack_sb()
5250 static void bnx2x_update_eq_prod(struct bnx2x *bp, u16 prod) in bnx2x_update_eq_prod() argument
5253 storm_memset_eq_prod(bp, prod, BP_FUNC(bp)); in bnx2x_update_eq_prod()
5256 static int bnx2x_cnic_handle_cfc_del(struct bnx2x *bp, u32 cid, in bnx2x_cnic_handle_cfc_del() argument
5261 if (!bp->cnic_eth_dev.starting_cid || in bnx2x_cnic_handle_cfc_del()
5262 (cid < bp->cnic_eth_dev.starting_cid && in bnx2x_cnic_handle_cfc_del()
5263 cid != bp->cnic_eth_dev.iscsi_l2_cid)) in bnx2x_cnic_handle_cfc_del()
5272 bnx2x_panic_dump(bp, false); in bnx2x_cnic_handle_cfc_del()
5274 bnx2x_cnic_cfc_comp(bp, cid, err); in bnx2x_cnic_handle_cfc_del()
5278 static void bnx2x_handle_mcast_eqe(struct bnx2x *bp) in bnx2x_handle_mcast_eqe() argument
5285 rparam.mcast_obj = &bp->mcast_obj; in bnx2x_handle_mcast_eqe()
5287 netif_addr_lock_bh(bp->dev); in bnx2x_handle_mcast_eqe()
5290 bp->mcast_obj.raw.clear_pending(&bp->mcast_obj.raw); in bnx2x_handle_mcast_eqe()
5293 if (bp->mcast_obj.check_pending(&bp->mcast_obj)) { in bnx2x_handle_mcast_eqe()
5294 rc = bnx2x_config_mcast(bp, &rparam, BNX2X_MCAST_CMD_CONT); in bnx2x_handle_mcast_eqe()
5300 netif_addr_unlock_bh(bp->dev); in bnx2x_handle_mcast_eqe()
5303 static void bnx2x_handle_classification_eqe(struct bnx2x *bp, in bnx2x_handle_classification_eqe() argument
5318 if (CNIC_LOADED(bp) && (cid == BNX2X_ISCSI_ETH_CID(bp))) in bnx2x_handle_classification_eqe()
5319 vlan_mac_obj = &bp->iscsi_l2_mac_obj; in bnx2x_handle_classification_eqe()
5321 vlan_mac_obj = &bp->sp_objs[cid].mac_obj; in bnx2x_handle_classification_eqe()
5326 vlan_mac_obj = &bp->sp_objs[cid].vlan_obj; in bnx2x_handle_classification_eqe()
5333 bnx2x_handle_mcast_eqe(bp); in bnx2x_handle_classification_eqe()
5340 rc = vlan_mac_obj->complete(bp, vlan_mac_obj, elem, &ramrod_flags); in bnx2x_handle_classification_eqe()
5348 static void bnx2x_set_iscsi_eth_rx_mode(struct bnx2x *bp, bool start);
5350 static void bnx2x_handle_rx_mode_eqe(struct bnx2x *bp) in bnx2x_handle_rx_mode_eqe() argument
5352 netif_addr_lock_bh(bp->dev); in bnx2x_handle_rx_mode_eqe()
5354 clear_bit(BNX2X_FILTER_RX_MODE_PENDING, &bp->sp_state); in bnx2x_handle_rx_mode_eqe()
5357 if (test_and_clear_bit(BNX2X_FILTER_RX_MODE_SCHED, &bp->sp_state)) in bnx2x_handle_rx_mode_eqe()
5358 bnx2x_set_storm_rx_mode(bp); in bnx2x_handle_rx_mode_eqe()
5360 &bp->sp_state)) in bnx2x_handle_rx_mode_eqe()
5361 bnx2x_set_iscsi_eth_rx_mode(bp, true); in bnx2x_handle_rx_mode_eqe()
5363 &bp->sp_state)) in bnx2x_handle_rx_mode_eqe()
5364 bnx2x_set_iscsi_eth_rx_mode(bp, false); in bnx2x_handle_rx_mode_eqe()
5366 netif_addr_unlock_bh(bp->dev); in bnx2x_handle_rx_mode_eqe()
5369 static void bnx2x_after_afex_vif_lists(struct bnx2x *bp, in bnx2x_after_afex_vif_lists() argument
5376 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_LISTGET_ACK, in bnx2x_after_afex_vif_lists()
5381 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_LISTSET_ACK, 0); in bnx2x_after_afex_vif_lists()
5386 static void bnx2x_after_function_update(struct bnx2x *bp) in bnx2x_after_function_update() argument
5405 if (bp->afex_vlan_mode == FUNC_MF_CFG_AFEX_VLAN_ACCESS_MODE) { in bnx2x_after_function_update()
5410 (bp->afex_def_vlan_tag & VLAN_VID_MASK); in bnx2x_after_function_update()
5414 for_each_eth_queue(bp, q) { in bnx2x_after_function_update()
5416 fp = &bp->fp[q]; in bnx2x_after_function_update()
5417 queue_params.q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_after_function_update()
5420 rc = bnx2x_queue_state_change(bp, &queue_params); in bnx2x_after_function_update()
5426 if (!NO_FCOE(bp) && CNIC_ENABLED(bp)) { in bnx2x_after_function_update()
5427 fp = &bp->fp[FCOE_IDX(bp)]; in bnx2x_after_function_update()
5428 queue_params.q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_after_function_update()
5435 set_bit(BNX2X_AFEX_FCOE_Q_UPDATE_PENDING, &bp->sp_state); in bnx2x_after_function_update()
5439 rc = bnx2x_queue_state_change(bp, &queue_params); in bnx2x_after_function_update()
5445 bnx2x_link_report(bp); in bnx2x_after_function_update()
5446 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_VIFSET_ACK, 0); in bnx2x_after_function_update()
5451 struct bnx2x *bp, u32 cid) in bnx2x_cid_to_q_obj() argument
5455 if (CNIC_LOADED(bp) && (cid == BNX2X_FCOE_ETH_CID(bp))) in bnx2x_cid_to_q_obj()
5456 return &bnx2x_fcoe_sp_obj(bp, q_obj); in bnx2x_cid_to_q_obj()
5458 return &bp->sp_objs[CID_TO_FP(cid, bp)].q_obj; in bnx2x_cid_to_q_obj()
5461 static void bnx2x_eq_int(struct bnx2x *bp) in bnx2x_eq_int() argument
5470 struct bnx2x_func_sp_obj *f_obj = &bp->func_obj; in bnx2x_eq_int()
5471 struct bnx2x_raw_obj *rss_raw = &bp->rss_conf_obj.raw; in bnx2x_eq_int()
5473 hw_cons = le16_to_cpu(*bp->eq_cons_sb); in bnx2x_eq_int()
5487 sw_cons = bp->eq_cons; in bnx2x_eq_int()
5488 sw_prod = bp->eq_prod; in bnx2x_eq_int()
5491 hw_cons, sw_cons, atomic_read(&bp->eq_spq_left)); in bnx2x_eq_int()
5496 elem = &bp->eq_ring[EQ_DESC(sw_cons)]; in bnx2x_eq_int()
5498 rc = bnx2x_iov_eq_sp_event(bp, elem); in bnx2x_eq_int()
5510 bnx2x_vf_mbx_schedule(bp, in bnx2x_eq_int()
5517 bp->stats_comp++); in bnx2x_eq_int()
5534 if (CNIC_LOADED(bp) && in bnx2x_eq_int()
5535 !bnx2x_cnic_handle_cfc_del(bp, cid, elem)) in bnx2x_eq_int()
5538 q_obj = bnx2x_cid_to_q_obj(bp, cid); in bnx2x_eq_int()
5540 if (q_obj->complete_cmd(bp, q_obj, BNX2X_Q_CMD_CFC_DEL)) in bnx2x_eq_int()
5547 bnx2x_dcbx_set_params(bp, BNX2X_DCBX_STATE_TX_PAUSED); in bnx2x_eq_int()
5548 if (f_obj->complete_cmd(bp, f_obj, in bnx2x_eq_int()
5555 bnx2x_dcbx_set_params(bp, BNX2X_DCBX_STATE_TX_RELEASED); in bnx2x_eq_int()
5556 if (f_obj->complete_cmd(bp, f_obj, in bnx2x_eq_int()
5567 bp, f_obj, BNX2X_F_CMD_SWITCH_UPDATE)) in bnx2x_eq_int()
5575 f_obj->complete_cmd(bp, f_obj, in bnx2x_eq_int()
5582 bnx2x_schedule_sp_rtnl(bp, cmd, 0); in bnx2x_eq_int()
5588 f_obj->complete_cmd(bp, f_obj, in bnx2x_eq_int()
5590 bnx2x_after_afex_vif_lists(bp, elem); in bnx2x_eq_int()
5595 if (f_obj->complete_cmd(bp, f_obj, BNX2X_F_CMD_START)) in bnx2x_eq_int()
5603 if (f_obj->complete_cmd(bp, f_obj, BNX2X_F_CMD_STOP)) in bnx2x_eq_int()
5611 if (f_obj->complete_cmd(bp, f_obj, in bnx2x_eq_int()
5617 switch (opcode | bp->state) { in bnx2x_eq_int()
5640 bnx2x_handle_classification_eqe(bp, elem); in bnx2x_eq_int()
5650 bnx2x_handle_mcast_eqe(bp); in bnx2x_eq_int()
5660 bnx2x_handle_rx_mode_eqe(bp); in bnx2x_eq_int()
5665 elem->message.opcode, bp->state); in bnx2x_eq_int()
5672 atomic_add(spqe_cnt, &bp->eq_spq_left); in bnx2x_eq_int()
5674 bp->eq_cons = sw_cons; in bnx2x_eq_int()
5675 bp->eq_prod = sw_prod; in bnx2x_eq_int()
5680 bnx2x_update_eq_prod(bp, bp->eq_prod); in bnx2x_eq_int()
5685 struct bnx2x *bp = container_of(work, struct bnx2x, sp_task.work); in bnx2x_sp_task() local
5691 if (atomic_read(&bp->interrupt_occurred)) { in bnx2x_sp_task()
5694 u16 status = bnx2x_update_dsb_idx(bp); in bnx2x_sp_task()
5698 atomic_set(&bp->interrupt_occurred, 0); in bnx2x_sp_task()
5702 bnx2x_attn_int(bp); in bnx2x_sp_task()
5708 struct bnx2x_fastpath *fp = bnx2x_fcoe_fp(bp); in bnx2x_sp_task()
5710 if (FCOE_INIT(bp) && in bnx2x_sp_task()
5716 napi_schedule(&bnx2x_fcoe(bp, napi)); in bnx2x_sp_task()
5721 bnx2x_eq_int(bp); in bnx2x_sp_task()
5722 bnx2x_ack_sb(bp, bp->igu_dsb_id, USTORM_ID, in bnx2x_sp_task()
5723 le16_to_cpu(bp->def_idx), IGU_INT_NOP, 1); in bnx2x_sp_task()
5734 bnx2x_ack_sb(bp, bp->igu_dsb_id, ATTENTION_ID, in bnx2x_sp_task()
5735 le16_to_cpu(bp->def_att_idx), IGU_INT_ENABLE, 1); in bnx2x_sp_task()
5740 &bp->sp_state)) { in bnx2x_sp_task()
5741 bnx2x_link_report(bp); in bnx2x_sp_task()
5742 bnx2x_fw_command(bp, DRV_MSG_CODE_AFEX_VIFSET_ACK, 0); in bnx2x_sp_task()
5749 struct bnx2x *bp = netdev_priv(dev); in bnx2x_msix_sp_int() local
5751 bnx2x_ack_sb(bp, bp->igu_dsb_id, USTORM_ID, 0, in bnx2x_msix_sp_int()
5755 if (unlikely(bp->panic)) in bnx2x_msix_sp_int()
5759 if (CNIC_LOADED(bp)) { in bnx2x_msix_sp_int()
5763 c_ops = rcu_dereference(bp->cnic_ops); in bnx2x_msix_sp_int()
5765 c_ops->cnic_handler(bp->cnic_data, NULL); in bnx2x_msix_sp_int()
5772 bnx2x_schedule_sp_task(bp); in bnx2x_msix_sp_int()
5779 void bnx2x_drv_pulse(struct bnx2x *bp) in bnx2x_drv_pulse() argument
5781 SHMEM_WR(bp, func_mb[BP_FW_MB_IDX(bp)].drv_pulse_mb, in bnx2x_drv_pulse()
5782 bp->fw_drv_pulse_wr_seq); in bnx2x_drv_pulse()
5787 struct bnx2x *bp = from_timer(bp, t, timer); in bnx2x_timer() local
5789 if (!netif_running(bp->dev)) in bnx2x_timer()
5792 if (IS_PF(bp) && in bnx2x_timer()
5793 !BP_NOMCP(bp)) { in bnx2x_timer()
5794 int mb_idx = BP_FW_MB_IDX(bp); in bnx2x_timer()
5798 ++bp->fw_drv_pulse_wr_seq; in bnx2x_timer()
5799 bp->fw_drv_pulse_wr_seq &= DRV_PULSE_SEQ_MASK; in bnx2x_timer()
5800 drv_pulse = bp->fw_drv_pulse_wr_seq; in bnx2x_timer()
5801 bnx2x_drv_pulse(bp); in bnx2x_timer()
5803 mcp_pulse = (SHMEM_RD(bp, func_mb[mb_idx].mcp_pulse_mb) & in bnx2x_timer()
5815 if (bp->state == BNX2X_STATE_OPEN) in bnx2x_timer()
5816 bnx2x_stats_handle(bp, STATS_EVENT_UPDATE); in bnx2x_timer()
5819 if (IS_VF(bp)) in bnx2x_timer()
5820 bnx2x_timer_sriov(bp); in bnx2x_timer()
5822 mod_timer(&bp->timer, jiffies + bp->current_interval); in bnx2x_timer()
5833 static void bnx2x_fill(struct bnx2x *bp, u32 addr, int fill, u32 len) in bnx2x_fill() argument
5838 REG_WR(bp, addr + i, fill); in bnx2x_fill()
5841 REG_WR8(bp, addr + i, fill); in bnx2x_fill()
5845 static void bnx2x_wr_fp_sb_data(struct bnx2x *bp, in bnx2x_wr_fp_sb_data() argument
5852 REG_WR(bp, BAR_CSTRORM_INTMEM + in bnx2x_wr_fp_sb_data()
5858 static void bnx2x_zero_fp_sb(struct bnx2x *bp, int fw_sb_id) in bnx2x_zero_fp_sb() argument
5866 if (!CHIP_IS_E1x(bp)) { in bnx2x_zero_fp_sb()
5880 bnx2x_wr_fp_sb_data(bp, fw_sb_id, sb_data_p, data_size); in bnx2x_zero_fp_sb()
5882 bnx2x_fill(bp, BAR_CSTRORM_INTMEM + in bnx2x_zero_fp_sb()
5885 bnx2x_fill(bp, BAR_CSTRORM_INTMEM + in bnx2x_zero_fp_sb()
5891 static void bnx2x_wr_sp_sb_data(struct bnx2x *bp, in bnx2x_wr_sp_sb_data() argument
5894 int func = BP_FUNC(bp); in bnx2x_wr_sp_sb_data()
5897 REG_WR(bp, BAR_CSTRORM_INTMEM + in bnx2x_wr_sp_sb_data()
5903 static void bnx2x_zero_sp_sb(struct bnx2x *bp) in bnx2x_zero_sp_sb() argument
5905 int func = BP_FUNC(bp); in bnx2x_zero_sp_sb()
5912 bnx2x_wr_sp_sb_data(bp, &sp_sb_data); in bnx2x_zero_sp_sb()
5914 bnx2x_fill(bp, BAR_CSTRORM_INTMEM + in bnx2x_zero_sp_sb()
5917 bnx2x_fill(bp, BAR_CSTRORM_INTMEM + in bnx2x_zero_sp_sb()
5960 void bnx2x_init_sb(struct bnx2x *bp, dma_addr_t mapping, int vfid, in bnx2x_init_sb() argument
5971 if (CHIP_INT_MODE_IS_BC(bp)) in bnx2x_init_sb()
5976 bnx2x_zero_fp_sb(bp, fw_sb_id); in bnx2x_init_sb()
5978 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_sb()
5981 sb_data_e2.common.p_func.pf_id = BP_FUNC(bp); in bnx2x_init_sb()
5984 sb_data_e2.common.p_func.vnic_id = BP_VN(bp); in bnx2x_init_sb()
5996 sb_data_e1x.common.p_func.pf_id = BP_FUNC(bp); in bnx2x_init_sb()
5999 sb_data_e1x.common.p_func.vnic_id = BP_VN(bp); in bnx2x_init_sb()
6017 bnx2x_wr_fp_sb_data(bp, fw_sb_id, sb_data_p, data_size); in bnx2x_init_sb()
6020 static void bnx2x_update_coalesce_sb(struct bnx2x *bp, u8 fw_sb_id, in bnx2x_update_coalesce_sb() argument
6023 bnx2x_update_coalesce_sb_index(bp, fw_sb_id, HC_INDEX_ETH_RX_CQ_CONS, in bnx2x_update_coalesce_sb()
6025 bnx2x_update_coalesce_sb_index(bp, fw_sb_id, in bnx2x_update_coalesce_sb()
6028 bnx2x_update_coalesce_sb_index(bp, fw_sb_id, in bnx2x_update_coalesce_sb()
6031 bnx2x_update_coalesce_sb_index(bp, fw_sb_id, in bnx2x_update_coalesce_sb()
6036 static void bnx2x_init_def_sb(struct bnx2x *bp) in bnx2x_init_def_sb() argument
6038 struct host_sp_status_block *def_sb = bp->def_status_blk; in bnx2x_init_def_sb()
6039 dma_addr_t mapping = bp->def_status_blk_mapping; in bnx2x_init_def_sb()
6042 int port = BP_PORT(bp); in bnx2x_init_def_sb()
6043 int func = BP_FUNC(bp); in bnx2x_init_def_sb()
6050 if (CHIP_INT_MODE_IS_BC(bp)) { in bnx2x_init_def_sb()
6054 igu_sp_sb_index = bp->igu_dsb_id; in bnx2x_init_def_sb()
6063 bp->attn_state = 0; in bnx2x_init_def_sb()
6073 bp->attn_group[index].sig[sindex] = in bnx2x_init_def_sb()
6074 REG_RD(bp, reg_offset + sindex*0x4 + 0x10*index); in bnx2x_init_def_sb()
6076 if (!CHIP_IS_E1x(bp)) in bnx2x_init_def_sb()
6082 bp->attn_group[index].sig[4] = REG_RD(bp, in bnx2x_init_def_sb()
6085 bp->attn_group[index].sig[4] = 0; in bnx2x_init_def_sb()
6088 if (bp->common.int_block == INT_BLOCK_HC) { in bnx2x_init_def_sb()
6092 REG_WR(bp, reg_offset, U64_LO(section)); in bnx2x_init_def_sb()
6093 REG_WR(bp, reg_offset + 4, U64_HI(section)); in bnx2x_init_def_sb()
6094 } else if (!CHIP_IS_E1x(bp)) { in bnx2x_init_def_sb()
6095 REG_WR(bp, IGU_REG_ATTN_MSG_ADDR_L, U64_LO(section)); in bnx2x_init_def_sb()
6096 REG_WR(bp, IGU_REG_ATTN_MSG_ADDR_H, U64_HI(section)); in bnx2x_init_def_sb()
6102 bnx2x_zero_sp_sb(bp); in bnx2x_init_def_sb()
6111 sp_sb_data.p_func.vnic_id = BP_VN(bp); in bnx2x_init_def_sb()
6114 bnx2x_wr_sp_sb_data(bp, &sp_sb_data); in bnx2x_init_def_sb()
6116 bnx2x_ack_sb(bp, bp->igu_dsb_id, USTORM_ID, 0, IGU_INT_ENABLE, 0); in bnx2x_init_def_sb()
6119 void bnx2x_update_coalesce(struct bnx2x *bp) in bnx2x_update_coalesce() argument
6123 for_each_eth_queue(bp, i) in bnx2x_update_coalesce()
6124 bnx2x_update_coalesce_sb(bp, bp->fp[i].fw_sb_id, in bnx2x_update_coalesce()
6125 bp->tx_ticks, bp->rx_ticks); in bnx2x_update_coalesce()
6128 static void bnx2x_init_sp_ring(struct bnx2x *bp) in bnx2x_init_sp_ring() argument
6130 spin_lock_init(&bp->spq_lock); in bnx2x_init_sp_ring()
6131 atomic_set(&bp->cq_spq_left, MAX_SPQ_PENDING); in bnx2x_init_sp_ring()
6133 bp->spq_prod_idx = 0; in bnx2x_init_sp_ring()
6134 bp->dsb_sp_prod = BNX2X_SP_DSB_INDEX; in bnx2x_init_sp_ring()
6135 bp->spq_prod_bd = bp->spq; in bnx2x_init_sp_ring()
6136 bp->spq_last_bd = bp->spq_prod_bd + MAX_SP_DESC_CNT; in bnx2x_init_sp_ring()
6139 static void bnx2x_init_eq_ring(struct bnx2x *bp) in bnx2x_init_eq_ring() argument
6144 &bp->eq_ring[EQ_DESC_CNT_PAGE * i - 1]; in bnx2x_init_eq_ring()
6147 cpu_to_le32(U64_HI(bp->eq_mapping + in bnx2x_init_eq_ring()
6150 cpu_to_le32(U64_LO(bp->eq_mapping + in bnx2x_init_eq_ring()
6153 bp->eq_cons = 0; in bnx2x_init_eq_ring()
6154 bp->eq_prod = NUM_EQ_DESC; in bnx2x_init_eq_ring()
6155 bp->eq_cons_sb = BNX2X_EQ_INDEX; in bnx2x_init_eq_ring()
6157 atomic_set(&bp->eq_spq_left, in bnx2x_init_eq_ring()
6162 static int bnx2x_set_q_rx_mode(struct bnx2x *bp, u8 cl_id, in bnx2x_set_q_rx_mode() argument
6176 ramrod_param.rx_mode_obj = &bp->rx_mode_obj; in bnx2x_set_q_rx_mode()
6177 ramrod_param.func_id = BP_FUNC(bp); in bnx2x_set_q_rx_mode()
6179 ramrod_param.pstate = &bp->sp_state; in bnx2x_set_q_rx_mode()
6182 ramrod_param.rdata = bnx2x_sp(bp, rx_mode_rdata); in bnx2x_set_q_rx_mode()
6183 ramrod_param.rdata_mapping = bnx2x_sp_mapping(bp, rx_mode_rdata); in bnx2x_set_q_rx_mode()
6185 set_bit(BNX2X_FILTER_RX_MODE_PENDING, &bp->sp_state); in bnx2x_set_q_rx_mode()
6193 rc = bnx2x_config_rx_mode(bp, &ramrod_param); in bnx2x_set_q_rx_mode()
6195 BNX2X_ERR("Set rx_mode %d failed\n", bp->rx_mode); in bnx2x_set_q_rx_mode()
6202 static int bnx2x_fill_accept_flags(struct bnx2x *bp, u32 rx_mode, in bnx2x_fill_accept_flags() argument
6227 if (bp->accept_any_vlan) { in bnx2x_fill_accept_flags()
6243 if (bp->accept_any_vlan) { in bnx2x_fill_accept_flags()
6263 if (IS_MF_SI(bp)) in bnx2x_fill_accept_flags()
6281 static int bnx2x_set_storm_rx_mode(struct bnx2x *bp) in bnx2x_set_storm_rx_mode() argument
6287 if (!NO_FCOE(bp)) in bnx2x_set_storm_rx_mode()
6291 rc = bnx2x_fill_accept_flags(bp, bp->rx_mode, &rx_accept_flags, in bnx2x_set_storm_rx_mode()
6299 return bnx2x_set_q_rx_mode(bp, bp->fp->cl_id, rx_mode_flags, in bnx2x_set_storm_rx_mode()
6304 static void bnx2x_init_internal_common(struct bnx2x *bp) in bnx2x_init_internal_common() argument
6311 REG_WR(bp, BAR_USTRORM_INTMEM + in bnx2x_init_internal_common()
6313 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_internal_common()
6314 REG_WR8(bp, BAR_CSTRORM_INTMEM + CSTORM_IGU_MODE_OFFSET, in bnx2x_init_internal_common()
6315 CHIP_INT_MODE_IS_BC(bp) ? in bnx2x_init_internal_common()
6320 static void bnx2x_init_internal(struct bnx2x *bp, u32 load_code) in bnx2x_init_internal() argument
6325 bnx2x_init_internal_common(bp); in bnx2x_init_internal()
6345 return fp->bp->igu_base_sb + fp->index + CNIC_SUPPORT(fp->bp); in bnx2x_fp_igu_sb_id()
6350 return fp->bp->base_fw_ndsb + fp->index + CNIC_SUPPORT(fp->bp); in bnx2x_fp_fw_sb_id()
6355 if (CHIP_IS_E1x(fp->bp)) in bnx2x_fp_cl_id()
6356 return BP_L_ID(fp->bp) + fp->index; in bnx2x_fp_cl_id()
6361 static void bnx2x_init_eth_fp(struct bnx2x *bp, int fp_idx) in bnx2x_init_eth_fp() argument
6363 struct bnx2x_fastpath *fp = &bp->fp[fp_idx]; in bnx2x_init_eth_fp()
6389 bnx2x_init_txdata(bp, fp->txdata_ptr[cos], in bnx2x_init_eth_fp()
6390 CID_COS_TO_TX_ONLY_CID(fp->cid, cos, bp), in bnx2x_init_eth_fp()
6391 FP_COS_TO_TXQ(fp, cos, bp), in bnx2x_init_eth_fp()
6397 if (IS_VF(bp)) in bnx2x_init_eth_fp()
6400 bnx2x_init_sb(bp, fp->status_blk_mapping, BNX2X_VF_ID_INVALID, false, in bnx2x_init_eth_fp()
6403 bnx2x_init_queue_obj(bp, &bnx2x_sp_obj(bp, fp).q_obj, fp->cl_id, cids, in bnx2x_init_eth_fp()
6404 fp->max_cos, BP_FUNC(bp), bnx2x_sp(bp, q_rdata), in bnx2x_init_eth_fp()
6405 bnx2x_sp_mapping(bp, q_rdata), q_type); in bnx2x_init_eth_fp()
6414 fp_idx, bp, fp->status_blk.e2_sb, fp->cl_id, fp->fw_sb_id, in bnx2x_init_eth_fp()
6447 static void bnx2x_init_tx_rings_cnic(struct bnx2x *bp) in bnx2x_init_tx_rings_cnic() argument
6451 for_each_tx_queue_cnic(bp, i) in bnx2x_init_tx_rings_cnic()
6452 bnx2x_init_tx_ring_one(bp->fp[i].txdata_ptr[0]); in bnx2x_init_tx_rings_cnic()
6455 static void bnx2x_init_tx_rings(struct bnx2x *bp) in bnx2x_init_tx_rings() argument
6460 for_each_eth_queue(bp, i) in bnx2x_init_tx_rings()
6461 for_each_cos_in_tx_queue(&bp->fp[i], cos) in bnx2x_init_tx_rings()
6462 bnx2x_init_tx_ring_one(bp->fp[i].txdata_ptr[cos]); in bnx2x_init_tx_rings()
6465 static void bnx2x_init_fcoe_fp(struct bnx2x *bp) in bnx2x_init_fcoe_fp() argument
6467 struct bnx2x_fastpath *fp = bnx2x_fcoe_fp(bp); in bnx2x_init_fcoe_fp()
6470 bnx2x_fcoe(bp, rx_queue) = BNX2X_NUM_ETH_QUEUES(bp); in bnx2x_init_fcoe_fp()
6471 bnx2x_fcoe(bp, cl_id) = bnx2x_cnic_eth_cl_id(bp, in bnx2x_init_fcoe_fp()
6473 bnx2x_fcoe(bp, cid) = BNX2X_FCOE_ETH_CID(bp); in bnx2x_init_fcoe_fp()
6474 bnx2x_fcoe(bp, fw_sb_id) = DEF_SB_ID; in bnx2x_init_fcoe_fp()
6475 bnx2x_fcoe(bp, igu_sb_id) = bp->igu_dsb_id; in bnx2x_init_fcoe_fp()
6476 bnx2x_fcoe(bp, rx_cons_sb) = BNX2X_FCOE_L2_RX_INDEX; in bnx2x_init_fcoe_fp()
6477 bnx2x_init_txdata(bp, bnx2x_fcoe(bp, txdata_ptr[0]), in bnx2x_init_fcoe_fp()
6478 fp->cid, FCOE_TXQ_IDX(bp), BNX2X_FCOE_L2_TX_INDEX, in bnx2x_init_fcoe_fp()
6484 bnx2x_fcoe(bp, cl_qzone_id) = bnx2x_fp_qzone_id(fp); in bnx2x_init_fcoe_fp()
6486 bnx2x_fcoe(bp, ustorm_rx_prods_offset) = in bnx2x_init_fcoe_fp()
6496 bnx2x_init_queue_obj(bp, &bnx2x_sp_obj(bp, fp).q_obj, fp->cl_id, in bnx2x_init_fcoe_fp()
6497 &fp->cid, 1, BP_FUNC(bp), bnx2x_sp(bp, q_rdata), in bnx2x_init_fcoe_fp()
6498 bnx2x_sp_mapping(bp, q_rdata), q_type); in bnx2x_init_fcoe_fp()
6502 fp->index, bp, fp->status_blk.e2_sb, fp->cl_id, fp->fw_sb_id, in bnx2x_init_fcoe_fp()
6506 void bnx2x_nic_init_cnic(struct bnx2x *bp) in bnx2x_nic_init_cnic() argument
6508 if (!NO_FCOE(bp)) in bnx2x_nic_init_cnic()
6509 bnx2x_init_fcoe_fp(bp); in bnx2x_nic_init_cnic()
6511 bnx2x_init_sb(bp, bp->cnic_sb_mapping, in bnx2x_nic_init_cnic()
6513 bnx2x_cnic_fw_sb_id(bp), bnx2x_cnic_igu_sb_id(bp)); in bnx2x_nic_init_cnic()
6517 bnx2x_init_rx_rings_cnic(bp); in bnx2x_nic_init_cnic()
6518 bnx2x_init_tx_rings_cnic(bp); in bnx2x_nic_init_cnic()
6524 void bnx2x_pre_irq_nic_init(struct bnx2x *bp) in bnx2x_pre_irq_nic_init() argument
6529 for_each_eth_queue(bp, i) in bnx2x_pre_irq_nic_init()
6530 bnx2x_init_eth_fp(bp, i); in bnx2x_pre_irq_nic_init()
6534 bnx2x_init_rx_rings(bp); in bnx2x_pre_irq_nic_init()
6535 bnx2x_init_tx_rings(bp); in bnx2x_pre_irq_nic_init()
6537 if (IS_PF(bp)) { in bnx2x_pre_irq_nic_init()
6539 bnx2x_init_mod_abs_int(bp, &bp->link_vars, bp->common.chip_id, in bnx2x_pre_irq_nic_init()
6540 bp->common.shmem_base, in bnx2x_pre_irq_nic_init()
6541 bp->common.shmem2_base, BP_PORT(bp)); in bnx2x_pre_irq_nic_init()
6544 bnx2x_init_def_sb(bp); in bnx2x_pre_irq_nic_init()
6545 bnx2x_update_dsb_idx(bp); in bnx2x_pre_irq_nic_init()
6546 bnx2x_init_sp_ring(bp); in bnx2x_pre_irq_nic_init()
6548 bnx2x_memset_stats(bp); in bnx2x_pre_irq_nic_init()
6552 void bnx2x_post_irq_nic_init(struct bnx2x *bp, u32 load_code) in bnx2x_post_irq_nic_init() argument
6554 bnx2x_init_eq_ring(bp); in bnx2x_post_irq_nic_init()
6555 bnx2x_init_internal(bp, load_code); in bnx2x_post_irq_nic_init()
6556 bnx2x_pf_init(bp); in bnx2x_post_irq_nic_init()
6557 bnx2x_stats_init(bp); in bnx2x_post_irq_nic_init()
6562 bnx2x_int_enable(bp); in bnx2x_post_irq_nic_init()
6565 bnx2x_attn_int_deasserted0(bp, in bnx2x_post_irq_nic_init()
6566 REG_RD(bp, MISC_REG_AEU_AFTER_INVERT_1_FUNC_0 + BP_PORT(bp)*4) & in bnx2x_post_irq_nic_init()
6571 static int bnx2x_gunzip_init(struct bnx2x *bp) in bnx2x_gunzip_init() argument
6573 bp->gunzip_buf = dma_alloc_coherent(&bp->pdev->dev, FW_BUF_SIZE, in bnx2x_gunzip_init()
6574 &bp->gunzip_mapping, GFP_KERNEL); in bnx2x_gunzip_init()
6575 if (bp->gunzip_buf == NULL) in bnx2x_gunzip_init()
6578 bp->strm = kmalloc(sizeof(*bp->strm), GFP_KERNEL); in bnx2x_gunzip_init()
6579 if (bp->strm == NULL) in bnx2x_gunzip_init()
6582 bp->strm->workspace = vmalloc(zlib_inflate_workspacesize()); in bnx2x_gunzip_init()
6583 if (bp->strm->workspace == NULL) in bnx2x_gunzip_init()
6589 kfree(bp->strm); in bnx2x_gunzip_init()
6590 bp->strm = NULL; in bnx2x_gunzip_init()
6593 dma_free_coherent(&bp->pdev->dev, FW_BUF_SIZE, bp->gunzip_buf, in bnx2x_gunzip_init()
6594 bp->gunzip_mapping); in bnx2x_gunzip_init()
6595 bp->gunzip_buf = NULL; in bnx2x_gunzip_init()
6602 static void bnx2x_gunzip_end(struct bnx2x *bp) in bnx2x_gunzip_end() argument
6604 if (bp->strm) { in bnx2x_gunzip_end()
6605 vfree(bp->strm->workspace); in bnx2x_gunzip_end()
6606 kfree(bp->strm); in bnx2x_gunzip_end()
6607 bp->strm = NULL; in bnx2x_gunzip_end()
6610 if (bp->gunzip_buf) { in bnx2x_gunzip_end()
6611 dma_free_coherent(&bp->pdev->dev, FW_BUF_SIZE, bp->gunzip_buf, in bnx2x_gunzip_end()
6612 bp->gunzip_mapping); in bnx2x_gunzip_end()
6613 bp->gunzip_buf = NULL; in bnx2x_gunzip_end()
6617 static int bnx2x_gunzip(struct bnx2x *bp, const u8 *zbuf, int len) in bnx2x_gunzip() argument
6634 bp->strm->next_in = (typeof(bp->strm->next_in))zbuf + n; in bnx2x_gunzip()
6635 bp->strm->avail_in = len - n; in bnx2x_gunzip()
6636 bp->strm->next_out = bp->gunzip_buf; in bnx2x_gunzip()
6637 bp->strm->avail_out = FW_BUF_SIZE; in bnx2x_gunzip()
6639 rc = zlib_inflateInit2(bp->strm, -MAX_WBITS); in bnx2x_gunzip()
6643 rc = zlib_inflate(bp->strm, Z_FINISH); in bnx2x_gunzip()
6645 netdev_err(bp->dev, "Firmware decompression error: %s\n", in bnx2x_gunzip()
6646 bp->strm->msg); in bnx2x_gunzip()
6648 bp->gunzip_outlen = (FW_BUF_SIZE - bp->strm->avail_out); in bnx2x_gunzip()
6649 if (bp->gunzip_outlen & 0x3) in bnx2x_gunzip()
6650 netdev_err(bp->dev, in bnx2x_gunzip()
6652 bp->gunzip_outlen); in bnx2x_gunzip()
6653 bp->gunzip_outlen >>= 2; in bnx2x_gunzip()
6655 zlib_inflateEnd(bp->strm); in bnx2x_gunzip()
6670 static void bnx2x_lb_pckt(struct bnx2x *bp) in bnx2x_lb_pckt() argument
6678 REG_WR_DMAE(bp, NIG_REG_DEBUG_PACKET_LB, wb_write, 3); in bnx2x_lb_pckt()
6684 REG_WR_DMAE(bp, NIG_REG_DEBUG_PACKET_LB, wb_write, 3); in bnx2x_lb_pckt()
6691 static int bnx2x_int_mem_test(struct bnx2x *bp) in bnx2x_int_mem_test() argument
6697 if (CHIP_REV_IS_FPGA(bp)) in bnx2x_int_mem_test()
6699 else if (CHIP_REV_IS_EMUL(bp)) in bnx2x_int_mem_test()
6705 REG_WR(bp, TSDM_REG_ENABLE_IN1, 0x0); in bnx2x_int_mem_test()
6706 REG_WR(bp, TCM_REG_PRS_IFEN, 0x0); in bnx2x_int_mem_test()
6707 REG_WR(bp, CFC_REG_DEBUG0, 0x1); in bnx2x_int_mem_test()
6708 REG_WR(bp, NIG_REG_PRS_REQ_IN_EN, 0x0); in bnx2x_int_mem_test()
6711 REG_WR(bp, PRS_REG_CFC_SEARCH_INITIAL_CREDIT, 0x0); in bnx2x_int_mem_test()
6714 bnx2x_lb_pckt(bp); in bnx2x_int_mem_test()
6721 bnx2x_read_dmae(bp, NIG_REG_STAT2_BRB_OCTET, 2); in bnx2x_int_mem_test()
6722 val = *bnx2x_sp(bp, wb_data[0]); in bnx2x_int_mem_test()
6737 val = REG_RD(bp, PRS_REG_NUM_OF_PACKETS); in bnx2x_int_mem_test()
6750 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_CLEAR, 0x03); in bnx2x_int_mem_test()
6752 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_SET, 0x03); in bnx2x_int_mem_test()
6754 bnx2x_init_block(bp, BLOCK_BRB1, PHASE_COMMON); in bnx2x_int_mem_test()
6755 bnx2x_init_block(bp, BLOCK_PRS, PHASE_COMMON); in bnx2x_int_mem_test()
6760 REG_WR(bp, TSDM_REG_ENABLE_IN1, 0x0); in bnx2x_int_mem_test()
6761 REG_WR(bp, TCM_REG_PRS_IFEN, 0x0); in bnx2x_int_mem_test()
6762 REG_WR(bp, CFC_REG_DEBUG0, 0x1); in bnx2x_int_mem_test()
6763 REG_WR(bp, NIG_REG_PRS_REQ_IN_EN, 0x0); in bnx2x_int_mem_test()
6766 REG_WR(bp, PRS_REG_CFC_SEARCH_INITIAL_CREDIT, 0x0); in bnx2x_int_mem_test()
6770 bnx2x_lb_pckt(bp); in bnx2x_int_mem_test()
6777 bnx2x_read_dmae(bp, NIG_REG_STAT2_BRB_OCTET, 2); in bnx2x_int_mem_test()
6778 val = *bnx2x_sp(bp, wb_data[0]); in bnx2x_int_mem_test()
6791 val = REG_RD(bp, PRS_REG_NUM_OF_PACKETS); in bnx2x_int_mem_test()
6796 REG_WR(bp, PRS_REG_CFC_SEARCH_INITIAL_CREDIT, 0x1); in bnx2x_int_mem_test()
6801 val = REG_RD(bp, PRS_REG_NUM_OF_PACKETS); in bnx2x_int_mem_test()
6807 REG_RD(bp, NIG_REG_INGRESS_EOP_LB_FIFO); in bnx2x_int_mem_test()
6808 val = REG_RD(bp, NIG_REG_INGRESS_EOP_LB_EMPTY); in bnx2x_int_mem_test()
6815 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_CLEAR, 0x03); in bnx2x_int_mem_test()
6817 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_SET, 0x03); in bnx2x_int_mem_test()
6819 bnx2x_init_block(bp, BLOCK_BRB1, PHASE_COMMON); in bnx2x_int_mem_test()
6820 bnx2x_init_block(bp, BLOCK_PRS, PHASE_COMMON); in bnx2x_int_mem_test()
6821 if (!CNIC_SUPPORT(bp)) in bnx2x_int_mem_test()
6823 REG_WR(bp, PRS_REG_NIC_MODE, 1); in bnx2x_int_mem_test()
6826 REG_WR(bp, TSDM_REG_ENABLE_IN1, 0x7fffffff); in bnx2x_int_mem_test()
6827 REG_WR(bp, TCM_REG_PRS_IFEN, 0x1); in bnx2x_int_mem_test()
6828 REG_WR(bp, CFC_REG_DEBUG0, 0x0); in bnx2x_int_mem_test()
6829 REG_WR(bp, NIG_REG_PRS_REQ_IN_EN, 0x1); in bnx2x_int_mem_test()
6836 static void bnx2x_enable_blocks_attention(struct bnx2x *bp) in bnx2x_enable_blocks_attention() argument
6840 REG_WR(bp, PXP_REG_PXP_INT_MASK_0, 0); in bnx2x_enable_blocks_attention()
6841 if (!CHIP_IS_E1x(bp)) in bnx2x_enable_blocks_attention()
6842 REG_WR(bp, PXP_REG_PXP_INT_MASK_1, 0x40); in bnx2x_enable_blocks_attention()
6844 REG_WR(bp, PXP_REG_PXP_INT_MASK_1, 0); in bnx2x_enable_blocks_attention()
6845 REG_WR(bp, DORQ_REG_DORQ_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6846 REG_WR(bp, CFC_REG_CFC_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6853 REG_WR(bp, BRB1_REG_BRB1_INT_MASK, 0xFC00); in bnx2x_enable_blocks_attention()
6854 REG_WR(bp, QM_REG_QM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6855 REG_WR(bp, TM_REG_TM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6856 REG_WR(bp, XSDM_REG_XSDM_INT_MASK_0, 0); in bnx2x_enable_blocks_attention()
6857 REG_WR(bp, XSDM_REG_XSDM_INT_MASK_1, 0); in bnx2x_enable_blocks_attention()
6858 REG_WR(bp, XCM_REG_XCM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6861 REG_WR(bp, USDM_REG_USDM_INT_MASK_0, 0); in bnx2x_enable_blocks_attention()
6862 REG_WR(bp, USDM_REG_USDM_INT_MASK_1, 0); in bnx2x_enable_blocks_attention()
6863 REG_WR(bp, UCM_REG_UCM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6866 REG_WR(bp, GRCBASE_UPB + PB_REG_PB_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6867 REG_WR(bp, CSDM_REG_CSDM_INT_MASK_0, 0); in bnx2x_enable_blocks_attention()
6868 REG_WR(bp, CSDM_REG_CSDM_INT_MASK_1, 0); in bnx2x_enable_blocks_attention()
6869 REG_WR(bp, CCM_REG_CCM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6876 if (!CHIP_IS_E1x(bp)) in bnx2x_enable_blocks_attention()
6879 REG_WR(bp, PXP2_REG_PXP2_INT_MASK_0, val); in bnx2x_enable_blocks_attention()
6881 REG_WR(bp, TSDM_REG_TSDM_INT_MASK_0, 0); in bnx2x_enable_blocks_attention()
6882 REG_WR(bp, TSDM_REG_TSDM_INT_MASK_1, 0); in bnx2x_enable_blocks_attention()
6883 REG_WR(bp, TCM_REG_TCM_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6886 if (!CHIP_IS_E1x(bp)) in bnx2x_enable_blocks_attention()
6888 REG_WR(bp, TSEM_REG_TSEM_INT_MASK_1, 0x07ff); in bnx2x_enable_blocks_attention()
6890 REG_WR(bp, CDU_REG_CDU_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6891 REG_WR(bp, DMAE_REG_DMAE_INT_MASK, 0); in bnx2x_enable_blocks_attention()
6893 REG_WR(bp, PBF_REG_PBF_INT_MASK, 0x18); /* bit 3,4 masked */ in bnx2x_enable_blocks_attention()
6896 static void bnx2x_reset_common(struct bnx2x *bp) in bnx2x_reset_common() argument
6901 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_CLEAR, in bnx2x_reset_common()
6904 if (CHIP_IS_E3(bp)) { in bnx2x_reset_common()
6909 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_2_CLEAR, val); in bnx2x_reset_common()
6912 static void bnx2x_setup_dmae(struct bnx2x *bp) in bnx2x_setup_dmae() argument
6914 bp->dmae_ready = 0; in bnx2x_setup_dmae()
6915 spin_lock_init(&bp->dmae_lock); in bnx2x_setup_dmae()
6918 static void bnx2x_init_pxp(struct bnx2x *bp) in bnx2x_init_pxp() argument
6923 pcie_capability_read_word(bp->pdev, PCI_EXP_DEVCTL, &devctl); in bnx2x_init_pxp()
6926 if (bp->mrrs == -1) in bnx2x_init_pxp()
6929 DP(NETIF_MSG_HW, "force read order to %d\n", bp->mrrs); in bnx2x_init_pxp()
6930 r_order = bp->mrrs; in bnx2x_init_pxp()
6933 bnx2x_init_pxp_arb(bp, r_order, w_order); in bnx2x_init_pxp()
6936 static void bnx2x_setup_fan_failure_detection(struct bnx2x *bp) in bnx2x_setup_fan_failure_detection() argument
6942 if (BP_NOMCP(bp)) in bnx2x_setup_fan_failure_detection()
6946 val = SHMEM_RD(bp, dev_info.shared_hw_config.config2) & in bnx2x_setup_fan_failure_detection()
6961 bp, in bnx2x_setup_fan_failure_detection()
6962 bp->common.shmem_base, in bnx2x_setup_fan_failure_detection()
6963 bp->common.shmem2_base, in bnx2x_setup_fan_failure_detection()
6973 bnx2x_set_spio(bp, MISC_SPIO_SPIO5, MISC_SPIO_INPUT_HI_Z); in bnx2x_setup_fan_failure_detection()
6976 val = REG_RD(bp, MISC_REG_SPIO_INT); in bnx2x_setup_fan_failure_detection()
6978 REG_WR(bp, MISC_REG_SPIO_INT, val); in bnx2x_setup_fan_failure_detection()
6981 val = REG_RD(bp, MISC_REG_SPIO_EVENT_EN); in bnx2x_setup_fan_failure_detection()
6983 REG_WR(bp, MISC_REG_SPIO_EVENT_EN, val); in bnx2x_setup_fan_failure_detection()
6986 void bnx2x_pf_disable(struct bnx2x *bp) in bnx2x_pf_disable() argument
6988 u32 val = REG_RD(bp, IGU_REG_PF_CONFIGURATION); in bnx2x_pf_disable()
6991 REG_WR(bp, IGU_REG_PF_CONFIGURATION, val); in bnx2x_pf_disable()
6992 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER, 0); in bnx2x_pf_disable()
6993 REG_WR(bp, CFC_REG_WEAK_ENABLE_PF, 0); in bnx2x_pf_disable()
6996 static void bnx2x__common_init_phy(struct bnx2x *bp) in bnx2x__common_init_phy() argument
7000 if (SHMEM2_RD(bp, size) > in bnx2x__common_init_phy()
7001 (u32)offsetof(struct shmem2_region, lfa_host_addr[BP_PORT(bp)])) in bnx2x__common_init_phy()
7003 shmem_base[0] = bp->common.shmem_base; in bnx2x__common_init_phy()
7004 shmem2_base[0] = bp->common.shmem2_base; in bnx2x__common_init_phy()
7005 if (!CHIP_IS_E1x(bp)) { in bnx2x__common_init_phy()
7007 SHMEM2_RD(bp, other_shmem_base_addr); in bnx2x__common_init_phy()
7009 SHMEM2_RD(bp, other_shmem2_base_addr); in bnx2x__common_init_phy()
7011 bnx2x_acquire_phy_lock(bp); in bnx2x__common_init_phy()
7012 bnx2x_common_init_phy(bp, shmem_base, shmem2_base, in bnx2x__common_init_phy()
7013 bp->common.chip_id); in bnx2x__common_init_phy()
7014 bnx2x_release_phy_lock(bp); in bnx2x__common_init_phy()
7017 static void bnx2x_config_endianity(struct bnx2x *bp, u32 val) in bnx2x_config_endianity() argument
7019 REG_WR(bp, PXP2_REG_RQ_QM_ENDIAN_M, val); in bnx2x_config_endianity()
7020 REG_WR(bp, PXP2_REG_RQ_TM_ENDIAN_M, val); in bnx2x_config_endianity()
7021 REG_WR(bp, PXP2_REG_RQ_SRC_ENDIAN_M, val); in bnx2x_config_endianity()
7022 REG_WR(bp, PXP2_REG_RQ_CDU_ENDIAN_M, val); in bnx2x_config_endianity()
7023 REG_WR(bp, PXP2_REG_RQ_DBG_ENDIAN_M, val); in bnx2x_config_endianity()
7026 REG_WR(bp, PXP2_REG_RQ_HC_ENDIAN_M, 0); in bnx2x_config_endianity()
7028 REG_WR(bp, PXP2_REG_RD_QM_SWAP_MODE, val); in bnx2x_config_endianity()
7029 REG_WR(bp, PXP2_REG_RD_TM_SWAP_MODE, val); in bnx2x_config_endianity()
7030 REG_WR(bp, PXP2_REG_RD_SRC_SWAP_MODE, val); in bnx2x_config_endianity()
7031 REG_WR(bp, PXP2_REG_RD_CDURD_SWAP_MODE, val); in bnx2x_config_endianity()
7034 static void bnx2x_set_endianity(struct bnx2x *bp) in bnx2x_set_endianity() argument
7037 bnx2x_config_endianity(bp, 1); in bnx2x_set_endianity()
7039 bnx2x_config_endianity(bp, 0); in bnx2x_set_endianity()
7043 static void bnx2x_reset_endianity(struct bnx2x *bp) in bnx2x_reset_endianity() argument
7045 bnx2x_config_endianity(bp, 0); in bnx2x_reset_endianity()
7053 static int bnx2x_init_hw_common(struct bnx2x *bp) in bnx2x_init_hw_common() argument
7057 DP(NETIF_MSG_HW, "starting common init func %d\n", BP_ABS_FUNC(bp)); in bnx2x_init_hw_common()
7063 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RESET); in bnx2x_init_hw_common()
7065 bnx2x_reset_common(bp); in bnx2x_init_hw_common()
7066 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_SET, 0xffffffff); in bnx2x_init_hw_common()
7069 if (CHIP_IS_E3(bp)) { in bnx2x_init_hw_common()
7073 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_2_SET, val); in bnx2x_init_hw_common()
7075 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RESET); in bnx2x_init_hw_common()
7077 bnx2x_init_block(bp, BLOCK_MISC, PHASE_COMMON); in bnx2x_init_hw_common()
7079 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7089 for (abs_func_id = BP_PATH(bp); in bnx2x_init_hw_common()
7091 if (abs_func_id == BP_ABS_FUNC(bp)) { in bnx2x_init_hw_common()
7092 REG_WR(bp, in bnx2x_init_hw_common()
7098 bnx2x_pretend_func(bp, abs_func_id); in bnx2x_init_hw_common()
7100 bnx2x_pf_disable(bp); in bnx2x_init_hw_common()
7101 bnx2x_pretend_func(bp, BP_ABS_FUNC(bp)); in bnx2x_init_hw_common()
7105 bnx2x_init_block(bp, BLOCK_PXP, PHASE_COMMON); in bnx2x_init_hw_common()
7106 if (CHIP_IS_E1(bp)) { in bnx2x_init_hw_common()
7109 REG_WR(bp, PXP_REG_PXP_INT_MASK_0, 0); in bnx2x_init_hw_common()
7112 bnx2x_init_block(bp, BLOCK_PXP2, PHASE_COMMON); in bnx2x_init_hw_common()
7113 bnx2x_init_pxp(bp); in bnx2x_init_hw_common()
7114 bnx2x_set_endianity(bp); in bnx2x_init_hw_common()
7115 bnx2x_ilt_init_page_size(bp, INITOP_SET); in bnx2x_init_hw_common()
7117 if (CHIP_REV_IS_FPGA(bp) && CHIP_IS_E1H(bp)) in bnx2x_init_hw_common()
7118 REG_WR(bp, PXP2_REG_PGL_TAGS_LIMIT, 0x1); in bnx2x_init_hw_common()
7123 val = REG_RD(bp, PXP2_REG_RQ_CFG_DONE); in bnx2x_init_hw_common()
7128 val = REG_RD(bp, PXP2_REG_RD_INIT_DONE); in bnx2x_init_hw_common()
7139 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7223 bnx2x_pretend_func(bp, (BP_PATH(bp) + 6)); in bnx2x_init_hw_common()
7224 bnx2x_ilt_client_init_op_ilt(bp, &ilt, &ilt_cli, INITOP_CLEAR); in bnx2x_init_hw_common()
7225 bnx2x_pretend_func(bp, BP_ABS_FUNC(bp)); in bnx2x_init_hw_common()
7227 REG_WR(bp, PXP2_REG_RQ_DRAM_ALIGN, BNX2X_PXP_DRAM_ALIGN); in bnx2x_init_hw_common()
7228 REG_WR(bp, PXP2_REG_RQ_DRAM_ALIGN_RD, BNX2X_PXP_DRAM_ALIGN); in bnx2x_init_hw_common()
7229 REG_WR(bp, PXP2_REG_RQ_DRAM_ALIGN_SEL, 1); in bnx2x_init_hw_common()
7232 REG_WR(bp, PXP2_REG_RQ_DISABLE_INPUTS, 0); in bnx2x_init_hw_common()
7233 REG_WR(bp, PXP2_REG_RD_DISABLE_INPUTS, 0); in bnx2x_init_hw_common()
7235 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7236 int factor = CHIP_REV_IS_EMUL(bp) ? 1000 : in bnx2x_init_hw_common()
7237 (CHIP_REV_IS_FPGA(bp) ? 400 : 0); in bnx2x_init_hw_common()
7238 bnx2x_init_block(bp, BLOCK_PGLUE_B, PHASE_COMMON); in bnx2x_init_hw_common()
7240 bnx2x_init_block(bp, BLOCK_ATC, PHASE_COMMON); in bnx2x_init_hw_common()
7245 val = REG_RD(bp, ATC_REG_ATC_INIT_DONE); in bnx2x_init_hw_common()
7254 bnx2x_init_block(bp, BLOCK_DMAE, PHASE_COMMON); in bnx2x_init_hw_common()
7256 bnx2x_iov_init_dmae(bp); in bnx2x_init_hw_common()
7259 bp->dmae_ready = 1; in bnx2x_init_hw_common()
7260 bnx2x_init_fill(bp, TSEM_REG_PRAM, 0, 8, 1); in bnx2x_init_hw_common()
7262 bnx2x_init_block(bp, BLOCK_TCM, PHASE_COMMON); in bnx2x_init_hw_common()
7264 bnx2x_init_block(bp, BLOCK_UCM, PHASE_COMMON); in bnx2x_init_hw_common()
7266 bnx2x_init_block(bp, BLOCK_CCM, PHASE_COMMON); in bnx2x_init_hw_common()
7268 bnx2x_init_block(bp, BLOCK_XCM, PHASE_COMMON); in bnx2x_init_hw_common()
7270 bnx2x_read_dmae(bp, XSEM_REG_PASSIVE_BUFFER, 3); in bnx2x_init_hw_common()
7271 bnx2x_read_dmae(bp, CSEM_REG_PASSIVE_BUFFER, 3); in bnx2x_init_hw_common()
7272 bnx2x_read_dmae(bp, TSEM_REG_PASSIVE_BUFFER, 3); in bnx2x_init_hw_common()
7273 bnx2x_read_dmae(bp, USEM_REG_PASSIVE_BUFFER, 3); in bnx2x_init_hw_common()
7275 bnx2x_init_block(bp, BLOCK_QM, PHASE_COMMON); in bnx2x_init_hw_common()
7278 bnx2x_qm_init_ptr_table(bp, bp->qm_cid_count, INITOP_SET); in bnx2x_init_hw_common()
7281 REG_WR(bp, QM_REG_SOFT_RESET, 1); in bnx2x_init_hw_common()
7282 REG_WR(bp, QM_REG_SOFT_RESET, 0); in bnx2x_init_hw_common()
7284 if (CNIC_SUPPORT(bp)) in bnx2x_init_hw_common()
7285 bnx2x_init_block(bp, BLOCK_TM, PHASE_COMMON); in bnx2x_init_hw_common()
7287 bnx2x_init_block(bp, BLOCK_DORQ, PHASE_COMMON); in bnx2x_init_hw_common()
7289 if (!CHIP_REV_IS_SLOW(bp)) in bnx2x_init_hw_common()
7291 REG_WR(bp, DORQ_REG_DORQ_INT_MASK, 0); in bnx2x_init_hw_common()
7293 bnx2x_init_block(bp, BLOCK_BRB1, PHASE_COMMON); in bnx2x_init_hw_common()
7295 bnx2x_init_block(bp, BLOCK_PRS, PHASE_COMMON); in bnx2x_init_hw_common()
7296 REG_WR(bp, PRS_REG_A_PRSU_20, 0xf); in bnx2x_init_hw_common()
7298 if (!CHIP_IS_E1(bp)) in bnx2x_init_hw_common()
7299 REG_WR(bp, PRS_REG_E1HOV_MODE, bp->path_has_ovlan); in bnx2x_init_hw_common()
7301 if (!CHIP_IS_E1x(bp) && !CHIP_IS_E3B0(bp)) { in bnx2x_init_hw_common()
7302 if (IS_MF_AFEX(bp)) { in bnx2x_init_hw_common()
7306 REG_WR(bp, PRS_REG_HDRS_AFTER_BASIC, 0xE); in bnx2x_init_hw_common()
7307 REG_WR(bp, PRS_REG_MUST_HAVE_HDRS, 0xA); in bnx2x_init_hw_common()
7308 REG_WR(bp, PRS_REG_HDRS_AFTER_TAG_0, 0x6); in bnx2x_init_hw_common()
7309 REG_WR(bp, PRS_REG_TAG_ETHERTYPE_0, 0x8926); in bnx2x_init_hw_common()
7310 REG_WR(bp, PRS_REG_TAG_LEN_0, 0x4); in bnx2x_init_hw_common()
7315 REG_WR(bp, PRS_REG_HDRS_AFTER_BASIC, in bnx2x_init_hw_common()
7316 bp->path_has_ovlan ? 7 : 6); in bnx2x_init_hw_common()
7320 bnx2x_init_block(bp, BLOCK_TSDM, PHASE_COMMON); in bnx2x_init_hw_common()
7321 bnx2x_init_block(bp, BLOCK_CSDM, PHASE_COMMON); in bnx2x_init_hw_common()
7322 bnx2x_init_block(bp, BLOCK_USDM, PHASE_COMMON); in bnx2x_init_hw_common()
7323 bnx2x_init_block(bp, BLOCK_XSDM, PHASE_COMMON); in bnx2x_init_hw_common()
7325 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7327 REG_WR(bp, TSEM_REG_FAST_MEMORY + VFC_REG_MEMORIES_RST, in bnx2x_init_hw_common()
7330 REG_WR(bp, XSEM_REG_FAST_MEMORY + VFC_REG_MEMORIES_RST, in bnx2x_init_hw_common()
7337 bnx2x_init_block(bp, BLOCK_TSEM, PHASE_COMMON); in bnx2x_init_hw_common()
7338 bnx2x_init_block(bp, BLOCK_USEM, PHASE_COMMON); in bnx2x_init_hw_common()
7339 bnx2x_init_block(bp, BLOCK_CSEM, PHASE_COMMON); in bnx2x_init_hw_common()
7340 bnx2x_init_block(bp, BLOCK_XSEM, PHASE_COMMON); in bnx2x_init_hw_common()
7343 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_CLEAR, in bnx2x_init_hw_common()
7345 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_SET, in bnx2x_init_hw_common()
7348 bnx2x_init_block(bp, BLOCK_UPB, PHASE_COMMON); in bnx2x_init_hw_common()
7349 bnx2x_init_block(bp, BLOCK_XPB, PHASE_COMMON); in bnx2x_init_hw_common()
7350 bnx2x_init_block(bp, BLOCK_PBF, PHASE_COMMON); in bnx2x_init_hw_common()
7352 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7353 if (IS_MF_AFEX(bp)) { in bnx2x_init_hw_common()
7357 REG_WR(bp, PBF_REG_HDRS_AFTER_BASIC, 0xE); in bnx2x_init_hw_common()
7358 REG_WR(bp, PBF_REG_MUST_HAVE_HDRS, 0xA); in bnx2x_init_hw_common()
7359 REG_WR(bp, PBF_REG_HDRS_AFTER_TAG_0, 0x6); in bnx2x_init_hw_common()
7360 REG_WR(bp, PBF_REG_TAG_ETHERTYPE_0, 0x8926); in bnx2x_init_hw_common()
7361 REG_WR(bp, PBF_REG_TAG_LEN_0, 0x4); in bnx2x_init_hw_common()
7363 REG_WR(bp, PBF_REG_HDRS_AFTER_BASIC, in bnx2x_init_hw_common()
7364 bp->path_has_ovlan ? 7 : 6); in bnx2x_init_hw_common()
7368 REG_WR(bp, SRC_REG_SOFT_RST, 1); in bnx2x_init_hw_common()
7370 bnx2x_init_block(bp, BLOCK_SRC, PHASE_COMMON); in bnx2x_init_hw_common()
7372 if (CNIC_SUPPORT(bp)) { in bnx2x_init_hw_common()
7373 REG_WR(bp, SRC_REG_KEYSEARCH_0, 0x63285672); in bnx2x_init_hw_common()
7374 REG_WR(bp, SRC_REG_KEYSEARCH_1, 0x24b8f2cc); in bnx2x_init_hw_common()
7375 REG_WR(bp, SRC_REG_KEYSEARCH_2, 0x223aef9b); in bnx2x_init_hw_common()
7376 REG_WR(bp, SRC_REG_KEYSEARCH_3, 0x26001e3a); in bnx2x_init_hw_common()
7377 REG_WR(bp, SRC_REG_KEYSEARCH_4, 0x7ae91116); in bnx2x_init_hw_common()
7378 REG_WR(bp, SRC_REG_KEYSEARCH_5, 0x5ce5230b); in bnx2x_init_hw_common()
7379 REG_WR(bp, SRC_REG_KEYSEARCH_6, 0x298d8adf); in bnx2x_init_hw_common()
7380 REG_WR(bp, SRC_REG_KEYSEARCH_7, 0x6eb0ff09); in bnx2x_init_hw_common()
7381 REG_WR(bp, SRC_REG_KEYSEARCH_8, 0x1830f82f); in bnx2x_init_hw_common()
7382 REG_WR(bp, SRC_REG_KEYSEARCH_9, 0x01e46be7); in bnx2x_init_hw_common()
7384 REG_WR(bp, SRC_REG_SOFT_RST, 0); in bnx2x_init_hw_common()
7388 dev_alert(&bp->pdev->dev, in bnx2x_init_hw_common()
7392 bnx2x_init_block(bp, BLOCK_CDU, PHASE_COMMON); in bnx2x_init_hw_common()
7394 REG_WR(bp, CDU_REG_CDU_GLOBAL_PARAMS, val); in bnx2x_init_hw_common()
7396 bnx2x_init_block(bp, BLOCK_CFC, PHASE_COMMON); in bnx2x_init_hw_common()
7397 REG_WR(bp, CFC_REG_INIT_REG, 0x7FF); in bnx2x_init_hw_common()
7399 REG_WR(bp, CFC_REG_CFC_INT_MASK, 0); in bnx2x_init_hw_common()
7402 REG_WR(bp, CFC_REG_DEBUG0, 0x20020000); in bnx2x_init_hw_common()
7404 bnx2x_init_block(bp, BLOCK_HC, PHASE_COMMON); in bnx2x_init_hw_common()
7406 if (!CHIP_IS_E1x(bp) && BP_NOMCP(bp)) in bnx2x_init_hw_common()
7407 REG_WR(bp, IGU_REG_RESET_MEMORIES, 0x36); in bnx2x_init_hw_common()
7409 bnx2x_init_block(bp, BLOCK_IGU, PHASE_COMMON); in bnx2x_init_hw_common()
7410 bnx2x_init_block(bp, BLOCK_MISC_AEU, PHASE_COMMON); in bnx2x_init_hw_common()
7413 REG_WR(bp, 0x2814, 0xffffffff); in bnx2x_init_hw_common()
7414 REG_WR(bp, 0x3820, 0xffffffff); in bnx2x_init_hw_common()
7416 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_common()
7417 REG_WR(bp, PCICFG_OFFSET + PXPCS_TL_CONTROL_5, in bnx2x_init_hw_common()
7420 REG_WR(bp, PCICFG_OFFSET + PXPCS_TL_FUNC345_STAT, in bnx2x_init_hw_common()
7424 REG_WR(bp, PCICFG_OFFSET + PXPCS_TL_FUNC678_STAT, in bnx2x_init_hw_common()
7430 bnx2x_init_block(bp, BLOCK_NIG, PHASE_COMMON); in bnx2x_init_hw_common()
7431 if (!CHIP_IS_E1(bp)) { in bnx2x_init_hw_common()
7433 if (!CHIP_IS_E3(bp)) in bnx2x_init_hw_common()
7434 REG_WR(bp, NIG_REG_LLH_MF_MODE, IS_MF(bp)); in bnx2x_init_hw_common()
7436 if (CHIP_IS_E1H(bp)) in bnx2x_init_hw_common()
7438 REG_WR(bp, NIG_REG_LLH_E1HOV_MODE, IS_MF_SD(bp)); in bnx2x_init_hw_common()
7440 if (CHIP_REV_IS_SLOW(bp)) in bnx2x_init_hw_common()
7444 val = reg_poll(bp, CFC_REG_LL_INIT_DONE, 1, 100, 10); in bnx2x_init_hw_common()
7449 val = reg_poll(bp, CFC_REG_AC_INIT_DONE, 1, 100, 10); in bnx2x_init_hw_common()
7454 val = reg_poll(bp, CFC_REG_CAM_INIT_DONE, 1, 100, 10); in bnx2x_init_hw_common()
7459 REG_WR(bp, CFC_REG_DEBUG0, 0); in bnx2x_init_hw_common()
7461 if (CHIP_IS_E1(bp)) { in bnx2x_init_hw_common()
7464 bnx2x_read_dmae(bp, NIG_REG_STAT2_BRB_OCTET, 2); in bnx2x_init_hw_common()
7465 val = *bnx2x_sp(bp, wb_data[0]); in bnx2x_init_hw_common()
7468 if ((val == 0) && bnx2x_int_mem_test(bp)) { in bnx2x_init_hw_common()
7474 bnx2x_setup_fan_failure_detection(bp); in bnx2x_init_hw_common()
7477 REG_RD(bp, PXP2_REG_PXP2_INT_STS_CLR_0); in bnx2x_init_hw_common()
7479 bnx2x_enable_blocks_attention(bp); in bnx2x_init_hw_common()
7480 bnx2x_enable_blocks_parity(bp); in bnx2x_init_hw_common()
7482 if (!BP_NOMCP(bp)) { in bnx2x_init_hw_common()
7483 if (CHIP_IS_E1x(bp)) in bnx2x_init_hw_common()
7484 bnx2x__common_init_phy(bp); in bnx2x_init_hw_common()
7488 if (SHMEM2_HAS(bp, netproc_fw_ver)) in bnx2x_init_hw_common()
7489 SHMEM2_WR(bp, netproc_fw_ver, REG_RD(bp, XSEM_REG_PRAM)); in bnx2x_init_hw_common()
7499 static int bnx2x_init_hw_common_chip(struct bnx2x *bp) in bnx2x_init_hw_common_chip() argument
7501 int rc = bnx2x_init_hw_common(bp); in bnx2x_init_hw_common_chip()
7507 if (!BP_NOMCP(bp)) in bnx2x_init_hw_common_chip()
7508 bnx2x__common_init_phy(bp); in bnx2x_init_hw_common_chip()
7513 static int bnx2x_init_hw_port(struct bnx2x *bp) in bnx2x_init_hw_port() argument
7515 int port = BP_PORT(bp); in bnx2x_init_hw_port()
7522 REG_WR(bp, NIG_REG_MASK_INTERRUPT_PORT0 + port*4, 0); in bnx2x_init_hw_port()
7524 bnx2x_init_block(bp, BLOCK_MISC, init_phase); in bnx2x_init_hw_port()
7525 bnx2x_init_block(bp, BLOCK_PXP, init_phase); in bnx2x_init_hw_port()
7526 bnx2x_init_block(bp, BLOCK_PXP2, init_phase); in bnx2x_init_hw_port()
7533 if (!CHIP_IS_E1x(bp)) in bnx2x_init_hw_port()
7534 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER, 1); in bnx2x_init_hw_port()
7536 bnx2x_init_block(bp, BLOCK_ATC, init_phase); in bnx2x_init_hw_port()
7537 bnx2x_init_block(bp, BLOCK_DMAE, init_phase); in bnx2x_init_hw_port()
7538 bnx2x_init_block(bp, BLOCK_PGLUE_B, init_phase); in bnx2x_init_hw_port()
7539 bnx2x_init_block(bp, BLOCK_QM, init_phase); in bnx2x_init_hw_port()
7541 bnx2x_init_block(bp, BLOCK_TCM, init_phase); in bnx2x_init_hw_port()
7542 bnx2x_init_block(bp, BLOCK_UCM, init_phase); in bnx2x_init_hw_port()
7543 bnx2x_init_block(bp, BLOCK_CCM, init_phase); in bnx2x_init_hw_port()
7544 bnx2x_init_block(bp, BLOCK_XCM, init_phase); in bnx2x_init_hw_port()
7547 bnx2x_qm_init_cid_count(bp, bp->qm_cid_count, INITOP_SET); in bnx2x_init_hw_port()
7549 if (CNIC_SUPPORT(bp)) { in bnx2x_init_hw_port()
7550 bnx2x_init_block(bp, BLOCK_TM, init_phase); in bnx2x_init_hw_port()
7551 REG_WR(bp, TM_REG_LIN0_SCAN_TIME + port*4, 20); in bnx2x_init_hw_port()
7552 REG_WR(bp, TM_REG_LIN0_MAX_ACTIVE_CID + port*4, 31); in bnx2x_init_hw_port()
7555 bnx2x_init_block(bp, BLOCK_DORQ, init_phase); in bnx2x_init_hw_port()
7557 bnx2x_init_block(bp, BLOCK_BRB1, init_phase); in bnx2x_init_hw_port()
7559 if (CHIP_IS_E1(bp) || CHIP_IS_E1H(bp)) { in bnx2x_init_hw_port()
7561 if (IS_MF(bp)) in bnx2x_init_hw_port()
7562 low = ((bp->flags & ONE_PORT_FLAG) ? 160 : 246); in bnx2x_init_hw_port()
7563 else if (bp->dev->mtu > 4096) { in bnx2x_init_hw_port()
7564 if (bp->flags & ONE_PORT_FLAG) in bnx2x_init_hw_port()
7567 val = bp->dev->mtu; in bnx2x_init_hw_port()
7573 low = ((bp->flags & ONE_PORT_FLAG) ? 80 : 160); in bnx2x_init_hw_port()
7575 REG_WR(bp, BRB1_REG_PAUSE_LOW_THRESHOLD_0 + port*4, low); in bnx2x_init_hw_port()
7576 REG_WR(bp, BRB1_REG_PAUSE_HIGH_THRESHOLD_0 + port*4, high); in bnx2x_init_hw_port()
7579 if (CHIP_MODE_IS_4_PORT(bp)) in bnx2x_init_hw_port()
7580 REG_WR(bp, (BP_PORT(bp) ? in bnx2x_init_hw_port()
7584 bnx2x_init_block(bp, BLOCK_PRS, init_phase); in bnx2x_init_hw_port()
7585 if (CHIP_IS_E3B0(bp)) { in bnx2x_init_hw_port()
7586 if (IS_MF_AFEX(bp)) { in bnx2x_init_hw_port()
7588 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7591 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7594 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7602 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7605 (bp->path_has_ovlan ? 7 : 6)); in bnx2x_init_hw_port()
7609 bnx2x_init_block(bp, BLOCK_TSDM, init_phase); in bnx2x_init_hw_port()
7610 bnx2x_init_block(bp, BLOCK_CSDM, init_phase); in bnx2x_init_hw_port()
7611 bnx2x_init_block(bp, BLOCK_USDM, init_phase); in bnx2x_init_hw_port()
7612 bnx2x_init_block(bp, BLOCK_XSDM, init_phase); in bnx2x_init_hw_port()
7614 bnx2x_init_block(bp, BLOCK_TSEM, init_phase); in bnx2x_init_hw_port()
7615 bnx2x_init_block(bp, BLOCK_USEM, init_phase); in bnx2x_init_hw_port()
7616 bnx2x_init_block(bp, BLOCK_CSEM, init_phase); in bnx2x_init_hw_port()
7617 bnx2x_init_block(bp, BLOCK_XSEM, init_phase); in bnx2x_init_hw_port()
7619 bnx2x_init_block(bp, BLOCK_UPB, init_phase); in bnx2x_init_hw_port()
7620 bnx2x_init_block(bp, BLOCK_XPB, init_phase); in bnx2x_init_hw_port()
7622 bnx2x_init_block(bp, BLOCK_PBF, init_phase); in bnx2x_init_hw_port()
7624 if (CHIP_IS_E1x(bp)) { in bnx2x_init_hw_port()
7626 REG_WR(bp, PBF_REG_P0_PAUSE_ENABLE + port*4, 0); in bnx2x_init_hw_port()
7629 REG_WR(bp, PBF_REG_P0_ARB_THRSH + port*4, (9040/16)); in bnx2x_init_hw_port()
7631 REG_WR(bp, PBF_REG_P0_INIT_CRD + port*4, (9040/16) + 553 - 22); in bnx2x_init_hw_port()
7634 REG_WR(bp, PBF_REG_INIT_P0 + port*4, 1); in bnx2x_init_hw_port()
7636 REG_WR(bp, PBF_REG_INIT_P0 + port*4, 0); in bnx2x_init_hw_port()
7639 if (CNIC_SUPPORT(bp)) in bnx2x_init_hw_port()
7640 bnx2x_init_block(bp, BLOCK_SRC, init_phase); in bnx2x_init_hw_port()
7642 bnx2x_init_block(bp, BLOCK_CDU, init_phase); in bnx2x_init_hw_port()
7643 bnx2x_init_block(bp, BLOCK_CFC, init_phase); in bnx2x_init_hw_port()
7645 if (CHIP_IS_E1(bp)) { in bnx2x_init_hw_port()
7646 REG_WR(bp, HC_REG_LEADING_EDGE_0 + port*8, 0); in bnx2x_init_hw_port()
7647 REG_WR(bp, HC_REG_TRAILING_EDGE_0 + port*8, 0); in bnx2x_init_hw_port()
7649 bnx2x_init_block(bp, BLOCK_HC, init_phase); in bnx2x_init_hw_port()
7651 bnx2x_init_block(bp, BLOCK_IGU, init_phase); in bnx2x_init_hw_port()
7653 bnx2x_init_block(bp, BLOCK_MISC_AEU, init_phase); in bnx2x_init_hw_port()
7658 val = IS_MF(bp) ? 0xF7 : 0x7; in bnx2x_init_hw_port()
7660 val |= CHIP_IS_E1(bp) ? 0 : 0x10; in bnx2x_init_hw_port()
7661 REG_WR(bp, MISC_REG_AEU_MASK_ATTN_FUNC_0 + port*4, val); in bnx2x_init_hw_port()
7665 REG_WR(bp, reg, in bnx2x_init_hw_port()
7666 REG_RD(bp, reg) & in bnx2x_init_hw_port()
7670 REG_WR(bp, reg, in bnx2x_init_hw_port()
7671 REG_RD(bp, reg) & in bnx2x_init_hw_port()
7674 bnx2x_init_block(bp, BLOCK_NIG, init_phase); in bnx2x_init_hw_port()
7676 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_port()
7680 if (IS_MF_AFEX(bp)) in bnx2x_init_hw_port()
7681 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7685 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7688 IS_MF_SD(bp) ? 7 : 6); in bnx2x_init_hw_port()
7690 if (CHIP_IS_E3(bp)) in bnx2x_init_hw_port()
7691 REG_WR(bp, BP_PORT(bp) ? in bnx2x_init_hw_port()
7693 NIG_REG_LLH_MF_MODE, IS_MF(bp)); in bnx2x_init_hw_port()
7695 if (!CHIP_IS_E3(bp)) in bnx2x_init_hw_port()
7696 REG_WR(bp, NIG_REG_XGXS_SERDES0_MODE_SEL + port*4, 1); in bnx2x_init_hw_port()
7698 if (!CHIP_IS_E1(bp)) { in bnx2x_init_hw_port()
7700 REG_WR(bp, NIG_REG_LLH0_BRB1_DRV_MASK_MF + port*4, in bnx2x_init_hw_port()
7701 (IS_MF_SD(bp) ? 0x1 : 0x2)); in bnx2x_init_hw_port()
7703 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_port()
7705 switch (bp->mf_mode) { in bnx2x_init_hw_port()
7715 REG_WR(bp, (BP_PORT(bp) ? NIG_REG_LLH1_CLS_TYPE : in bnx2x_init_hw_port()
7719 REG_WR(bp, NIG_REG_LLFC_ENABLE_0 + port*4, 0); in bnx2x_init_hw_port()
7720 REG_WR(bp, NIG_REG_LLFC_OUT_EN_0 + port*4, 0); in bnx2x_init_hw_port()
7721 REG_WR(bp, NIG_REG_PAUSE_ENABLE_0 + port*4, 1); in bnx2x_init_hw_port()
7726 val = REG_RD(bp, MISC_REG_SPIO_EVENT_EN); in bnx2x_init_hw_port()
7730 val = REG_RD(bp, reg_addr); in bnx2x_init_hw_port()
7732 REG_WR(bp, reg_addr, val); in bnx2x_init_hw_port()
7735 if (CHIP_IS_E3B0(bp)) in bnx2x_init_hw_port()
7736 bp->flags |= PTP_SUPPORTED; in bnx2x_init_hw_port()
7741 static void bnx2x_ilt_wr(struct bnx2x *bp, u32 index, dma_addr_t addr) in bnx2x_ilt_wr() argument
7746 if (CHIP_IS_E1(bp)) in bnx2x_ilt_wr()
7753 REG_WR_DMAE(bp, reg, wb_write, 2); in bnx2x_ilt_wr()
7756 void bnx2x_igu_clear_sb_gen(struct bnx2x *bp, u8 func, u8 idu_sb_id, bool is_pf) in bnx2x_igu_clear_sb_gen() argument
7767 if (CHIP_INT_MODE_IS_BC(bp)) in bnx2x_igu_clear_sb_gen()
7781 REG_WR(bp, igu_addr_data, data); in bnx2x_igu_clear_sb_gen()
7785 REG_WR(bp, igu_addr_ctl, ctl); in bnx2x_igu_clear_sb_gen()
7789 while (!(REG_RD(bp, igu_addr_ack) & sb_bit) && --cnt) in bnx2x_igu_clear_sb_gen()
7792 if (!(REG_RD(bp, igu_addr_ack) & sb_bit)) { in bnx2x_igu_clear_sb_gen()
7799 static void bnx2x_igu_clear_sb(struct bnx2x *bp, u8 idu_sb_id) in bnx2x_igu_clear_sb() argument
7801 bnx2x_igu_clear_sb_gen(bp, BP_FUNC(bp), idu_sb_id, true /*PF*/); in bnx2x_igu_clear_sb()
7804 static void bnx2x_clear_func_ilt(struct bnx2x *bp, u32 func) in bnx2x_clear_func_ilt() argument
7808 bnx2x_ilt_wr(bp, i, 0); in bnx2x_clear_func_ilt()
7811 static void bnx2x_init_searcher(struct bnx2x *bp) in bnx2x_init_searcher() argument
7813 int port = BP_PORT(bp); in bnx2x_init_searcher()
7814 bnx2x_src_init_t2(bp, bp->t2, bp->t2_mapping, SRC_CONN_NUM); in bnx2x_init_searcher()
7816 REG_WR(bp, SRC_REG_NUMBER_HASH_BITS0 + port*4, SRC_HASH_BITS); in bnx2x_init_searcher()
7819 static inline int bnx2x_func_switch_update(struct bnx2x *bp, int suspend) in bnx2x_func_switch_update() argument
7830 func_params.f_obj = &bp->func_obj; in bnx2x_func_switch_update()
7840 rc = bnx2x_func_state_change(bp, &func_params); in bnx2x_func_switch_update()
7845 static int bnx2x_reset_nic_mode(struct bnx2x *bp) in bnx2x_reset_nic_mode() argument
7847 int rc, i, port = BP_PORT(bp); in bnx2x_reset_nic_mode()
7851 if (bp->mf_mode == SINGLE_FUNCTION) { in bnx2x_reset_nic_mode()
7852 bnx2x_set_rx_filter(&bp->link_params, 0); in bnx2x_reset_nic_mode()
7854 vlan_en = REG_RD(bp, port ? NIG_REG_LLH1_FUNC_EN : in bnx2x_reset_nic_mode()
7856 REG_WR(bp, port ? NIG_REG_LLH1_FUNC_EN : in bnx2x_reset_nic_mode()
7859 mac_en[i] = REG_RD(bp, port ? in bnx2x_reset_nic_mode()
7864 REG_WR(bp, port ? (NIG_REG_LLH1_FUNC_MEM_ENABLE + in bnx2x_reset_nic_mode()
7871 REG_WR(bp, port ? NIG_REG_P0_TX_MNG_HOST_ENABLE : in bnx2x_reset_nic_mode()
7879 rc = bnx2x_func_switch_update(bp, 1); in bnx2x_reset_nic_mode()
7886 REG_WR(bp, PRS_REG_NIC_MODE, 0); in bnx2x_reset_nic_mode()
7889 if (bp->mf_mode == SINGLE_FUNCTION) { in bnx2x_reset_nic_mode()
7890 bnx2x_set_rx_filter(&bp->link_params, 1); in bnx2x_reset_nic_mode()
7892 REG_WR(bp, port ? NIG_REG_LLH1_FUNC_EN : in bnx2x_reset_nic_mode()
7895 REG_WR(bp, port ? (NIG_REG_LLH1_FUNC_MEM_ENABLE + in bnx2x_reset_nic_mode()
7903 REG_WR(bp, port ? NIG_REG_P0_TX_MNG_HOST_ENABLE : in bnx2x_reset_nic_mode()
7907 rc = bnx2x_func_switch_update(bp, 0); in bnx2x_reset_nic_mode()
7917 int bnx2x_init_hw_func_cnic(struct bnx2x *bp) in bnx2x_init_hw_func_cnic() argument
7921 bnx2x_ilt_init_op_cnic(bp, INITOP_SET); in bnx2x_init_hw_func_cnic()
7923 if (CONFIGURE_NIC_MODE(bp)) { in bnx2x_init_hw_func_cnic()
7925 bnx2x_init_searcher(bp); in bnx2x_init_hw_func_cnic()
7928 rc = bnx2x_reset_nic_mode(bp); in bnx2x_init_hw_func_cnic()
7944 static void bnx2x_clean_pglue_errors(struct bnx2x *bp) in bnx2x_clean_pglue_errors() argument
7946 if (!CHIP_IS_E1x(bp)) in bnx2x_clean_pglue_errors()
7947 REG_WR(bp, PGLUE_B_REG_WAS_ERROR_PF_7_0_CLR, in bnx2x_clean_pglue_errors()
7948 1 << BP_ABS_FUNC(bp)); in bnx2x_clean_pglue_errors()
7951 static int bnx2x_init_hw_func(struct bnx2x *bp) in bnx2x_init_hw_func() argument
7953 int port = BP_PORT(bp); in bnx2x_init_hw_func()
7954 int func = BP_FUNC(bp); in bnx2x_init_hw_func()
7956 struct bnx2x_ilt *ilt = BP_ILT(bp); in bnx2x_init_hw_func()
7965 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
7966 rc = bnx2x_pf_flr_clnup(bp); in bnx2x_init_hw_func()
7968 bnx2x_fw_dump(bp); in bnx2x_init_hw_func()
7974 if (bp->common.int_block == INT_BLOCK_HC) { in bnx2x_init_hw_func()
7976 val = REG_RD(bp, addr); in bnx2x_init_hw_func()
7978 REG_WR(bp, addr, val); in bnx2x_init_hw_func()
7981 bnx2x_init_block(bp, BLOCK_PXP, init_phase); in bnx2x_init_hw_func()
7982 bnx2x_init_block(bp, BLOCK_PXP2, init_phase); in bnx2x_init_hw_func()
7984 ilt = BP_ILT(bp); in bnx2x_init_hw_func()
7987 if (IS_SRIOV(bp)) in bnx2x_init_hw_func()
7989 cdu_ilt_start = bnx2x_iov_init_ilt(bp, cdu_ilt_start); in bnx2x_init_hw_func()
7995 for (i = 0; i < L2_ILT_LINES(bp); i++) { in bnx2x_init_hw_func()
7996 ilt->lines[cdu_ilt_start + i].page = bp->context[i].vcxt; in bnx2x_init_hw_func()
7998 bp->context[i].cxt_mapping; in bnx2x_init_hw_func()
7999 ilt->lines[cdu_ilt_start + i].size = bp->context[i].size; in bnx2x_init_hw_func()
8002 bnx2x_ilt_init_op(bp, INITOP_SET); in bnx2x_init_hw_func()
8004 if (!CONFIGURE_NIC_MODE(bp)) { in bnx2x_init_hw_func()
8005 bnx2x_init_searcher(bp); in bnx2x_init_hw_func()
8006 REG_WR(bp, PRS_REG_NIC_MODE, 0); in bnx2x_init_hw_func()
8010 REG_WR(bp, PRS_REG_NIC_MODE, 1); in bnx2x_init_hw_func()
8014 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
8020 if (!(bp->flags & USING_MSIX_FLAG)) in bnx2x_init_hw_func()
8034 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER, 1); in bnx2x_init_hw_func()
8036 REG_WR(bp, IGU_REG_PF_CONFIGURATION, pf_conf); in bnx2x_init_hw_func()
8039 bp->dmae_ready = 1; in bnx2x_init_hw_func()
8041 bnx2x_init_block(bp, BLOCK_PGLUE_B, init_phase); in bnx2x_init_hw_func()
8043 bnx2x_clean_pglue_errors(bp); in bnx2x_init_hw_func()
8045 bnx2x_init_block(bp, BLOCK_ATC, init_phase); in bnx2x_init_hw_func()
8046 bnx2x_init_block(bp, BLOCK_DMAE, init_phase); in bnx2x_init_hw_func()
8047 bnx2x_init_block(bp, BLOCK_NIG, init_phase); in bnx2x_init_hw_func()
8048 bnx2x_init_block(bp, BLOCK_SRC, init_phase); in bnx2x_init_hw_func()
8049 bnx2x_init_block(bp, BLOCK_MISC, init_phase); in bnx2x_init_hw_func()
8050 bnx2x_init_block(bp, BLOCK_TCM, init_phase); in bnx2x_init_hw_func()
8051 bnx2x_init_block(bp, BLOCK_UCM, init_phase); in bnx2x_init_hw_func()
8052 bnx2x_init_block(bp, BLOCK_CCM, init_phase); in bnx2x_init_hw_func()
8053 bnx2x_init_block(bp, BLOCK_XCM, init_phase); in bnx2x_init_hw_func()
8054 bnx2x_init_block(bp, BLOCK_TSEM, init_phase); in bnx2x_init_hw_func()
8055 bnx2x_init_block(bp, BLOCK_USEM, init_phase); in bnx2x_init_hw_func()
8056 bnx2x_init_block(bp, BLOCK_CSEM, init_phase); in bnx2x_init_hw_func()
8057 bnx2x_init_block(bp, BLOCK_XSEM, init_phase); in bnx2x_init_hw_func()
8059 if (!CHIP_IS_E1x(bp)) in bnx2x_init_hw_func()
8060 REG_WR(bp, QM_REG_PF_EN, 1); in bnx2x_init_hw_func()
8062 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
8063 REG_WR(bp, TSEM_REG_VFPF_ERR_NUM, BNX2X_MAX_NUM_OF_VFS + func); in bnx2x_init_hw_func()
8064 REG_WR(bp, USEM_REG_VFPF_ERR_NUM, BNX2X_MAX_NUM_OF_VFS + func); in bnx2x_init_hw_func()
8065 REG_WR(bp, CSEM_REG_VFPF_ERR_NUM, BNX2X_MAX_NUM_OF_VFS + func); in bnx2x_init_hw_func()
8066 REG_WR(bp, XSEM_REG_VFPF_ERR_NUM, BNX2X_MAX_NUM_OF_VFS + func); in bnx2x_init_hw_func()
8068 bnx2x_init_block(bp, BLOCK_QM, init_phase); in bnx2x_init_hw_func()
8070 bnx2x_init_block(bp, BLOCK_TM, init_phase); in bnx2x_init_hw_func()
8071 bnx2x_init_block(bp, BLOCK_DORQ, init_phase); in bnx2x_init_hw_func()
8072 REG_WR(bp, DORQ_REG_MODE_ACT, 1); /* no dpm */ in bnx2x_init_hw_func()
8074 bnx2x_iov_init_dq(bp); in bnx2x_init_hw_func()
8076 bnx2x_init_block(bp, BLOCK_BRB1, init_phase); in bnx2x_init_hw_func()
8077 bnx2x_init_block(bp, BLOCK_PRS, init_phase); in bnx2x_init_hw_func()
8078 bnx2x_init_block(bp, BLOCK_TSDM, init_phase); in bnx2x_init_hw_func()
8079 bnx2x_init_block(bp, BLOCK_CSDM, init_phase); in bnx2x_init_hw_func()
8080 bnx2x_init_block(bp, BLOCK_USDM, init_phase); in bnx2x_init_hw_func()
8081 bnx2x_init_block(bp, BLOCK_XSDM, init_phase); in bnx2x_init_hw_func()
8082 bnx2x_init_block(bp, BLOCK_UPB, init_phase); in bnx2x_init_hw_func()
8083 bnx2x_init_block(bp, BLOCK_XPB, init_phase); in bnx2x_init_hw_func()
8084 bnx2x_init_block(bp, BLOCK_PBF, init_phase); in bnx2x_init_hw_func()
8085 if (!CHIP_IS_E1x(bp)) in bnx2x_init_hw_func()
8086 REG_WR(bp, PBF_REG_DISABLE_PF, 0); in bnx2x_init_hw_func()
8088 bnx2x_init_block(bp, BLOCK_CDU, init_phase); in bnx2x_init_hw_func()
8090 bnx2x_init_block(bp, BLOCK_CFC, init_phase); in bnx2x_init_hw_func()
8092 if (!CHIP_IS_E1x(bp)) in bnx2x_init_hw_func()
8093 REG_WR(bp, CFC_REG_WEAK_ENABLE_PF, 1); in bnx2x_init_hw_func()
8095 if (IS_MF(bp)) { in bnx2x_init_hw_func()
8096 if (!(IS_MF_UFP(bp) && BNX2X_IS_MF_SD_PROTOCOL_FCOE(bp))) { in bnx2x_init_hw_func()
8097 REG_WR(bp, NIG_REG_LLH0_FUNC_EN + port * 8, 1); in bnx2x_init_hw_func()
8098 REG_WR(bp, NIG_REG_LLH0_FUNC_VLAN_ID + port * 8, in bnx2x_init_hw_func()
8099 bp->mf_ov); in bnx2x_init_hw_func()
8103 bnx2x_init_block(bp, BLOCK_MISC_AEU, init_phase); in bnx2x_init_hw_func()
8106 if (bp->common.int_block == INT_BLOCK_HC) { in bnx2x_init_hw_func()
8107 if (CHIP_IS_E1H(bp)) { in bnx2x_init_hw_func()
8108 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_12 + func*4, 0); in bnx2x_init_hw_func()
8110 REG_WR(bp, HC_REG_LEADING_EDGE_0 + port*8, 0); in bnx2x_init_hw_func()
8111 REG_WR(bp, HC_REG_TRAILING_EDGE_0 + port*8, 0); in bnx2x_init_hw_func()
8113 bnx2x_init_block(bp, BLOCK_HC, init_phase); in bnx2x_init_hw_func()
8118 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_12 + func*4, 0); in bnx2x_init_hw_func()
8120 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
8121 REG_WR(bp, IGU_REG_LEADING_EDGE_LATCH, 0); in bnx2x_init_hw_func()
8122 REG_WR(bp, IGU_REG_TRAILING_EDGE_LATCH, 0); in bnx2x_init_hw_func()
8125 bnx2x_init_block(bp, BLOCK_IGU, init_phase); in bnx2x_init_hw_func()
8127 if (!CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
8150 num_segs = CHIP_INT_MODE_IS_BC(bp) ? in bnx2x_init_hw_func()
8152 for (sb_idx = 0; sb_idx < bp->igu_sb_cnt; sb_idx++) { in bnx2x_init_hw_func()
8153 prod_offset = (bp->igu_base_sb + sb_idx) * in bnx2x_init_hw_func()
8159 REG_WR(bp, addr, 0); in bnx2x_init_hw_func()
8162 bnx2x_ack_sb(bp, bp->igu_base_sb + sb_idx, in bnx2x_init_hw_func()
8164 bnx2x_igu_clear_sb(bp, in bnx2x_init_hw_func()
8165 bp->igu_base_sb + sb_idx); in bnx2x_init_hw_func()
8169 num_segs = CHIP_INT_MODE_IS_BC(bp) ? in bnx2x_init_hw_func()
8172 if (CHIP_MODE_IS_4_PORT(bp)) in bnx2x_init_hw_func()
8173 dsb_idx = BP_FUNC(bp); in bnx2x_init_hw_func()
8175 dsb_idx = BP_VN(bp); in bnx2x_init_hw_func()
8177 prod_offset = (CHIP_INT_MODE_IS_BC(bp) ? in bnx2x_init_hw_func()
8189 REG_WR(bp, addr, 0); in bnx2x_init_hw_func()
8192 if (CHIP_INT_MODE_IS_BC(bp)) { in bnx2x_init_hw_func()
8193 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8195 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8197 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8199 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8201 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8204 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8206 bnx2x_ack_sb(bp, bp->igu_dsb_id, in bnx2x_init_hw_func()
8209 bnx2x_igu_clear_sb(bp, bp->igu_dsb_id); in bnx2x_init_hw_func()
8213 REG_WR(bp, IGU_REG_SB_INT_BEFORE_MASK_LSB, 0); in bnx2x_init_hw_func()
8214 REG_WR(bp, IGU_REG_SB_INT_BEFORE_MASK_MSB, 0); in bnx2x_init_hw_func()
8215 REG_WR(bp, IGU_REG_SB_MASK_LSB, 0); in bnx2x_init_hw_func()
8216 REG_WR(bp, IGU_REG_SB_MASK_MSB, 0); in bnx2x_init_hw_func()
8217 REG_WR(bp, IGU_REG_PBA_STATUS_LSB, 0); in bnx2x_init_hw_func()
8218 REG_WR(bp, IGU_REG_PBA_STATUS_MSB, 0); in bnx2x_init_hw_func()
8223 REG_WR(bp, 0x2114, 0xffffffff); in bnx2x_init_hw_func()
8224 REG_WR(bp, 0x2120, 0xffffffff); in bnx2x_init_hw_func()
8226 if (CHIP_IS_E1x(bp)) { in bnx2x_init_hw_func()
8229 BP_PORT(bp) * (main_mem_size * 4); in bnx2x_init_hw_func()
8233 val = REG_RD(bp, main_mem_prty_clr); in bnx2x_init_hw_func()
8243 bnx2x_read_dmae(bp, i, main_mem_width / 4); in bnx2x_init_hw_func()
8244 bnx2x_write_dmae(bp, bnx2x_sp_mapping(bp, wb_data), in bnx2x_init_hw_func()
8248 REG_RD(bp, main_mem_prty_clr); in bnx2x_init_hw_func()
8253 REG_WR8(bp, BAR_USTRORM_INTMEM + in bnx2x_init_hw_func()
8254 USTORM_RECORD_SLOW_PATH_OFFSET(BP_FUNC(bp)), 1); in bnx2x_init_hw_func()
8255 REG_WR8(bp, BAR_TSTRORM_INTMEM + in bnx2x_init_hw_func()
8256 TSTORM_RECORD_SLOW_PATH_OFFSET(BP_FUNC(bp)), 1); in bnx2x_init_hw_func()
8257 REG_WR8(bp, BAR_CSTRORM_INTMEM + in bnx2x_init_hw_func()
8258 CSTORM_RECORD_SLOW_PATH_OFFSET(BP_FUNC(bp)), 1); in bnx2x_init_hw_func()
8259 REG_WR8(bp, BAR_XSTRORM_INTMEM + in bnx2x_init_hw_func()
8260 XSTORM_RECORD_SLOW_PATH_OFFSET(BP_FUNC(bp)), 1); in bnx2x_init_hw_func()
8263 bnx2x_phy_probe(&bp->link_params); in bnx2x_init_hw_func()
8268 void bnx2x_free_mem_cnic(struct bnx2x *bp) in bnx2x_free_mem_cnic() argument
8270 bnx2x_ilt_mem_op_cnic(bp, ILT_MEMOP_FREE); in bnx2x_free_mem_cnic()
8272 if (!CHIP_IS_E1x(bp)) in bnx2x_free_mem_cnic()
8273 BNX2X_PCI_FREE(bp->cnic_sb.e2_sb, bp->cnic_sb_mapping, in bnx2x_free_mem_cnic()
8276 BNX2X_PCI_FREE(bp->cnic_sb.e1x_sb, bp->cnic_sb_mapping, in bnx2x_free_mem_cnic()
8279 BNX2X_PCI_FREE(bp->t2, bp->t2_mapping, SRC_T2_SZ); in bnx2x_free_mem_cnic()
8282 void bnx2x_free_mem(struct bnx2x *bp) in bnx2x_free_mem() argument
8286 BNX2X_PCI_FREE(bp->fw_stats, bp->fw_stats_mapping, in bnx2x_free_mem()
8287 bp->fw_stats_data_sz + bp->fw_stats_req_sz); in bnx2x_free_mem()
8289 if (IS_VF(bp)) in bnx2x_free_mem()
8292 BNX2X_PCI_FREE(bp->def_status_blk, bp->def_status_blk_mapping, in bnx2x_free_mem()
8295 BNX2X_PCI_FREE(bp->slowpath, bp->slowpath_mapping, in bnx2x_free_mem()
8298 for (i = 0; i < L2_ILT_LINES(bp); i++) in bnx2x_free_mem()
8299 BNX2X_PCI_FREE(bp->context[i].vcxt, bp->context[i].cxt_mapping, in bnx2x_free_mem()
8300 bp->context[i].size); in bnx2x_free_mem()
8301 bnx2x_ilt_mem_op(bp, ILT_MEMOP_FREE); in bnx2x_free_mem()
8303 BNX2X_FREE(bp->ilt->lines); in bnx2x_free_mem()
8305 BNX2X_PCI_FREE(bp->spq, bp->spq_mapping, BCM_PAGE_SIZE); in bnx2x_free_mem()
8307 BNX2X_PCI_FREE(bp->eq_ring, bp->eq_mapping, in bnx2x_free_mem()
8310 BNX2X_PCI_FREE(bp->t2, bp->t2_mapping, SRC_T2_SZ); in bnx2x_free_mem()
8312 bnx2x_iov_free_mem(bp); in bnx2x_free_mem()
8315 int bnx2x_alloc_mem_cnic(struct bnx2x *bp) in bnx2x_alloc_mem_cnic() argument
8317 if (!CHIP_IS_E1x(bp)) { in bnx2x_alloc_mem_cnic()
8319 bp->cnic_sb.e2_sb = BNX2X_PCI_ALLOC(&bp->cnic_sb_mapping, in bnx2x_alloc_mem_cnic()
8321 if (!bp->cnic_sb.e2_sb) in bnx2x_alloc_mem_cnic()
8324 bp->cnic_sb.e1x_sb = BNX2X_PCI_ALLOC(&bp->cnic_sb_mapping, in bnx2x_alloc_mem_cnic()
8326 if (!bp->cnic_sb.e1x_sb) in bnx2x_alloc_mem_cnic()
8330 if (CONFIGURE_NIC_MODE(bp) && !bp->t2) { in bnx2x_alloc_mem_cnic()
8332 bp->t2 = BNX2X_PCI_ALLOC(&bp->t2_mapping, SRC_T2_SZ); in bnx2x_alloc_mem_cnic()
8333 if (!bp->t2) in bnx2x_alloc_mem_cnic()
8338 bp->cnic_eth_dev.addr_drv_info_to_mcp = in bnx2x_alloc_mem_cnic()
8339 &bp->slowpath->drv_info_to_mcp; in bnx2x_alloc_mem_cnic()
8341 if (bnx2x_ilt_mem_op_cnic(bp, ILT_MEMOP_ALLOC)) in bnx2x_alloc_mem_cnic()
8347 bnx2x_free_mem_cnic(bp); in bnx2x_alloc_mem_cnic()
8352 int bnx2x_alloc_mem(struct bnx2x *bp) in bnx2x_alloc_mem() argument
8356 if (!CONFIGURE_NIC_MODE(bp) && !bp->t2) { in bnx2x_alloc_mem()
8358 bp->t2 = BNX2X_PCI_ALLOC(&bp->t2_mapping, SRC_T2_SZ); in bnx2x_alloc_mem()
8359 if (!bp->t2) in bnx2x_alloc_mem()
8363 bp->def_status_blk = BNX2X_PCI_ALLOC(&bp->def_status_blk_mapping, in bnx2x_alloc_mem()
8365 if (!bp->def_status_blk) in bnx2x_alloc_mem()
8368 bp->slowpath = BNX2X_PCI_ALLOC(&bp->slowpath_mapping, in bnx2x_alloc_mem()
8370 if (!bp->slowpath) in bnx2x_alloc_mem()
8386 context_size = sizeof(union cdu_context) * BNX2X_L2_CID_COUNT(bp); in bnx2x_alloc_mem()
8389 bp->context[i].size = min(CDU_ILT_PAGE_SZ, in bnx2x_alloc_mem()
8391 bp->context[i].vcxt = BNX2X_PCI_ALLOC(&bp->context[i].cxt_mapping, in bnx2x_alloc_mem()
8392 bp->context[i].size); in bnx2x_alloc_mem()
8393 if (!bp->context[i].vcxt) in bnx2x_alloc_mem()
8395 allocated += bp->context[i].size; in bnx2x_alloc_mem()
8397 bp->ilt->lines = kcalloc(ILT_MAX_LINES, sizeof(struct ilt_line), in bnx2x_alloc_mem()
8399 if (!bp->ilt->lines) in bnx2x_alloc_mem()
8402 if (bnx2x_ilt_mem_op(bp, ILT_MEMOP_ALLOC)) in bnx2x_alloc_mem()
8405 if (bnx2x_iov_alloc_mem(bp)) in bnx2x_alloc_mem()
8409 bp->spq = BNX2X_PCI_ALLOC(&bp->spq_mapping, BCM_PAGE_SIZE); in bnx2x_alloc_mem()
8410 if (!bp->spq) in bnx2x_alloc_mem()
8414 bp->eq_ring = BNX2X_PCI_ALLOC(&bp->eq_mapping, in bnx2x_alloc_mem()
8416 if (!bp->eq_ring) in bnx2x_alloc_mem()
8422 bnx2x_free_mem(bp); in bnx2x_alloc_mem()
8431 int bnx2x_set_mac_one(struct bnx2x *bp, const u8 *mac, in bnx2x_set_mac_one() argument
8457 rc = bnx2x_config_vlan_mac(bp, &ramrod_param); in bnx2x_set_mac_one()
8469 int bnx2x_set_vlan_one(struct bnx2x *bp, u16 vlan, in bnx2x_set_vlan_one() argument
8493 rc = bnx2x_config_vlan_mac(bp, &ramrod_param); in bnx2x_set_vlan_one()
8506 void bnx2x_clear_vlan_info(struct bnx2x *bp) in bnx2x_clear_vlan_info() argument
8511 list_for_each_entry(vlan, &bp->vlan_reg, link) in bnx2x_clear_vlan_info()
8514 bp->vlan_cnt = 0; in bnx2x_clear_vlan_info()
8517 static int bnx2x_del_all_vlans(struct bnx2x *bp) in bnx2x_del_all_vlans() argument
8519 struct bnx2x_vlan_mac_obj *vlan_obj = &bp->sp_objs[0].vlan_obj; in bnx2x_del_all_vlans()
8525 rc = vlan_obj->delete_all(bp, vlan_obj, &vlan_flags, &ramrod_flags); in bnx2x_del_all_vlans()
8529 bnx2x_clear_vlan_info(bp); in bnx2x_del_all_vlans()
8534 int bnx2x_del_all_macs(struct bnx2x *bp, in bnx2x_del_all_macs() argument
8548 rc = mac_obj->delete_all(bp, mac_obj, &vlan_mac_flags, &ramrod_flags); in bnx2x_del_all_macs()
8555 int bnx2x_set_eth_mac(struct bnx2x *bp, bool set) in bnx2x_set_eth_mac() argument
8557 if (IS_PF(bp)) { in bnx2x_set_eth_mac()
8562 return bnx2x_set_mac_one(bp, bp->dev->dev_addr, in bnx2x_set_eth_mac()
8563 &bp->sp_objs->mac_obj, set, in bnx2x_set_eth_mac()
8566 return bnx2x_vfpf_config_mac(bp, bp->dev->dev_addr, in bnx2x_set_eth_mac()
8567 bp->fp->index, set); in bnx2x_set_eth_mac()
8571 int bnx2x_setup_leading(struct bnx2x *bp) in bnx2x_setup_leading() argument
8573 if (IS_PF(bp)) in bnx2x_setup_leading()
8574 return bnx2x_setup_queue(bp, &bp->fp[0], true); in bnx2x_setup_leading()
8576 return bnx2x_vfpf_setup_q(bp, &bp->fp[0], true); in bnx2x_setup_leading()
8586 int bnx2x_set_int_mode(struct bnx2x *bp) in bnx2x_set_int_mode() argument
8590 if (IS_VF(bp) && int_mode != BNX2X_INT_MODE_MSIX) { in bnx2x_set_int_mode()
8598 rc = bnx2x_enable_msix(bp); in bnx2x_set_int_mode()
8605 if (rc && IS_VF(bp)) in bnx2x_set_int_mode()
8610 bp->num_queues, in bnx2x_set_int_mode()
8611 1 + bp->num_cnic_queues); in bnx2x_set_int_mode()
8615 bnx2x_enable_msi(bp); in bnx2x_set_int_mode()
8619 bp->num_ethernet_queues = 1; in bnx2x_set_int_mode()
8620 bp->num_queues = bp->num_ethernet_queues + bp->num_cnic_queues; in bnx2x_set_int_mode()
8631 static inline u16 bnx2x_cid_ilt_lines(struct bnx2x *bp) in bnx2x_cid_ilt_lines() argument
8633 if (IS_SRIOV(bp)) in bnx2x_cid_ilt_lines()
8635 return L2_ILT_LINES(bp); in bnx2x_cid_ilt_lines()
8638 void bnx2x_ilt_set_info(struct bnx2x *bp) in bnx2x_ilt_set_info() argument
8641 struct bnx2x_ilt *ilt = BP_ILT(bp); in bnx2x_ilt_set_info()
8644 ilt->start_line = FUNC_ILT_BASE(BP_FUNC(bp)); in bnx2x_ilt_set_info()
8653 line += bnx2x_cid_ilt_lines(bp); in bnx2x_ilt_set_info()
8655 if (CNIC_SUPPORT(bp)) in bnx2x_ilt_set_info()
8667 if (QM_INIT(bp->qm_cid_count)) { in bnx2x_ilt_set_info()
8675 line += DIV_ROUND_UP(bp->qm_cid_count * QM_QUEUES_PER_FUNC * 4, in bnx2x_ilt_set_info()
8689 if (CNIC_SUPPORT(bp)) { in bnx2x_ilt_set_info()
8739 static void bnx2x_pf_q_prep_init(struct bnx2x *bp, in bnx2x_pf_q_prep_init() argument
8757 init_params->rx.hc_rate = bp->rx_ticks ? in bnx2x_pf_q_prep_init()
8758 (1000000 / bp->rx_ticks) : 0; in bnx2x_pf_q_prep_init()
8759 init_params->tx.hc_rate = bp->tx_ticks ? in bnx2x_pf_q_prep_init()
8760 (1000000 / bp->tx_ticks) : 0; in bnx2x_pf_q_prep_init()
8786 &bp->context[cxt_index].vcxt[cxt_offset].eth; in bnx2x_pf_q_prep_init()
8790 static int bnx2x_setup_tx_only(struct bnx2x *bp, struct bnx2x_fastpath *fp, in bnx2x_setup_tx_only() argument
8801 tx_only_params->flags = bnx2x_get_common_flags(bp, fp, false); in bnx2x_setup_tx_only()
8807 bnx2x_pf_q_prep_general(bp, fp, &tx_only_params->gen_params, tx_index); in bnx2x_setup_tx_only()
8810 bnx2x_pf_tx_q_prep(bp, fp, &tx_only_params->txq_params, tx_index); in bnx2x_setup_tx_only()
8819 return bnx2x_queue_state_change(bp, q_params); in bnx2x_setup_tx_only()
8833 int bnx2x_setup_queue(struct bnx2x *bp, struct bnx2x_fastpath *fp, in bnx2x_setup_queue() argument
8848 bnx2x_ack_sb(bp, fp->igu_sb_id, USTORM_ID, 0, in bnx2x_setup_queue()
8851 q_params.q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_setup_queue()
8856 bnx2x_pf_q_prep_init(bp, fp, &q_params.params.init); in bnx2x_setup_queue()
8862 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_setup_queue()
8874 setup_params->flags = bnx2x_get_q_flags(bp, fp, leading); in bnx2x_setup_queue()
8877 bnx2x_pf_q_prep_general(bp, fp, &setup_params->gen_params, in bnx2x_setup_queue()
8880 bnx2x_pf_rx_q_prep(bp, fp, &setup_params->pause_params, in bnx2x_setup_queue()
8883 bnx2x_pf_tx_q_prep(bp, fp, &setup_params->txq_params, in bnx2x_setup_queue()
8890 bp->fcoe_init = true; in bnx2x_setup_queue()
8893 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_setup_queue()
8905 rc = bnx2x_setup_tx_only(bp, fp, &q_params, in bnx2x_setup_queue()
8917 static int bnx2x_stop_queue(struct bnx2x *bp, int index) in bnx2x_stop_queue() argument
8919 struct bnx2x_fastpath *fp = &bp->fp[index]; in bnx2x_stop_queue()
8926 q_params.q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_stop_queue()
8947 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_stop_queue()
8956 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_stop_queue()
8963 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_stop_queue()
8972 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_stop_queue()
8980 return bnx2x_queue_state_change(bp, &q_params); in bnx2x_stop_queue()
8983 static void bnx2x_reset_func(struct bnx2x *bp) in bnx2x_reset_func() argument
8985 int port = BP_PORT(bp); in bnx2x_reset_func()
8986 int func = BP_FUNC(bp); in bnx2x_reset_func()
8990 REG_WR8(bp, BAR_XSTRORM_INTMEM + XSTORM_FUNC_EN_OFFSET(func), 0); in bnx2x_reset_func()
8991 REG_WR8(bp, BAR_CSTRORM_INTMEM + CSTORM_FUNC_EN_OFFSET(func), 0); in bnx2x_reset_func()
8992 REG_WR8(bp, BAR_TSTRORM_INTMEM + TSTORM_FUNC_EN_OFFSET(func), 0); in bnx2x_reset_func()
8993 REG_WR8(bp, BAR_USTRORM_INTMEM + USTORM_FUNC_EN_OFFSET(func), 0); in bnx2x_reset_func()
8996 for_each_eth_queue(bp, i) { in bnx2x_reset_func()
8997 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_reset_func()
8998 REG_WR8(bp, BAR_CSTRORM_INTMEM + in bnx2x_reset_func()
9003 if (CNIC_LOADED(bp)) in bnx2x_reset_func()
9005 REG_WR8(bp, BAR_CSTRORM_INTMEM + in bnx2x_reset_func()
9007 (bnx2x_cnic_fw_sb_id(bp)), SB_DISABLED); in bnx2x_reset_func()
9010 REG_WR8(bp, BAR_CSTRORM_INTMEM + in bnx2x_reset_func()
9015 REG_WR(bp, BAR_XSTRORM_INTMEM + XSTORM_SPQ_DATA_OFFSET(func), in bnx2x_reset_func()
9019 if (bp->common.int_block == INT_BLOCK_HC) { in bnx2x_reset_func()
9020 REG_WR(bp, HC_REG_LEADING_EDGE_0 + port*8, 0); in bnx2x_reset_func()
9021 REG_WR(bp, HC_REG_TRAILING_EDGE_0 + port*8, 0); in bnx2x_reset_func()
9023 REG_WR(bp, IGU_REG_LEADING_EDGE_LATCH, 0); in bnx2x_reset_func()
9024 REG_WR(bp, IGU_REG_TRAILING_EDGE_LATCH, 0); in bnx2x_reset_func()
9027 if (CNIC_LOADED(bp)) { in bnx2x_reset_func()
9029 REG_WR(bp, TM_REG_EN_LINEAR0_TIMER + port*4, 0); in bnx2x_reset_func()
9036 if (!REG_RD(bp, TM_REG_LIN0_SCAN_ON + port*4)) in bnx2x_reset_func()
9041 bnx2x_clear_func_ilt(bp, func); in bnx2x_reset_func()
9046 if (!CHIP_IS_E1x(bp) && BP_VN(bp) == 3) { in bnx2x_reset_func()
9054 bnx2x_ilt_boundry_init_op(bp, &ilt_cli, 0, INITOP_CLEAR); in bnx2x_reset_func()
9058 if (!CHIP_IS_E1x(bp)) in bnx2x_reset_func()
9059 bnx2x_pf_disable(bp); in bnx2x_reset_func()
9061 bp->dmae_ready = 0; in bnx2x_reset_func()
9064 static void bnx2x_reset_port(struct bnx2x *bp) in bnx2x_reset_port() argument
9066 int port = BP_PORT(bp); in bnx2x_reset_port()
9070 bnx2x__link_reset(bp); in bnx2x_reset_port()
9072 REG_WR(bp, NIG_REG_MASK_INTERRUPT_PORT0 + port*4, 0); in bnx2x_reset_port()
9075 REG_WR(bp, NIG_REG_LLH0_BRB1_DRV_MASK + port*4, 0x0); in bnx2x_reset_port()
9077 REG_WR(bp, (port ? NIG_REG_LLH1_BRB1_NOT_MCP : in bnx2x_reset_port()
9081 REG_WR(bp, MISC_REG_AEU_MASK_ATTN_FUNC_0 + port*4, 0); in bnx2x_reset_port()
9085 val = REG_RD(bp, BRB1_REG_PORT_NUM_OCC_BLOCKS_0 + port*4); in bnx2x_reset_port()
9093 static int bnx2x_reset_hw(struct bnx2x *bp, u32 load_code) in bnx2x_reset_hw() argument
9100 func_params.f_obj = &bp->func_obj; in bnx2x_reset_hw()
9105 return bnx2x_func_state_change(bp, &func_params); in bnx2x_reset_hw()
9108 static int bnx2x_func_stop(struct bnx2x *bp) in bnx2x_func_stop() argument
9115 func_params.f_obj = &bp->func_obj; in bnx2x_func_stop()
9124 rc = bnx2x_func_state_change(bp, &func_params); in bnx2x_func_stop()
9131 return bnx2x_func_state_change(bp, &func_params); in bnx2x_func_stop()
9146 u32 bnx2x_send_unload_req(struct bnx2x *bp, int unload_mode) in bnx2x_send_unload_req() argument
9149 int port = BP_PORT(bp); in bnx2x_send_unload_req()
9155 else if (bp->flags & NO_WOL_FLAG) in bnx2x_send_unload_req()
9158 else if (bp->wol) { in bnx2x_send_unload_req()
9160 const u8 *mac_addr = bp->dev->dev_addr; in bnx2x_send_unload_req()
9161 struct pci_dev *pdev = bp->pdev; in bnx2x_send_unload_req()
9168 u8 entry = (BP_VN(bp) + 1)*8; in bnx2x_send_unload_req()
9171 EMAC_WR(bp, EMAC_REG_EMAC_MAC_MATCH + entry, val); in bnx2x_send_unload_req()
9175 EMAC_WR(bp, EMAC_REG_EMAC_MAC_MATCH + entry + 4, val); in bnx2x_send_unload_req()
9188 if (!BP_NOMCP(bp)) in bnx2x_send_unload_req()
9189 reset_code = bnx2x_fw_command(bp, reset_code, 0); in bnx2x_send_unload_req()
9191 int path = BP_PATH(bp); in bnx2x_send_unload_req()
9218 void bnx2x_send_unload_done(struct bnx2x *bp, bool keep_link) in bnx2x_send_unload_done() argument
9223 if (!BP_NOMCP(bp)) in bnx2x_send_unload_done()
9224 bnx2x_fw_command(bp, DRV_MSG_CODE_UNLOAD_DONE, reset_param); in bnx2x_send_unload_done()
9227 static int bnx2x_func_wait_started(struct bnx2x *bp) in bnx2x_func_wait_started() argument
9230 int msix = (bp->flags & USING_MSIX_FLAG) ? 1 : 0; in bnx2x_func_wait_started()
9232 if (!bp->port.pmf) in bnx2x_func_wait_started()
9251 synchronize_irq(bp->msix_table[0].vector); in bnx2x_func_wait_started()
9253 synchronize_irq(bp->pdev->irq); in bnx2x_func_wait_started()
9258 while (bnx2x_func_get_state(bp, &bp->func_obj) != in bnx2x_func_wait_started()
9262 if (bnx2x_func_get_state(bp, &bp->func_obj) != in bnx2x_func_wait_started()
9277 func_params.f_obj = &bp->func_obj; in bnx2x_func_wait_started()
9283 bnx2x_func_state_change(bp, &func_params); in bnx2x_func_wait_started()
9287 return bnx2x_func_state_change(bp, &func_params); in bnx2x_func_wait_started()
9294 static void bnx2x_disable_ptp(struct bnx2x *bp) in bnx2x_disable_ptp() argument
9296 int port = BP_PORT(bp); in bnx2x_disable_ptp()
9299 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_TO_HOST : in bnx2x_disable_ptp()
9303 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_PARAM_MASK : in bnx2x_disable_ptp()
9305 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_RULE_MASK : in bnx2x_disable_ptp()
9307 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_PARAM_MASK : in bnx2x_disable_ptp()
9309 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_RULE_MASK : in bnx2x_disable_ptp()
9313 REG_WR(bp, port ? NIG_REG_P1_PTP_EN : in bnx2x_disable_ptp()
9318 static void bnx2x_stop_ptp(struct bnx2x *bp) in bnx2x_stop_ptp() argument
9323 cancel_work_sync(&bp->ptp_task); in bnx2x_stop_ptp()
9325 if (bp->ptp_tx_skb) { in bnx2x_stop_ptp()
9326 dev_kfree_skb_any(bp->ptp_tx_skb); in bnx2x_stop_ptp()
9327 bp->ptp_tx_skb = NULL; in bnx2x_stop_ptp()
9331 bnx2x_disable_ptp(bp); in bnx2x_stop_ptp()
9336 void bnx2x_chip_cleanup(struct bnx2x *bp, int unload_mode, bool keep_link) in bnx2x_chip_cleanup() argument
9338 int port = BP_PORT(bp); in bnx2x_chip_cleanup()
9345 for_each_tx_queue(bp, i) { in bnx2x_chip_cleanup()
9346 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_chip_cleanup()
9349 rc = bnx2x_clean_tx_queue(bp, fp->txdata_ptr[cos]); in bnx2x_chip_cleanup()
9360 rc = bnx2x_del_all_macs(bp, &bp->sp_objs[0].mac_obj, BNX2X_ETH_MAC, in bnx2x_chip_cleanup()
9366 rc = bnx2x_del_all_macs(bp, &bp->sp_objs[0].mac_obj, BNX2X_UC_LIST_MAC, in bnx2x_chip_cleanup()
9376 if (!CHIP_IS_E1x(bp)) { in bnx2x_chip_cleanup()
9378 rc = bnx2x_del_all_vlans(bp); in bnx2x_chip_cleanup()
9384 if (!CHIP_IS_E1(bp)) in bnx2x_chip_cleanup()
9385 REG_WR(bp, NIG_REG_LLH0_FUNC_EN + port*8, 0); in bnx2x_chip_cleanup()
9391 netif_addr_lock_bh(bp->dev); in bnx2x_chip_cleanup()
9393 if (test_bit(BNX2X_FILTER_RX_MODE_PENDING, &bp->sp_state)) in bnx2x_chip_cleanup()
9394 set_bit(BNX2X_FILTER_RX_MODE_SCHED, &bp->sp_state); in bnx2x_chip_cleanup()
9395 else if (bp->slowpath) in bnx2x_chip_cleanup()
9396 bnx2x_set_storm_rx_mode(bp); in bnx2x_chip_cleanup()
9399 rparam.mcast_obj = &bp->mcast_obj; in bnx2x_chip_cleanup()
9400 rc = bnx2x_config_mcast(bp, &rparam, BNX2X_MCAST_CMD_DEL); in bnx2x_chip_cleanup()
9404 netif_addr_unlock_bh(bp->dev); in bnx2x_chip_cleanup()
9406 bnx2x_iov_chip_cleanup(bp); in bnx2x_chip_cleanup()
9413 reset_code = bnx2x_send_unload_req(bp, unload_mode); in bnx2x_chip_cleanup()
9419 rc = bnx2x_func_wait_started(bp); in bnx2x_chip_cleanup()
9430 for_each_eth_queue(bp, i) in bnx2x_chip_cleanup()
9431 if (bnx2x_stop_queue(bp, i)) in bnx2x_chip_cleanup()
9438 if (CNIC_LOADED(bp)) { in bnx2x_chip_cleanup()
9439 for_each_cnic_queue(bp, i) in bnx2x_chip_cleanup()
9440 if (bnx2x_stop_queue(bp, i)) in bnx2x_chip_cleanup()
9451 if (!bnx2x_wait_sp_comp(bp, ~0x0UL)) in bnx2x_chip_cleanup()
9457 rc = bnx2x_func_stop(bp); in bnx2x_chip_cleanup()
9470 if (bp->flags & PTP_SUPPORTED) { in bnx2x_chip_cleanup()
9471 bnx2x_stop_ptp(bp); in bnx2x_chip_cleanup()
9472 if (bp->ptp_clock) { in bnx2x_chip_cleanup()
9473 ptp_clock_unregister(bp->ptp_clock); in bnx2x_chip_cleanup()
9474 bp->ptp_clock = NULL; in bnx2x_chip_cleanup()
9479 bnx2x_netif_stop(bp, 1); in bnx2x_chip_cleanup()
9481 bnx2x_del_all_napi(bp); in bnx2x_chip_cleanup()
9482 if (CNIC_LOADED(bp)) in bnx2x_chip_cleanup()
9483 bnx2x_del_all_napi_cnic(bp); in bnx2x_chip_cleanup()
9486 bnx2x_free_irq(bp); in bnx2x_chip_cleanup()
9493 if (!pci_channel_offline(bp->pdev)) { in bnx2x_chip_cleanup()
9494 rc = bnx2x_reset_hw(bp, reset_code); in bnx2x_chip_cleanup()
9500 bnx2x_send_unload_done(bp, keep_link); in bnx2x_chip_cleanup()
9503 void bnx2x_disable_close_the_gate(struct bnx2x *bp) in bnx2x_disable_close_the_gate() argument
9509 if (CHIP_IS_E1(bp)) { in bnx2x_disable_close_the_gate()
9510 int port = BP_PORT(bp); in bnx2x_disable_close_the_gate()
9514 val = REG_RD(bp, addr); in bnx2x_disable_close_the_gate()
9516 REG_WR(bp, addr, val); in bnx2x_disable_close_the_gate()
9518 val = REG_RD(bp, MISC_REG_AEU_GENERAL_MASK); in bnx2x_disable_close_the_gate()
9521 REG_WR(bp, MISC_REG_AEU_GENERAL_MASK, val); in bnx2x_disable_close_the_gate()
9526 static void bnx2x_set_234_gates(struct bnx2x *bp, bool close) in bnx2x_set_234_gates() argument
9531 if (!CHIP_IS_E1(bp)) { in bnx2x_set_234_gates()
9533 REG_WR(bp, PXP_REG_HST_DISCARD_DOORBELLS, !!close); in bnx2x_set_234_gates()
9535 REG_WR(bp, PXP_REG_HST_DISCARD_INTERNAL_WRITES, !!close); in bnx2x_set_234_gates()
9539 if (CHIP_IS_E1x(bp)) { in bnx2x_set_234_gates()
9541 val = REG_RD(bp, HC_REG_CONFIG_1); in bnx2x_set_234_gates()
9542 REG_WR(bp, HC_REG_CONFIG_1, in bnx2x_set_234_gates()
9546 val = REG_RD(bp, HC_REG_CONFIG_0); in bnx2x_set_234_gates()
9547 REG_WR(bp, HC_REG_CONFIG_0, in bnx2x_set_234_gates()
9552 val = REG_RD(bp, IGU_REG_BLOCK_CONFIGURATION); in bnx2x_set_234_gates()
9554 REG_WR(bp, IGU_REG_BLOCK_CONFIGURATION, in bnx2x_set_234_gates()
9566 static void bnx2x_clp_reset_prep(struct bnx2x *bp, u32 *magic_val) in bnx2x_clp_reset_prep() argument
9569 u32 val = MF_CFG_RD(bp, shared_mf_config.clp_mb); in bnx2x_clp_reset_prep()
9571 MF_CFG_WR(bp, shared_mf_config.clp_mb, val | SHARED_MF_CLP_MAGIC); in bnx2x_clp_reset_prep()
9580 static void bnx2x_clp_reset_done(struct bnx2x *bp, u32 magic_val) in bnx2x_clp_reset_done() argument
9583 u32 val = MF_CFG_RD(bp, shared_mf_config.clp_mb); in bnx2x_clp_reset_done()
9584 MF_CFG_WR(bp, shared_mf_config.clp_mb, in bnx2x_clp_reset_done()
9596 static void bnx2x_reset_mcp_prep(struct bnx2x *bp, u32 *magic_val) in bnx2x_reset_mcp_prep() argument
9604 if (!CHIP_IS_E1(bp)) in bnx2x_reset_mcp_prep()
9605 bnx2x_clp_reset_prep(bp, magic_val); in bnx2x_reset_mcp_prep()
9608 shmem = REG_RD(bp, MISC_REG_SHARED_MEM_ADDR); in bnx2x_reset_mcp_prep()
9610 offsetof(struct shmem_region, validity_map[BP_PORT(bp)]); in bnx2x_reset_mcp_prep()
9614 REG_WR(bp, shmem + validity_offset, 0); in bnx2x_reset_mcp_prep()
9625 static void bnx2x_mcp_wait_one(struct bnx2x *bp) in bnx2x_mcp_wait_one() argument
9629 if (CHIP_REV_IS_SLOW(bp)) in bnx2x_mcp_wait_one()
9638 static int bnx2x_init_shmem(struct bnx2x *bp) in bnx2x_init_shmem() argument
9644 bp->common.shmem_base = REG_RD(bp, MISC_REG_SHARED_MEM_ADDR); in bnx2x_init_shmem()
9649 if (bp->common.shmem_base == 0xFFFFFFFF) { in bnx2x_init_shmem()
9650 bp->flags |= NO_MCP_FLAG; in bnx2x_init_shmem()
9654 if (bp->common.shmem_base) { in bnx2x_init_shmem()
9655 val = SHMEM_RD(bp, validity_map[BP_PORT(bp)]); in bnx2x_init_shmem()
9660 bnx2x_mcp_wait_one(bp); in bnx2x_init_shmem()
9669 static int bnx2x_reset_mcp_comp(struct bnx2x *bp, u32 magic_val) in bnx2x_reset_mcp_comp() argument
9671 int rc = bnx2x_init_shmem(bp); in bnx2x_reset_mcp_comp()
9674 if (!CHIP_IS_E1(bp)) in bnx2x_reset_mcp_comp()
9675 bnx2x_clp_reset_done(bp, magic_val); in bnx2x_reset_mcp_comp()
9680 static void bnx2x_pxp_prep(struct bnx2x *bp) in bnx2x_pxp_prep() argument
9682 if (!CHIP_IS_E1(bp)) { in bnx2x_pxp_prep()
9683 REG_WR(bp, PXP2_REG_RD_START_INIT, 0); in bnx2x_pxp_prep()
9684 REG_WR(bp, PXP2_REG_RQ_RBC_DONE, 0); in bnx2x_pxp_prep()
9698 static void bnx2x_process_kill_chip_reset(struct bnx2x *bp, bool global) in bnx2x_process_kill_chip_reset() argument
9750 if (CHIP_IS_E1(bp)) in bnx2x_process_kill_chip_reset()
9752 else if (CHIP_IS_E1H(bp)) in bnx2x_process_kill_chip_reset()
9754 else if (CHIP_IS_E2(bp)) in bnx2x_process_kill_chip_reset()
9777 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_2_CLEAR, in bnx2x_process_kill_chip_reset()
9780 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_CLEAR, in bnx2x_process_kill_chip_reset()
9785 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_2_SET, in bnx2x_process_kill_chip_reset()
9790 REG_WR(bp, GRCBASE_MISC + MISC_REGISTERS_RESET_REG_1_SET, reset_mask1); in bnx2x_process_kill_chip_reset()
9802 static int bnx2x_er_poll_igu_vq(struct bnx2x *bp) in bnx2x_er_poll_igu_vq() argument
9808 pend_bits = REG_RD(bp, IGU_REG_PENDING_BITS_STATUS); in bnx2x_er_poll_igu_vq()
9825 static int bnx2x_process_kill(struct bnx2x *bp, bool global) in bnx2x_process_kill() argument
9834 sr_cnt = REG_RD(bp, PXP2_REG_RD_SR_CNT); in bnx2x_process_kill()
9835 blk_cnt = REG_RD(bp, PXP2_REG_RD_BLK_CNT); in bnx2x_process_kill()
9836 port_is_idle_0 = REG_RD(bp, PXP2_REG_RD_PORT_IS_IDLE_0); in bnx2x_process_kill()
9837 port_is_idle_1 = REG_RD(bp, PXP2_REG_RD_PORT_IS_IDLE_1); in bnx2x_process_kill()
9838 pgl_exp_rom2 = REG_RD(bp, PXP2_REG_PGL_EXP_ROM2); in bnx2x_process_kill()
9839 if (CHIP_IS_E3(bp)) in bnx2x_process_kill()
9840 tags_63_32 = REG_RD(bp, PGLUE_B_REG_TAGS_63_32); in bnx2x_process_kill()
9846 (!CHIP_IS_E3(bp) || (tags_63_32 == 0xffffffff))) in bnx2x_process_kill()
9862 bnx2x_set_234_gates(bp, true); in bnx2x_process_kill()
9865 if (!CHIP_IS_E1x(bp) && bnx2x_er_poll_igu_vq(bp)) in bnx2x_process_kill()
9871 REG_WR(bp, MISC_REG_UNPREPARED, 0); in bnx2x_process_kill()
9882 bnx2x_reset_mcp_prep(bp, &val); in bnx2x_process_kill()
9885 bnx2x_pxp_prep(bp); in bnx2x_process_kill()
9889 bnx2x_process_kill_chip_reset(bp, global); in bnx2x_process_kill()
9893 if (!CHIP_IS_E1x(bp)) in bnx2x_process_kill()
9894 REG_WR(bp, PGLUE_B_REG_LATCHED_ERRORS_CLR, 0x7f); in bnx2x_process_kill()
9898 if (global && bnx2x_reset_mcp_comp(bp, val)) in bnx2x_process_kill()
9904 bnx2x_set_234_gates(bp, false); in bnx2x_process_kill()
9912 static int bnx2x_leader_reset(struct bnx2x *bp) in bnx2x_leader_reset() argument
9915 bool global = bnx2x_reset_is_global(bp); in bnx2x_leader_reset()
9921 if (!global && !BP_NOMCP(bp)) { in bnx2x_leader_reset()
9922 load_code = bnx2x_fw_command(bp, DRV_MSG_CODE_LOAD_REQ, in bnx2x_leader_reset()
9935 load_code = bnx2x_fw_command(bp, DRV_MSG_CODE_LOAD_DONE, 0); in bnx2x_leader_reset()
9944 if (bnx2x_process_kill(bp, global)) { in bnx2x_leader_reset()
9946 BP_PATH(bp)); in bnx2x_leader_reset()
9955 bnx2x_set_reset_done(bp); in bnx2x_leader_reset()
9957 bnx2x_clear_reset_global(bp); in bnx2x_leader_reset()
9961 if (!global && !BP_NOMCP(bp)) { in bnx2x_leader_reset()
9962 bnx2x_fw_command(bp, DRV_MSG_CODE_UNLOAD_REQ_WOL_MCP, 0); in bnx2x_leader_reset()
9963 bnx2x_fw_command(bp, DRV_MSG_CODE_UNLOAD_DONE, 0); in bnx2x_leader_reset()
9966 bp->is_leader = 0; in bnx2x_leader_reset()
9967 bnx2x_release_leader_lock(bp); in bnx2x_leader_reset()
9972 static void bnx2x_recovery_failed(struct bnx2x *bp) in bnx2x_recovery_failed() argument
9974 netdev_err(bp->dev, "Recovery has failed. Power cycle is needed.\n"); in bnx2x_recovery_failed()
9977 netif_device_detach(bp->dev); in bnx2x_recovery_failed()
9983 bnx2x_set_reset_in_progress(bp); in bnx2x_recovery_failed()
9986 bnx2x_set_power_state(bp, PCI_D3hot); in bnx2x_recovery_failed()
9988 bp->recovery_state = BNX2X_RECOVERY_FAILED; in bnx2x_recovery_failed()
9998 static void bnx2x_parity_recover(struct bnx2x *bp) in bnx2x_parity_recover() argument
10005 for (vf_idx = 0; vf_idx < bp->requested_nr_virtfn; vf_idx++) { in bnx2x_parity_recover()
10006 struct bnx2x_virtf *vf = BP_VF(bp, vf_idx); in bnx2x_parity_recover()
10014 switch (bp->recovery_state) { in bnx2x_parity_recover()
10017 is_parity = bnx2x_chk_parity_attn(bp, &global, false); in bnx2x_parity_recover()
10021 if (bnx2x_trylock_leader_lock(bp)) { in bnx2x_parity_recover()
10022 bnx2x_set_reset_in_progress(bp); in bnx2x_parity_recover()
10030 bnx2x_set_reset_global(bp); in bnx2x_parity_recover()
10032 bp->is_leader = 1; in bnx2x_parity_recover()
10037 if (bnx2x_nic_unload(bp, UNLOAD_RECOVERY, false)) in bnx2x_parity_recover()
10040 bp->recovery_state = BNX2X_RECOVERY_WAIT; in bnx2x_parity_recover()
10051 if (bp->is_leader) { in bnx2x_parity_recover()
10052 int other_engine = BP_PATH(bp) ? 0 : 1; in bnx2x_parity_recover()
10054 bnx2x_get_load_status(bp, other_engine); in bnx2x_parity_recover()
10056 bnx2x_get_load_status(bp, BP_PATH(bp)); in bnx2x_parity_recover()
10057 global = bnx2x_reset_is_global(bp); in bnx2x_parity_recover()
10072 schedule_delayed_work(&bp->sp_rtnl_task, in bnx2x_parity_recover()
10081 if (bnx2x_leader_reset(bp)) { in bnx2x_parity_recover()
10082 bnx2x_recovery_failed(bp); in bnx2x_parity_recover()
10094 if (!bnx2x_reset_is_done(bp, BP_PATH(bp))) { in bnx2x_parity_recover()
10101 if (bnx2x_trylock_leader_lock(bp)) { in bnx2x_parity_recover()
10105 bp->is_leader = 1; in bnx2x_parity_recover()
10109 schedule_delayed_work(&bp->sp_rtnl_task, in bnx2x_parity_recover()
10118 if (bnx2x_reset_is_global(bp)) { in bnx2x_parity_recover()
10120 &bp->sp_rtnl_task, in bnx2x_parity_recover()
10126 bp->eth_stats.recoverable_error; in bnx2x_parity_recover()
10128 bp->eth_stats.unrecoverable_error; in bnx2x_parity_recover()
10129 bp->recovery_state = in bnx2x_parity_recover()
10131 if (bnx2x_nic_load(bp, LOAD_NORMAL)) { in bnx2x_parity_recover()
10133 netdev_err(bp->dev, in bnx2x_parity_recover()
10136 netif_device_detach(bp->dev); in bnx2x_parity_recover()
10139 bp, PCI_D3hot); in bnx2x_parity_recover()
10142 bp->recovery_state = in bnx2x_parity_recover()
10147 bp->eth_stats.recoverable_error = in bnx2x_parity_recover()
10149 bp->eth_stats.unrecoverable_error = in bnx2x_parity_recover()
10161 static int bnx2x_udp_port_update(struct bnx2x *bp) in bnx2x_udp_port_update() argument
10174 func_params.f_obj = &bp->func_obj; in bnx2x_udp_port_update()
10181 if (bp->udp_tunnel_ports[BNX2X_UDP_PORT_GENEVE]) { in bnx2x_udp_port_update()
10182 geneve_port = bp->udp_tunnel_ports[BNX2X_UDP_PORT_GENEVE]; in bnx2x_udp_port_update()
10186 if (bp->udp_tunnel_ports[BNX2X_UDP_PORT_VXLAN]) { in bnx2x_udp_port_update()
10187 vxlan_port = bp->udp_tunnel_ports[BNX2X_UDP_PORT_VXLAN]; in bnx2x_udp_port_update()
10195 rc = bnx2x_func_state_change(bp, &func_params); in bnx2x_udp_port_update()
10209 struct bnx2x *bp = netdev_priv(netdev); in bnx2x_udp_tunnel_sync() local
10213 bp->udp_tunnel_ports[table] = be16_to_cpu(ti.port); in bnx2x_udp_tunnel_sync()
10215 return bnx2x_udp_port_update(bp); in bnx2x_udp_tunnel_sync()
10235 struct bnx2x *bp = container_of(work, struct bnx2x, sp_rtnl_task.work); in bnx2x_sp_rtnl_task() local
10239 if (!netif_running(bp->dev)) { in bnx2x_sp_rtnl_task()
10244 if (unlikely(bp->recovery_state != BNX2X_RECOVERY_DONE)) { in bnx2x_sp_rtnl_task()
10254 bp->sp_rtnl_state = 0; in bnx2x_sp_rtnl_task()
10257 bnx2x_parity_recover(bp); in bnx2x_sp_rtnl_task()
10263 if (test_and_clear_bit(BNX2X_SP_RTNL_TX_TIMEOUT, &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10274 bp->sp_rtnl_state = 0; in bnx2x_sp_rtnl_task()
10278 bp->link_vars.link_up = 0; in bnx2x_sp_rtnl_task()
10279 bp->force_link_down = true; in bnx2x_sp_rtnl_task()
10280 netif_carrier_off(bp->dev); in bnx2x_sp_rtnl_task()
10283 bnx2x_nic_unload(bp, UNLOAD_NORMAL, true); in bnx2x_sp_rtnl_task()
10288 if (bnx2x_nic_load(bp, LOAD_NORMAL) == -ENOMEM) { in bnx2x_sp_rtnl_task()
10289 bnx2x_nic_unload(bp, UNLOAD_NORMAL, true); in bnx2x_sp_rtnl_task()
10290 if (bnx2x_nic_load(bp, LOAD_NORMAL)) in bnx2x_sp_rtnl_task()
10299 if (test_and_clear_bit(BNX2X_SP_RTNL_SETUP_TC, &bp->sp_rtnl_state)) in bnx2x_sp_rtnl_task()
10300 bnx2x_setup_tc(bp->dev, bp->dcbx_port_params.ets.num_of_cos); in bnx2x_sp_rtnl_task()
10301 if (test_and_clear_bit(BNX2X_SP_RTNL_AFEX_F_UPDATE, &bp->sp_rtnl_state)) in bnx2x_sp_rtnl_task()
10302 bnx2x_after_function_update(bp); in bnx2x_sp_rtnl_task()
10308 if (test_and_clear_bit(BNX2X_SP_RTNL_FAN_FAILURE, &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10310 netif_device_detach(bp->dev); in bnx2x_sp_rtnl_task()
10311 bnx2x_close(bp->dev); in bnx2x_sp_rtnl_task()
10316 if (test_and_clear_bit(BNX2X_SP_RTNL_VFPF_MCAST, &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10319 bnx2x_vfpf_set_mcast(bp->dev); in bnx2x_sp_rtnl_task()
10322 &bp->sp_rtnl_state)){ in bnx2x_sp_rtnl_task()
10323 if (netif_carrier_ok(bp->dev)) { in bnx2x_sp_rtnl_task()
10324 bnx2x_tx_disable(bp); in bnx2x_sp_rtnl_task()
10329 if (test_and_clear_bit(BNX2X_SP_RTNL_RX_MODE, &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10331 bnx2x_set_rx_mode_inner(bp); in bnx2x_sp_rtnl_task()
10335 &bp->sp_rtnl_state)) in bnx2x_sp_rtnl_task()
10336 bnx2x_pf_set_vfs_vlan(bp); in bnx2x_sp_rtnl_task()
10338 if (test_and_clear_bit(BNX2X_SP_RTNL_TX_STOP, &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10339 bnx2x_dcbx_stop_hw_tx(bp); in bnx2x_sp_rtnl_task()
10340 bnx2x_dcbx_resume_hw_tx(bp); in bnx2x_sp_rtnl_task()
10344 &bp->sp_rtnl_state)) in bnx2x_sp_rtnl_task()
10345 bnx2x_update_mng_version(bp); in bnx2x_sp_rtnl_task()
10347 if (test_and_clear_bit(BNX2X_SP_RTNL_UPDATE_SVID, &bp->sp_rtnl_state)) in bnx2x_sp_rtnl_task()
10348 bnx2x_handle_update_svid_cmd(bp); in bnx2x_sp_rtnl_task()
10356 if (IS_SRIOV(bp) && test_and_clear_bit(BNX2X_SP_RTNL_ENABLE_SRIOV, in bnx2x_sp_rtnl_task()
10357 &bp->sp_rtnl_state)) { in bnx2x_sp_rtnl_task()
10358 bnx2x_disable_sriov(bp); in bnx2x_sp_rtnl_task()
10359 bnx2x_enable_sriov(bp); in bnx2x_sp_rtnl_task()
10365 struct bnx2x *bp = container_of(work, struct bnx2x, period_task.work); in bnx2x_period_task() local
10367 if (!netif_running(bp->dev)) in bnx2x_period_task()
10370 if (CHIP_REV_IS_SLOW(bp)) { in bnx2x_period_task()
10375 bnx2x_acquire_phy_lock(bp); in bnx2x_period_task()
10382 if (bp->port.pmf) { in bnx2x_period_task()
10383 bnx2x_period_func(&bp->link_params, &bp->link_vars); in bnx2x_period_task()
10386 queue_delayed_work(bnx2x_wq, &bp->period_task, 1*HZ); in bnx2x_period_task()
10389 bnx2x_release_phy_lock(bp); in bnx2x_period_task()
10398 static u32 bnx2x_get_pretend_reg(struct bnx2x *bp) in bnx2x_get_pretend_reg() argument
10402 return base + (BP_ABS_FUNC(bp)) * stride; in bnx2x_get_pretend_reg()
10405 static bool bnx2x_prev_unload_close_umac(struct bnx2x *bp, in bnx2x_prev_unload_close_umac() argument
10418 vals->umac_val[port] = REG_RD(bp, vals->umac_addr[port]); in bnx2x_prev_unload_close_umac()
10419 REG_WR(bp, vals->umac_addr[port], 0); in bnx2x_prev_unload_close_umac()
10424 static void bnx2x_prev_unload_close_mac(struct bnx2x *bp, in bnx2x_prev_unload_close_mac() argument
10429 u8 port = BP_PORT(bp); in bnx2x_prev_unload_close_mac()
10434 reset_reg = REG_RD(bp, MISC_REG_RESET_REG_2); in bnx2x_prev_unload_close_mac()
10436 if (!CHIP_IS_E3(bp)) { in bnx2x_prev_unload_close_mac()
10437 val = REG_RD(bp, NIG_REG_BMAC0_REGS_OUT_EN + port * 4); in bnx2x_prev_unload_close_mac()
10442 base_addr = BP_PORT(bp) ? NIG_REG_INGRESS_BMAC1_MEM in bnx2x_prev_unload_close_mac()
10444 offset = CHIP_IS_E2(bp) ? BIGMAC2_REGISTER_BMAC_CONTROL in bnx2x_prev_unload_close_mac()
10453 wb_data[0] = REG_RD(bp, base_addr + offset); in bnx2x_prev_unload_close_mac()
10454 wb_data[1] = REG_RD(bp, base_addr + offset + 0x4); in bnx2x_prev_unload_close_mac()
10459 REG_WR(bp, vals->bmac_addr, wb_data[0]); in bnx2x_prev_unload_close_mac()
10460 REG_WR(bp, vals->bmac_addr + 0x4, wb_data[1]); in bnx2x_prev_unload_close_mac()
10463 vals->emac_addr = NIG_REG_NIG_EMAC0_EN + BP_PORT(bp)*4; in bnx2x_prev_unload_close_mac()
10464 vals->emac_val = REG_RD(bp, vals->emac_addr); in bnx2x_prev_unload_close_mac()
10465 REG_WR(bp, vals->emac_addr, 0); in bnx2x_prev_unload_close_mac()
10470 base_addr = BP_PORT(bp) ? GRCBASE_XMAC1 : GRCBASE_XMAC0; in bnx2x_prev_unload_close_mac()
10471 val = REG_RD(bp, base_addr + XMAC_REG_PFC_CTRL_HI); in bnx2x_prev_unload_close_mac()
10472 REG_WR(bp, base_addr + XMAC_REG_PFC_CTRL_HI, in bnx2x_prev_unload_close_mac()
10474 REG_WR(bp, base_addr + XMAC_REG_PFC_CTRL_HI, in bnx2x_prev_unload_close_mac()
10477 vals->xmac_val = REG_RD(bp, vals->xmac_addr); in bnx2x_prev_unload_close_mac()
10478 REG_WR(bp, vals->xmac_addr, 0); in bnx2x_prev_unload_close_mac()
10482 mac_stopped |= bnx2x_prev_unload_close_umac(bp, 0, in bnx2x_prev_unload_close_mac()
10484 mac_stopped |= bnx2x_prev_unload_close_umac(bp, 1, in bnx2x_prev_unload_close_mac()
10503 static bool bnx2x_prev_is_after_undi(struct bnx2x *bp) in bnx2x_prev_is_after_undi() argument
10508 if (!(REG_RD(bp, MISC_REG_RESET_REG_1) & in bnx2x_prev_is_after_undi()
10512 if (REG_RD(bp, DORQ_REG_NORM_CID_OFST) == 0x7) { in bnx2x_prev_is_after_undi()
10520 static void bnx2x_prev_unload_undi_inc(struct bnx2x *bp, u8 inc) in bnx2x_prev_unload_undi_inc() argument
10525 if (BP_FUNC(bp) < 2) in bnx2x_prev_unload_undi_inc()
10526 addr = BNX2X_PREV_UNDI_PROD_ADDR(BP_PORT(bp)); in bnx2x_prev_unload_undi_inc()
10528 addr = BNX2X_PREV_UNDI_PROD_ADDR_H(BP_FUNC(bp) - 2); in bnx2x_prev_unload_undi_inc()
10530 tmp_reg = REG_RD(bp, addr); in bnx2x_prev_unload_undi_inc()
10535 REG_WR(bp, addr, tmp_reg); in bnx2x_prev_unload_undi_inc()
10538 BP_PORT(bp), BP_FUNC(bp), addr, bd, rcq); in bnx2x_prev_unload_undi_inc()
10541 static int bnx2x_prev_mcp_done(struct bnx2x *bp) in bnx2x_prev_mcp_done() argument
10543 u32 rc = bnx2x_fw_command(bp, DRV_MSG_CODE_UNLOAD_DONE, in bnx2x_prev_mcp_done()
10554 bnx2x_prev_path_get_entry(struct bnx2x *bp) in bnx2x_prev_path_get_entry() argument
10559 if (PCI_SLOT(bp->pdev->devfn) == tmp_list->slot && in bnx2x_prev_path_get_entry()
10560 bp->pdev->bus->number == tmp_list->bus && in bnx2x_prev_path_get_entry()
10561 BP_PATH(bp) == tmp_list->path) in bnx2x_prev_path_get_entry()
10567 static int bnx2x_prev_path_mark_eeh(struct bnx2x *bp) in bnx2x_prev_path_mark_eeh() argument
10578 tmp_list = bnx2x_prev_path_get_entry(bp); in bnx2x_prev_path_mark_eeh()
10584 BP_PATH(bp)); in bnx2x_prev_path_mark_eeh()
10592 static bool bnx2x_prev_is_path_marked(struct bnx2x *bp) in bnx2x_prev_is_path_marked() argument
10600 tmp_list = bnx2x_prev_path_get_entry(bp); in bnx2x_prev_is_path_marked()
10604 BP_PATH(bp)); in bnx2x_prev_is_path_marked()
10608 BP_PATH(bp)); in bnx2x_prev_is_path_marked()
10617 bool bnx2x_port_after_undi(struct bnx2x *bp) in bnx2x_port_after_undi() argument
10624 entry = bnx2x_prev_path_get_entry(bp); in bnx2x_port_after_undi()
10625 val = !!(entry && (entry->undi & (1 << BP_PORT(bp)))); in bnx2x_port_after_undi()
10632 static int bnx2x_prev_mark_path(struct bnx2x *bp, bool after_undi) in bnx2x_prev_mark_path() argument
10644 tmp_list = bnx2x_prev_path_get_entry(bp); in bnx2x_prev_mark_path()
10650 BP_PATH(bp)); in bnx2x_prev_mark_path()
10665 tmp_list->bus = bp->pdev->bus->number; in bnx2x_prev_mark_path()
10666 tmp_list->slot = PCI_SLOT(bp->pdev->devfn); in bnx2x_prev_mark_path()
10667 tmp_list->path = BP_PATH(bp); in bnx2x_prev_mark_path()
10669 tmp_list->undi = after_undi ? (1 << BP_PORT(bp)) : 0; in bnx2x_prev_mark_path()
10677 BP_PATH(bp)); in bnx2x_prev_mark_path()
10685 static int bnx2x_do_flr(struct bnx2x *bp) in bnx2x_do_flr() argument
10687 struct pci_dev *dev = bp->pdev; in bnx2x_do_flr()
10689 if (CHIP_IS_E1x(bp)) { in bnx2x_do_flr()
10695 if (bp->common.bc_ver < REQ_BC_VER_4_INITIATE_FLR) { in bnx2x_do_flr()
10697 bp->common.bc_ver); in bnx2x_do_flr()
10705 bnx2x_fw_command(bp, DRV_MSG_CODE_INITIATE_FLR, 0); in bnx2x_do_flr()
10710 static int bnx2x_prev_unload_uncommon(struct bnx2x *bp) in bnx2x_prev_unload_uncommon() argument
10717 if (bnx2x_prev_is_path_marked(bp)) in bnx2x_prev_unload_uncommon()
10718 return bnx2x_prev_mcp_done(bp); in bnx2x_prev_unload_uncommon()
10723 if (bnx2x_prev_is_after_undi(bp)) in bnx2x_prev_unload_uncommon()
10730 rc = bnx2x_compare_fw_ver(bp, FW_MSG_CODE_DRV_LOAD_FUNCTION, false); in bnx2x_prev_unload_uncommon()
10735 rc = bnx2x_do_flr(bp); in bnx2x_prev_unload_uncommon()
10748 rc = bnx2x_prev_mcp_done(bp); in bnx2x_prev_unload_uncommon()
10755 static int bnx2x_prev_unload_common(struct bnx2x *bp) in bnx2x_prev_unload_common() argument
10769 if (bnx2x_prev_is_path_marked(bp)) in bnx2x_prev_unload_common()
10770 return bnx2x_prev_mcp_done(bp); in bnx2x_prev_unload_common()
10772 reset_reg = REG_RD(bp, MISC_REG_RESET_REG_1); in bnx2x_prev_unload_common()
10779 bnx2x_prev_unload_close_mac(bp, &mac_vals); in bnx2x_prev_unload_common()
10782 bnx2x_set_rx_filter(&bp->link_params, 0); in bnx2x_prev_unload_common()
10783 bp->link_params.port ^= 1; in bnx2x_prev_unload_common()
10784 bnx2x_set_rx_filter(&bp->link_params, 0); in bnx2x_prev_unload_common()
10785 bp->link_params.port ^= 1; in bnx2x_prev_unload_common()
10788 if (bnx2x_prev_is_after_undi(bp)) { in bnx2x_prev_unload_common()
10791 REG_WR(bp, DORQ_REG_NORM_CID_OFST, 0); in bnx2x_prev_unload_common()
10793 REG_RD(bp, NIG_REG_NIG_INT_STS_CLR_0); in bnx2x_prev_unload_common()
10795 if (!CHIP_IS_E1x(bp)) in bnx2x_prev_unload_common()
10797 REG_WR(bp, PGLUE_B_REG_INTERNAL_PFID_ENABLE_MASTER, 0); in bnx2x_prev_unload_common()
10800 tmp_reg = REG_RD(bp, BRB1_REG_NUM_OF_FULL_BLOCKS); in bnx2x_prev_unload_common()
10804 tmp_reg = REG_RD(bp, BRB1_REG_NUM_OF_FULL_BLOCKS); in bnx2x_prev_unload_common()
10818 bnx2x_prev_unload_undi_inc(bp, 1); in bnx2x_prev_unload_common()
10828 bnx2x_reset_common(bp); in bnx2x_prev_unload_common()
10831 REG_WR(bp, mac_vals.xmac_addr, mac_vals.xmac_val); in bnx2x_prev_unload_common()
10833 REG_WR(bp, mac_vals.umac_addr[0], mac_vals.umac_val[0]); in bnx2x_prev_unload_common()
10835 REG_WR(bp, mac_vals.umac_addr[1], mac_vals.umac_val[1]); in bnx2x_prev_unload_common()
10837 REG_WR(bp, mac_vals.emac_addr, mac_vals.emac_val); in bnx2x_prev_unload_common()
10839 REG_WR(bp, mac_vals.bmac_addr, mac_vals.bmac_val[0]); in bnx2x_prev_unload_common()
10840 REG_WR(bp, mac_vals.bmac_addr + 4, mac_vals.bmac_val[1]); in bnx2x_prev_unload_common()
10843 rc = bnx2x_prev_mark_path(bp, prev_undi); in bnx2x_prev_unload_common()
10845 bnx2x_prev_mcp_done(bp); in bnx2x_prev_unload_common()
10849 return bnx2x_prev_mcp_done(bp); in bnx2x_prev_unload_common()
10852 static int bnx2x_prev_unload(struct bnx2x *bp) in bnx2x_prev_unload() argument
10861 bnx2x_clean_pglue_errors(bp); in bnx2x_prev_unload()
10864 hw_lock_reg = (BP_FUNC(bp) <= 5) ? in bnx2x_prev_unload()
10865 (MISC_REG_DRIVER_CONTROL_1 + BP_FUNC(bp) * 8) : in bnx2x_prev_unload()
10866 (MISC_REG_DRIVER_CONTROL_7 + (BP_FUNC(bp) - 6) * 8); in bnx2x_prev_unload()
10868 hw_lock_val = REG_RD(bp, hw_lock_reg); in bnx2x_prev_unload()
10872 REG_WR(bp, MCP_REG_MCPR_NVM_SW_ARB, in bnx2x_prev_unload()
10873 (MCPR_NVM_SW_ARB_ARB_REQ_CLR1 << BP_PORT(bp))); in bnx2x_prev_unload()
10877 REG_WR(bp, hw_lock_reg, 0xffffffff); in bnx2x_prev_unload()
10881 if (MCPR_ACCESS_LOCK_LOCK & REG_RD(bp, MCP_REG_MCPR_ACCESS_LOCK)) { in bnx2x_prev_unload()
10883 bnx2x_release_alr(bp); in bnx2x_prev_unload()
10889 fw = bnx2x_fw_command(bp, DRV_MSG_CODE_UNLOAD_REQ_WOL_DIS, 0); in bnx2x_prev_unload()
10902 aer = !!(bnx2x_prev_path_get_entry(bp) && in bnx2x_prev_unload()
10903 bnx2x_prev_path_get_entry(bp)->aer); in bnx2x_prev_unload()
10908 rc = bnx2x_prev_unload_common(bp); in bnx2x_prev_unload()
10913 rc = bnx2x_prev_unload_uncommon(bp); in bnx2x_prev_unload()
10926 if (bnx2x_port_after_undi(bp)) in bnx2x_prev_unload()
10927 bp->link_params.feature_config_flags |= in bnx2x_prev_unload()
10935 static void bnx2x_get_common_hwinfo(struct bnx2x *bp) in bnx2x_get_common_hwinfo() argument
10942 val = REG_RD(bp, MISC_REG_CHIP_NUM); in bnx2x_get_common_hwinfo()
10944 val = REG_RD(bp, MISC_REG_CHIP_REV); in bnx2x_get_common_hwinfo()
10950 val = REG_RD(bp, PCICFG_OFFSET + PCI_ID_VAL3); in bnx2x_get_common_hwinfo()
10952 val = REG_RD(bp, MISC_REG_BOND_ID); in bnx2x_get_common_hwinfo()
10954 bp->common.chip_id = id; in bnx2x_get_common_hwinfo()
10957 if (REG_RD(bp, MISC_REG_CHIP_TYPE) & MISC_REG_CHIP_TYPE_57811_MASK) { in bnx2x_get_common_hwinfo()
10958 if (CHIP_IS_57810(bp)) in bnx2x_get_common_hwinfo()
10959 bp->common.chip_id = (CHIP_NUM_57811 << 16) | in bnx2x_get_common_hwinfo()
10960 (bp->common.chip_id & 0x0000FFFF); in bnx2x_get_common_hwinfo()
10961 else if (CHIP_IS_57810_MF(bp)) in bnx2x_get_common_hwinfo()
10962 bp->common.chip_id = (CHIP_NUM_57811_MF << 16) | in bnx2x_get_common_hwinfo()
10963 (bp->common.chip_id & 0x0000FFFF); in bnx2x_get_common_hwinfo()
10964 bp->common.chip_id |= 0x1; in bnx2x_get_common_hwinfo()
10968 bp->db_size = (1 << BNX2X_DB_SHIFT); in bnx2x_get_common_hwinfo()
10970 if (!CHIP_IS_E1x(bp)) { in bnx2x_get_common_hwinfo()
10971 val = REG_RD(bp, MISC_REG_PORT4MODE_EN_OVWR); in bnx2x_get_common_hwinfo()
10973 val = REG_RD(bp, MISC_REG_PORT4MODE_EN); in bnx2x_get_common_hwinfo()
10978 bp->common.chip_port_mode = val ? CHIP_4_PORT_MODE : in bnx2x_get_common_hwinfo()
10981 if (CHIP_MODE_IS_4_PORT(bp)) in bnx2x_get_common_hwinfo()
10982 bp->pfid = (bp->pf_num >> 1); /* 0..3 */ in bnx2x_get_common_hwinfo()
10984 bp->pfid = (bp->pf_num & 0x6); /* 0, 2, 4, 6 */ in bnx2x_get_common_hwinfo()
10986 bp->common.chip_port_mode = CHIP_PORT_MODE_NONE; /* N/A */ in bnx2x_get_common_hwinfo()
10987 bp->pfid = bp->pf_num; /* 0..7 */ in bnx2x_get_common_hwinfo()
10990 BNX2X_DEV_INFO("pf_id: %x", bp->pfid); in bnx2x_get_common_hwinfo()
10992 bp->link_params.chip_id = bp->common.chip_id; in bnx2x_get_common_hwinfo()
10995 val = (REG_RD(bp, 0x2874) & 0x55); in bnx2x_get_common_hwinfo()
10996 if ((bp->common.chip_id & 0x1) || in bnx2x_get_common_hwinfo()
10997 (CHIP_IS_E1(bp) && val) || (CHIP_IS_E1H(bp) && (val == 0x55))) { in bnx2x_get_common_hwinfo()
10998 bp->flags |= ONE_PORT_FLAG; in bnx2x_get_common_hwinfo()
11002 val = REG_RD(bp, MCP_REG_MCPR_NVM_CFG4); in bnx2x_get_common_hwinfo()
11003 bp->common.flash_size = (BNX2X_NVRAM_1MB_SIZE << in bnx2x_get_common_hwinfo()
11006 bp->common.flash_size, bp->common.flash_size); in bnx2x_get_common_hwinfo()
11008 bnx2x_init_shmem(bp); in bnx2x_get_common_hwinfo()
11010 bp->common.shmem2_base = REG_RD(bp, (BP_PATH(bp) ? in bnx2x_get_common_hwinfo()
11014 bp->link_params.shmem_base = bp->common.shmem_base; in bnx2x_get_common_hwinfo()
11015 bp->link_params.shmem2_base = bp->common.shmem2_base; in bnx2x_get_common_hwinfo()
11016 if (SHMEM2_RD(bp, size) > in bnx2x_get_common_hwinfo()
11017 (u32)offsetof(struct shmem2_region, lfa_host_addr[BP_PORT(bp)])) in bnx2x_get_common_hwinfo()
11018 bp->link_params.lfa_base = in bnx2x_get_common_hwinfo()
11019 REG_RD(bp, bp->common.shmem2_base + in bnx2x_get_common_hwinfo()
11021 lfa_host_addr[BP_PORT(bp)])); in bnx2x_get_common_hwinfo()
11023 bp->link_params.lfa_base = 0; in bnx2x_get_common_hwinfo()
11025 bp->common.shmem_base, bp->common.shmem2_base); in bnx2x_get_common_hwinfo()
11027 if (!bp->common.shmem_base) { in bnx2x_get_common_hwinfo()
11029 bp->flags |= NO_MCP_FLAG; in bnx2x_get_common_hwinfo()
11033 bp->common.hw_config = SHMEM_RD(bp, dev_info.shared_hw_config.config); in bnx2x_get_common_hwinfo()
11034 BNX2X_DEV_INFO("hw_config 0x%08x\n", bp->common.hw_config); in bnx2x_get_common_hwinfo()
11036 bp->link_params.hw_led_mode = ((bp->common.hw_config & in bnx2x_get_common_hwinfo()
11040 bp->link_params.feature_config_flags = 0; in bnx2x_get_common_hwinfo()
11041 val = SHMEM_RD(bp, dev_info.shared_feature_config.config); in bnx2x_get_common_hwinfo()
11043 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11046 bp->link_params.feature_config_flags &= in bnx2x_get_common_hwinfo()
11049 val = SHMEM_RD(bp, dev_info.bc_rev) >> 8; in bnx2x_get_common_hwinfo()
11050 bp->common.bc_ver = val; in bnx2x_get_common_hwinfo()
11058 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11062 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11065 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11068 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11072 bp->link_params.feature_config_flags |= in bnx2x_get_common_hwinfo()
11076 bp->flags |= (val >= REQ_BC_VER_4_PFC_STATS_SUPPORTED) ? in bnx2x_get_common_hwinfo()
11079 bp->flags |= (val >= REQ_BC_VER_4_FCOE_FEATURES) ? in bnx2x_get_common_hwinfo()
11082 bp->flags |= (val >= REQ_BC_VER_4_DCBX_ADMIN_MSG_NON_PMF) ? in bnx2x_get_common_hwinfo()
11085 bp->flags |= (val >= REQ_BC_VER_4_RMMOD_CMD) ? in bnx2x_get_common_hwinfo()
11088 boot_mode = SHMEM_RD(bp, in bnx2x_get_common_hwinfo()
11089 dev_info.port_feature_config[BP_PORT(bp)].mba_config) & in bnx2x_get_common_hwinfo()
11093 bp->common.boot_mode = FEATURE_ETH_BOOTMODE_PXE; in bnx2x_get_common_hwinfo()
11096 bp->common.boot_mode = FEATURE_ETH_BOOTMODE_ISCSI; in bnx2x_get_common_hwinfo()
11099 bp->common.boot_mode = FEATURE_ETH_BOOTMODE_FCOE; in bnx2x_get_common_hwinfo()
11102 bp->common.boot_mode = FEATURE_ETH_BOOTMODE_NONE; in bnx2x_get_common_hwinfo()
11106 pci_read_config_word(bp->pdev, bp->pdev->pm_cap + PCI_PM_PMC, &pmc); in bnx2x_get_common_hwinfo()
11107 bp->flags |= (pmc & PCI_PM_CAP_PME_D3cold) ? 0 : NO_WOL_FLAG; in bnx2x_get_common_hwinfo()
11110 (bp->flags & NO_WOL_FLAG) ? "not " : ""); in bnx2x_get_common_hwinfo()
11112 val = SHMEM_RD(bp, dev_info.shared_hw_config.part_num); in bnx2x_get_common_hwinfo()
11113 val2 = SHMEM_RD(bp, dev_info.shared_hw_config.part_num[4]); in bnx2x_get_common_hwinfo()
11114 val3 = SHMEM_RD(bp, dev_info.shared_hw_config.part_num[8]); in bnx2x_get_common_hwinfo()
11115 val4 = SHMEM_RD(bp, dev_info.shared_hw_config.part_num[12]); in bnx2x_get_common_hwinfo()
11117 dev_info(&bp->pdev->dev, "part number %X-%X-%X-%X\n", in bnx2x_get_common_hwinfo()
11124 static int bnx2x_get_igu_cam_info(struct bnx2x *bp) in bnx2x_get_igu_cam_info() argument
11126 int pfid = BP_FUNC(bp); in bnx2x_get_igu_cam_info()
11131 bp->igu_base_sb = 0xff; in bnx2x_get_igu_cam_info()
11132 if (CHIP_INT_MODE_IS_BC(bp)) { in bnx2x_get_igu_cam_info()
11133 int vn = BP_VN(bp); in bnx2x_get_igu_cam_info()
11134 igu_sb_cnt = bp->igu_sb_cnt; in bnx2x_get_igu_cam_info()
11135 bp->igu_base_sb = (CHIP_MODE_IS_4_PORT(bp) ? pfid : vn) * in bnx2x_get_igu_cam_info()
11138 bp->igu_dsb_id = E1HVN_MAX * FP_SB_MAX_E1x + in bnx2x_get_igu_cam_info()
11139 (CHIP_MODE_IS_4_PORT(bp) ? pfid : vn); in bnx2x_get_igu_cam_info()
11147 val = REG_RD(bp, IGU_REG_MAPPING_MEMORY + igu_sb_id * 4); in bnx2x_get_igu_cam_info()
11156 bp->igu_dsb_id = igu_sb_id; in bnx2x_get_igu_cam_info()
11158 if (bp->igu_base_sb == 0xff) in bnx2x_get_igu_cam_info()
11159 bp->igu_base_sb = igu_sb_id; in bnx2x_get_igu_cam_info()
11172 bp->igu_sb_cnt = min_t(int, bp->igu_sb_cnt, igu_sb_cnt); in bnx2x_get_igu_cam_info()
11183 static void bnx2x_link_settings_supported(struct bnx2x *bp, u32 switch_cfg) in bnx2x_link_settings_supported() argument
11185 int cfg_size = 0, idx, port = BP_PORT(bp); in bnx2x_link_settings_supported()
11188 bp->port.supported[0] = 0; in bnx2x_link_settings_supported()
11189 bp->port.supported[1] = 0; in bnx2x_link_settings_supported()
11190 switch (bp->link_params.num_phys) { in bnx2x_link_settings_supported()
11192 bp->port.supported[0] = bp->link_params.phy[INT_PHY].supported; in bnx2x_link_settings_supported()
11196 bp->port.supported[0] = bp->link_params.phy[EXT_PHY1].supported; in bnx2x_link_settings_supported()
11200 if (bp->link_params.multi_phy_config & in bnx2x_link_settings_supported()
11202 bp->port.supported[1] = in bnx2x_link_settings_supported()
11203 bp->link_params.phy[EXT_PHY1].supported; in bnx2x_link_settings_supported()
11204 bp->port.supported[0] = in bnx2x_link_settings_supported()
11205 bp->link_params.phy[EXT_PHY2].supported; in bnx2x_link_settings_supported()
11207 bp->port.supported[0] = in bnx2x_link_settings_supported()
11208 bp->link_params.phy[EXT_PHY1].supported; in bnx2x_link_settings_supported()
11209 bp->port.supported[1] = in bnx2x_link_settings_supported()
11210 bp->link_params.phy[EXT_PHY2].supported; in bnx2x_link_settings_supported()
11216 if (!(bp->port.supported[0] || bp->port.supported[1])) { in bnx2x_link_settings_supported()
11218 SHMEM_RD(bp, in bnx2x_link_settings_supported()
11220 SHMEM_RD(bp, in bnx2x_link_settings_supported()
11225 if (CHIP_IS_E3(bp)) in bnx2x_link_settings_supported()
11226 bp->port.phy_addr = REG_RD(bp, MISC_REG_WC0_CTRL_PHY_ADDR); in bnx2x_link_settings_supported()
11230 bp->port.phy_addr = REG_RD( in bnx2x_link_settings_supported()
11231 bp, NIG_REG_SERDES0_CTRL_PHY_ADDR + port*0x10); in bnx2x_link_settings_supported()
11234 bp->port.phy_addr = REG_RD( in bnx2x_link_settings_supported()
11235 bp, NIG_REG_XGXS0_CTRL_PHY_ADDR + port*0x18); in bnx2x_link_settings_supported()
11239 bp->port.link_config[0]); in bnx2x_link_settings_supported()
11243 BNX2X_DEV_INFO("phy_addr 0x%x\n", bp->port.phy_addr); in bnx2x_link_settings_supported()
11246 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11248 bp->port.supported[idx] &= ~SUPPORTED_10baseT_Half; in bnx2x_link_settings_supported()
11250 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11252 bp->port.supported[idx] &= ~SUPPORTED_10baseT_Full; in bnx2x_link_settings_supported()
11254 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11256 bp->port.supported[idx] &= ~SUPPORTED_100baseT_Half; in bnx2x_link_settings_supported()
11258 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11260 bp->port.supported[idx] &= ~SUPPORTED_100baseT_Full; in bnx2x_link_settings_supported()
11262 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11264 bp->port.supported[idx] &= ~(SUPPORTED_1000baseT_Half | in bnx2x_link_settings_supported()
11267 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11269 bp->port.supported[idx] &= ~SUPPORTED_2500baseX_Full; in bnx2x_link_settings_supported()
11271 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11273 bp->port.supported[idx] &= ~SUPPORTED_10000baseT_Full; in bnx2x_link_settings_supported()
11275 if (!(bp->link_params.speed_cap_mask[idx] & in bnx2x_link_settings_supported()
11277 bp->port.supported[idx] &= ~SUPPORTED_20000baseKR2_Full; in bnx2x_link_settings_supported()
11280 BNX2X_DEV_INFO("supported 0x%x 0x%x\n", bp->port.supported[0], in bnx2x_link_settings_supported()
11281 bp->port.supported[1]); in bnx2x_link_settings_supported()
11284 static void bnx2x_link_settings_requested(struct bnx2x *bp) in bnx2x_link_settings_requested() argument
11287 bp->port.advertising[0] = 0; in bnx2x_link_settings_requested()
11288 bp->port.advertising[1] = 0; in bnx2x_link_settings_requested()
11289 switch (bp->link_params.num_phys) { in bnx2x_link_settings_requested()
11299 bp->link_params.req_duplex[idx] = DUPLEX_FULL; in bnx2x_link_settings_requested()
11300 link_config = bp->port.link_config[idx]; in bnx2x_link_settings_requested()
11303 if (bp->port.supported[idx] & SUPPORTED_Autoneg) { in bnx2x_link_settings_requested()
11304 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11306 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11307 bp->port.supported[idx]; in bnx2x_link_settings_requested()
11308 if (bp->link_params.phy[EXT_PHY1].type == in bnx2x_link_settings_requested()
11310 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11315 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11317 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11325 if (bp->port.supported[idx] & SUPPORTED_10baseT_Full) { in bnx2x_link_settings_requested()
11326 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11328 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11334 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11340 if (bp->port.supported[idx] & SUPPORTED_10baseT_Half) { in bnx2x_link_settings_requested()
11341 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11343 bp->link_params.req_duplex[idx] = in bnx2x_link_settings_requested()
11345 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11351 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11357 if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11359 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11361 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11367 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11373 if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11375 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11377 bp->link_params.req_duplex[idx] = in bnx2x_link_settings_requested()
11379 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11385 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11391 if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11393 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11395 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11398 } else if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11400 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11402 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11407 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11413 if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11415 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11417 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11423 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11429 if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11431 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11433 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11436 } else if (bp->port.supported[idx] & in bnx2x_link_settings_requested()
11438 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11440 bp->port.advertising[idx] |= in bnx2x_link_settings_requested()
11446 bp->link_params.speed_cap_mask[idx]); in bnx2x_link_settings_requested()
11451 bp->link_params.req_line_speed[idx] = SPEED_20000; in bnx2x_link_settings_requested()
11457 bp->link_params.req_line_speed[idx] = in bnx2x_link_settings_requested()
11459 bp->port.advertising[idx] = in bnx2x_link_settings_requested()
11460 bp->port.supported[idx]; in bnx2x_link_settings_requested()
11464 bp->link_params.req_flow_ctrl[idx] = (link_config & in bnx2x_link_settings_requested()
11466 if (bp->link_params.req_flow_ctrl[idx] == in bnx2x_link_settings_requested()
11468 if (!(bp->port.supported[idx] & SUPPORTED_Autoneg)) in bnx2x_link_settings_requested()
11469 bp->link_params.req_flow_ctrl[idx] = in bnx2x_link_settings_requested()
11472 bnx2x_set_requested_fc(bp); in bnx2x_link_settings_requested()
11476 bp->link_params.req_line_speed[idx], in bnx2x_link_settings_requested()
11477 bp->link_params.req_duplex[idx], in bnx2x_link_settings_requested()
11478 bp->link_params.req_flow_ctrl[idx], in bnx2x_link_settings_requested()
11479 bp->port.advertising[idx]); in bnx2x_link_settings_requested()
11491 static void bnx2x_get_port_hwinfo(struct bnx2x *bp) in bnx2x_get_port_hwinfo() argument
11493 int port = BP_PORT(bp); in bnx2x_get_port_hwinfo()
11497 bp->link_params.bp = bp; in bnx2x_get_port_hwinfo()
11498 bp->link_params.port = port; in bnx2x_get_port_hwinfo()
11500 bp->link_params.lane_config = in bnx2x_get_port_hwinfo()
11501 SHMEM_RD(bp, dev_info.port_hw_config[port].lane_config); in bnx2x_get_port_hwinfo()
11503 bp->link_params.speed_cap_mask[0] = in bnx2x_get_port_hwinfo()
11504 SHMEM_RD(bp, in bnx2x_get_port_hwinfo()
11507 bp->link_params.speed_cap_mask[1] = in bnx2x_get_port_hwinfo()
11508 SHMEM_RD(bp, in bnx2x_get_port_hwinfo()
11511 bp->port.link_config[0] = in bnx2x_get_port_hwinfo()
11512 SHMEM_RD(bp, dev_info.port_feature_config[port].link_config); in bnx2x_get_port_hwinfo()
11514 bp->port.link_config[1] = in bnx2x_get_port_hwinfo()
11515 SHMEM_RD(bp, dev_info.port_feature_config[port].link_config2); in bnx2x_get_port_hwinfo()
11517 bp->link_params.multi_phy_config = in bnx2x_get_port_hwinfo()
11518 SHMEM_RD(bp, dev_info.port_hw_config[port].multi_phy_config); in bnx2x_get_port_hwinfo()
11522 config = SHMEM_RD(bp, dev_info.port_feature_config[port].config); in bnx2x_get_port_hwinfo()
11523 bp->wol = (!(bp->flags & NO_WOL_FLAG) && in bnx2x_get_port_hwinfo()
11527 PORT_FEAT_CFG_STORAGE_PERSONALITY_FCOE && !IS_MF(bp)) in bnx2x_get_port_hwinfo()
11528 bp->flags |= NO_ISCSI_FLAG; in bnx2x_get_port_hwinfo()
11530 PORT_FEAT_CFG_STORAGE_PERSONALITY_ISCSI && !(IS_MF(bp))) in bnx2x_get_port_hwinfo()
11531 bp->flags |= NO_FCOE_FLAG; in bnx2x_get_port_hwinfo()
11534 bp->link_params.lane_config, in bnx2x_get_port_hwinfo()
11535 bp->link_params.speed_cap_mask[0], in bnx2x_get_port_hwinfo()
11536 bp->port.link_config[0]); in bnx2x_get_port_hwinfo()
11538 bp->link_params.switch_cfg = (bp->port.link_config[0] & in bnx2x_get_port_hwinfo()
11540 bnx2x_phy_probe(&bp->link_params); in bnx2x_get_port_hwinfo()
11541 bnx2x_link_settings_supported(bp, bp->link_params.switch_cfg); in bnx2x_get_port_hwinfo()
11543 bnx2x_link_settings_requested(bp); in bnx2x_get_port_hwinfo()
11550 SHMEM_RD(bp, in bnx2x_get_port_hwinfo()
11554 bp->mdio.prtad = bp->port.phy_addr; in bnx2x_get_port_hwinfo()
11558 bp->mdio.prtad = in bnx2x_get_port_hwinfo()
11562 eee_mode = (((SHMEM_RD(bp, dev_info. in bnx2x_get_port_hwinfo()
11567 bp->link_params.eee_mode = EEE_MODE_ADV_LPI | in bnx2x_get_port_hwinfo()
11571 bp->link_params.eee_mode = 0; in bnx2x_get_port_hwinfo()
11575 void bnx2x_get_iscsi_info(struct bnx2x *bp) in bnx2x_get_iscsi_info() argument
11578 int port = BP_PORT(bp); in bnx2x_get_iscsi_info()
11579 u32 max_iscsi_conn = FW_ENCODE_32BIT_PATTERN ^ SHMEM_RD(bp, in bnx2x_get_iscsi_info()
11582 if (!CNIC_SUPPORT(bp)) { in bnx2x_get_iscsi_info()
11583 bp->flags |= no_flags; in bnx2x_get_iscsi_info()
11588 bp->cnic_eth_dev.max_iscsi_conn = in bnx2x_get_iscsi_info()
11593 bp->cnic_eth_dev.max_iscsi_conn); in bnx2x_get_iscsi_info()
11599 if (!bp->cnic_eth_dev.max_iscsi_conn) in bnx2x_get_iscsi_info()
11600 bp->flags |= no_flags; in bnx2x_get_iscsi_info()
11603 static void bnx2x_get_ext_wwn_info(struct bnx2x *bp, int func) in bnx2x_get_ext_wwn_info() argument
11606 bp->cnic_eth_dev.fcoe_wwn_port_name_hi = in bnx2x_get_ext_wwn_info()
11607 MF_CFG_RD(bp, func_ext_config[func].fcoe_wwn_port_name_upper); in bnx2x_get_ext_wwn_info()
11608 bp->cnic_eth_dev.fcoe_wwn_port_name_lo = in bnx2x_get_ext_wwn_info()
11609 MF_CFG_RD(bp, func_ext_config[func].fcoe_wwn_port_name_lower); in bnx2x_get_ext_wwn_info()
11612 bp->cnic_eth_dev.fcoe_wwn_node_name_hi = in bnx2x_get_ext_wwn_info()
11613 MF_CFG_RD(bp, func_ext_config[func].fcoe_wwn_node_name_upper); in bnx2x_get_ext_wwn_info()
11614 bp->cnic_eth_dev.fcoe_wwn_node_name_lo = in bnx2x_get_ext_wwn_info()
11615 MF_CFG_RD(bp, func_ext_config[func].fcoe_wwn_node_name_lower); in bnx2x_get_ext_wwn_info()
11618 static int bnx2x_shared_fcoe_funcs(struct bnx2x *bp) in bnx2x_shared_fcoe_funcs() argument
11622 if (IS_MF(bp)) { in bnx2x_shared_fcoe_funcs()
11626 for (fid = BP_PATH(bp); fid < E2_FUNC_MAX * 2; fid += 2) { in bnx2x_shared_fcoe_funcs()
11627 if (IS_MF_SD(bp)) { in bnx2x_shared_fcoe_funcs()
11628 u32 cfg = MF_CFG_RD(bp, in bnx2x_shared_fcoe_funcs()
11636 u32 cfg = MF_CFG_RD(bp, in bnx2x_shared_fcoe_funcs()
11646 int port, port_cnt = CHIP_MODE_IS_4_PORT(bp) ? 2 : 1; in bnx2x_shared_fcoe_funcs()
11649 u32 lic = SHMEM_RD(bp, in bnx2x_shared_fcoe_funcs()
11660 static void bnx2x_get_fcoe_info(struct bnx2x *bp) in bnx2x_get_fcoe_info() argument
11662 int port = BP_PORT(bp); in bnx2x_get_fcoe_info()
11663 int func = BP_ABS_FUNC(bp); in bnx2x_get_fcoe_info()
11664 u32 max_fcoe_conn = FW_ENCODE_32BIT_PATTERN ^ SHMEM_RD(bp, in bnx2x_get_fcoe_info()
11666 u8 num_fcoe_func = bnx2x_shared_fcoe_funcs(bp); in bnx2x_get_fcoe_info()
11668 if (!CNIC_SUPPORT(bp)) { in bnx2x_get_fcoe_info()
11669 bp->flags |= NO_FCOE_FLAG; in bnx2x_get_fcoe_info()
11674 bp->cnic_eth_dev.max_fcoe_conn = in bnx2x_get_fcoe_info()
11679 bp->cnic_eth_dev.max_fcoe_exchanges = MAX_NUM_FCOE_TASKS_PER_ENGINE; in bnx2x_get_fcoe_info()
11683 bp->cnic_eth_dev.max_fcoe_exchanges /= num_fcoe_func; in bnx2x_get_fcoe_info()
11686 if (!IS_MF(bp)) { in bnx2x_get_fcoe_info()
11688 bp->cnic_eth_dev.fcoe_wwn_port_name_hi = in bnx2x_get_fcoe_info()
11689 SHMEM_RD(bp, in bnx2x_get_fcoe_info()
11692 bp->cnic_eth_dev.fcoe_wwn_port_name_lo = in bnx2x_get_fcoe_info()
11693 SHMEM_RD(bp, in bnx2x_get_fcoe_info()
11698 bp->cnic_eth_dev.fcoe_wwn_node_name_hi = in bnx2x_get_fcoe_info()
11699 SHMEM_RD(bp, in bnx2x_get_fcoe_info()
11702 bp->cnic_eth_dev.fcoe_wwn_node_name_lo = in bnx2x_get_fcoe_info()
11703 SHMEM_RD(bp, in bnx2x_get_fcoe_info()
11706 } else if (!IS_MF_SD(bp)) { in bnx2x_get_fcoe_info()
11710 if (BNX2X_HAS_MF_EXT_PROTOCOL_FCOE(bp)) in bnx2x_get_fcoe_info()
11711 bnx2x_get_ext_wwn_info(bp, func); in bnx2x_get_fcoe_info()
11713 if (BNX2X_IS_MF_SD_PROTOCOL_FCOE(bp) && !CHIP_IS_E1x(bp)) in bnx2x_get_fcoe_info()
11714 bnx2x_get_ext_wwn_info(bp, func); in bnx2x_get_fcoe_info()
11717 BNX2X_DEV_INFO("max_fcoe_conn 0x%x\n", bp->cnic_eth_dev.max_fcoe_conn); in bnx2x_get_fcoe_info()
11723 if (!bp->cnic_eth_dev.max_fcoe_conn) { in bnx2x_get_fcoe_info()
11724 bp->flags |= NO_FCOE_FLAG; in bnx2x_get_fcoe_info()
11725 eth_zero_addr(bp->fip_mac); in bnx2x_get_fcoe_info()
11729 static void bnx2x_get_cnic_info(struct bnx2x *bp) in bnx2x_get_cnic_info() argument
11736 bnx2x_get_iscsi_info(bp); in bnx2x_get_cnic_info()
11737 bnx2x_get_fcoe_info(bp); in bnx2x_get_cnic_info()
11740 static void bnx2x_get_cnic_mac_hwinfo(struct bnx2x *bp) in bnx2x_get_cnic_mac_hwinfo() argument
11743 int func = BP_ABS_FUNC(bp); in bnx2x_get_cnic_mac_hwinfo()
11744 int port = BP_PORT(bp); in bnx2x_get_cnic_mac_hwinfo()
11745 u8 *iscsi_mac = bp->cnic_eth_dev.iscsi_mac; in bnx2x_get_cnic_mac_hwinfo()
11746 u8 *fip_mac = bp->fip_mac; in bnx2x_get_cnic_mac_hwinfo()
11748 if (IS_MF(bp)) { in bnx2x_get_cnic_mac_hwinfo()
11754 if (!IS_MF_SD(bp)) { in bnx2x_get_cnic_mac_hwinfo()
11755 u32 cfg = MF_CFG_RD(bp, func_ext_config[func].func_cfg); in bnx2x_get_cnic_mac_hwinfo()
11757 val2 = MF_CFG_RD(bp, func_ext_config[func]. in bnx2x_get_cnic_mac_hwinfo()
11759 val = MF_CFG_RD(bp, func_ext_config[func]. in bnx2x_get_cnic_mac_hwinfo()
11765 bp->flags |= NO_ISCSI_OOO_FLAG | NO_ISCSI_FLAG; in bnx2x_get_cnic_mac_hwinfo()
11769 val2 = MF_CFG_RD(bp, func_ext_config[func]. in bnx2x_get_cnic_mac_hwinfo()
11771 val = MF_CFG_RD(bp, func_ext_config[func]. in bnx2x_get_cnic_mac_hwinfo()
11777 bp->flags |= NO_FCOE_FLAG; in bnx2x_get_cnic_mac_hwinfo()
11780 bp->mf_ext_config = cfg; in bnx2x_get_cnic_mac_hwinfo()
11783 if (BNX2X_IS_MF_SD_PROTOCOL_ISCSI(bp)) { in bnx2x_get_cnic_mac_hwinfo()
11785 memcpy(iscsi_mac, bp->dev->dev_addr, ETH_ALEN); in bnx2x_get_cnic_mac_hwinfo()
11790 } else if (BNX2X_IS_MF_SD_PROTOCOL_FCOE(bp)) { in bnx2x_get_cnic_mac_hwinfo()
11792 memcpy(fip_mac, bp->dev->dev_addr, ETH_ALEN); in bnx2x_get_cnic_mac_hwinfo()
11803 if (IS_MF_FCOE_AFEX(bp)) in bnx2x_get_cnic_mac_hwinfo()
11804 eth_hw_addr_set(bp->dev, fip_mac); in bnx2x_get_cnic_mac_hwinfo()
11806 val2 = SHMEM_RD(bp, dev_info.port_hw_config[port]. in bnx2x_get_cnic_mac_hwinfo()
11808 val = SHMEM_RD(bp, dev_info.port_hw_config[port]. in bnx2x_get_cnic_mac_hwinfo()
11812 val2 = SHMEM_RD(bp, dev_info.port_hw_config[port]. in bnx2x_get_cnic_mac_hwinfo()
11814 val = SHMEM_RD(bp, dev_info.port_hw_config[port]. in bnx2x_get_cnic_mac_hwinfo()
11821 bp->flags |= NO_ISCSI_OOO_FLAG | NO_ISCSI_FLAG; in bnx2x_get_cnic_mac_hwinfo()
11827 bp->flags |= NO_FCOE_FLAG; in bnx2x_get_cnic_mac_hwinfo()
11828 eth_zero_addr(bp->fip_mac); in bnx2x_get_cnic_mac_hwinfo()
11832 static void bnx2x_get_mac_hwinfo(struct bnx2x *bp) in bnx2x_get_mac_hwinfo() argument
11835 int func = BP_ABS_FUNC(bp); in bnx2x_get_mac_hwinfo()
11836 int port = BP_PORT(bp); in bnx2x_get_mac_hwinfo()
11840 eth_hw_addr_set(bp->dev, addr); in bnx2x_get_mac_hwinfo()
11842 if (BP_NOMCP(bp)) { in bnx2x_get_mac_hwinfo()
11844 eth_hw_addr_random(bp->dev); in bnx2x_get_mac_hwinfo()
11845 } else if (IS_MF(bp)) { in bnx2x_get_mac_hwinfo()
11846 val2 = MF_CFG_RD(bp, func_mf_config[func].mac_upper); in bnx2x_get_mac_hwinfo()
11847 val = MF_CFG_RD(bp, func_mf_config[func].mac_lower); in bnx2x_get_mac_hwinfo()
11851 eth_hw_addr_set(bp->dev, addr); in bnx2x_get_mac_hwinfo()
11854 if (CNIC_SUPPORT(bp)) in bnx2x_get_mac_hwinfo()
11855 bnx2x_get_cnic_mac_hwinfo(bp); in bnx2x_get_mac_hwinfo()
11858 val2 = SHMEM_RD(bp, dev_info.port_hw_config[port].mac_upper); in bnx2x_get_mac_hwinfo()
11859 val = SHMEM_RD(bp, dev_info.port_hw_config[port].mac_lower); in bnx2x_get_mac_hwinfo()
11861 eth_hw_addr_set(bp->dev, addr); in bnx2x_get_mac_hwinfo()
11863 if (CNIC_SUPPORT(bp)) in bnx2x_get_mac_hwinfo()
11864 bnx2x_get_cnic_mac_hwinfo(bp); in bnx2x_get_mac_hwinfo()
11867 if (!BP_NOMCP(bp)) { in bnx2x_get_mac_hwinfo()
11869 val2 = SHMEM_RD(bp, dev_info.port_hw_config[port].mac_upper); in bnx2x_get_mac_hwinfo()
11870 val = SHMEM_RD(bp, dev_info.port_hw_config[port].mac_lower); in bnx2x_get_mac_hwinfo()
11871 bnx2x_set_mac_buf(bp->phys_port_id, val, val2); in bnx2x_get_mac_hwinfo()
11872 bp->flags |= HAS_PHYS_PORT_ID; in bnx2x_get_mac_hwinfo()
11875 memcpy(bp->link_params.mac_addr, bp->dev->dev_addr, ETH_ALEN); in bnx2x_get_mac_hwinfo()
11877 if (!is_valid_ether_addr(bp->dev->dev_addr)) in bnx2x_get_mac_hwinfo()
11878 dev_err(&bp->pdev->dev, in bnx2x_get_mac_hwinfo()
11881 bp->dev->dev_addr); in bnx2x_get_mac_hwinfo()
11884 static bool bnx2x_get_dropless_info(struct bnx2x *bp) in bnx2x_get_dropless_info() argument
11889 if (IS_VF(bp)) in bnx2x_get_dropless_info()
11892 if (IS_MF(bp) && !CHIP_IS_E1x(bp)) { in bnx2x_get_dropless_info()
11894 tmp = BP_ABS_FUNC(bp); in bnx2x_get_dropless_info()
11895 cfg = MF_CFG_RD(bp, func_ext_config[tmp].func_cfg); in bnx2x_get_dropless_info()
11899 tmp = BP_PORT(bp); in bnx2x_get_dropless_info()
11900 cfg = SHMEM_RD(bp, in bnx2x_get_dropless_info()
11907 static void validate_set_si_mode(struct bnx2x *bp) in validate_set_si_mode() argument
11909 u8 func = BP_ABS_FUNC(bp); in validate_set_si_mode()
11912 val = MF_CFG_RD(bp, func_mf_config[func].mac_upper); in validate_set_si_mode()
11916 bp->mf_mode = MULTI_FUNCTION_SI; in validate_set_si_mode()
11917 bp->mf_config[BP_VN(bp)] = in validate_set_si_mode()
11918 MF_CFG_RD(bp, func_mf_config[func].config); in validate_set_si_mode()
11923 static int bnx2x_get_hwinfo(struct bnx2x *bp) in bnx2x_get_hwinfo() argument
11925 int /*abs*/func = BP_ABS_FUNC(bp); in bnx2x_get_hwinfo()
11931 if (REG_RD(bp, MISC_REG_CHIP_NUM) == 0xffffffff) { in bnx2x_get_hwinfo()
11932 dev_err(&bp->pdev->dev, in bnx2x_get_hwinfo()
11937 bnx2x_get_common_hwinfo(bp); in bnx2x_get_hwinfo()
11942 if (CHIP_IS_E1x(bp)) { in bnx2x_get_hwinfo()
11943 bp->common.int_block = INT_BLOCK_HC; in bnx2x_get_hwinfo()
11945 bp->igu_dsb_id = DEF_SB_IGU_ID; in bnx2x_get_hwinfo()
11946 bp->igu_base_sb = 0; in bnx2x_get_hwinfo()
11948 bp->common.int_block = INT_BLOCK_IGU; in bnx2x_get_hwinfo()
11951 bnx2x_acquire_hw_lock(bp, HW_LOCK_RESOURCE_RESET); in bnx2x_get_hwinfo()
11953 val = REG_RD(bp, IGU_REG_BLOCK_CONFIGURATION); in bnx2x_get_hwinfo()
11961 REG_WR(bp, IGU_REG_BLOCK_CONFIGURATION, val); in bnx2x_get_hwinfo()
11962 REG_WR(bp, IGU_REG_RESET_MEMORIES, 0x7f); in bnx2x_get_hwinfo()
11964 while (tout && REG_RD(bp, IGU_REG_RESET_MEMORIES)) { in bnx2x_get_hwinfo()
11969 if (REG_RD(bp, IGU_REG_RESET_MEMORIES)) { in bnx2x_get_hwinfo()
11970 dev_err(&bp->pdev->dev, in bnx2x_get_hwinfo()
11972 bnx2x_release_hw_lock(bp, in bnx2x_get_hwinfo()
11980 bp->common.int_block |= INT_BLOCK_MODE_BW_COMP; in bnx2x_get_hwinfo()
11984 rc = bnx2x_get_igu_cam_info(bp); in bnx2x_get_hwinfo()
11985 bnx2x_release_hw_lock(bp, HW_LOCK_RESOURCE_RESET); in bnx2x_get_hwinfo()
11995 if (CHIP_IS_E1x(bp)) in bnx2x_get_hwinfo()
11996 bp->base_fw_ndsb = BP_PORT(bp) * FP_SB_MAX_E1x + BP_L_ID(bp); in bnx2x_get_hwinfo()
12002 bp->base_fw_ndsb = bp->igu_base_sb; in bnx2x_get_hwinfo()
12005 "base_fw_ndsb %d\n", bp->igu_dsb_id, bp->igu_base_sb, in bnx2x_get_hwinfo()
12006 bp->igu_sb_cnt, bp->base_fw_ndsb); in bnx2x_get_hwinfo()
12011 bp->mf_ov = 0; in bnx2x_get_hwinfo()
12012 bp->mf_mode = 0; in bnx2x_get_hwinfo()
12013 bp->mf_sub_mode = 0; in bnx2x_get_hwinfo()
12014 vn = BP_VN(bp); in bnx2x_get_hwinfo()
12016 if (!CHIP_IS_E1(bp) && !BP_NOMCP(bp)) { in bnx2x_get_hwinfo()
12018 bp->common.shmem2_base, SHMEM2_RD(bp, size), in bnx2x_get_hwinfo()
12021 if (SHMEM2_HAS(bp, mf_cfg_addr)) in bnx2x_get_hwinfo()
12022 bp->common.mf_cfg_base = SHMEM2_RD(bp, mf_cfg_addr); in bnx2x_get_hwinfo()
12024 bp->common.mf_cfg_base = bp->common.shmem_base + in bnx2x_get_hwinfo()
12035 if (bp->common.mf_cfg_base != SHMEM_MF_CFG_ADDR_NONE) { in bnx2x_get_hwinfo()
12037 val = SHMEM_RD(bp, in bnx2x_get_hwinfo()
12043 validate_set_si_mode(bp); in bnx2x_get_hwinfo()
12046 if ((!CHIP_IS_E1x(bp)) && in bnx2x_get_hwinfo()
12047 (MF_CFG_RD(bp, func_mf_config[func]. in bnx2x_get_hwinfo()
12049 (SHMEM2_HAS(bp, in bnx2x_get_hwinfo()
12051 bp->mf_mode = MULTI_FUNCTION_AFEX; in bnx2x_get_hwinfo()
12052 bp->mf_config[vn] = MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12060 val = MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12065 bp->mf_mode = MULTI_FUNCTION_SD; in bnx2x_get_hwinfo()
12066 bp->mf_config[vn] = MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12072 bp->mf_mode = MULTI_FUNCTION_SD; in bnx2x_get_hwinfo()
12073 bp->mf_sub_mode = SUB_MF_MODE_BD; in bnx2x_get_hwinfo()
12074 bp->mf_config[vn] = in bnx2x_get_hwinfo()
12075 MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12078 if (SHMEM2_HAS(bp, mtu_size)) { in bnx2x_get_hwinfo()
12079 int mtu_idx = BP_FW_MB_IDX(bp); in bnx2x_get_hwinfo()
12083 mtu = SHMEM2_RD(bp, mtu_size[mtu_idx]); in bnx2x_get_hwinfo()
12092 bp->dev->mtu = mtu_size; in bnx2x_get_hwinfo()
12096 bp->mf_mode = MULTI_FUNCTION_SD; in bnx2x_get_hwinfo()
12097 bp->mf_sub_mode = SUB_MF_MODE_UFP; in bnx2x_get_hwinfo()
12098 bp->mf_config[vn] = in bnx2x_get_hwinfo()
12099 MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12103 bp->mf_config[vn] = 0; in bnx2x_get_hwinfo()
12106 val2 = SHMEM_RD(bp, in bnx2x_get_hwinfo()
12111 validate_set_si_mode(bp); in bnx2x_get_hwinfo()
12112 bp->mf_sub_mode = in bnx2x_get_hwinfo()
12117 bp->mf_config[vn] = 0; in bnx2x_get_hwinfo()
12124 bp->mf_config[vn] = 0; in bnx2x_get_hwinfo()
12130 IS_MF(bp) ? "multi" : "single"); in bnx2x_get_hwinfo()
12132 switch (bp->mf_mode) { in bnx2x_get_hwinfo()
12134 val = MF_CFG_RD(bp, func_mf_config[func].e1hov_tag) & in bnx2x_get_hwinfo()
12137 bp->mf_ov = val; in bnx2x_get_hwinfo()
12138 bp->path_has_ovlan = true; in bnx2x_get_hwinfo()
12141 func, bp->mf_ov, bp->mf_ov); in bnx2x_get_hwinfo()
12142 } else if ((bp->mf_sub_mode == SUB_MF_MODE_UFP) || in bnx2x_get_hwinfo()
12143 (bp->mf_sub_mode == SUB_MF_MODE_BD)) { in bnx2x_get_hwinfo()
12144 dev_err(&bp->pdev->dev, in bnx2x_get_hwinfo()
12147 bp->path_has_ovlan = true; in bnx2x_get_hwinfo()
12149 dev_err(&bp->pdev->dev, in bnx2x_get_hwinfo()
12164 dev_err(&bp->pdev->dev, in bnx2x_get_hwinfo()
12177 if (CHIP_MODE_IS_4_PORT(bp) && in bnx2x_get_hwinfo()
12178 !bp->path_has_ovlan && in bnx2x_get_hwinfo()
12179 !IS_MF(bp) && in bnx2x_get_hwinfo()
12180 bp->common.mf_cfg_base != SHMEM_MF_CFG_ADDR_NONE) { in bnx2x_get_hwinfo()
12181 u8 other_port = !BP_PORT(bp); in bnx2x_get_hwinfo()
12182 u8 other_func = BP_PATH(bp) + 2*other_port; in bnx2x_get_hwinfo()
12183 val = MF_CFG_RD(bp, in bnx2x_get_hwinfo()
12186 bp->path_has_ovlan = true; in bnx2x_get_hwinfo()
12191 if (CHIP_IS_E1H(bp) && IS_MF(bp)) in bnx2x_get_hwinfo()
12192 bp->igu_sb_cnt = min_t(u8, bp->igu_sb_cnt, E1H_MAX_MF_SB_COUNT); in bnx2x_get_hwinfo()
12195 bnx2x_get_port_hwinfo(bp); in bnx2x_get_hwinfo()
12198 bnx2x_get_mac_hwinfo(bp); in bnx2x_get_hwinfo()
12200 bnx2x_get_cnic_info(bp); in bnx2x_get_hwinfo()
12205 static void bnx2x_read_fwinfo(struct bnx2x *bp) in bnx2x_read_fwinfo() argument
12212 memset(bp->fw_ver, 0, sizeof(bp->fw_ver)); in bnx2x_read_fwinfo()
12214 vpd_data = pci_vpd_alloc(bp->pdev, &vpd_len); in bnx2x_read_fwinfo()
12229 if (rodi >= 0 && kw_len < sizeof(bp->fw_ver)) { in bnx2x_read_fwinfo()
12230 memcpy(bp->fw_ver, &vpd_data[rodi], kw_len); in bnx2x_read_fwinfo()
12231 bp->fw_ver[kw_len] = ' '; in bnx2x_read_fwinfo()
12238 static void bnx2x_set_modes_bitmap(struct bnx2x *bp) in bnx2x_set_modes_bitmap() argument
12242 if (CHIP_REV_IS_FPGA(bp)) in bnx2x_set_modes_bitmap()
12244 else if (CHIP_REV_IS_EMUL(bp)) in bnx2x_set_modes_bitmap()
12249 if (CHIP_MODE_IS_4_PORT(bp)) in bnx2x_set_modes_bitmap()
12254 if (CHIP_IS_E2(bp)) in bnx2x_set_modes_bitmap()
12256 else if (CHIP_IS_E3(bp)) { in bnx2x_set_modes_bitmap()
12258 if (CHIP_REV(bp) == CHIP_REV_Ax) in bnx2x_set_modes_bitmap()
12264 if (IS_MF(bp)) { in bnx2x_set_modes_bitmap()
12266 switch (bp->mf_mode) { in bnx2x_set_modes_bitmap()
12285 INIT_MODE_FLAGS(bp) = flags; in bnx2x_set_modes_bitmap()
12288 static int bnx2x_init_bp(struct bnx2x *bp) in bnx2x_init_bp() argument
12293 mutex_init(&bp->port.phy_mutex); in bnx2x_init_bp()
12294 mutex_init(&bp->fw_mb_mutex); in bnx2x_init_bp()
12295 mutex_init(&bp->drv_info_mutex); in bnx2x_init_bp()
12296 sema_init(&bp->stats_lock, 1); in bnx2x_init_bp()
12297 bp->drv_info_mng_owner = false; in bnx2x_init_bp()
12298 INIT_LIST_HEAD(&bp->vlan_reg); in bnx2x_init_bp()
12300 INIT_DELAYED_WORK(&bp->sp_task, bnx2x_sp_task); in bnx2x_init_bp()
12301 INIT_DELAYED_WORK(&bp->sp_rtnl_task, bnx2x_sp_rtnl_task); in bnx2x_init_bp()
12302 INIT_DELAYED_WORK(&bp->period_task, bnx2x_period_task); in bnx2x_init_bp()
12303 INIT_DELAYED_WORK(&bp->iov_task, bnx2x_iov_task); in bnx2x_init_bp()
12304 if (IS_PF(bp)) { in bnx2x_init_bp()
12305 rc = bnx2x_get_hwinfo(bp); in bnx2x_init_bp()
12311 eth_hw_addr_set(bp->dev, zero_addr); in bnx2x_init_bp()
12314 bnx2x_set_modes_bitmap(bp); in bnx2x_init_bp()
12316 rc = bnx2x_alloc_mem_bp(bp); in bnx2x_init_bp()
12320 bnx2x_read_fwinfo(bp); in bnx2x_init_bp()
12322 func = BP_FUNC(bp); in bnx2x_init_bp()
12325 if (IS_PF(bp) && !BP_NOMCP(bp)) { in bnx2x_init_bp()
12327 bp->fw_seq = in bnx2x_init_bp()
12328 SHMEM_RD(bp, func_mb[BP_FW_MB_IDX(bp)].drv_mb_header) & in bnx2x_init_bp()
12330 BNX2X_DEV_INFO("fw_seq 0x%08x\n", bp->fw_seq); in bnx2x_init_bp()
12332 rc = bnx2x_prev_unload(bp); in bnx2x_init_bp()
12334 bnx2x_free_mem_bp(bp); in bnx2x_init_bp()
12339 if (CHIP_REV_IS_FPGA(bp)) in bnx2x_init_bp()
12340 dev_err(&bp->pdev->dev, "FPGA detected\n"); in bnx2x_init_bp()
12342 if (BP_NOMCP(bp) && (func == 0)) in bnx2x_init_bp()
12343 dev_err(&bp->pdev->dev, "MCP disabled, must load devices in order!\n"); in bnx2x_init_bp()
12345 bp->disable_tpa = disable_tpa; in bnx2x_init_bp()
12346 bp->disable_tpa |= !!IS_MF_STORAGE_ONLY(bp); in bnx2x_init_bp()
12348 bp->disable_tpa |= is_kdump_kernel(); in bnx2x_init_bp()
12351 if (bp->disable_tpa) { in bnx2x_init_bp()
12352 bp->dev->hw_features &= ~(NETIF_F_LRO | NETIF_F_GRO_HW); in bnx2x_init_bp()
12353 bp->dev->features &= ~(NETIF_F_LRO | NETIF_F_GRO_HW); in bnx2x_init_bp()
12356 if (CHIP_IS_E1(bp)) in bnx2x_init_bp()
12357 bp->dropless_fc = false; in bnx2x_init_bp()
12359 bp->dropless_fc = dropless_fc | bnx2x_get_dropless_info(bp); in bnx2x_init_bp()
12361 bp->mrrs = mrrs; in bnx2x_init_bp()
12363 bp->tx_ring_size = IS_MF_STORAGE_ONLY(bp) ? 0 : MAX_TX_AVAIL; in bnx2x_init_bp()
12364 if (IS_VF(bp)) in bnx2x_init_bp()
12365 bp->rx_ring_size = MAX_RX_AVAIL; in bnx2x_init_bp()
12368 bp->tx_ticks = (50 / BNX2X_BTR) * BNX2X_BTR; in bnx2x_init_bp()
12369 bp->rx_ticks = (25 / BNX2X_BTR) * BNX2X_BTR; in bnx2x_init_bp()
12371 bp->current_interval = CHIP_REV_IS_SLOW(bp) ? 5*HZ : HZ; in bnx2x_init_bp()
12373 timer_setup(&bp->timer, bnx2x_timer, 0); in bnx2x_init_bp()
12374 bp->timer.expires = jiffies + bp->current_interval; in bnx2x_init_bp()
12376 if (SHMEM2_HAS(bp, dcbx_lldp_params_offset) && in bnx2x_init_bp()
12377 SHMEM2_HAS(bp, dcbx_lldp_dcbx_stat_offset) && in bnx2x_init_bp()
12378 SHMEM2_HAS(bp, dcbx_en) && in bnx2x_init_bp()
12379 SHMEM2_RD(bp, dcbx_lldp_params_offset) && in bnx2x_init_bp()
12380 SHMEM2_RD(bp, dcbx_lldp_dcbx_stat_offset) && in bnx2x_init_bp()
12381 SHMEM2_RD(bp, dcbx_en[BP_PORT(bp)])) { in bnx2x_init_bp()
12382 bnx2x_dcbx_set_state(bp, true, BNX2X_DCBX_ENABLED_ON_NEG_ON); in bnx2x_init_bp()
12383 bnx2x_dcbx_init_params(bp); in bnx2x_init_bp()
12385 bnx2x_dcbx_set_state(bp, false, BNX2X_DCBX_ENABLED_OFF); in bnx2x_init_bp()
12388 if (CHIP_IS_E1x(bp)) in bnx2x_init_bp()
12389 bp->cnic_base_cl_id = FP_SB_MAX_E1x; in bnx2x_init_bp()
12391 bp->cnic_base_cl_id = FP_SB_MAX_E2; in bnx2x_init_bp()
12394 if (IS_VF(bp)) in bnx2x_init_bp()
12395 bp->max_cos = 1; in bnx2x_init_bp()
12396 else if (CHIP_IS_E1x(bp)) in bnx2x_init_bp()
12397 bp->max_cos = BNX2X_MULTI_TX_COS_E1X; in bnx2x_init_bp()
12398 else if (CHIP_IS_E2(bp) || CHIP_IS_E3A0(bp)) in bnx2x_init_bp()
12399 bp->max_cos = BNX2X_MULTI_TX_COS_E2_E3A0; in bnx2x_init_bp()
12400 else if (CHIP_IS_E3B0(bp)) in bnx2x_init_bp()
12401 bp->max_cos = BNX2X_MULTI_TX_COS_E3B0; in bnx2x_init_bp()
12404 CHIP_NUM(bp), CHIP_REV(bp)); in bnx2x_init_bp()
12405 BNX2X_DEV_INFO("set bp->max_cos to %d\n", bp->max_cos); in bnx2x_init_bp()
12411 if (IS_VF(bp)) in bnx2x_init_bp()
12412 bp->min_msix_vec_cnt = 1; in bnx2x_init_bp()
12413 else if (CNIC_SUPPORT(bp)) in bnx2x_init_bp()
12414 bp->min_msix_vec_cnt = 3; in bnx2x_init_bp()
12416 bp->min_msix_vec_cnt = 2; in bnx2x_init_bp()
12417 BNX2X_DEV_INFO("bp->min_msix_vec_cnt %d", bp->min_msix_vec_cnt); in bnx2x_init_bp()
12419 bp->dump_preset_idx = 1; in bnx2x_init_bp()
12435 struct bnx2x *bp = netdev_priv(dev); in bnx2x_open() local
12438 bp->stats_init = true; in bnx2x_open()
12442 bnx2x_set_power_state(bp, PCI_D0); in bnx2x_open()
12450 if (IS_PF(bp)) { in bnx2x_open()
12451 int other_engine = BP_PATH(bp) ? 0 : 1; in bnx2x_open()
12455 other_load_status = bnx2x_get_load_status(bp, other_engine); in bnx2x_open()
12456 load_status = bnx2x_get_load_status(bp, BP_PATH(bp)); in bnx2x_open()
12457 if (!bnx2x_reset_is_done(bp, BP_PATH(bp)) || in bnx2x_open()
12458 bnx2x_chk_parity_attn(bp, &global, true)) { in bnx2x_open()
12466 bnx2x_set_reset_global(bp); in bnx2x_open()
12475 bnx2x_trylock_leader_lock(bp) && in bnx2x_open()
12476 !bnx2x_leader_reset(bp)) { in bnx2x_open()
12477 netdev_info(bp->dev, in bnx2x_open()
12483 bnx2x_set_power_state(bp, PCI_D3hot); in bnx2x_open()
12484 bp->recovery_state = BNX2X_RECOVERY_FAILED; in bnx2x_open()
12494 bp->recovery_state = BNX2X_RECOVERY_DONE; in bnx2x_open()
12495 rc = bnx2x_nic_load(bp, LOAD_OPEN); in bnx2x_open()
12505 struct bnx2x *bp = netdev_priv(dev); in bnx2x_close() local
12508 bnx2x_nic_unload(bp, UNLOAD_CLOSE, false); in bnx2x_close()
12536 static int bnx2x_init_mcast_macs_list(struct bnx2x *bp, in bnx2x_init_mcast_macs_list() argument
12543 int mc_count = netdev_mc_count(bp->dev); in bnx2x_init_mcast_macs_list()
12547 netdev_for_each_mc_addr(ha, bp->dev) { in bnx2x_init_mcast_macs_list()
12578 static int bnx2x_set_uc_list(struct bnx2x *bp) in bnx2x_set_uc_list() argument
12581 struct net_device *dev = bp->dev; in bnx2x_set_uc_list()
12583 struct bnx2x_vlan_mac_obj *mac_obj = &bp->sp_objs->mac_obj; in bnx2x_set_uc_list()
12587 rc = bnx2x_del_all_macs(bp, mac_obj, BNX2X_UC_LIST_MAC, false); in bnx2x_set_uc_list()
12594 rc = bnx2x_set_mac_one(bp, bnx2x_uc_addr(ha), mac_obj, true, in bnx2x_set_uc_list()
12612 return bnx2x_set_mac_one(bp, NULL, mac_obj, false /* don't care */, in bnx2x_set_uc_list()
12616 static int bnx2x_set_mc_list_e1x(struct bnx2x *bp) in bnx2x_set_mc_list_e1x() argument
12619 struct net_device *dev = bp->dev; in bnx2x_set_mc_list_e1x()
12623 rparam.mcast_obj = &bp->mcast_obj; in bnx2x_set_mc_list_e1x()
12626 rc = bnx2x_config_mcast(bp, &rparam, BNX2X_MCAST_CMD_DEL); in bnx2x_set_mc_list_e1x()
12634 rc = bnx2x_init_mcast_macs_list(bp, &rparam, &mcast_group_list); in bnx2x_set_mc_list_e1x()
12639 rc = bnx2x_config_mcast(bp, &rparam, in bnx2x_set_mc_list_e1x()
12651 static int bnx2x_set_mc_list(struct bnx2x *bp) in bnx2x_set_mc_list() argument
12655 struct net_device *dev = bp->dev; in bnx2x_set_mc_list()
12659 if (CHIP_IS_E1x(bp)) in bnx2x_set_mc_list()
12660 return bnx2x_set_mc_list_e1x(bp); in bnx2x_set_mc_list()
12662 rparam.mcast_obj = &bp->mcast_obj; in bnx2x_set_mc_list()
12665 rc = bnx2x_init_mcast_macs_list(bp, &rparam, &mcast_group_list); in bnx2x_set_mc_list()
12670 rc = bnx2x_config_mcast(bp, &rparam, in bnx2x_set_mc_list()
12679 rc = bnx2x_config_mcast(bp, &rparam, BNX2X_MCAST_CMD_DEL); in bnx2x_set_mc_list()
12691 struct bnx2x *bp = netdev_priv(dev); in bnx2x_set_rx_mode() local
12693 if (bp->state != BNX2X_STATE_OPEN) { in bnx2x_set_rx_mode()
12694 DP(NETIF_MSG_IFUP, "state is %x, returning\n", bp->state); in bnx2x_set_rx_mode()
12698 bnx2x_schedule_sp_rtnl(bp, BNX2X_SP_RTNL_RX_MODE, in bnx2x_set_rx_mode()
12703 void bnx2x_set_rx_mode_inner(struct bnx2x *bp) in bnx2x_set_rx_mode_inner() argument
12707 DP(NETIF_MSG_IFUP, "dev->flags = %x\n", bp->dev->flags); in bnx2x_set_rx_mode_inner()
12709 netif_addr_lock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12711 if (bp->dev->flags & IFF_PROMISC) { in bnx2x_set_rx_mode_inner()
12713 } else if ((bp->dev->flags & IFF_ALLMULTI) || in bnx2x_set_rx_mode_inner()
12714 ((netdev_mc_count(bp->dev) > BNX2X_MAX_MULTICAST) && in bnx2x_set_rx_mode_inner()
12715 CHIP_IS_E1(bp))) { in bnx2x_set_rx_mode_inner()
12718 if (IS_PF(bp)) { in bnx2x_set_rx_mode_inner()
12720 if (bnx2x_set_mc_list(bp) < 0) in bnx2x_set_rx_mode_inner()
12724 netif_addr_unlock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12725 if (bnx2x_set_uc_list(bp) < 0) in bnx2x_set_rx_mode_inner()
12727 netif_addr_lock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12732 bnx2x_schedule_sp_rtnl(bp, in bnx2x_set_rx_mode_inner()
12737 bp->rx_mode = rx_mode; in bnx2x_set_rx_mode_inner()
12739 if (IS_MF_ISCSI_ONLY(bp)) in bnx2x_set_rx_mode_inner()
12740 bp->rx_mode = BNX2X_RX_MODE_NONE; in bnx2x_set_rx_mode_inner()
12743 if (test_bit(BNX2X_FILTER_RX_MODE_PENDING, &bp->sp_state)) { in bnx2x_set_rx_mode_inner()
12744 set_bit(BNX2X_FILTER_RX_MODE_SCHED, &bp->sp_state); in bnx2x_set_rx_mode_inner()
12745 netif_addr_unlock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12749 if (IS_PF(bp)) { in bnx2x_set_rx_mode_inner()
12750 bnx2x_set_storm_rx_mode(bp); in bnx2x_set_rx_mode_inner()
12751 netif_addr_unlock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12757 netif_addr_unlock_bh(bp->dev); in bnx2x_set_rx_mode_inner()
12758 bnx2x_vfpf_storm_rx_mode(bp); in bnx2x_set_rx_mode_inner()
12766 struct bnx2x *bp = netdev_priv(netdev); in bnx2x_mdio_read() local
12776 bnx2x_acquire_phy_lock(bp); in bnx2x_mdio_read()
12777 rc = bnx2x_phy_read(&bp->link_params, prtad, devad, addr, &value); in bnx2x_mdio_read()
12778 bnx2x_release_phy_lock(bp); in bnx2x_mdio_read()
12790 struct bnx2x *bp = netdev_priv(netdev); in bnx2x_mdio_write() local
12800 bnx2x_acquire_phy_lock(bp); in bnx2x_mdio_write()
12801 rc = bnx2x_phy_write(&bp->link_params, prtad, devad, addr, value); in bnx2x_mdio_write()
12802 bnx2x_release_phy_lock(bp); in bnx2x_mdio_write()
12809 struct bnx2x *bp = netdev_priv(dev); in bnx2x_ioctl() local
12817 return bnx2x_hwtstamp_ioctl(bp, ifr); in bnx2x_ioctl()
12821 return mdio_mii_ioctl(&bp->mdio, mdio, cmd); in bnx2x_ioctl()
12827 struct bnx2x *bp = netdev_priv(dev); in bnx2x_validate_addr() local
12830 if (IS_VF(bp)) in bnx2x_validate_addr()
12831 bnx2x_sample_bulletin(bp); in bnx2x_validate_addr()
12843 struct bnx2x *bp = netdev_priv(netdev); in bnx2x_get_phys_port_id() local
12845 if (!(bp->flags & HAS_PHYS_PORT_ID)) in bnx2x_get_phys_port_id()
12848 ppid->id_len = sizeof(bp->phys_port_id); in bnx2x_get_phys_port_id()
12849 memcpy(ppid->id, bp->phys_port_id, ppid->id_len); in bnx2x_get_phys_port_id()
12880 static int __bnx2x_vlan_configure_vid(struct bnx2x *bp, u16 vid, bool add) in __bnx2x_vlan_configure_vid() argument
12884 if (IS_PF(bp)) { in __bnx2x_vlan_configure_vid()
12888 rc = bnx2x_set_vlan_one(bp, vid, &bp->sp_objs->vlan_obj, in __bnx2x_vlan_configure_vid()
12891 rc = bnx2x_vfpf_update_vlan(bp, vid, bp->fp->index, add); in __bnx2x_vlan_configure_vid()
12897 static int bnx2x_vlan_configure_vid_list(struct bnx2x *bp) in bnx2x_vlan_configure_vid_list() argument
12903 list_for_each_entry(vlan, &bp->vlan_reg, link) { in bnx2x_vlan_configure_vid_list()
12907 if (bp->vlan_cnt >= bp->vlan_credit) in bnx2x_vlan_configure_vid_list()
12910 rc = __bnx2x_vlan_configure_vid(bp, vlan->vid, true); in bnx2x_vlan_configure_vid_list()
12918 bp->vlan_cnt++; in bnx2x_vlan_configure_vid_list()
12924 static void bnx2x_vlan_configure(struct bnx2x *bp, bool set_rx_mode) in bnx2x_vlan_configure() argument
12928 need_accept_any_vlan = !!bnx2x_vlan_configure_vid_list(bp); in bnx2x_vlan_configure()
12930 if (bp->accept_any_vlan != need_accept_any_vlan) { in bnx2x_vlan_configure()
12931 bp->accept_any_vlan = need_accept_any_vlan; in bnx2x_vlan_configure()
12933 bp->accept_any_vlan ? "raised" : "cleared"); in bnx2x_vlan_configure()
12935 if (IS_PF(bp)) in bnx2x_vlan_configure()
12936 bnx2x_set_rx_mode_inner(bp); in bnx2x_vlan_configure()
12938 bnx2x_vfpf_storm_rx_mode(bp); in bnx2x_vlan_configure()
12943 int bnx2x_vlan_reconfigure_vid(struct bnx2x *bp) in bnx2x_vlan_reconfigure_vid() argument
12946 bnx2x_vlan_configure(bp, false); in bnx2x_vlan_reconfigure_vid()
12953 struct bnx2x *bp = netdev_priv(dev); in bnx2x_vlan_rx_add_vid() local
12964 list_add_tail(&vlan->link, &bp->vlan_reg); in bnx2x_vlan_rx_add_vid()
12967 bnx2x_vlan_configure(bp, true); in bnx2x_vlan_rx_add_vid()
12974 struct bnx2x *bp = netdev_priv(dev); in bnx2x_vlan_rx_kill_vid() local
12981 list_for_each_entry(vlan, &bp->vlan_reg, link) in bnx2x_vlan_rx_kill_vid()
12993 rc = __bnx2x_vlan_configure_vid(bp, vid, false); in bnx2x_vlan_rx_kill_vid()
12995 bp->vlan_cnt--; in bnx2x_vlan_rx_kill_vid()
13002 bnx2x_vlan_configure(bp, true); in bnx2x_vlan_rx_kill_vid()
13040 static void bnx2x_disable_pcie_error_reporting(struct bnx2x *bp) in bnx2x_disable_pcie_error_reporting() argument
13042 if (bp->flags & AER_ENABLED) { in bnx2x_disable_pcie_error_reporting()
13043 pci_disable_pcie_error_reporting(bp->pdev); in bnx2x_disable_pcie_error_reporting()
13044 bp->flags &= ~AER_ENABLED; in bnx2x_disable_pcie_error_reporting()
13048 static int bnx2x_init_dev(struct bnx2x *bp, struct pci_dev *pdev, in bnx2x_init_dev() argument
13059 bp->dev = dev; in bnx2x_init_dev()
13060 bp->pdev = pdev; in bnx2x_init_dev()
13064 dev_err(&bp->pdev->dev, in bnx2x_init_dev()
13070 dev_err(&bp->pdev->dev, in bnx2x_init_dev()
13076 if (IS_PF(bp) && !(pci_resource_flags(pdev, 2) & IORESOURCE_MEM)) { in bnx2x_init_dev()
13077 dev_err(&bp->pdev->dev, "Cannot find second PCI device base address, aborting\n"); in bnx2x_init_dev()
13093 dev_err(&bp->pdev->dev, in bnx2x_init_dev()
13102 if (IS_PF(bp)) { in bnx2x_init_dev()
13104 dev_err(&bp->pdev->dev, in bnx2x_init_dev()
13112 dev_err(&bp->pdev->dev, "Not PCI Express, aborting\n"); in bnx2x_init_dev()
13117 rc = dma_set_mask_and_coherent(&bp->pdev->dev, DMA_BIT_MASK(64)); in bnx2x_init_dev()
13119 dev_err(&bp->pdev->dev, "System does not support DMA, aborting\n"); in bnx2x_init_dev()
13129 bp->regview = pci_ioremap_bar(pdev, 0); in bnx2x_init_dev()
13130 if (!bp->regview) { in bnx2x_init_dev()
13131 dev_err(&bp->pdev->dev, in bnx2x_init_dev()
13143 bp->pf_num = PCI_FUNC(pdev->devfn); in bnx2x_init_dev()
13146 pci_read_config_dword(bp->pdev, in bnx2x_init_dev()
13148 bp->pf_num = (u8)((pci_cfg_dword & ME_REG_ABS_PF_NUM) >> in bnx2x_init_dev()
13151 BNX2X_DEV_INFO("me reg PF num: %d\n", bp->pf_num); in bnx2x_init_dev()
13154 pci_write_config_dword(bp->pdev, PCICFG_GRC_ADDRESS, in bnx2x_init_dev()
13163 bp->flags |= AER_ENABLED; in bnx2x_init_dev()
13171 if (IS_PF(bp)) { in bnx2x_init_dev()
13172 REG_WR(bp, PXP2_REG_PGL_ADDR_88_F0, 0); in bnx2x_init_dev()
13173 REG_WR(bp, PXP2_REG_PGL_ADDR_8C_F0, 0); in bnx2x_init_dev()
13174 REG_WR(bp, PXP2_REG_PGL_ADDR_90_F0, 0); in bnx2x_init_dev()
13175 REG_WR(bp, PXP2_REG_PGL_ADDR_94_F0, 0); in bnx2x_init_dev()
13178 REG_WR(bp, PXP2_REG_PGL_ADDR_88_F1, 0); in bnx2x_init_dev()
13179 REG_WR(bp, PXP2_REG_PGL_ADDR_8C_F1, 0); in bnx2x_init_dev()
13180 REG_WR(bp, PXP2_REG_PGL_ADDR_90_F1, 0); in bnx2x_init_dev()
13181 REG_WR(bp, PXP2_REG_PGL_ADDR_94_F1, 0); in bnx2x_init_dev()
13189 REG_WR(bp, in bnx2x_init_dev()
13196 bnx2x_set_ethtool_ops(bp, dev); in bnx2x_init_dev()
13222 if (IS_PF(bp)) in bnx2x_init_dev()
13229 if (IS_PF(bp)) { in bnx2x_init_dev()
13231 bp->accept_any_vlan = true; in bnx2x_init_dev()
13256 bp->mdio.prtad = MDIO_PRTAD_NONE; in bnx2x_init_dev()
13257 bp->mdio.mmds = 0; in bnx2x_init_dev()
13258 bp->mdio.mode_support = MDIO_SUPPORTS_C45 | MDIO_EMULATE_C22; in bnx2x_init_dev()
13259 bp->mdio.dev = dev; in bnx2x_init_dev()
13260 bp->mdio.mdio_read = bnx2x_mdio_read; in bnx2x_init_dev()
13261 bp->mdio.mdio_write = bnx2x_mdio_write; in bnx2x_init_dev()
13276 static int bnx2x_check_firmware(struct bnx2x *bp) in bnx2x_check_firmware() argument
13278 const struct firmware *firmware = bp->firmware; in bnx2x_check_firmware()
13320 if (fw_ver[0] != bp->fw_major || fw_ver[1] != bp->fw_minor || in bnx2x_check_firmware()
13321 fw_ver[2] != bp->fw_rev || fw_ver[3] != bp->fw_eng) { in bnx2x_check_firmware()
13324 bp->fw_major, bp->fw_minor, bp->fw_rev, bp->fw_eng); in bnx2x_check_firmware()
13395 bp->arr = kmalloc(len, GFP_KERNEL); \
13396 if (!bp->arr) \
13398 func(bp->firmware->data + be32_to_cpu(fw_hdr->arr.offset), \
13399 (u8 *)bp->arr, len); \
13402 static int bnx2x_init_firmware(struct bnx2x *bp) in bnx2x_init_firmware() argument
13408 if (bp->firmware) in bnx2x_init_firmware()
13411 if (CHIP_IS_E1(bp)) { in bnx2x_init_firmware()
13414 } else if (CHIP_IS_E1H(bp)) { in bnx2x_init_firmware()
13417 } else if (!CHIP_IS_E1x(bp)) { in bnx2x_init_firmware()
13427 rc = request_firmware(&bp->firmware, fw_file_name, &bp->pdev->dev); in bnx2x_init_firmware()
13432 rc = request_firmware(&bp->firmware, fw_file_name_v15, &bp->pdev->dev); in bnx2x_init_firmware()
13437 bp->fw_rev = BCM_5710_FW_REVISION_VERSION_V15; in bnx2x_init_firmware()
13439 bp->fw_cap |= FW_CAP_INVALIDATE_VF_FP_HSI; in bnx2x_init_firmware()
13440 bp->fw_rev = BCM_5710_FW_REVISION_VERSION; in bnx2x_init_firmware()
13443 bp->fw_major = BCM_5710_FW_MAJOR_VERSION; in bnx2x_init_firmware()
13444 bp->fw_minor = BCM_5710_FW_MINOR_VERSION; in bnx2x_init_firmware()
13445 bp->fw_eng = BCM_5710_FW_ENGINEERING_VERSION; in bnx2x_init_firmware()
13447 rc = bnx2x_check_firmware(bp); in bnx2x_init_firmware()
13453 fw_hdr = (struct bnx2x_fw_file_hdr *)bp->firmware->data; in bnx2x_init_firmware()
13468 INIT_TSEM_INT_TABLE_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13470 INIT_TSEM_PRAM_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13472 INIT_USEM_INT_TABLE_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13474 INIT_USEM_PRAM_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13476 INIT_XSEM_INT_TABLE_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13478 INIT_XSEM_PRAM_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13480 INIT_CSEM_INT_TABLE_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13482 INIT_CSEM_PRAM_DATA(bp) = bp->firmware->data + in bnx2x_init_firmware()
13490 kfree(bp->init_ops_offsets); in bnx2x_init_firmware()
13492 kfree(bp->init_ops); in bnx2x_init_firmware()
13494 kfree(bp->init_data); in bnx2x_init_firmware()
13496 release_firmware(bp->firmware); in bnx2x_init_firmware()
13497 bp->firmware = NULL; in bnx2x_init_firmware()
13502 static void bnx2x_release_firmware(struct bnx2x *bp) in bnx2x_release_firmware() argument
13504 kfree(bp->init_ops_offsets); in bnx2x_release_firmware()
13505 kfree(bp->init_ops); in bnx2x_release_firmware()
13506 kfree(bp->init_data); in bnx2x_release_firmware()
13507 release_firmware(bp->firmware); in bnx2x_release_firmware()
13508 bp->firmware = NULL; in bnx2x_release_firmware()
13528 void bnx2x__init_func_obj(struct bnx2x *bp) in bnx2x__init_func_obj() argument
13531 bnx2x_setup_dmae(bp); in bnx2x__init_func_obj()
13533 bnx2x_init_func_obj(bp, &bp->func_obj, in bnx2x__init_func_obj()
13534 bnx2x_sp(bp, func_rdata), in bnx2x__init_func_obj()
13535 bnx2x_sp_mapping(bp, func_rdata), in bnx2x__init_func_obj()
13536 bnx2x_sp(bp, func_afex_rdata), in bnx2x__init_func_obj()
13537 bnx2x_sp_mapping(bp, func_afex_rdata), in bnx2x__init_func_obj()
13542 static int bnx2x_set_qm_cid_count(struct bnx2x *bp) in bnx2x_set_qm_cid_count() argument
13544 int cid_count = BNX2X_L2_MAX_CID(bp); in bnx2x_set_qm_cid_count()
13546 if (IS_SRIOV(bp)) in bnx2x_set_qm_cid_count()
13549 if (CNIC_SUPPORT(bp)) in bnx2x_set_qm_cid_count()
13649 static int bnx2x_send_update_drift_ramrod(struct bnx2x *bp, int drift_dir, in bnx2x_send_update_drift_ramrod() argument
13660 func_params.f_obj = &bp->func_obj; in bnx2x_send_update_drift_ramrod()
13671 return bnx2x_func_state_change(bp, &func_params); in bnx2x_send_update_drift_ramrod()
13676 struct bnx2x *bp = container_of(ptp, struct bnx2x, ptp_clock_info); in bnx2x_ptp_adjfreq() local
13684 if (!netif_running(bp->dev)) { in bnx2x_ptp_adjfreq()
13729 rc = bnx2x_send_update_drift_ramrod(bp, drift_dir, best_val, in bnx2x_ptp_adjfreq()
13744 struct bnx2x *bp = container_of(ptp, struct bnx2x, ptp_clock_info); in bnx2x_ptp_adjtime() local
13746 if (!netif_running(bp->dev)) { in bnx2x_ptp_adjtime()
13754 timecounter_adjtime(&bp->timecounter, delta); in bnx2x_ptp_adjtime()
13761 struct bnx2x *bp = container_of(ptp, struct bnx2x, ptp_clock_info); in bnx2x_ptp_gettime() local
13764 if (!netif_running(bp->dev)) { in bnx2x_ptp_gettime()
13770 ns = timecounter_read(&bp->timecounter); in bnx2x_ptp_gettime()
13782 struct bnx2x *bp = container_of(ptp, struct bnx2x, ptp_clock_info); in bnx2x_ptp_settime() local
13785 if (!netif_running(bp->dev)) { in bnx2x_ptp_settime()
13796 timecounter_init(&bp->timecounter, &bp->cyclecounter, ns); in bnx2x_ptp_settime()
13805 struct bnx2x *bp = container_of(ptp, struct bnx2x, ptp_clock_info); in bnx2x_ptp_enable() local
13811 void bnx2x_register_phc(struct bnx2x *bp) in bnx2x_register_phc() argument
13814 bp->ptp_clock_info.owner = THIS_MODULE; in bnx2x_register_phc()
13815 snprintf(bp->ptp_clock_info.name, 16, "%s", bp->dev->name); in bnx2x_register_phc()
13816 bp->ptp_clock_info.max_adj = BNX2X_MAX_PHC_DRIFT; /* In PPB */ in bnx2x_register_phc()
13817 bp->ptp_clock_info.n_alarm = 0; in bnx2x_register_phc()
13818 bp->ptp_clock_info.n_ext_ts = 0; in bnx2x_register_phc()
13819 bp->ptp_clock_info.n_per_out = 0; in bnx2x_register_phc()
13820 bp->ptp_clock_info.pps = 0; in bnx2x_register_phc()
13821 bp->ptp_clock_info.adjfreq = bnx2x_ptp_adjfreq; in bnx2x_register_phc()
13822 bp->ptp_clock_info.adjtime = bnx2x_ptp_adjtime; in bnx2x_register_phc()
13823 bp->ptp_clock_info.gettime64 = bnx2x_ptp_gettime; in bnx2x_register_phc()
13824 bp->ptp_clock_info.settime64 = bnx2x_ptp_settime; in bnx2x_register_phc()
13825 bp->ptp_clock_info.enable = bnx2x_ptp_enable; in bnx2x_register_phc()
13827 bp->ptp_clock = ptp_clock_register(&bp->ptp_clock_info, &bp->pdev->dev); in bnx2x_register_phc()
13828 if (IS_ERR(bp->ptp_clock)) { in bnx2x_register_phc()
13829 bp->ptp_clock = NULL; in bnx2x_register_phc()
13838 struct bnx2x *bp; in bnx2x_init_one() local
13890 dev = alloc_etherdev_mqs(sizeof(*bp), tx_count, rx_count); in bnx2x_init_one()
13894 bp = netdev_priv(dev); in bnx2x_init_one()
13896 bp->flags = 0; in bnx2x_init_one()
13898 bp->flags |= IS_VF_FLAG; in bnx2x_init_one()
13900 bp->igu_sb_cnt = max_non_def_sbs; in bnx2x_init_one()
13901 bp->igu_base_addr = IS_VF(bp) ? PXP_VF_ADDR_IGU_START : BAR_IGU_INTMEM; in bnx2x_init_one()
13902 bp->msg_enable = debug; in bnx2x_init_one()
13903 bp->cnic_support = cnic_cnt; in bnx2x_init_one()
13904 bp->cnic_probe = bnx2x_cnic_probe; in bnx2x_init_one()
13908 rc = bnx2x_init_dev(bp, pdev, dev, ent->driver_data); in bnx2x_init_one()
13915 IS_PF(bp) ? "physical" : "virtual"); in bnx2x_init_one()
13916 BNX2X_DEV_INFO("Cnic support is %s\n", CNIC_SUPPORT(bp) ? "on" : "off"); in bnx2x_init_one()
13921 rc = bnx2x_init_bp(bp); in bnx2x_init_one()
13929 if (IS_VF(bp)) { in bnx2x_init_one()
13930 bp->doorbells = bnx2x_vf_doorbells(bp); in bnx2x_init_one()
13931 rc = bnx2x_vf_pci_alloc(bp); in bnx2x_init_one()
13935 doorbell_size = BNX2X_L2_MAX_CID(bp) * (1 << BNX2X_DB_SHIFT); in bnx2x_init_one()
13937 dev_err(&bp->pdev->dev, in bnx2x_init_one()
13942 bp->doorbells = ioremap(pci_resource_start(pdev, 2), in bnx2x_init_one()
13945 if (!bp->doorbells) { in bnx2x_init_one()
13946 dev_err(&bp->pdev->dev, in bnx2x_init_one()
13952 if (IS_VF(bp)) { in bnx2x_init_one()
13953 rc = bnx2x_vfpf_acquire(bp, tx_count, rx_count); in bnx2x_init_one()
13959 if (bp->acquire_resp.pfdev_info.pf_cap & PFVF_CAP_VLAN_FILTER) { in bnx2x_init_one()
13967 rc = bnx2x_iov_init_one(bp, int_mode, BNX2X_MAX_NUM_OF_VFS); in bnx2x_init_one()
13972 bp->qm_cid_count = bnx2x_set_qm_cid_count(bp); in bnx2x_init_one()
13973 BNX2X_DEV_INFO("qm_cid_count %d\n", bp->qm_cid_count); in bnx2x_init_one()
13976 if (CHIP_IS_E1x(bp)) in bnx2x_init_one()
13977 bp->flags |= NO_FCOE_FLAG; in bnx2x_init_one()
13980 bnx2x_set_num_queues(bp); in bnx2x_init_one()
13985 rc = bnx2x_set_int_mode(bp); in bnx2x_init_one()
14000 if (!NO_FCOE(bp)) { in bnx2x_init_one()
14003 dev_addr_add(bp->dev, bp->fip_mac, NETDEV_HW_ADDR_T_SAN); in bnx2x_init_one()
14009 (CHIP_REV(bp) >> 12) + 'A', (CHIP_METAL(bp) >> 4), in bnx2x_init_one()
14010 dev->base_addr, bp->pdev->irq, dev->dev_addr); in bnx2x_init_one()
14011 pcie_print_link_status(bp->pdev); in bnx2x_init_one()
14013 if (!IS_MF_SD_STORAGE_PERSONALITY_ONLY(bp)) in bnx2x_init_one()
14014 bnx2x_set_os_driver_state(bp, OS_DRIVER_STATE_DISABLED); in bnx2x_init_one()
14019 bnx2x_free_mem_bp(bp); in bnx2x_init_one()
14022 bnx2x_disable_pcie_error_reporting(bp); in bnx2x_init_one()
14024 if (bp->regview) in bnx2x_init_one()
14025 iounmap(bp->regview); in bnx2x_init_one()
14027 if (IS_PF(bp) && bp->doorbells) in bnx2x_init_one()
14028 iounmap(bp->doorbells); in bnx2x_init_one()
14042 struct bnx2x *bp, in __bnx2x_remove() argument
14046 if (!NO_FCOE(bp)) { in __bnx2x_remove()
14048 dev_addr_del(bp->dev, bp->fip_mac, NETDEV_HW_ADDR_T_SAN); in __bnx2x_remove()
14054 bnx2x_dcbnl_update_applist(bp, true); in __bnx2x_remove()
14057 if (IS_PF(bp) && in __bnx2x_remove()
14058 !BP_NOMCP(bp) && in __bnx2x_remove()
14059 (bp->flags & BC_SUPPORTS_RMMOD_CMD)) in __bnx2x_remove()
14060 bnx2x_fw_command(bp, DRV_MSG_CODE_RMMOD, 0); in __bnx2x_remove()
14071 bnx2x_iov_remove_one(bp); in __bnx2x_remove()
14074 if (IS_PF(bp)) { in __bnx2x_remove()
14075 bnx2x_set_power_state(bp, PCI_D0); in __bnx2x_remove()
14076 bnx2x_set_os_driver_state(bp, OS_DRIVER_STATE_NOT_LOADED); in __bnx2x_remove()
14081 bnx2x_reset_endianity(bp); in __bnx2x_remove()
14085 bnx2x_disable_msi(bp); in __bnx2x_remove()
14088 if (IS_PF(bp)) in __bnx2x_remove()
14089 bnx2x_set_power_state(bp, PCI_D3hot); in __bnx2x_remove()
14092 cancel_delayed_work_sync(&bp->sp_rtnl_task); in __bnx2x_remove()
14095 if (IS_VF(bp)) in __bnx2x_remove()
14096 bnx2x_vfpf_release(bp); in __bnx2x_remove()
14100 pci_wake_from_d3(pdev, bp->wol); in __bnx2x_remove()
14104 bnx2x_disable_pcie_error_reporting(bp); in __bnx2x_remove()
14106 if (bp->regview) in __bnx2x_remove()
14107 iounmap(bp->regview); in __bnx2x_remove()
14112 if (IS_PF(bp)) { in __bnx2x_remove()
14113 if (bp->doorbells) in __bnx2x_remove()
14114 iounmap(bp->doorbells); in __bnx2x_remove()
14116 bnx2x_release_firmware(bp); in __bnx2x_remove()
14118 bnx2x_vf_pci_dealloc(bp); in __bnx2x_remove()
14120 bnx2x_free_mem_bp(bp); in __bnx2x_remove()
14134 struct bnx2x *bp; in bnx2x_remove_one() local
14140 bp = netdev_priv(dev); in bnx2x_remove_one()
14142 __bnx2x_remove(pdev, dev, bp, true); in bnx2x_remove_one()
14145 static int bnx2x_eeh_nic_unload(struct bnx2x *bp) in bnx2x_eeh_nic_unload() argument
14147 bp->state = BNX2X_STATE_CLOSING_WAIT4_HALT; in bnx2x_eeh_nic_unload()
14149 bp->rx_mode = BNX2X_RX_MODE_NONE; in bnx2x_eeh_nic_unload()
14151 if (CNIC_LOADED(bp)) in bnx2x_eeh_nic_unload()
14152 bnx2x_cnic_notify(bp, CNIC_CTL_STOP_CMD); in bnx2x_eeh_nic_unload()
14155 bnx2x_tx_disable(bp); in bnx2x_eeh_nic_unload()
14156 netdev_reset_tc(bp->dev); in bnx2x_eeh_nic_unload()
14158 del_timer_sync(&bp->timer); in bnx2x_eeh_nic_unload()
14159 cancel_delayed_work_sync(&bp->sp_task); in bnx2x_eeh_nic_unload()
14160 cancel_delayed_work_sync(&bp->period_task); in bnx2x_eeh_nic_unload()
14162 if (!down_timeout(&bp->stats_lock, HZ / 10)) { in bnx2x_eeh_nic_unload()
14163 bp->stats_state = STATS_STATE_DISABLED; in bnx2x_eeh_nic_unload()
14164 up(&bp->stats_lock); in bnx2x_eeh_nic_unload()
14167 bnx2x_save_statistics(bp); in bnx2x_eeh_nic_unload()
14169 netif_carrier_off(bp->dev); in bnx2x_eeh_nic_unload()
14186 struct bnx2x *bp = netdev_priv(dev); in bnx2x_io_error_detected() local
14200 bnx2x_eeh_nic_unload(bp); in bnx2x_io_error_detected()
14202 bnx2x_prev_path_mark_eeh(bp); in bnx2x_io_error_detected()
14221 struct bnx2x *bp = netdev_priv(dev); in bnx2x_io_slot_reset() local
14238 bnx2x_set_power_state(bp, PCI_D0); in bnx2x_io_slot_reset()
14244 if (bnx2x_init_shmem(bp)) { in bnx2x_io_slot_reset()
14249 if (IS_PF(bp) && SHMEM2_HAS(bp, drv_capabilities_flag)) { in bnx2x_io_slot_reset()
14252 v = SHMEM2_RD(bp, in bnx2x_io_slot_reset()
14253 drv_capabilities_flag[BP_FW_MB_IDX(bp)]); in bnx2x_io_slot_reset()
14254 SHMEM2_WR(bp, drv_capabilities_flag[BP_FW_MB_IDX(bp)], in bnx2x_io_slot_reset()
14257 bnx2x_drain_tx_queues(bp); in bnx2x_io_slot_reset()
14258 bnx2x_send_unload_req(bp, UNLOAD_RECOVERY); in bnx2x_io_slot_reset()
14259 bnx2x_netif_stop(bp, 1); in bnx2x_io_slot_reset()
14260 bnx2x_del_all_napi(bp); in bnx2x_io_slot_reset()
14262 if (CNIC_LOADED(bp)) in bnx2x_io_slot_reset()
14263 bnx2x_del_all_napi_cnic(bp); in bnx2x_io_slot_reset()
14265 bnx2x_free_irq(bp); in bnx2x_io_slot_reset()
14268 bnx2x_send_unload_done(bp, true); in bnx2x_io_slot_reset()
14270 bp->sp_state = 0; in bnx2x_io_slot_reset()
14271 bp->port.pmf = 0; in bnx2x_io_slot_reset()
14273 bnx2x_prev_unload(bp); in bnx2x_io_slot_reset()
14278 bnx2x_squeeze_objects(bp); in bnx2x_io_slot_reset()
14279 bnx2x_free_skbs(bp); in bnx2x_io_slot_reset()
14280 for_each_rx_queue(bp, i) in bnx2x_io_slot_reset()
14281 bnx2x_free_rx_sge_range(bp, bp->fp + i, NUM_RX_SGE); in bnx2x_io_slot_reset()
14282 bnx2x_free_fp_mem(bp); in bnx2x_io_slot_reset()
14283 bnx2x_free_mem(bp); in bnx2x_io_slot_reset()
14285 bp->state = BNX2X_STATE_CLOSED; in bnx2x_io_slot_reset()
14303 struct bnx2x *bp = netdev_priv(dev); in bnx2x_io_resume() local
14305 if (bp->recovery_state != BNX2X_RECOVERY_DONE) { in bnx2x_io_resume()
14306 netdev_err(bp->dev, "Handling parity error recovery. Try again later\n"); in bnx2x_io_resume()
14312 bp->fw_seq = SHMEM_RD(bp, func_mb[BP_FW_MB_IDX(bp)].drv_mb_header) & in bnx2x_io_resume()
14316 bnx2x_nic_load(bp, LOAD_NORMAL); in bnx2x_io_resume()
14332 struct bnx2x *bp; in bnx2x_shutdown() local
14337 bp = netdev_priv(dev); in bnx2x_shutdown()
14338 if (!bp) in bnx2x_shutdown()
14349 __bnx2x_remove(pdev, dev, bp, false); in bnx2x_shutdown()
14408 void bnx2x_notify_link_changed(struct bnx2x *bp) in bnx2x_notify_link_changed() argument
14410 REG_WR(bp, MISC_REG_AEU_GENERAL_ATTN_12 + BP_FUNC(bp)*sizeof(u32), 1); in bnx2x_notify_link_changed()
14423 static int bnx2x_set_iscsi_eth_mac_addr(struct bnx2x *bp) in bnx2x_set_iscsi_eth_mac_addr() argument
14428 return bnx2x_set_mac_one(bp, bp->cnic_eth_dev.iscsi_mac, in bnx2x_set_iscsi_eth_mac_addr()
14429 &bp->iscsi_l2_mac_obj, true, in bnx2x_set_iscsi_eth_mac_addr()
14434 static void bnx2x_cnic_sp_post(struct bnx2x *bp, int count) in bnx2x_cnic_sp_post() argument
14440 if (unlikely(bp->panic)) in bnx2x_cnic_sp_post()
14444 spin_lock_bh(&bp->spq_lock); in bnx2x_cnic_sp_post()
14445 BUG_ON(bp->cnic_spq_pending < count); in bnx2x_cnic_sp_post()
14446 bp->cnic_spq_pending -= count; in bnx2x_cnic_sp_post()
14448 for (; bp->cnic_kwq_pending; bp->cnic_kwq_pending--) { in bnx2x_cnic_sp_post()
14449 u16 type = (le16_to_cpu(bp->cnic_kwq_cons->hdr.type) in bnx2x_cnic_sp_post()
14452 u8 cmd = (le32_to_cpu(bp->cnic_kwq_cons->hdr.conn_and_cmd_data) in bnx2x_cnic_sp_post()
14460 cxt_index = BNX2X_ISCSI_ETH_CID(bp) / in bnx2x_cnic_sp_post()
14462 cxt_offset = BNX2X_ISCSI_ETH_CID(bp) - in bnx2x_cnic_sp_post()
14464 bnx2x_set_ctx_validation(bp, in bnx2x_cnic_sp_post()
14465 &bp->context[cxt_index]. in bnx2x_cnic_sp_post()
14467 BNX2X_ISCSI_ETH_CID(bp)); in bnx2x_cnic_sp_post()
14478 if (!atomic_read(&bp->cq_spq_left)) in bnx2x_cnic_sp_post()
14481 atomic_dec(&bp->cq_spq_left); in bnx2x_cnic_sp_post()
14483 if (!atomic_read(&bp->eq_spq_left)) in bnx2x_cnic_sp_post()
14486 atomic_dec(&bp->eq_spq_left); in bnx2x_cnic_sp_post()
14489 if (bp->cnic_spq_pending >= in bnx2x_cnic_sp_post()
14490 bp->cnic_eth_dev.max_kwqe_pending) in bnx2x_cnic_sp_post()
14493 bp->cnic_spq_pending++; in bnx2x_cnic_sp_post()
14500 spe = bnx2x_sp_get_next(bp); in bnx2x_cnic_sp_post()
14501 *spe = *bp->cnic_kwq_cons; in bnx2x_cnic_sp_post()
14504 bp->cnic_spq_pending, bp->cnic_kwq_pending, count); in bnx2x_cnic_sp_post()
14506 if (bp->cnic_kwq_cons == bp->cnic_kwq_last) in bnx2x_cnic_sp_post()
14507 bp->cnic_kwq_cons = bp->cnic_kwq; in bnx2x_cnic_sp_post()
14509 bp->cnic_kwq_cons++; in bnx2x_cnic_sp_post()
14511 bnx2x_sp_prod_update(bp); in bnx2x_cnic_sp_post()
14512 spin_unlock_bh(&bp->spq_lock); in bnx2x_cnic_sp_post()
14518 struct bnx2x *bp = netdev_priv(dev); in bnx2x_cnic_sp_queue() local
14522 if (unlikely(bp->panic)) { in bnx2x_cnic_sp_queue()
14528 if ((bp->recovery_state != BNX2X_RECOVERY_DONE) && in bnx2x_cnic_sp_queue()
14529 (bp->recovery_state != BNX2X_RECOVERY_NIC_LOADING)) { in bnx2x_cnic_sp_queue()
14534 spin_lock_bh(&bp->spq_lock); in bnx2x_cnic_sp_queue()
14539 if (bp->cnic_kwq_pending == MAX_SP_DESC_CNT) in bnx2x_cnic_sp_queue()
14542 *bp->cnic_kwq_prod = *spe; in bnx2x_cnic_sp_queue()
14544 bp->cnic_kwq_pending++; in bnx2x_cnic_sp_queue()
14550 bp->cnic_kwq_pending); in bnx2x_cnic_sp_queue()
14552 if (bp->cnic_kwq_prod == bp->cnic_kwq_last) in bnx2x_cnic_sp_queue()
14553 bp->cnic_kwq_prod = bp->cnic_kwq; in bnx2x_cnic_sp_queue()
14555 bp->cnic_kwq_prod++; in bnx2x_cnic_sp_queue()
14558 spin_unlock_bh(&bp->spq_lock); in bnx2x_cnic_sp_queue()
14560 if (bp->cnic_spq_pending < bp->cnic_eth_dev.max_kwqe_pending) in bnx2x_cnic_sp_queue()
14561 bnx2x_cnic_sp_post(bp, 0); in bnx2x_cnic_sp_queue()
14566 static int bnx2x_cnic_ctl_send(struct bnx2x *bp, struct cnic_ctl_info *ctl) in bnx2x_cnic_ctl_send() argument
14571 mutex_lock(&bp->cnic_mutex); in bnx2x_cnic_ctl_send()
14572 c_ops = rcu_dereference_protected(bp->cnic_ops, in bnx2x_cnic_ctl_send()
14573 lockdep_is_held(&bp->cnic_mutex)); in bnx2x_cnic_ctl_send()
14575 rc = c_ops->cnic_ctl(bp->cnic_data, ctl); in bnx2x_cnic_ctl_send()
14576 mutex_unlock(&bp->cnic_mutex); in bnx2x_cnic_ctl_send()
14581 static int bnx2x_cnic_ctl_send_bh(struct bnx2x *bp, struct cnic_ctl_info *ctl) in bnx2x_cnic_ctl_send_bh() argument
14587 c_ops = rcu_dereference(bp->cnic_ops); in bnx2x_cnic_ctl_send_bh()
14589 rc = c_ops->cnic_ctl(bp->cnic_data, ctl); in bnx2x_cnic_ctl_send_bh()
14598 int bnx2x_cnic_notify(struct bnx2x *bp, int cmd) in bnx2x_cnic_notify() argument
14604 return bnx2x_cnic_ctl_send(bp, &ctl); in bnx2x_cnic_notify()
14607 static void bnx2x_cnic_cfc_comp(struct bnx2x *bp, int cid, u8 err) in bnx2x_cnic_cfc_comp() argument
14616 bnx2x_cnic_ctl_send_bh(bp, &ctl); in bnx2x_cnic_cfc_comp()
14617 bnx2x_cnic_sp_post(bp, 0); in bnx2x_cnic_cfc_comp()
14625 static void bnx2x_set_iscsi_eth_rx_mode(struct bnx2x *bp, bool start) in bnx2x_set_iscsi_eth_rx_mode() argument
14628 u8 cl_id = bnx2x_cnic_eth_cl_id(bp, BNX2X_ISCSI_ETH_CL_ID_IDX); in bnx2x_set_iscsi_eth_rx_mode()
14644 clear_bit(BNX2X_FILTER_ISCSI_ETH_STOP_SCHED, &bp->sp_state); in bnx2x_set_iscsi_eth_rx_mode()
14649 clear_bit(BNX2X_FILTER_ISCSI_ETH_START_SCHED, &bp->sp_state); in bnx2x_set_iscsi_eth_rx_mode()
14651 if (test_bit(BNX2X_FILTER_RX_MODE_PENDING, &bp->sp_state)) in bnx2x_set_iscsi_eth_rx_mode()
14652 set_bit(sched_state, &bp->sp_state); in bnx2x_set_iscsi_eth_rx_mode()
14655 bnx2x_set_q_rx_mode(bp, cl_id, 0, accept_flags, 0, in bnx2x_set_iscsi_eth_rx_mode()
14662 struct bnx2x *bp = netdev_priv(dev); in bnx2x_drv_ctl() local
14670 bnx2x_ilt_wr(bp, index, addr); in bnx2x_drv_ctl()
14677 bnx2x_cnic_sp_post(bp, count); in bnx2x_drv_ctl()
14683 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_drv_ctl()
14687 bnx2x_init_mac_obj(bp, &bp->iscsi_l2_mac_obj, in bnx2x_drv_ctl()
14689 cp->iscsi_l2_cid, BP_FUNC(bp), in bnx2x_drv_ctl()
14690 bnx2x_sp(bp, mac_rdata), in bnx2x_drv_ctl()
14691 bnx2x_sp_mapping(bp, mac_rdata), in bnx2x_drv_ctl()
14693 &bp->sp_state, BNX2X_OBJ_TYPE_RX, in bnx2x_drv_ctl()
14694 &bp->macs_pool); in bnx2x_drv_ctl()
14697 rc = bnx2x_set_iscsi_eth_mac_addr(bp); in bnx2x_drv_ctl()
14706 bnx2x_set_iscsi_eth_rx_mode(bp, true); in bnx2x_drv_ctl()
14713 if (!bnx2x_wait_sp_comp(bp, sp_bits)) in bnx2x_drv_ctl()
14725 bnx2x_set_iscsi_eth_rx_mode(bp, false); in bnx2x_drv_ctl()
14732 if (!bnx2x_wait_sp_comp(bp, sp_bits)) in bnx2x_drv_ctl()
14738 rc = bnx2x_del_all_macs(bp, &bp->iscsi_l2_mac_obj, in bnx2x_drv_ctl()
14746 atomic_add(count, &bp->cq_spq_left); in bnx2x_drv_ctl()
14753 if (CHIP_IS_E3(bp)) { in bnx2x_drv_ctl()
14754 int idx = BP_FW_MB_IDX(bp); in bnx2x_drv_ctl()
14755 u32 cap = SHMEM2_RD(bp, drv_capabilities_flag[idx]); in bnx2x_drv_ctl()
14756 int path = BP_PATH(bp); in bnx2x_drv_ctl()
14757 int port = BP_PORT(bp); in bnx2x_drv_ctl()
14767 SHMEM2_WR(bp, drv_capabilities_flag[idx], cap); in bnx2x_drv_ctl()
14770 (!SHMEM2_HAS(bp, ncsi_oem_data_addr)) || in bnx2x_drv_ctl()
14771 (!(bp->flags & BC_SUPPORTS_FCOE_FEATURES))) in bnx2x_drv_ctl()
14775 scratch_offset = SHMEM2_RD(bp, ncsi_oem_data_addr); in bnx2x_drv_ctl()
14784 REG_WR(bp, scratch_offset + i, in bnx2x_drv_ctl()
14787 bnx2x_schedule_sp_rtnl(bp, BNX2X_SP_RTNL_GET_DRV_VERSION, 0); in bnx2x_drv_ctl()
14794 if (CHIP_IS_E3(bp)) { in bnx2x_drv_ctl()
14795 int idx = BP_FW_MB_IDX(bp); in bnx2x_drv_ctl()
14798 cap = SHMEM2_RD(bp, drv_capabilities_flag[idx]); in bnx2x_drv_ctl()
14803 SHMEM2_WR(bp, drv_capabilities_flag[idx], cap); in bnx2x_drv_ctl()
14805 bnx2x_schedule_sp_rtnl(bp, BNX2X_SP_RTNL_GET_DRV_VERSION, 0); in bnx2x_drv_ctl()
14815 if (IS_MF_SD_STORAGE_PERSONALITY_ONLY(bp)) { in bnx2x_drv_ctl()
14820 bnx2x_set_os_driver_state(bp, in bnx2x_drv_ctl()
14824 bnx2x_set_os_driver_state(bp, in bnx2x_drv_ctl()
14828 bnx2x_set_os_driver_state(bp, in bnx2x_drv_ctl()
14842 struct bnx2x *bp = netdev_priv(dev); in bnx2x_get_fc_npiv() local
14848 if (!SHMEM2_HAS(bp, fc_npiv_nvram_tbl_addr[0])) in bnx2x_get_fc_npiv()
14859 offset = SHMEM2_RD(bp, fc_npiv_nvram_tbl_addr[BP_PORT(bp)]); in bnx2x_get_fc_npiv()
14867 if (bnx2x_nvram_read(bp, offset, (u8 *)tbl, sizeof(*tbl))) { in bnx2x_get_fc_npiv()
14905 void bnx2x_setup_cnic_irq_info(struct bnx2x *bp) in bnx2x_setup_cnic_irq_info() argument
14907 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_setup_cnic_irq_info()
14909 if (bp->flags & USING_MSIX_FLAG) { in bnx2x_setup_cnic_irq_info()
14912 cp->irq_arr[0].vector = bp->msix_table[1].vector; in bnx2x_setup_cnic_irq_info()
14917 if (!CHIP_IS_E1x(bp)) in bnx2x_setup_cnic_irq_info()
14918 cp->irq_arr[0].status_blk = (void *)bp->cnic_sb.e2_sb; in bnx2x_setup_cnic_irq_info()
14920 cp->irq_arr[0].status_blk = (void *)bp->cnic_sb.e1x_sb; in bnx2x_setup_cnic_irq_info()
14922 cp->irq_arr[0].status_blk_num = bnx2x_cnic_fw_sb_id(bp); in bnx2x_setup_cnic_irq_info()
14923 cp->irq_arr[0].status_blk_num2 = bnx2x_cnic_igu_sb_id(bp); in bnx2x_setup_cnic_irq_info()
14924 cp->irq_arr[1].status_blk = bp->def_status_blk; in bnx2x_setup_cnic_irq_info()
14931 void bnx2x_setup_cnic_info(struct bnx2x *bp) in bnx2x_setup_cnic_info() argument
14933 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_setup_cnic_info()
14935 cp->ctx_tbl_offset = FUNC_ILT_BASE(BP_FUNC(bp)) + in bnx2x_setup_cnic_info()
14936 bnx2x_cid_ilt_lines(bp); in bnx2x_setup_cnic_info()
14937 cp->starting_cid = bnx2x_cid_ilt_lines(bp) * ILT_PAGE_CIDS; in bnx2x_setup_cnic_info()
14938 cp->fcoe_init_cid = BNX2X_FCOE_ETH_CID(bp); in bnx2x_setup_cnic_info()
14939 cp->iscsi_l2_cid = BNX2X_ISCSI_ETH_CID(bp); in bnx2x_setup_cnic_info()
14942 BNX2X_1st_NON_L2_ETH_CID(bp), cp->starting_cid, cp->fcoe_init_cid, in bnx2x_setup_cnic_info()
14945 if (NO_ISCSI_OOO(bp)) in bnx2x_setup_cnic_info()
14952 struct bnx2x *bp = netdev_priv(dev); in bnx2x_register_cnic() local
14953 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_register_cnic()
14963 if (!CNIC_SUPPORT(bp)) { in bnx2x_register_cnic()
14968 if (!CNIC_LOADED(bp)) { in bnx2x_register_cnic()
14969 rc = bnx2x_load_cnic(bp); in bnx2x_register_cnic()
14976 bp->cnic_enabled = true; in bnx2x_register_cnic()
14978 bp->cnic_kwq = kzalloc(PAGE_SIZE, GFP_KERNEL); in bnx2x_register_cnic()
14979 if (!bp->cnic_kwq) in bnx2x_register_cnic()
14982 bp->cnic_kwq_cons = bp->cnic_kwq; in bnx2x_register_cnic()
14983 bp->cnic_kwq_prod = bp->cnic_kwq; in bnx2x_register_cnic()
14984 bp->cnic_kwq_last = bp->cnic_kwq + MAX_SP_DESC_CNT; in bnx2x_register_cnic()
14986 bp->cnic_spq_pending = 0; in bnx2x_register_cnic()
14987 bp->cnic_kwq_pending = 0; in bnx2x_register_cnic()
14989 bp->cnic_data = data; in bnx2x_register_cnic()
14993 cp->iro_arr = bp->iro_arr; in bnx2x_register_cnic()
14995 bnx2x_setup_cnic_irq_info(bp); in bnx2x_register_cnic()
14997 rcu_assign_pointer(bp->cnic_ops, ops); in bnx2x_register_cnic()
15000 bnx2x_schedule_sp_rtnl(bp, BNX2X_SP_RTNL_GET_DRV_VERSION, 0); in bnx2x_register_cnic()
15007 struct bnx2x *bp = netdev_priv(dev); in bnx2x_unregister_cnic() local
15008 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_unregister_cnic()
15010 mutex_lock(&bp->cnic_mutex); in bnx2x_unregister_cnic()
15012 RCU_INIT_POINTER(bp->cnic_ops, NULL); in bnx2x_unregister_cnic()
15013 mutex_unlock(&bp->cnic_mutex); in bnx2x_unregister_cnic()
15015 bp->cnic_enabled = false; in bnx2x_unregister_cnic()
15016 kfree(bp->cnic_kwq); in bnx2x_unregister_cnic()
15017 bp->cnic_kwq = NULL; in bnx2x_unregister_cnic()
15024 struct bnx2x *bp = netdev_priv(dev); in bnx2x_cnic_probe() local
15025 struct cnic_eth_dev *cp = &bp->cnic_eth_dev; in bnx2x_cnic_probe()
15031 if (NO_ISCSI(bp) && NO_FCOE(bp)) in bnx2x_cnic_probe()
15035 cp->chip_id = CHIP_ID(bp); in bnx2x_cnic_probe()
15036 cp->pdev = bp->pdev; in bnx2x_cnic_probe()
15037 cp->io_base = bp->regview; in bnx2x_cnic_probe()
15038 cp->io_base2 = bp->doorbells; in bnx2x_cnic_probe()
15041 cp->ctx_tbl_offset = FUNC_ILT_BASE(BP_FUNC(bp)) + in bnx2x_cnic_probe()
15042 bnx2x_cid_ilt_lines(bp); in bnx2x_cnic_probe()
15044 cp->starting_cid = bnx2x_cid_ilt_lines(bp) * ILT_PAGE_CIDS; in bnx2x_cnic_probe()
15050 cp->fcoe_init_cid = BNX2X_FCOE_ETH_CID(bp); in bnx2x_cnic_probe()
15052 bnx2x_cnic_eth_cl_id(bp, BNX2X_ISCSI_ETH_CL_ID_IDX); in bnx2x_cnic_probe()
15053 cp->iscsi_l2_cid = BNX2X_ISCSI_ETH_CID(bp); in bnx2x_cnic_probe()
15055 if (NO_ISCSI_OOO(bp)) in bnx2x_cnic_probe()
15058 if (NO_ISCSI(bp)) in bnx2x_cnic_probe()
15061 if (NO_FCOE(bp)) in bnx2x_cnic_probe()
15075 struct bnx2x *bp = fp->bp; in bnx2x_rx_ustorm_prods_offset() local
15078 if (IS_VF(bp)) in bnx2x_rx_ustorm_prods_offset()
15079 return bnx2x_vf_ustorm_prods_offset(bp, fp); in bnx2x_rx_ustorm_prods_offset()
15080 else if (!CHIP_IS_E1x(bp)) in bnx2x_rx_ustorm_prods_offset()
15083 offset += USTORM_RX_PRODS_E1X_OFFSET(BP_PORT(bp), fp->cl_id); in bnx2x_rx_ustorm_prods_offset()
15093 int bnx2x_pretend_func(struct bnx2x *bp, u16 pretend_func_val) in bnx2x_pretend_func() argument
15097 if (CHIP_IS_E1H(bp) && pretend_func_val >= E1H_FUNC_MAX) in bnx2x_pretend_func()
15101 pretend_reg = bnx2x_get_pretend_reg(bp); in bnx2x_pretend_func()
15102 REG_WR(bp, pretend_reg, pretend_func_val); in bnx2x_pretend_func()
15103 REG_RD(bp, pretend_reg); in bnx2x_pretend_func()
15109 struct bnx2x *bp = container_of(work, struct bnx2x, ptp_task); in bnx2x_ptp_task() local
15110 int port = BP_PORT(bp); in bnx2x_ptp_task()
15122 val_seq = REG_RD(bp, port ? NIG_REG_P1_TLLH_PTP_BUF_SEQID : in bnx2x_ptp_task()
15133 timestamp = REG_RD(bp, port ? NIG_REG_P1_TLLH_PTP_BUF_TS_MSB : in bnx2x_ptp_task()
15136 timestamp |= REG_RD(bp, port ? NIG_REG_P1_TLLH_PTP_BUF_TS_LSB : in bnx2x_ptp_task()
15139 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_BUF_SEQID : in bnx2x_ptp_task()
15141 ns = timecounter_cyc2time(&bp->timecounter, timestamp); in bnx2x_ptp_task()
15145 skb_tstamp_tx(bp->ptp_tx_skb, &shhwtstamps); in bnx2x_ptp_task()
15153 bp->eth_stats.ptp_skip_tx_ts++; in bnx2x_ptp_task()
15156 dev_kfree_skb_any(bp->ptp_tx_skb); in bnx2x_ptp_task()
15157 bp->ptp_tx_skb = NULL; in bnx2x_ptp_task()
15160 void bnx2x_set_rx_ts(struct bnx2x *bp, struct sk_buff *skb) in bnx2x_set_rx_ts() argument
15162 int port = BP_PORT(bp); in bnx2x_set_rx_ts()
15165 timestamp = REG_RD(bp, port ? NIG_REG_P1_LLH_PTP_HOST_BUF_TS_MSB : in bnx2x_set_rx_ts()
15168 timestamp |= REG_RD(bp, port ? NIG_REG_P1_LLH_PTP_HOST_BUF_TS_LSB : in bnx2x_set_rx_ts()
15172 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_HOST_BUF_SEQID : in bnx2x_set_rx_ts()
15175 ns = timecounter_cyc2time(&bp->timecounter, timestamp); in bnx2x_set_rx_ts()
15186 struct bnx2x *bp = container_of(cc, struct bnx2x, cyclecounter); in bnx2x_cyclecounter_read() local
15187 int port = BP_PORT(bp); in bnx2x_cyclecounter_read()
15191 REG_RD_DMAE(bp, port ? NIG_REG_TIMESYNC_GEN_REG + tsgen_synctime_t1 : in bnx2x_cyclecounter_read()
15201 static void bnx2x_init_cyclecounter(struct bnx2x *bp) in bnx2x_init_cyclecounter() argument
15203 memset(&bp->cyclecounter, 0, sizeof(bp->cyclecounter)); in bnx2x_init_cyclecounter()
15204 bp->cyclecounter.read = bnx2x_cyclecounter_read; in bnx2x_init_cyclecounter()
15205 bp->cyclecounter.mask = CYCLECOUNTER_MASK(64); in bnx2x_init_cyclecounter()
15206 bp->cyclecounter.shift = 0; in bnx2x_init_cyclecounter()
15207 bp->cyclecounter.mult = 1; in bnx2x_init_cyclecounter()
15210 static int bnx2x_send_reset_timesync_ramrod(struct bnx2x *bp) in bnx2x_send_reset_timesync_ramrod() argument
15220 func_params.f_obj = &bp->func_obj; in bnx2x_send_reset_timesync_ramrod()
15227 return bnx2x_func_state_change(bp, &func_params); in bnx2x_send_reset_timesync_ramrod()
15230 static int bnx2x_enable_ptp_packets(struct bnx2x *bp) in bnx2x_enable_ptp_packets() argument
15245 for_each_eth_queue(bp, i) { in bnx2x_enable_ptp_packets()
15246 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_enable_ptp_packets()
15249 q_params.q_obj = &bnx2x_sp_obj(bp, fp).q_obj; in bnx2x_enable_ptp_packets()
15252 rc = bnx2x_queue_state_change(bp, &q_params); in bnx2x_enable_ptp_packets()
15275 int bnx2x_configure_ptp_filters(struct bnx2x *bp) in bnx2x_configure_ptp_filters() argument
15277 int port = BP_PORT(bp); in bnx2x_configure_ptp_filters()
15281 if (!bp->hwtstamp_ioctl_called) in bnx2x_configure_ptp_filters()
15288 switch (bp->tx_type) { in bnx2x_configure_ptp_filters()
15290 bp->flags |= TX_TIMESTAMPING_EN; in bnx2x_configure_ptp_filters()
15291 REG_WR(bp, param, BNX2X_PTP_TX_ON_PARAM_MASK); in bnx2x_configure_ptp_filters()
15292 REG_WR(bp, rule, BNX2X_PTP_TX_ON_RULE_MASK); in bnx2x_configure_ptp_filters()
15304 switch (bp->rx_filter) { in bnx2x_configure_ptp_filters()
15310 bp->rx_filter = HWTSTAMP_FILTER_NONE; in bnx2x_configure_ptp_filters()
15315 bp->rx_filter = HWTSTAMP_FILTER_PTP_V1_L4_EVENT; in bnx2x_configure_ptp_filters()
15317 REG_WR(bp, param, BNX2X_PTP_V1_L4_PARAM_MASK); in bnx2x_configure_ptp_filters()
15318 REG_WR(bp, rule, BNX2X_PTP_V1_L4_RULE_MASK); in bnx2x_configure_ptp_filters()
15323 bp->rx_filter = HWTSTAMP_FILTER_PTP_V2_L4_EVENT; in bnx2x_configure_ptp_filters()
15325 REG_WR(bp, param, BNX2X_PTP_V2_L4_PARAM_MASK); in bnx2x_configure_ptp_filters()
15326 REG_WR(bp, rule, BNX2X_PTP_V2_L4_RULE_MASK); in bnx2x_configure_ptp_filters()
15331 bp->rx_filter = HWTSTAMP_FILTER_PTP_V2_L2_EVENT; in bnx2x_configure_ptp_filters()
15333 REG_WR(bp, param, BNX2X_PTP_V2_L2_PARAM_MASK); in bnx2x_configure_ptp_filters()
15334 REG_WR(bp, rule, BNX2X_PTP_V2_L2_RULE_MASK); in bnx2x_configure_ptp_filters()
15340 bp->rx_filter = HWTSTAMP_FILTER_PTP_V2_EVENT; in bnx2x_configure_ptp_filters()
15342 REG_WR(bp, param, BNX2X_PTP_V2_PARAM_MASK); in bnx2x_configure_ptp_filters()
15343 REG_WR(bp, rule, BNX2X_PTP_V2_RULE_MASK); in bnx2x_configure_ptp_filters()
15348 rc = bnx2x_enable_ptp_packets(bp); in bnx2x_configure_ptp_filters()
15353 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_TO_HOST : in bnx2x_configure_ptp_filters()
15359 static int bnx2x_hwtstamp_ioctl(struct bnx2x *bp, struct ifreq *ifr) in bnx2x_hwtstamp_ioctl() argument
15372 bp->hwtstamp_ioctl_called = true; in bnx2x_hwtstamp_ioctl()
15373 bp->tx_type = config.tx_type; in bnx2x_hwtstamp_ioctl()
15374 bp->rx_filter = config.rx_filter; in bnx2x_hwtstamp_ioctl()
15376 rc = bnx2x_configure_ptp_filters(bp); in bnx2x_hwtstamp_ioctl()
15380 config.rx_filter = bp->rx_filter; in bnx2x_hwtstamp_ioctl()
15387 static int bnx2x_configure_ptp(struct bnx2x *bp) in bnx2x_configure_ptp() argument
15389 int rc, port = BP_PORT(bp); in bnx2x_configure_ptp()
15393 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_PARAM_MASK : in bnx2x_configure_ptp()
15395 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_RULE_MASK : in bnx2x_configure_ptp()
15397 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_PARAM_MASK : in bnx2x_configure_ptp()
15399 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_RULE_MASK : in bnx2x_configure_ptp()
15403 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_TO_HOST : in bnx2x_configure_ptp()
15407 REG_WR(bp, port ? NIG_REG_P1_PTP_EN : in bnx2x_configure_ptp()
15413 REG_WR_DMAE(bp, NIG_REG_TIMESYNC_GEN_REG + tsgen_ctrl, wb_data, 2); in bnx2x_configure_ptp()
15416 rc = bnx2x_send_reset_timesync_ramrod(bp); in bnx2x_configure_ptp()
15423 REG_WR(bp, port ? NIG_REG_P1_LLH_PTP_HOST_BUF_SEQID : in bnx2x_configure_ptp()
15425 REG_WR(bp, port ? NIG_REG_P1_TLLH_PTP_BUF_SEQID : in bnx2x_configure_ptp()
15432 void bnx2x_init_ptp(struct bnx2x *bp) in bnx2x_init_ptp() argument
15437 rc = bnx2x_configure_ptp(bp); in bnx2x_init_ptp()
15444 INIT_WORK(&bp->ptp_task, bnx2x_ptp_task); in bnx2x_init_ptp()
15450 if (!bp->timecounter_init_done) { in bnx2x_init_ptp()
15451 bnx2x_init_cyclecounter(bp); in bnx2x_init_ptp()
15452 timecounter_init(&bp->timecounter, &bp->cyclecounter, in bnx2x_init_ptp()
15454 bp->timecounter_init_done = true; in bnx2x_init_ptp()