Lines Matching refs:rxr

720 		struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;  in bnx2_free_rx_mem()  local
724 if (rxr->rx_desc_ring[j]) in bnx2_free_rx_mem()
726 rxr->rx_desc_ring[j], in bnx2_free_rx_mem()
727 rxr->rx_desc_mapping[j]); in bnx2_free_rx_mem()
728 rxr->rx_desc_ring[j] = NULL; in bnx2_free_rx_mem()
730 vfree(rxr->rx_buf_ring); in bnx2_free_rx_mem()
731 rxr->rx_buf_ring = NULL; in bnx2_free_rx_mem()
734 if (rxr->rx_pg_desc_ring[j]) in bnx2_free_rx_mem()
736 rxr->rx_pg_desc_ring[j], in bnx2_free_rx_mem()
737 rxr->rx_pg_desc_mapping[j]); in bnx2_free_rx_mem()
738 rxr->rx_pg_desc_ring[j] = NULL; in bnx2_free_rx_mem()
740 vfree(rxr->rx_pg_ring); in bnx2_free_rx_mem()
741 rxr->rx_pg_ring = NULL; in bnx2_free_rx_mem()
774 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_alloc_rx_mem() local
777 rxr->rx_buf_ring = in bnx2_alloc_rx_mem()
779 if (rxr->rx_buf_ring == NULL) in bnx2_alloc_rx_mem()
783 rxr->rx_desc_ring[j] = in bnx2_alloc_rx_mem()
786 &rxr->rx_desc_mapping[j], in bnx2_alloc_rx_mem()
788 if (rxr->rx_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
794 rxr->rx_pg_ring = vzalloc(SW_RXPG_RING_SIZE * in bnx2_alloc_rx_mem()
796 if (rxr->rx_pg_ring == NULL) in bnx2_alloc_rx_mem()
802 rxr->rx_pg_desc_ring[j] = in bnx2_alloc_rx_mem()
805 &rxr->rx_pg_desc_mapping[j], in bnx2_alloc_rx_mem()
807 if (rxr->rx_pg_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
2685 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_page() argument
2688 struct sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_alloc_rx_page()
2690 &rxr->rx_pg_desc_ring[RX_RING(index)][RX_IDX(index)]; in bnx2_alloc_rx_page()
2710 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) in bnx2_free_rx_page() argument
2712 struct sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_free_rx_page()
2726 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_data() argument
2729 struct sw_bd *rx_buf = &rxr->rx_buf_ring[index]; in bnx2_alloc_rx_data()
2731 struct rx_bd *rxbd = &rxr->rx_desc_ring[RX_RING(index)][RX_IDX(index)]; in bnx2_alloc_rx_data()
2752 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_alloc_rx_data()
2900 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_skb_pages() argument
2907 u16 cons = rxr->rx_pg_cons; in bnx2_reuse_rx_skb_pages()
2909 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2928 hw_prod = rxr->rx_pg_prod; in bnx2_reuse_rx_skb_pages()
2933 prod_rx_pg = &rxr->rx_pg_ring[prod]; in bnx2_reuse_rx_skb_pages()
2934 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2935 cons_bd = &rxr->rx_pg_desc_ring[RX_RING(cons)][RX_IDX(cons)]; in bnx2_reuse_rx_skb_pages()
2936 prod_bd = &rxr->rx_pg_desc_ring[RX_RING(prod)][RX_IDX(prod)]; in bnx2_reuse_rx_skb_pages()
2951 rxr->rx_pg_prod = hw_prod; in bnx2_reuse_rx_skb_pages()
2952 rxr->rx_pg_cons = cons; in bnx2_reuse_rx_skb_pages()
2956 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_data() argument
2962 cons_rx_buf = &rxr->rx_buf_ring[cons]; in bnx2_reuse_rx_data()
2963 prod_rx_buf = &rxr->rx_buf_ring[prod]; in bnx2_reuse_rx_data()
2969 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_reuse_rx_data()
2979 cons_bd = &rxr->rx_desc_ring[RX_RING(cons)][RX_IDX(cons)]; in bnx2_reuse_rx_data()
2980 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)]; in bnx2_reuse_rx_data()
2986 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, in bnx2_rx_skb() argument
2994 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); in bnx2_rx_skb()
2996 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); in bnx2_rx_skb()
3002 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_skb()
3021 u16 pg_cons = rxr->rx_pg_cons; in bnx2_rx_skb()
3022 u16 pg_prod = rxr->rx_pg_prod; in bnx2_rx_skb()
3035 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3036 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3037 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, in bnx2_rx_skb()
3050 rx_pg = &rxr->rx_pg_ring[pg_cons]; in bnx2_rx_skb()
3062 err = bnx2_alloc_rx_page(bp, rxr, in bnx2_rx_skb()
3066 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3067 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3068 bnx2_reuse_rx_skb_pages(bp, rxr, skb, in bnx2_rx_skb()
3084 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3085 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3107 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_rx_int() local
3113 sw_cons = rxr->rx_cons; in bnx2_rx_int()
3114 sw_prod = rxr->rx_prod; in bnx2_rx_int()
3131 rx_buf = &rxr->rx_buf_ring[sw_ring_cons]; in bnx2_rx_int()
3145 &rxr->rx_buf_ring[RX_RING_IDX(NEXT_RX_BD(sw_cons))]; in bnx2_rx_int()
3166 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3173 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_int()
3183 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3195 bnx2_reuse_rx_data(bp, rxr, data, in bnx2_rx_int()
3199 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, in bnx2_rx_int()
3249 rxr->rx_cons = sw_cons; in bnx2_rx_int()
3250 rxr->rx_prod = sw_prod; in bnx2_rx_int()
3253 REG_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_rx_int()
3255 REG_WR16(bp, rxr->rx_bidx_addr, sw_prod); in bnx2_rx_int()
3257 REG_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_rx_int()
3348 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_has_fast_work() local
3350 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) || in bnx2_has_fast_work()
3442 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_poll_work() local
3447 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) in bnx2_poll_work()
5060 struct bnx2_rx_ring_info *rxr; in bnx2_clear_ring_states() local
5066 rxr = &bnapi->rx_ring; in bnx2_clear_ring_states()
5070 rxr->rx_prod_bseq = 0; in bnx2_clear_ring_states()
5071 rxr->rx_prod = 0; in bnx2_clear_ring_states()
5072 rxr->rx_cons = 0; in bnx2_clear_ring_states()
5073 rxr->rx_pg_prod = 0; in bnx2_clear_ring_states()
5074 rxr->rx_pg_cons = 0; in bnx2_clear_ring_states()
5171 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_init_rx_ring() local
5180 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping, in bnx2_init_rx_ring()
5192 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring, in bnx2_init_rx_ring()
5193 rxr->rx_pg_desc_mapping, in bnx2_init_rx_ring()
5200 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5203 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5210 val = (u64) rxr->rx_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5213 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5216 ring_prod = prod = rxr->rx_pg_prod; in bnx2_init_rx_ring()
5218 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5226 rxr->rx_pg_prod = prod; in bnx2_init_rx_ring()
5228 ring_prod = prod = rxr->rx_prod; in bnx2_init_rx_ring()
5230 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5238 rxr->rx_prod = prod; in bnx2_init_rx_ring()
5240 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX; in bnx2_init_rx_ring()
5241 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ; in bnx2_init_rx_ring()
5242 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX; in bnx2_init_rx_ring()
5244 REG_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_init_rx_ring()
5245 REG_WR16(bp, rxr->rx_bidx_addr, prod); in bnx2_init_rx_ring()
5247 REG_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_init_rx_ring()
5408 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_skbs() local
5411 if (rxr->rx_buf_ring == NULL) in bnx2_free_rx_skbs()
5415 struct sw_bd *rx_buf = &rxr->rx_buf_ring[j]; in bnx2_free_rx_skbs()
5431 bnx2_free_rx_page(bp, rxr, j); in bnx2_free_rx_skbs()
5745 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_run_loopback() local
5750 rxr = &bnapi->rx_ring; in bnx2_run_loopback()
5826 rx_buf = &rxr->rx_buf_ring[rx_start_idx]; in bnx2_run_loopback()