Lines Matching refs:r_vec

442 	struct nfp_net_r_vector *r_vec = data;  in nfp_net_irq_rxtx()  local
448 r_vec->event_ctr++; in nfp_net_irq_rxtx()
450 napi_schedule_irqoff(&r_vec->napi); in nfp_net_irq_rxtx()
461 struct nfp_net_r_vector *r_vec = data; in nfp_ctrl_irq_rxtx() local
463 tasklet_schedule(&r_vec->tasklet); in nfp_ctrl_irq_rxtx()
586 nfp_net_tls_tx(struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec, in nfp_net_tls_tx() argument
609 u64_stats_update_begin(&r_vec->tx_sync); in nfp_net_tls_tx()
610 r_vec->tls_tx_fallback++; in nfp_net_tls_tx()
611 u64_stats_update_end(&r_vec->tx_sync); in nfp_net_tls_tx()
615 u64_stats_update_begin(&r_vec->tx_sync); in nfp_net_tls_tx()
616 r_vec->tls_tx_no_fallback++; in nfp_net_tls_tx()
617 u64_stats_update_end(&r_vec->tx_sync); in nfp_net_tls_tx()
626 u64_stats_update_begin(&r_vec->tx_sync); in nfp_net_tls_tx()
627 r_vec->tx_errors++; in nfp_net_tls_tx()
628 u64_stats_update_end(&r_vec->tx_sync); in nfp_net_tls_tx()
643 u64_stats_update_begin(&r_vec->tx_sync); in nfp_net_tls_tx()
645 r_vec->hw_tls_tx++; in nfp_net_tls_tx()
647 r_vec->hw_tls_tx += skb_shinfo(skb)->gso_segs; in nfp_net_tls_tx()
648 u64_stats_update_end(&r_vec->tx_sync); in nfp_net_tls_tx()
734 struct nfp_net_r_vector *r_vec; in nfp_net_vecs_init() local
745 r_vec = &nn->r_vecs[r]; in nfp_net_vecs_init()
746 r_vec->nfp_net = nn; in nfp_net_vecs_init()
747 r_vec->irq_entry = entry->entry; in nfp_net_vecs_init()
748 r_vec->irq_vector = entry->vector; in nfp_net_vecs_init()
751 r_vec->handler = nfp_net_irq_rxtx; in nfp_net_vecs_init()
753 r_vec->handler = nfp_ctrl_irq_rxtx; in nfp_net_vecs_init()
755 __skb_queue_head_init(&r_vec->queue); in nfp_net_vecs_init()
756 spin_lock_init(&r_vec->lock); in nfp_net_vecs_init()
757 tasklet_setup(&r_vec->tasklet, nn->dp.ops->ctrl_poll); in nfp_net_vecs_init()
758 tasklet_disable(&r_vec->tasklet); in nfp_net_vecs_init()
761 cpumask_set_cpu(r, &r_vec->affinity_mask); in nfp_net_vecs_init()
766 nfp_net_napi_add(struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec, int idx) in nfp_net_napi_add() argument
769 netif_napi_add(dp->netdev, &r_vec->napi, in nfp_net_napi_add()
774 tasklet_enable(&r_vec->tasklet); in nfp_net_napi_add()
778 nfp_net_napi_del(struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec) in nfp_net_napi_del() argument
781 netif_napi_del(&r_vec->napi); in nfp_net_napi_del()
783 tasklet_disable(&r_vec->tasklet); in nfp_net_napi_del()
788 struct nfp_net_r_vector *r_vec, int idx) in nfp_net_vector_assign_rings() argument
790 r_vec->rx_ring = idx < dp->num_rx_rings ? &dp->rx_rings[idx] : NULL; in nfp_net_vector_assign_rings()
791 r_vec->tx_ring = in nfp_net_vector_assign_rings()
794 r_vec->xdp_ring = idx < dp->num_tx_rings - dp->num_stack_tx_rings ? in nfp_net_vector_assign_rings()
797 if (nfp_net_has_xsk_pool_slow(dp, idx) || r_vec->xsk_pool) { in nfp_net_vector_assign_rings()
798 r_vec->xsk_pool = dp->xdp_prog ? dp->xsk_pools[idx] : NULL; in nfp_net_vector_assign_rings()
800 if (r_vec->xsk_pool) in nfp_net_vector_assign_rings()
801 xsk_pool_set_rxq_info(r_vec->xsk_pool, in nfp_net_vector_assign_rings()
802 &r_vec->rx_ring->xdp_rxq); in nfp_net_vector_assign_rings()
804 nfp_net_napi_del(dp, r_vec); in nfp_net_vector_assign_rings()
805 nfp_net_napi_add(dp, r_vec, idx); in nfp_net_vector_assign_rings()
810 nfp_net_prepare_vector(struct nfp_net *nn, struct nfp_net_r_vector *r_vec, in nfp_net_prepare_vector() argument
815 nfp_net_napi_add(&nn->dp, r_vec, idx); in nfp_net_prepare_vector()
817 snprintf(r_vec->name, sizeof(r_vec->name), in nfp_net_prepare_vector()
819 err = request_irq(r_vec->irq_vector, r_vec->handler, 0, r_vec->name, in nfp_net_prepare_vector()
820 r_vec); in nfp_net_prepare_vector()
822 nfp_net_napi_del(&nn->dp, r_vec); in nfp_net_prepare_vector()
823 nn_err(nn, "Error requesting IRQ %d\n", r_vec->irq_vector); in nfp_net_prepare_vector()
826 disable_irq(r_vec->irq_vector); in nfp_net_prepare_vector()
828 irq_set_affinity_hint(r_vec->irq_vector, &r_vec->affinity_mask); in nfp_net_prepare_vector()
830 nn_dbg(nn, "RV%02d: irq=%03d/%03d\n", idx, r_vec->irq_vector, in nfp_net_prepare_vector()
831 r_vec->irq_entry); in nfp_net_prepare_vector()
837 nfp_net_cleanup_vector(struct nfp_net *nn, struct nfp_net_r_vector *r_vec) in nfp_net_cleanup_vector() argument
839 irq_set_affinity_hint(r_vec->irq_vector, NULL); in nfp_net_cleanup_vector()
840 nfp_net_napi_del(&nn->dp, r_vec); in nfp_net_cleanup_vector()
841 free_irq(r_vec->irq_vector, r_vec); in nfp_net_cleanup_vector()
1028 struct nfp_net_r_vector *r_vec; in nfp_net_close_stack() local
1036 r_vec = &nn->r_vecs[r]; in nfp_net_close_stack()
1038 disable_irq(r_vec->irq_vector); in nfp_net_close_stack()
1039 napi_disable(&r_vec->napi); in nfp_net_close_stack()
1041 if (r_vec->rx_ring) in nfp_net_close_stack()
1042 cancel_work_sync(&r_vec->rx_dim.work); in nfp_net_close_stack()
1044 if (r_vec->tx_ring) in nfp_net_close_stack()
1045 cancel_work_sync(&r_vec->tx_dim.work); in nfp_net_close_stack()
1114 struct nfp_net_r_vector *r_vec; in nfp_net_rx_dim_work() local
1122 r_vec = container_of(dim, struct nfp_net_r_vector, rx_dim); in nfp_net_rx_dim_work()
1123 nn = r_vec->nfp_net; in nfp_net_rx_dim_work()
1135 nn_writel(nn, NFP_NET_CFG_RXR_IRQ_MOD(r_vec->rx_ring->idx), value); in nfp_net_rx_dim_work()
1143 struct nfp_net_r_vector *r_vec; in nfp_net_tx_dim_work() local
1151 r_vec = container_of(dim, struct nfp_net_r_vector, tx_dim); in nfp_net_tx_dim_work()
1152 nn = r_vec->nfp_net; in nfp_net_tx_dim_work()
1164 nn_writel(nn, NFP_NET_CFG_TXR_IRQ_MOD(r_vec->tx_ring->idx), value); in nfp_net_tx_dim_work()
1176 struct nfp_net_r_vector *r_vec; in nfp_net_open_stack() local
1180 r_vec = &nn->r_vecs[r]; in nfp_net_open_stack()
1182 if (r_vec->rx_ring) { in nfp_net_open_stack()
1183 INIT_WORK(&r_vec->rx_dim.work, nfp_net_rx_dim_work); in nfp_net_open_stack()
1184 r_vec->rx_dim.mode = DIM_CQ_PERIOD_MODE_START_FROM_EQE; in nfp_net_open_stack()
1187 if (r_vec->tx_ring) { in nfp_net_open_stack()
1188 INIT_WORK(&r_vec->tx_dim.work, nfp_net_tx_dim_work); in nfp_net_open_stack()
1189 r_vec->tx_dim.mode = DIM_CQ_PERIOD_MODE_START_FROM_EQE; in nfp_net_open_stack()
1192 napi_enable(&r_vec->napi); in nfp_net_open_stack()
1193 enable_irq(r_vec->irq_vector); in nfp_net_open_stack()
1627 struct nfp_net_r_vector *r_vec = &nn->r_vecs[r]; in nfp_net_stat64() local
1632 start = u64_stats_fetch_begin_irq(&r_vec->rx_sync); in nfp_net_stat64()
1633 data[0] = r_vec->rx_pkts; in nfp_net_stat64()
1634 data[1] = r_vec->rx_bytes; in nfp_net_stat64()
1635 data[2] = r_vec->rx_drops; in nfp_net_stat64()
1636 } while (u64_stats_fetch_retry_irq(&r_vec->rx_sync, start)); in nfp_net_stat64()
1642 start = u64_stats_fetch_begin_irq(&r_vec->tx_sync); in nfp_net_stat64()
1643 data[0] = r_vec->tx_pkts; in nfp_net_stat64()
1644 data[1] = r_vec->tx_bytes; in nfp_net_stat64()
1645 data[2] = r_vec->tx_errors; in nfp_net_stat64()
1646 } while (u64_stats_fetch_retry_irq(&r_vec->tx_sync, start)); in nfp_net_stat64()