Home
last modified time | relevance | path

Searched refs:txqs (Results 1 – 25 of 57) sorted by relevance

123

/linux-6.1.9/drivers/net/wireless/intel/iwlwifi/
Diwl-trans.c46 trans->txqs.tfd.addr_size = 64; in iwl_trans_alloc()
47 trans->txqs.tfd.max_tbs = IWL_TFH_NUM_TBS; in iwl_trans_alloc()
48 trans->txqs.tfd.size = sizeof(struct iwl_tfh_tfd); in iwl_trans_alloc()
50 trans->txqs.tfd.addr_size = 36; in iwl_trans_alloc()
51 trans->txqs.tfd.max_tbs = IWL_NUM_OF_TBS; in iwl_trans_alloc()
52 trans->txqs.tfd.size = sizeof(struct iwl_tfd); in iwl_trans_alloc()
82 trans->txqs.bc_tbl_size = in iwl_trans_init()
85 trans->txqs.bc_tbl_size = in iwl_trans_init()
88 trans->txqs.bc_tbl_size = sizeof(struct iwlagn_scd_bc_tbl); in iwl_trans_init()
95 trans->txqs.bc_pool = dmam_pool_create("iwlwifi:bc", trans->dev, in iwl_trans_init()
[all …]
Diwl-trans.h170 #define IWL_TRANS_MAX_FRAGS(trans) ((trans)->txqs.tfd.max_tbs - 3)
588 void (*freeze_txq_timer)(struct iwl_trans *trans, unsigned long txqs,
1055 struct iwl_trans_txqs txqs; member
1310 unsigned long txqs, in iwl_trans_freeze_txq_timer() argument
1319 trans->ops->freeze_txq_timer(trans, txqs, freeze); in iwl_trans_freeze_txq_timer()
1335 u32 txqs) in iwl_trans_wait_tx_queues_empty() argument
1346 return trans->ops->wait_tx_queues_empty(trans, txqs); in iwl_trans_wait_tx_queues_empty()
/linux-6.1.9/drivers/net/wireless/intel/iwlwifi/pcie/
Dtx.c87 txq_id != trans->txqs.cmd.q_id && in iwl_pcie_txq_inc_wr_ptr()
121 struct iwl_txq *txq = trans->txqs.txq[i]; in iwl_pcie_txq_check_wrptrs()
123 if (!test_bit(i, trans->txqs.queue_used)) in iwl_pcie_txq_check_wrptrs()
157 tfd = (u8 *)txq->tfds + trans->txqs.tfd.size * txq->write_ptr; in iwl_pcie_txq_build_tfd()
160 memset(tfd, 0, trans->txqs.tfd.size); in iwl_pcie_txq_build_tfd()
165 if (num_tbs >= trans->txqs.tfd.max_tbs) { in iwl_pcie_txq_build_tfd()
167 trans->txqs.tfd.max_tbs); in iwl_pcie_txq_build_tfd()
205 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_pcie_txq_unmap()
217 if (txq_id != trans->txqs.cmd.q_id) { in iwl_pcie_txq_unmap()
229 txq_id == trans->txqs.cmd.q_id) in iwl_pcie_txq_unmap()
[all …]
Dtrans-gen2.c240 if (iwl_txq_gen2_init(trans, trans->txqs.cmd.q_id, queue_size)) in iwl_pcie_gen2_nic_init()
326 memset(trans->txqs.queue_stopped, 0, in iwl_trans_pcie_gen2_fw_alive()
327 sizeof(trans->txqs.queue_stopped)); in iwl_trans_pcie_gen2_fw_alive()
328 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_trans_pcie_gen2_fw_alive()
Dtx-gen2.c31 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
143 cpu_to_le16(QUEUE_TO_SEQ(trans->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
191 cmd_size, txq->write_ptr, idx, trans->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
Dtrans.c1956 trans->txqs.cmd.q_id = trans_cfg->cmd_queue; in iwl_trans_pcie_configure()
1957 trans->txqs.cmd.fifo = trans_cfg->cmd_fifo; in iwl_trans_pcie_configure()
1958 trans->txqs.cmd.wdg_timeout = trans_cfg->cmd_q_wdg_timeout; in iwl_trans_pcie_configure()
1959 trans->txqs.page_offs = trans_cfg->cb_data_offs; in iwl_trans_pcie_configure()
1960 trans->txqs.dev_cmd_offs = trans_cfg->cb_data_offs + sizeof(void *); in iwl_trans_pcie_configure()
1961 trans->txqs.queue_alloc_cmd_ver = trans_cfg->queue_alloc_cmd_ver; in iwl_trans_pcie_configure()
1980 trans->txqs.bc_table_dword = trans_cfg->bc_table_dword; in iwl_trans_pcie_configure()
2299 struct iwl_txq *txq = trans->txqs.txq[i]; in iwl_trans_pcie_block_txq_ptrs()
2301 if (i == trans->txqs.cmd.q_id) in iwl_trans_pcie_block_txq_ptrs()
2349 if (!test_bit(txq_idx, trans->txqs.queue_used)) in iwl_trans_pcie_wait_txq_empty()
[all …]
Dctxt-info.c221 cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_init()
Dctxt-info-gen3.c186 cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_gen3_init()
/linux-6.1.9/drivers/net/wireless/intel/iwlwifi/queue/
Dtx.c49 WARN_ON(trans->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
57 WARN_ON(!trans->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
95 if (num_tbs > trans->txqs.tfd.max_tbs) { in iwl_txq_gen2_tfd_unmap()
167 if (le16_to_cpu(tfd->num_tbs) >= trans->txqs.tfd.max_tbs) { in iwl_txq_gen2_set_tb()
169 trans->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb()
187 page_ptr = (void *)((u8 *)skb->cb + trans->txqs.page_offs); in get_workaround_page()
285 struct iwl_tso_hdr_page *p = this_cpu_ptr(trans->txqs.tso_hdr_page); in get_page_hdr()
288 page_ptr = (void *)((u8 *)skb->cb + trans->txqs.page_offs); in get_page_hdr()
710 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_tx()
719 if (WARN_ONCE(!test_bit(txq_id, trans->txqs.queue_used), in iwl_txq_gen2_tx()
[all …]
Dtx.h32 if (test_and_clear_bit(txq->id, trans->txqs.queue_stopped)) { in iwl_wake_queue()
44 return (u8 *)txq->tfds + trans->txqs.tfd.size * idx; in iwl_txq_get_tfd()
63 if (!test_and_set_bit(txq->id, trans->txqs.queue_stopped)) { in iwl_txq_stop()
178 void iwl_trans_txq_freeze_timer(struct iwl_trans *trans, unsigned long txqs,
/linux-6.1.9/drivers/net/wireless/ath/ath5k/
Dmac80211-ops.c69 ath5k_tx_queue(hw, skb, &ah->txqs[qnum], control); in ath5k_tx()
746 *tx = ah->txqs[AR5K_TX_QUEUE_ID_DATA_MIN].txq_max; in ath5k_get_ringparam()
766 for (qnum = 0; qnum < ARRAY_SIZE(ah->txqs); qnum++) { in ath5k_set_ringparam()
767 if (!ah->txqs[qnum].setup) in ath5k_set_ringparam()
769 if (ah->txqs[qnum].qnum < AR5K_TX_QUEUE_ID_DATA_MIN || in ath5k_set_ringparam()
770 ah->txqs[qnum].qnum > AR5K_TX_QUEUE_ID_DATA_MAX) in ath5k_set_ringparam()
773 ah->txqs[qnum].txq_max = tx; in ath5k_set_ringparam()
774 if (ah->txqs[qnum].txq_len >= ah->txqs[qnum].txq_max) in ath5k_set_ringparam()
775 ieee80211_stop_queue(hw, ah->txqs[qnum].qnum); in ath5k_set_ringparam()
Dbase.c1055 txq = &ah->txqs[qnum]; in ath5k_txq_setup()
1067 return &ah->txqs[qnum]; in ath5k_txq_setup()
1161 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_drain_tx_buffs()
1162 if (ah->txqs[i].setup) { in ath5k_drain_tx_buffs()
1163 txq = &ah->txqs[i]; in ath5k_drain_tx_buffs()
1186 struct ath5k_txq *txq = ah->txqs; in ath5k_txq_release()
1189 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++) in ath5k_txq_release()
1838 if (ah->txqs[i].setup && (ah->ah_txq_isr_txok_all & BIT(i))) in ath5k_tasklet_tx()
1839 ath5k_tx_processq(ah, &ah->txqs[i]); in ath5k_tasklet_tx()
2054 trace_ath5k_tx(ah, bf->skb, &ah->txqs[ah->bhalq]); in ath5k_beacon_send()
[all …]
/linux-6.1.9/drivers/net/ethernet/fungible/funeth/
Dfuneth_main.c350 static void free_txqs(struct funeth_txq **txqs, unsigned int nqs, in free_txqs() argument
355 for (i = start; i < nqs && txqs[i]; i++) in free_txqs()
356 txqs[i] = funeth_txq_free(txqs[i], state); in free_txqs()
359 static int alloc_txqs(struct net_device *dev, struct funeth_txq **txqs, in alloc_txqs() argument
369 state, &txqs[i]); in alloc_txqs()
371 free_txqs(txqs, nqs, start, FUN_QSTATE_DESTROYED); in alloc_txqs()
453 qset->txqs = fp->txqs; in fun_free_rings()
465 fp->txqs = NULL; in fun_free_rings()
469 free_txqs(qset->txqs, qset->ntxqs, qset->txq_start, qset->state); in fun_free_rings()
481 struct funeth_txq **xdpqs = NULL, **txqs; in fun_alloc_rings() local
[all …]
Dfuneth.h67 struct funeth_txq **txqs; member
88 struct funeth_txq **txqs; member
/linux-6.1.9/drivers/net/ethernet/huawei/hinic/
Dhinic_main.c120 gather_tx_stats(nic_tx_stats, &nic_dev->txqs[i]); in gather_nic_stats()
134 if (nic_dev->txqs) in create_txqs()
137 nic_dev->txqs = devm_kcalloc(&netdev->dev, num_txqs, in create_txqs()
138 sizeof(*nic_dev->txqs), GFP_KERNEL); in create_txqs()
139 if (!nic_dev->txqs) in create_txqs()
147 err = hinic_init_txq(&nic_dev->txqs[i], sq, netdev); in create_txqs()
165 hinic_clean_txq(&nic_dev->txqs[i]); in create_txqs()
168 hinic_sq_debug_rem(nic_dev->txqs[j].sq); in create_txqs()
169 hinic_clean_txq(&nic_dev->txqs[j]); in create_txqs()
174 devm_kfree(&netdev->dev, nic_dev->txqs); in create_txqs()
[all …]
Dhinic_dev.h93 struct hinic_txq *txqs; member
/linux-6.1.9/drivers/infiniband/hw/hfi1/
Dipoib_tx.c592 txp.txq = &priv->txqs[skb_get_queue_mapping(skb)]; in hfi1_ipoib_send()
696 priv->txqs = kcalloc_node(dev->num_tx_queues, in hfi1_ipoib_txreq_init()
700 if (!priv->txqs) in hfi1_ipoib_txreq_init()
704 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
752 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
761 kfree(priv->txqs); in hfi1_ipoib_txreq_init()
762 priv->txqs = NULL; in hfi1_ipoib_txreq_init()
795 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_deinit()
808 kfree(priv->txqs); in hfi1_ipoib_txreq_deinit()
809 priv->txqs = NULL; in hfi1_ipoib_txreq_deinit()
[all …]
Dipoib.h125 struct hfi1_ipoib_txq *txqs; member
/linux-6.1.9/net/
Ddevres.c22 unsigned int txqs, unsigned int rxqs) in devm_alloc_etherdev_mqs() argument
30 dr->ndev = alloc_etherdev_mqs(sizeof_priv, txqs, rxqs); in devm_alloc_etherdev_mqs()
/linux-6.1.9/drivers/net/ethernet/netronome/nfp/abm/
Dmain.c85 unsigned int txqs; in nfp_abm_spawn_repr() local
90 txqs = 1; in nfp_abm_spawn_repr()
93 txqs = alink->vnic->max_rx_rings; in nfp_abm_spawn_repr()
96 netdev = nfp_repr_alloc_mqs(app, txqs, 1); in nfp_abm_spawn_repr()
/linux-6.1.9/drivers/net/ethernet/netronome/nfp/
Dnfp_net_repr.h102 nfp_repr_alloc_mqs(struct nfp_app *app, unsigned int txqs, unsigned int rxqs);
/linux-6.1.9/include/linux/
Detherdevice.h56 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs,
62 unsigned int txqs,
/linux-6.1.9/drivers/net/ethernet/intel/ice/
Dice_base.c770 u8 buf_len = struct_size(qg_buf, txqs, 1); in ice_vsi_cfg_txq()
786 qg_buf->txqs[0].txq_id = cpu_to_le16(pf_q); in ice_vsi_cfg_txq()
787 ice_set_ctx(hw, (u8 *)&tlan_ctx, qg_buf->txqs[0].txq_ctx, in ice_vsi_cfg_txq()
830 txq = &qg_buf->txqs[0]; in ice_vsi_cfg_txq()
/linux-6.1.9/drivers/net/can/dev/
Ddev.c229 unsigned int txqs, unsigned int rxqs) in alloc_candev_mqs() argument
256 txqs, rxqs); in alloc_candev_mqs()
/linux-6.1.9/net/ethernet/
Deth.c391 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs, in alloc_etherdev_mqs() argument
395 ether_setup, txqs, rxqs); in alloc_etherdev_mqs()

123